repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
rs9899/Parsing-R-CNN | utils/data/dataset_catalog.py | a0c9ed8850abe740eedf8bfc6e1577cc0aa3fc7b | import os.path as osp
# Root directory of project
ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..'))
# Path to data dir
_DATA_DIR = osp.abspath(osp.join(ROOT_DIR, 'data'))
# Required dataset entry keys
_IM_DIR = 'image_directory'
_ANN_FN = 'annotation_file'
# Available datasets
COMMON_DATASETS = {
'coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/instances_train2017.json',
},
'coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/instances_val2017.json',
},
'coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test2017.json',
},
'coco_2017_test-dev': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test-dev2017.json',
},
'keypoints_coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/person_keypoints_train2017.json'
},
'keypoints_coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/person_keypoints_val2017.json'
},
'keypoints_coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test2017.json'
},
'keypoints_coco_2017_test-dev': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test-dev2017.json',
},
'dense_coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_train2017.json',
},
'dense_coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_val2017.json',
},
'dense_coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_test.json',
},
'CIHP_train': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/train_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_train.json',
},
'CIHP_val': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/val_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_val.json',
},
'CIHP_test': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/test_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_test.json',
},
'MHP-v2_train': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/train_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_train.json',
},
'MHP-v2_val': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/val_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_val.json',
},
'MHP-v2_test': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_all.json',
},
'MHP-v2_test_inter_top10': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_inter_top10.json',
},
'MHP-v2_test_inter_top20': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_inter_top20.json',
},
'PASCAL-Person-Part_train': { # new addition by soeaver
_IM_DIR:
_DATA_DIR + '/PASCAL-Person-Part/train_img',
_ANN_FN:
_DATA_DIR + '/PASCAL-Person-Part/annotations/pascal_person_part_train.json',
},
'PASCAL-Person-Part_test': { # new addition by soeaver
_IM_DIR:
_DATA_DIR + '/PASCAL-Person-Part/test_img',
_ANN_FN:
_DATA_DIR + '/PASCAL-Person-Part/annotations/pascal_person_part_test.json',
}
}
| [((7, 24, 7, 50), 'os.path.join', 'osp.join', ({(7, 33, 7, 41): 'ROOT_DIR', (7, 43, 7, 49): '"""data"""'}, {}), "(ROOT_DIR, 'data')", True, 'import os.path as osp\n'), ((4, 32, 4, 53), 'os.path.dirname', 'osp.dirname', ({(4, 44, 4, 52): '__file__'}, {}), '(__file__)', True, 'import os.path as osp\n')] |
Dakewe-DS1000/LapRSNet | prepareDataSet.py | 47e630acd3f0523ee5ac698566ff45e645681b23 | # Prepare my dataset for Digital Pathology
import os
import math
import cv2
import pdb
rootFolder = "F:\DataBase\LymphnodePathology"
trainFolder = rootFolder + "\\trainDataSet"
testFolder = rootFolder + "\\testDataSet"
srcTrainFilePath = trainFolder + "\\20X\\"
dstTrainFilePath = trainFolder + "\\5X\\"
srcTestFilePath = testFolder + "\\20X\\"
dstTestFilePath = testFolder + "\\5X\\"
factor = 4
if __name__ == '__main__':
srcTrainFileNameList = os.listdir(srcTrainFilePath)
srcTestFileNameList = os.listdir(srcTestFilePath)
for srcTrainFileName in srcTrainFileNameList:
srcTrainImage = cv2.imread(srcTrainFilePath + srcTrainFileName)
imgHeight, imgWidth, _ = srcTrainImage.shape
newWidth = int(imgWidth / factor)
newHeight = int(imgHeight / factor)
newSize = (newWidth, newHeight)
dstTrainImage = cv2.resize(srcTrainImage, newSize, interpolation=cv2.INTER_AREA)
print("Train File Name : %s, (%d, %d) => (%d, %d)" %(srcTrainFileName, imgWidth, imgHeight, newSize[0], newSize[1]))
cv2.imwrite(dstTrainFilePath + srcTrainFileName, dstTrainImage)
for srcTestFileName in srcTestFileNameList:
srcTestImage = cv2.imread(srcTestFilePath + srcTestFileName)
imgHeight, imgWidth, _ = srcTestImage.shape
newWidth = int(imgWidth / factor)
newHeight = int(imgHeight / factor)
newSize = (newWidth, newHeight)
dstTestImage = cv2.resize(srcTestImage, newSize, interpolation=cv2.INTER_AREA)
print("Test File Name : %s, (%d, %d) => (%d, %d)" %(srcTestFileName, imgWidth, imgHeight, newSize[0], newSize[1]))
cv2.imwrite(dstTestFilePath + srcTestFileName, dstTestImage)
| [((21, 27, 21, 55), 'os.listdir', 'os.listdir', ({(21, 38, 21, 54): 'srcTrainFilePath'}, {}), '(srcTrainFilePath)', False, 'import os\n'), ((22, 27, 22, 54), 'os.listdir', 'os.listdir', ({(22, 38, 22, 53): 'srcTestFilePath'}, {}), '(srcTestFilePath)', False, 'import os\n'), ((25, 24, 25, 71), 'cv2.imread', 'cv2.imread', ({(25, 35, 25, 70): 'srcTrainFilePath + srcTrainFileName'}, {}), '(srcTrainFilePath + srcTrainFileName)', False, 'import cv2\n'), ((32, 24, 32, 88), 'cv2.resize', 'cv2.resize', (), '', False, 'import cv2\n'), ((35, 8, 35, 71), 'cv2.imwrite', 'cv2.imwrite', ({(35, 20, 35, 55): '(dstTrainFilePath + srcTrainFileName)', (35, 57, 35, 70): 'dstTrainImage'}, {}), '(dstTrainFilePath + srcTrainFileName, dstTrainImage)', False, 'import cv2\n'), ((38, 23, 38, 68), 'cv2.imread', 'cv2.imread', ({(38, 34, 38, 67): 'srcTestFilePath + srcTestFileName'}, {}), '(srcTestFilePath + srcTestFileName)', False, 'import cv2\n'), ((45, 23, 45, 86), 'cv2.resize', 'cv2.resize', (), '', False, 'import cv2\n'), ((48, 8, 48, 68), 'cv2.imwrite', 'cv2.imwrite', ({(48, 20, 48, 53): '(dstTestFilePath + srcTestFileName)', (48, 55, 48, 67): 'dstTestImage'}, {}), '(dstTestFilePath + srcTestFileName, dstTestImage)', False, 'import cv2\n')] |
zentrumnawi/solid-backend | sample_project/sample_content/serializers.py | 0a6ac51608d4c713903856bb9b0cbf0068aa472c | from rest_framework import serializers
from solid_backend.photograph.serializers import PhotographSerializer
from solid_backend.media_object.serializers import MediaObjectSerializer
from .models import SampleProfile
class SampleProfileSerializer(serializers.ModelSerializer):
media_objects = MediaObjectSerializer(many=True)
class Meta:
model = SampleProfile
fields = "__all__"
depth = 1
| [((9, 20, 9, 52), 'solid_backend.media_object.serializers.MediaObjectSerializer', 'MediaObjectSerializer', (), '', False, 'from solid_backend.media_object.serializers import MediaObjectSerializer\n')] |
wchsieh/utensor_cgen | tests/reshape_4/generate_pb.py | 1774f0dfc0eb98b274271e7a67457dc3593b2593 | # -*- coding: utf8 -*-
import os
from utensor_cgen.utils import save_consts, save_graph, save_idx
import numpy as np
import tensorflow as tf
def generate():
test_dir = os.path.dirname(__file__)
graph = tf.Graph()
with graph.as_default():
x = tf.constant(np.random.randn(10),
dtype=tf.float32,
name='x')
output_x = tf.reshape(x, [5, 2], name="output_x")
with tf.Session(graph=graph) as sess:
save_consts(sess, test_dir)
save_graph(graph, 'test_reshape_4', test_dir)
np_output = output_x.eval()
save_idx(np_output, os.path.join(test_dir, 'output_x.idx'))
# test_reshape_4.pb is the same as test_quant_reshape_4.pb
# hack, since we do not have QuantizedReshape yet
if __name__ == "__main__":
generate()
| [((9, 13, 9, 38), 'os.path.dirname', 'os.path.dirname', ({(9, 29, 9, 37): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((10, 10, 10, 20), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((15, 15, 15, 53), 'tensorflow.reshape', 'tf.reshape', (), '', True, 'import tensorflow as tf\n'), ((17, 7, 17, 30), 'tensorflow.Session', 'tf.Session', (), '', True, 'import tensorflow as tf\n'), ((18, 4, 18, 31), 'utensor_cgen.utils.save_consts', 'save_consts', ({(18, 16, 18, 20): 'sess', (18, 22, 18, 30): 'test_dir'}, {}), '(sess, test_dir)', False, 'from utensor_cgen.utils import save_consts, save_graph, save_idx\n'), ((19, 4, 19, 49), 'utensor_cgen.utils.save_graph', 'save_graph', ({(19, 15, 19, 20): 'graph', (19, 22, 19, 38): '"""test_reshape_4"""', (19, 40, 19, 48): 'test_dir'}, {}), "(graph, 'test_reshape_4', test_dir)", False, 'from utensor_cgen.utils import save_consts, save_graph, save_idx\n'), ((12, 20, 12, 39), 'numpy.random.randn', 'np.random.randn', ({(12, 36, 12, 38): '10'}, {}), '(10)', True, 'import numpy as np\n'), ((21, 24, 21, 62), 'os.path.join', 'os.path.join', ({(21, 37, 21, 45): 'test_dir', (21, 47, 21, 61): '"""output_x.idx"""'}, {}), "(test_dir, 'output_x.idx')", False, 'import os\n')] |
modsim/junn | junn-predict/junn_predict/common/logging.py | a40423b98c6a3739dd0b2ba02d546a5db91f9215 | """Logging helpers."""
import logging
import sys
import colorlog
import tqdm
class TqdmLoggingHandler(logging.StreamHandler):
"""TqdmLoggingHandler, outputs log messages to the console compatible with tqdm."""
def emit(self, record): # noqa: D102
message = self.format(record)
tqdm.tqdm.write(message)
class DelayedFileLog(logging.StreamHandler):
"""DelayedFileLog will cache messages till it can write them to a specified file."""
def __init__(self): # noqa: D107
super().__init__()
self.file_name = None
self.buffer = []
def emit(self, record): # noqa: D102
if self.file_name is None:
message = self.format(record)
self.buffer.append(message)
else:
super().emit(record)
def setFilename(self, file_name, mode='a'):
"""
Set the filename to write the log messages to.
:param file_name: File name to use.
:param mode: File open mode, by default 'a'.
:return: None
"""
self.file_name = file_name
stream = open(file_name, mode)
for old_message in self.buffer:
stream.write(old_message + self.terminator)
self.setStream(stream)
def setup_logging(level):
"""
Set the logging up to the specified level.
:param level: Log level
:return: None
"""
name_to_log_level = get_name_to_log_level_dict()
if level in name_to_log_level:
level = name_to_log_level[level]
tqdm_log_handler = TqdmLoggingHandler()
log_format = (
"%(asctime)-15s.%(msecs)03d %(process)d %(levelname)s %(name)s %(message)s"
)
log_datefmt = '%Y-%m-%d %H:%M:%S'
tqdm_log_handler.setFormatter(
colorlog.TTYColoredFormatter(
fmt='%(log_color)s' + log_format, datefmt=log_datefmt, stream=sys.stdout
)
)
buffer = DelayedFileLog()
log_handlers = [tqdm_log_handler, buffer]
# noinspection PyArgumentList
logging.basicConfig(
level=level, format=log_format, datefmt=log_datefmt, handlers=log_handlers
)
def get_name_to_log_level_dict():
"""
Return a dict with a mapping of log levels.
:return: The dict
"""
# noinspection PyProtectedMember
name_to_log_level = logging._nameToLevel.copy()
return name_to_log_level
def get_log_levels():
"""
Return supported log levels.
:return: List of log levels
"""
log_levels = [
k for k, v in sorted(get_name_to_log_level_dict().items(), key=lambda ab: ab[1])
]
log_levels.remove('NOTSET')
return log_levels
| [((74, 4, 76, 5), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((86, 24, 86, 51), 'logging._nameToLevel.copy', 'logging._nameToLevel.copy', ({}, {}), '()', False, 'import logging\n'), ((14, 8, 14, 32), 'tqdm.tqdm.write', 'tqdm.tqdm.write', ({(14, 24, 14, 31): 'message'}, {}), '(message)', False, 'import tqdm\n'), ((67, 8, 69, 9), 'colorlog.TTYColoredFormatter', 'colorlog.TTYColoredFormatter', (), '', False, 'import colorlog\n')] |
LesterYHZ/Automated-Bridge-Inspection-Robot-Project | subpartcode/ultrasonic_basic_code.py | c3f4e12f9b60a8a6b041bf2b6d0461a1bb39c726 | #Basic Ultrasonic sensor (HC-SR04) code
import RPi.GPIO as GPIO #GPIO RPI library
import time # makes sure Pi waits between steps
GPIO.setmode(GPIO.BCM) #sets GPIO pin numbering
#GPIO.setmode(GPIO.BOARD)
#Remove warnings
GPIO.setwarnings(False)
#Create loop variable
#loop = 1
#BCM
TRIG = 23 #output pin - triggers the sensor
ECHO = 24 #input pin - reads the return signal from the sensor
#BOARD
#TRIG=16
#ECHO=18
#Looping not necessary
#Print a message to let the user know that distance measurement is in progress
print ("Distance Measurement In Progress")
#Set two GPIO ports as inputs/outputs
GPIO.setup(TRIG,GPIO.OUT)
GPIO.setup(ECHO,GPIO.IN)
#while loop == 1: #Looping forever
while True: #Looping forever
#Ensure the trigger pin is set low
GPIO.output(TRIG, False)
#Give the sensor a second to settle
print ("Waiting for Sensor to Settle")
#time.sleep(2)
time.sleep(1)
#Create trigger pulse
GPIO.output(TRIG,True)
#Set trigger pin high for 10uS
time.sleep(0.00001)
#Set it low again
GPIO.output(TRIG,False)
#Record the last low timestamp for ECHO (just before the return signal is received and the pin goes high)
while GPIO.input(ECHO)==0:
pulse_start = time.time()
#Once a signal is received, the value changes from low to high, and the signal will remain high for the duration of the echo pulse
while GPIO.input(ECHO)==1:
pulse_end = time.time()
#speed=distance/time
#speed of sound at sea level = 343m/s
#34300 = distance/(time/2)
#17150 = distance/time
#17150*time = distance
#Calculating...
pulse_duration = pulse_end - pulse_start
distance_cm = pulse_duration*17150
#distance_cm = pulse_duration*0.034/2;
distance_cm = round(distance_cm,2)
distance_inch = distance_cm/2.54 #2.54 cm in 1 inch
#distance_inch = pulse_duration*0.0133/2
distance_inch = round(distance_inch,2)
distance_feet = distance_inch/12
distance_feet = round(distance_feet,2)
#Print distance
#print ("Distance:",distance_cm,"cm")
#print ("Distance:",distance_inch,"in")
print ("Distance:",distance_feet,"ft")
#Delay
time.sleep(2)
#Clean GPIO pins to ensure all inputs/outputs are reset
GPIO.cleanup()
| [((5, 0, 5, 22), 'RPi.GPIO.setmode', 'GPIO.setmode', ({(5, 13, 5, 21): 'GPIO.BCM'}, {}), '(GPIO.BCM)', True, 'import RPi.GPIO as GPIO\n'), ((9, 0, 9, 23), 'RPi.GPIO.setwarnings', 'GPIO.setwarnings', ({(9, 17, 9, 22): '(False)'}, {}), '(False)', True, 'import RPi.GPIO as GPIO\n'), ((27, 0, 27, 25), 'RPi.GPIO.setup', 'GPIO.setup', ({(27, 11, 27, 15): 'TRIG', (27, 16, 27, 24): 'GPIO.OUT'}, {}), '(TRIG, GPIO.OUT)', True, 'import RPi.GPIO as GPIO\n'), ((28, 0, 28, 24), 'RPi.GPIO.setup', 'GPIO.setup', ({(28, 11, 28, 15): 'ECHO', (28, 16, 28, 23): 'GPIO.IN'}, {}), '(ECHO, GPIO.IN)', True, 'import RPi.GPIO as GPIO\n'), ((84, 0, 84, 14), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ({}, {}), '()', True, 'import RPi.GPIO as GPIO\n'), ((33, 4, 33, 28), 'RPi.GPIO.output', 'GPIO.output', ({(33, 16, 33, 20): 'TRIG', (33, 22, 33, 27): '(False)'}, {}), '(TRIG, False)', True, 'import RPi.GPIO as GPIO\n'), ((38, 4, 38, 17), 'time.sleep', 'time.sleep', ({(38, 15, 38, 16): '(1)'}, {}), '(1)', False, 'import time\n'), ((41, 4, 41, 26), 'RPi.GPIO.output', 'GPIO.output', ({(41, 16, 41, 20): 'TRIG', (41, 21, 41, 25): '(True)'}, {}), '(TRIG, True)', True, 'import RPi.GPIO as GPIO\n'), ((44, 4, 44, 23), 'time.sleep', 'time.sleep', ({(44, 15, 44, 22): '(1e-05)'}, {}), '(1e-05)', False, 'import time\n'), ((47, 4, 47, 27), 'RPi.GPIO.output', 'GPIO.output', ({(47, 16, 47, 20): 'TRIG', (47, 21, 47, 26): '(False)'}, {}), '(TRIG, False)', True, 'import RPi.GPIO as GPIO\n'), ((81, 4, 81, 17), 'time.sleep', 'time.sleep', ({(81, 15, 81, 16): '(2)'}, {}), '(2)', False, 'import time\n'), ((50, 10, 50, 26), 'RPi.GPIO.input', 'GPIO.input', ({(50, 21, 50, 25): 'ECHO'}, {}), '(ECHO)', True, 'import RPi.GPIO as GPIO\n'), ((51, 22, 51, 33), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((54, 10, 54, 26), 'RPi.GPIO.input', 'GPIO.input', ({(54, 21, 54, 25): 'ECHO'}, {}), '(ECHO)', True, 'import RPi.GPIO as GPIO\n'), ((55, 20, 55, 31), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
MOURAIGOR/python | Mentorama/Modulo 3 - POO/Retangulo.py | b267f8ef277a385e3e315e88a22390512bf1e101 | class Retangulo:
# Atributos
def __init__(self, comprimento, altura):
self.setcomprimento(comprimento)
self.setAltura(altura)
# Métodos
def setcomprimento(self, comprimento):
self.comprimento = comprimento
def getcomprimento(self):
return self.comprimento
def setAltura(self, altura):
self.altura = altura
def getAltura(self):
return self.altura
def calculaArea(self):
return self.comprimento * self.altura
def calculaPerimetro(self):
return 2 * self.comprimento + 2 * self.altura
# Executando
comprimento = int(input('Valor do comprimento: '))
altura = int(input('Valor da altura: '))
retangulo = Retangulo(comprimento, altura)
print('A area do retangulo é: %d' % retangulo.calculaArea())
print('O perimetro do retangulo é : %d' % retangulo.calculaPerimetro())
| [] |
PeterFogh/digital_elevation_model_use_cases | DEMs/denmark/download_dk_dem.py | 0e72cc6238ca5217a73d06dc3e8c3229024112c3 | """
Fetch all files from Kortforsyningen FTP server folder.
Copyright (c) 2021 Peter Fogh
See also command line alternative in `download_dk_dem.sh`
"""
from ftplib import FTP, error_perm
import os
from pathlib import Path
import time
import operator
import functools
import shutil
# TODO: use logging to std instead of print(time.ctime())
from environs import Env
# Functions
def download_FTP_tree(ftp, remote_dir, local_dir):
"""
Download FTP directory and all content to local directory.
Inspired by https://stackoverflow.com/a/55127679/7796217.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
remote_dir : pathlib.Path
FTP directory to download.
local_dir : pathlib.Path
Local directory to store downloaded content.
"""
# Set up empty local dir and FTP current work dir before tree traversal.
shutil.rmtree(local_dir)
ftp.cwd(remote_dir.parent.as_posix())
local_dir.mkdir(parents=True, exist_ok=True)
return _recursive_download_FTP_tree(ftp, remote_dir, local_dir)
def _is_ftp_dir(ftp, name):
"""
Check if FTP entry is a directory.
Modified from here https://www.daniweb.com/programming/software-development/threads/243712/ftplib-isdir-or-isfile
to accommodate not necessarily being in the top-level directory.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
name: str
Name of FTP file system entry to check if directory or not.
"""
try:
current_dir = ftp.pwd()
ftp.cwd(name)
#print(f'File system entry "{name=}" is a directory.')
ftp.cwd(current_dir)
return True
except error_perm as e:
#print(f'File system entry "{name=}" is a file.')
return False
def _recursive_download_FTP_tree(ftp, remote_dir, local_dir):
"""
Download FTP directory and all content to local directory.
Inspired by https://stackoverflow.com/a/55127679/7796217.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
remote_dir : pathlib.Path
FTP directory to download.
local_dir : pathlib.Path
Local directory to store downloaded content.
"""
print(f'{remote_dir=}')
print(f'{local_dir=}')
ftp.cwd(remote_dir.name)
local_dir.mkdir(exist_ok=True)
print(f'{time.ctime()}: Fetching file & directory names within "{remote_dir}".')
dir_entries = ftp.nlst()
print(f'{time.ctime()}: Fetched file & directory names within "{remote_dir}".')
dirs = []
for filename in sorted(dir_entries)[-5:]: # TODO: remove restriction on downloaded of entries
if _is_ftp_dir(ftp, filename):
dirs.append(filename)
else:
local_file = local_dir/filename
print(f'{time.ctime()}: Downloading "{local_file}".')
ftp.retrbinary(
cmd=f'RETR {filename}',
callback=local_file.open('wb').write)
print(f'{time.ctime()}: Downloaded "{local_file}".')
print(f'Traverse dir tree to "{dirs=}"')
map_download_FTP_tree = map(lambda dir: _recursive_download_FTP_tree(
ftp, remote_dir/dir, local_dir/dir), dirs)
return functools.reduce(operator.iand, map_download_FTP_tree, True)
if __name__ == '__main__':
# Load environment variables from local `.env` file.
env = Env()
env.read_env()
# Set up server and source/destination paths.
ftp_host = 'ftp.kortforsyningen.dk'
dem_ftp_dir = Path('dhm_danmarks_hoejdemodel/DTM')
local_ftp_dir = env.path('LOCAL_FTP_DIR', './')
local_dem_ftp_dir = local_ftp_dir/'kortforsyningen'/dem_ftp_dir
# Perform FTP download.
print(f'{time.ctime()}: Connect to {ftp_host}')
ftp = FTP(ftp_host)
ftp.login(env('KORTFORSYNING_USERNAME'), env('KORTFORSYNING_PASSWORD'))
download_FTP_tree(ftp, dem_ftp_dir, local_dem_ftp_dir)
ftp.close()
print(f'{time.ctime()}: Finished')
| [((37, 4, 37, 28), 'shutil.rmtree', 'shutil.rmtree', ({(37, 18, 37, 27): 'local_dir'}, {}), '(local_dir)', False, 'import shutil\n'), ((105, 11, 105, 71), 'functools.reduce', 'functools.reduce', ({(105, 28, 105, 41): 'operator.iand', (105, 43, 105, 64): 'map_download_FTP_tree', (105, 66, 105, 70): '(True)'}, {}), '(operator.iand, map_download_FTP_tree, True)', False, 'import functools\n'), ((110, 10, 110, 15), 'environs.Env', 'Env', ({}, {}), '()', False, 'from environs import Env\n'), ((115, 18, 115, 54), 'pathlib.Path', 'Path', ({(115, 23, 115, 53): '"""dhm_danmarks_hoejdemodel/DTM"""'}, {}), "('dhm_danmarks_hoejdemodel/DTM')", False, 'from pathlib import Path\n'), ((121, 10, 121, 23), 'ftplib.FTP', 'FTP', ({(121, 14, 121, 22): 'ftp_host'}, {}), '(ftp_host)', False, 'from ftplib import FTP, error_perm\n'), ((87, 13, 87, 25), 'time.ctime', 'time.ctime', ({}, {}), '()', False, 'import time\n'), ((89, 13, 89, 25), 'time.ctime', 'time.ctime', ({}, {}), '()', False, 'import time\n'), ((120, 13, 120, 25), 'time.ctime', 'time.ctime', ({}, {}), '()', False, 'import time\n'), ((125, 13, 125, 25), 'time.ctime', 'time.ctime', ({}, {}), '()', False, 'import time\n'), ((96, 21, 96, 33), 'time.ctime', 'time.ctime', ({}, {}), '()', False, 'import time\n'), ((100, 21, 100, 33), 'time.ctime', 'time.ctime', ({}, {}), '()', False, 'import time\n')] |
jiaxinjiang2919/Refinance-Calculator | 6_refin_widgets.py | f4bb0c536b88692ef90f504fdb2d9bed85588b7c | # -*- coding: utf-8 -*-
"""
Created on Sun Mar 24 15:02:37 2019
@author: Matt Macarty
"""
from tkinter import *
import numpy as np
class LoanCalculator:
def __init__(self):
window = Tk()
window.title("Loan Calculator")
Label(window, text="Loan Amount").grid(row=1, column=1, sticky=W)
Label(window, text="Interest rate").grid(row=2, column=1, sticky=W)
Label(window, text="Term (years)").grid(row=3, column=1, sticky=W)
Label(window, text=None).grid(row=4,column=1) # space between inputs and outputs
Label(window, text="Payment:").grid(row=5, column=1, sticky=W)
Label(window, text="Total Payments:").grid(row=6, column=1, sticky=W)
# variables to store loan inputs
self.pv = StringVar()
self.interest_rate = StringVar()
self.term = StringVar()
# varianbles for loan outputs
self.pmt = StringVar()
self.total = StringVar()
# text boxes to hold inputs and outputs
Entry(window, textvariable = self.pv,
justify=RIGHT).grid(row=1,column=2, padx=(0,5))
Entry(window, textvariable = self.interest_rate,
justify=RIGHT).grid(row=2,column=2, padx=(0,5))
Entry(window, textvariable = self.term,
justify=RIGHT).grid(row=3,column=2, padx=(0,5))
Label(window, textvariable = self.pmt,
font="Helvetica 12 bold",
justify=RIGHT).grid(row=5,column=2,sticky= E )
Label(window, textvariable = self.total,
font="Helvetica 12 bold",
justify=RIGHT).grid(row=6,column=2, sticky= E)
Button(window, text="Calculate Payment", command=self.calcPayment).grid(row=7,column=2, padx= (60,5), pady=5)
# Refinance variables
self.old_pmt = StringVar()
self.time_left = StringVar()
self.refi_cost = StringVar()
# Refinance widgets
Label(window, text="Current Payment").grid(row=8,column=1)
Label(window, text="Time Left").grid(row=9,column=1)
Label(window, text="Cost of Refi").grid(row=10,column=1)
Entry(window, textvariable=self.old_pmt, justify=RIGHT).grid(row=8,column=2, padx=(0,5))
Entry(window, textvariable=self.time_left, justify=RIGHT).grid(row=9,column=2, padx=(0,5))
Entry(window, textvariable=self.refi_cost, justify=RIGHT).grid(row=10,column=2, padx=(0,5))
# Refi output variables
self.monthly_savings = StringVar()
self.payback = StringVar()
self.overall_savings = StringVar()
Label(window, text="Payback Months:").grid(row=11,column=1)
Label(window, text="Monthly Savings:").grid(row=12,column=1)
Label(window, text="Overall Savings:").grid(row=13,column=1)
Button(window, text="Evaluate Refi", command=self.evalRefi).grid(row=14,column=2, padx= (100,5), pady=5)
window.mainloop()
def calcPayment(self):
pv = float(self.pv.get())
rate = float(self.interest_rate.get())
term = int(self.term.get())
pmt = np.pmt(rate / 1200, term * 12, -pv,0)
total = pmt * term * 12
self.pmt.set("$" + format(pmt, "5,.2f"))
self.total.set("$" + format(total, "8,.2f"))
def evalRefi():
pass
LoanCalculator()
| [((85, 14, 85, 51), 'numpy.pmt', 'np.pmt', ({(85, 21, 85, 32): 'rate / 1200', (85, 34, 85, 43): 'term * 12', (85, 45, 85, 48): '-pv', (85, 49, 85, 50): '0'}, {}), '(rate / 1200, term * 12, -pv, 0)', True, 'import numpy as np\n')] |
davelarsen58/pmemtool | ndctl.py | a7acb0991cbcd683f761d4b108d018d7d2d10aeb | #!/usr/bin/python3
#
# PMTOOL NDCTL Python Module
# Copyright (C) David P Larsen
# Released under MIT License
import os
import json
from common import message, get_linenumber, pretty_print
from common import V0, V1, V2, V3, V4, V5, D0, D1, D2, D3, D4, D5
import common as c
import time
DEFAULT_FSTAB_FILE = "/etc/fstab"
DEFAULT_NDCTL_FILE = "/tmp/ndctl_list_NDRH.txt"
DEBUG = 0
VERBOSE = c.VERBOSE
tmp_dir = '/tmp'
timers = []
# If working in a test sandbox, change paths
# to start with path to sandbox
#
if not os.getenv('SANDBOX'):
SANDBOX = ''
else:
SANDBOX = os.environ['SANDBOX']
print('Enabling Sandbox at:', SANDBOX)
# FSTAB = SANDBOX + '/etc/fstab'
DEVDIR = SANDBOX + '/dev'
DEV_UUID = DEVDIR + '/disk/by-uuid/'
NDCTL_FILE = SANDBOX + "/tmp/ndctl_list_NDRH.txt"
ndctl = {}
# ---------------------------------------------------------------------
def clean_up():
'''clean up all tmp files associated with this mdule'''
name = 'clean_up()'
tic = time.perf_counter()
status = False
file_name = '/tmp/ndctl*.txt'
status = c.clean_up(file_name)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return status
def get_nmem_dev_list(node):
''' returns list of nmems['nmem0' 'nmem1' 'nmem2' 'nmem3' 'nmem4' 'nmem5']
ndctl list -D -U 0
{
"dev":"nmem2",
"id":"8089-a2-1836-00002716",
"handle":33,
"phys_id":42,
"flag_failed_flush":true,
"flag_smart_event":true,
"security":"disabled"
}
'''
name = 'get_nmem_dev_list()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-D_-U_node' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -D -U " + str(node) + " > " + file_name
if not os.path.exists(file_name):
os.system(cmd)
tmp = {}
my_list = []
with open(file_name, 'r') as f:
tmp = json.load(f)
for t in range(len(tmp)):
my_list.append(tmp[0]['dev'])
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def get_region_dev_list(node):
''' returns list of regions devices, ie: "region0"
ndctl list -U 0
[
{
"dev":"region0",
"size":1623497637888,
"available_size":0,
"max_available_extent":0,
"type":"pmem",
"iset_id":-7155516910447809332,
"persistence_domain":"memory_controller"
}
]
'''
name = 'get_region_dev_list()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-R_-U_node' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -R -U " + str(node) + " > " + file_name
if not os.path.exists(file_name):
os.system(cmd)
#
tmp = {}
with open(file_name, 'r') as f:
tmp = json.load(f)
my_list = []
for t in range(len(tmp)):
my_list.append(tmp[0]['dev'])
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def get_ns_dev(node):
''' returns list of namespace names, ie: "namespace0.0"
ndctl list -U 0
[
{
"dev":"namespace0.0",
"mode":"fsdax",
"map":"dev",
"size":1598128390144,
"uuid":"115ff8e8-bd52-47b8-a678-9b200902d864",
"sector_size":512,
"align":2097152,
"blockdev":"pmem0"
}
]
'''
name = 'get_ns_dev()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-N_-U' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -N -U " + str(node) + " > " + file_name
os.system(cmd)
#
tmp = {}
with open(file_name, 'r') as f:
tmp = json.load(f)
#
my_list = []
for t in range(len(tmp)):
my_list.append(tmp[0]['dev'])
#
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def get_ns_block_dev(node):
''' returns list of ns blockdevs, ie: "pmem0"
ndctl list -U 0
[
{
"dev":"namespace0.0",
"mode":"fsdax",
"map":"dev",
"size":1598128390144,
"uuid":"115ff8e8-bd52-47b8-a678-9b200902d864",
"sector_size":512,
"align":2097152,
"blockdev":"pmem0"
}
]
'''
name = 'get_ns_block_dev()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-N_-U' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -N -U " + str(node) + " > " + file_name
os.system(cmd)
#
tmp = {}
with open(file_name, 'r') as f:
tmp = json.load(f)
#
my_list = []
for t in range(len(tmp)):
my_list.append(tmp[0]['blockdev'])
#
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def dump(file_name = NDCTL_FILE):
"""
dump the config to a file to parse
"""
name = 'dump()'
tic = time.perf_counter()
# message("Function:", __name__, "File:", file_name )
# if VERBOSE: print(' Querying ndctl data:', file_name, end="...")
# ndctl list -NDRH
cmd = "/usr/bin/ndctl list -NDRH > " + file_name
os.system(cmd)
# if VERBOSE: print('Done')
def parse(file_name = NDCTL_FILE):
"""
parse ndctl dump file into dict: ndctl
"""
name = 'parse()'
tic = time.perf_counter()
global ndctl
# if DEBUG: print("DEBUG: Function:", __name__, "File:", file_name )
# if VERBOSE: print(' Parsing ndctl data:', file_name, end="...")
with open(file_name, 'r') as f:
ndctl = json.load(f)
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", ndctl)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ndctl
# - +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +
# Accessor Functions
#
def get_region_dimm_list(region):
"""
returns list of pmem dimms assocaited with pmem region
"""
name = 'get_region_dimm_list()'
tic = time.perf_counter()
global ndctl
dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__, "Region:", region )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
# if this region matches arg, get DIMM mappings
if ndctl['regions'][r]['dev'] == region:
for d in range(len(ndctl['regions'][r]['mappings'])):
if DEBUG: print(' ndctl[regions][r]mappings', ndctl['regions'][r]['mappings'][d]['dimm'])
dimm_list.append(ndctl['regions'][r]['mappings'][d]['dimm'])
continue
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, region, "DIMMS", dimm_list)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return dimm_list
def get_region_list():
"""
Region List
returns list of all pmem regions
"""
name = 'get_region_list()'
tic = time.perf_counter()
global ndctl
region_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
region_list.append(ndctl['regions'][r]['dev'])
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", region_list)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return region_list
def get_region_ns_device_list(region):
"""
Region Namespace Device List
returns list of all pmem namespaces names associated w/ pmem region
"""
name = 'get_region_ns_device_list()'
tic = time.perf_counter()
ns_list = []
# if DEBUG: print("DEBUG: Function:", __name__, "Region:", region )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
# if this region matches arg, get DIMM mappings
if ndctl['regions'][r]['dev'] == region:
for d in range(len(ndctl['regions'][r]['namespaces'])):
if DEBUG: print(' ndctl[regions][r]mappings', ndctl['regions'][r]['mappings'][d]['dimm'])
ns_list.append(ndctl['regions'][r]['namespaces'][d]['blockdev'])
continue
# if VERBOSE: print('Done')
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ns_list
def get_region_ns_name_list(region):
"""
Region Namespace List
returns list of all pmem namespaces names associated w/ pmem region
"""
name = 'get_region_ns_name_list()'
tic = time.perf_counter()
ns_list = []
# if DEBUG: print("DEBUG: Function:", __name__, "Region:", region )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
# if this region matches arg, get DIMM mappings
if ndctl['regions'][r]['dev'] == region:
for d in range(len(ndctl['regions'][r]['namespaces'])):
if DEBUG: print(' ndctl[regions][r]mappings', ndctl['regions'][r]['mappings'][d]['dimm'])
ns_list.append(ndctl['regions'][r]['namespaces'][d]['dev'])
continue
# if VERBOSE: print('Done')
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ns_list
def get_dimm_status(dimm):
"""
DIMM List
returns status of given dimm
"""
name = 'get_dimm_status()'
tic = time.perf_counter()
# dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
for d in range(len(ndctl['dimms'])):
if DEBUG: print(ndctl['dimms'][d]['dev'], ndctl['dimms'][d]['health']['health_state'])
if ndctl['dimms'][d]['dev'] == dimm:
status = ndctl['dimms'][d]['health']['health_state']
break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", dimmList)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return status
def get_dimm_list():
"""
DIMM List
returns list of all pmem devices in system
"""
name = 'get_dimm_list()'
tic = time.perf_counter()
dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
for d in range(len(ndctl['dimms'])):
dimm_list.append(ndctl['dimms'][d]['dev'])
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", dimmList)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return dimm_list
def get_region_by_dimm(dimm):
"""
Get Region by DIMM
returns region associated with PMEM device
"""
name = 'get_region_by_dimm()'
tic = time.perf_counter()
region = "regionX"
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
# loop through regions, get dimmList for each, check if match
for r in range(len(ndctl['regions'])):
region = ndctl['regions'][r]['dev']
dimmList = get_region_dimm_list(region)
# print("get_region_by_dimm.r", r, region, dimmList )
if dimm in dimmList: break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", region)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return region
def get_ns_name_list_by_dimm(dimm):
"""
Get PMEM Namespace name by DIMM
returns list of pmem namespaces associated with name
"""
name = 'get_ns_name_list_by_dimm()'
tic = time.perf_counter()
nsNameList = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
# loop through regions, get dimmList for each, check if match
for r in range(len(ndctl['regions'])):
region = ndctl['regions'][r]['dev']
dimmList = get_region_dimm_list(region)
# we should have a region to lookup namespaces
nsNameList = get_region_ns_name_list(region)
if dimm in dimmList: break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", nsNameList)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return nsNameList
def get_ns_device_list_by_dimm(dimm):
"""
Get Namespace Devices by DIMM
returns pmem namespace device for given DIMM
"""
name = 'get_ns_device_list_by_dimm()'
tic = time.perf_counter()
ns_device_list = []
dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
# loop through regions, get dimmList for each, check if match
for r in range(len(ndctl['regions'])):
region = ndctl['regions'][r]['dev']
dimm_list = get_region_dimm_list(region)
# we should have a region to lookup namespaces
ns_device_list = get_region_ns_device_list(region)
if dimm in dimm_list: break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", ns_device_list)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ns_device_list
def list_dimm_table():
name = 'list_dimm_table()'
tic = time.perf_counter()
print()
print("Optane Persistent Memory DIMM Status")
print()
print("%-7s %-21s %-6s %-6s %-6s %-6s" % ("Linux", "DIMM", "DIMM", "DIMM", "Cntrl", "Remaining") )
print("%-7s %-21s %-6s %-6s %-6s %-6s" % ("Device", "UID", "Health", "Temp", "Temp", "Life") )
print("%-7s %-21s %-6s %-6s %-6s %-6s" % ("-------", "--------------------", "------", "------", "------", "----") )
for x in range(len(ndctl['dimms'])):
print("%-7s %-21s %6s %-6s %-6s %-6s" % (
ndctl['dimms'][x]['dev'], \
ndctl['dimms'][x]['id'], \
ndctl['dimms'][x]['health']['health_state'], \
ndctl['dimms'][x]['health']['temperature_celsius'], \
ndctl['dimms'][x]['health']['controller_temperature_celsius'], \
ndctl['dimms'][x]['health']['spares_percentage'] \
))
def module_test():
name = 'module_test()'
tic = time.perf_counter()
import sys
import os
global VERBOSE
global DEBUG
VERBOSE = 0
DEBUG = 0
# Dicts
ndctl = {}
# Lists
regionList = []
dimmList = []
nsList = []
nsDeviceList = []
nsNameList = []
region = "region1"
dimm = "nmem0"
print("Module: ndctl.py: Testing Functions")
dump()
ndctl = parse()
# OK
dimmList = get_dimm_list()
print(" MAIN:get_dimm_list:dimmList:", dimmList)
# OK
regionList = get_region_list()
print(" MAIN:get_region_list:regionList:", regionList)
# OK
dimmList = get_region_dimm_list(region)
print(" MAIN:get_region_dimm_list:dimmList", " Region:", region, "DIMM's", dimmList)
# OK
region = "region0"
nsList = get_region_ns_name_list(region)
print(" MAIN:get_region_ns_name_list:nsList", " Region:", region, "NS", nsList)
# OK
region = "region1"
nsList = get_region_ns_device_list(region)
print(" MAIN:get_region_ns_device_list:nsList", " Region:", region, "NS", nsList)
dimm = "nmem1"
region = get_region_by_dimm(dimm)
print(" MAIN:get_region_by_dimm:region", " DIMM:", dimm, "Region:", region)
nsDeviceList = get_ns_device_list_by_dimm(dimm)
print(" MAIN:get_ns_device_list_by_dimm:nsDeviceList", nsDeviceList)
nsNameList = get_ns_name_list_by_dimm(dimm)
print(" MAIN:get_ns_name_list_by_dimm:nsNameList", nsNameList)
dimm = "nmem8"
dimmStatus = get_dimm_status(dimm)
print(" MAIN:get_dimm_status:dimmStatus", dimm, dimmStatus)
print(" MAIN:listDimmsFull")
list_dimm_table()
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
def print_timers(t = timers):
'''
------------ndctl function timers---------------------
Function Elapsed Start End
-------------------- --------- ----------- ------------
show_socket() 0.5140 941291.4208 941291.9348
parse_socket() 0.0004 941291.9348 941291.9352
show_dimm() 2.0074 941291.9352 941293.9426
parse_dimm() 0.0068 941293.9426 941293.9494
show_region() 3.8237 941293.9494 941297.7731
parse_region() 0.0006 941297.7732 941297.7737
show_dimm() 2.5911 941297.7781 941300.3692
parse_dimm() 0.0051 941300.3692 941300.3743
get_dimms() 2.5962 941297.7781 941300.3744
list_dimms() 0.0004 941300.3744 941300.3748
'''
print('------------Start ndctl function timers---------------')
print('%30s %8s %11s %11s' % ('Function', 'Elapsed', 'Start', 'End') )
print('%30s %8s %11s %11s' % ('------------------------------', '---------', '-----------', '------------') )
first = t[0]['tic']
last = t[len(t) -1]['toc']
for i in t:
print('%30s %9.4f %11.4f %11.4f' % (i['name'], i['elapsed'], i['tic'], i['toc']) )
print('%30s %9.4f %11.4f %11.4f' % ('NDCTL Overall', last - first, first, last) )
print()
print('------------End ndctl function timers-----------------')
def main():
name = 'main()'
tic = time.perf_counter()
print("This module is not intended to run standalone")
print("import this module into your script to use or use")
print("Persistent Memory Tool, pmt")
module_test()
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
print_timers()
if __name__ == "__main__":
main()
| [((27, 7, 27, 27), 'os.getenv', 'os.getenv', ({(27, 17, 27, 26): '"""SANDBOX"""'}, {}), "('SANDBOX')", False, 'import os\n'), ((44, 10, 44, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((49, 13, 49, 34), 'common.clean_up', 'c.clean_up', ({(49, 24, 49, 33): 'file_name'}, {}), '(file_name)', True, 'import common as c\n'), ((51, 10, 51, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((73, 10, 73, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((90, 10, 90, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((114, 10, 114, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((130, 10, 130, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((156, 10, 156, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((160, 4, 160, 18), 'os.system', 'os.system', ({(160, 14, 160, 17): 'cmd'}, {}), '(cmd)', False, 'import os\n'), ((170, 10, 170, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((196, 10, 196, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((201, 4, 201, 18), 'os.system', 'os.system', ({(201, 14, 201, 17): 'cmd'}, {}), '(cmd)', False, 'import os\n'), ((211, 10, 211, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((224, 10, 224, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((232, 4, 232, 18), 'os.system', 'os.system', ({(232, 14, 232, 17): 'cmd'}, {}), '(cmd)', False, 'import os\n'), ((241, 10, 241, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((255, 10, 255, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((270, 10, 270, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((289, 10, 289, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((302, 10, 302, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((316, 10, 316, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((330, 10, 330, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((347, 10, 347, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((361, 10, 361, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((378, 10, 378, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((392, 10, 392, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((410, 10, 410, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((424, 10, 424, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((438, 10, 438, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((452, 10, 452, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((469, 10, 469, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((483, 10, 483, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((503, 10, 503, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((517, 10, 517, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((538, 10, 538, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((547, 10, 547, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((570, 10, 570, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((636, 10, 636, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((676, 10, 676, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((684, 10, 684, 29), 'time.perf_counter', 'time.perf_counter', ({}, {}), '()', False, 'import time\n'), ((78, 11, 78, 36), 'os.path.exists', 'os.path.exists', ({(78, 26, 78, 35): 'file_name'}, {}), '(file_name)', False, 'import os\n'), ((79, 8, 79, 22), 'os.system', 'os.system', ({(79, 18, 79, 21): 'cmd'}, {}), '(cmd)', False, 'import os\n'), ((85, 16, 85, 28), 'json.load', 'json.load', ({(85, 26, 85, 27): 'f'}, {}), '(f)', False, 'import json\n'), ((118, 11, 118, 36), 'os.path.exists', 'os.path.exists', ({(118, 26, 118, 35): 'file_name'}, {}), '(file_name)', False, 'import os\n'), ((119, 8, 119, 22), 'os.system', 'os.system', ({(119, 18, 119, 21): 'cmd'}, {}), '(cmd)', False, 'import os\n'), ((124, 16, 124, 28), 'json.load', 'json.load', ({(124, 26, 124, 27): 'f'}, {}), '(f)', False, 'import json\n'), ((164, 16, 164, 28), 'json.load', 'json.load', ({(164, 26, 164, 27): 'f'}, {}), '(f)', False, 'import json\n'), ((205, 16, 205, 28), 'json.load', 'json.load', ({(205, 26, 205, 27): 'f'}, {}), '(f)', False, 'import json\n'), ((250, 18, 250, 30), 'json.load', 'json.load', ({(250, 28, 250, 29): 'f'}, {}), '(f)', False, 'import json\n')] |
DronMDF/manabot | tb/sources/__init__.py | b412e8cb9b5247f05487bed4cbf4967f7b58327f | from .admin import ReviewListAdmin, SoAdminReviewIsOut, SoReviewForAdmin
from .admin_commands import (
AdminCommands,
AdminFilteredCommands,
ReviewListByCommands,
SoIgnoreReview,
SoSubmitReview
)
from .gerrit import ReviewOnServer, SoNewReview, SoOutReview, SoUpdateReview
from .reaction import (
ReactionAlways,
ReactionChoiced,
ReactionRestrict,
ReactionReview
)
from .review_list import (
ReviewDifference,
ReviewForUpdate,
ReviewIgnored,
ReviewIsNeed,
ReviewOne,
ReviewUnderControl,
ReviewVerified
)
from .telegram import (
SoNoTelegramTimeout,
SoTelegram,
TelegramBot,
TelegramOffsetFromDb
)
from .utility import SoJoin, SoSafe
| [] |
sisisin/pulumi-gcp | sdk/python/pulumi_gcp/accesscontextmanager/service_perimeter.py | af6681d70ea457843409110c1324817fe55f68ad | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ServicePerimeterArgs', 'ServicePerimeter']
@pulumi.input_type
class ServicePerimeterArgs:
def __init__(__self__, *,
parent: pulumi.Input[str],
title: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input['ServicePerimeterSpecArgs']] = None,
status: Optional[pulumi.Input['ServicePerimeterStatusArgs']] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a ServicePerimeter resource.
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
pulumi.set(__self__, "parent", parent)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if perimeter_type is not None:
pulumi.set(__self__, "perimeter_type", perimeter_type)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
if use_explicit_dry_run_spec is not None:
pulumi.set(__self__, "use_explicit_dry_run_spec", use_explicit_dry_run_spec)
@property
@pulumi.getter
def parent(self) -> pulumi.Input[str]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: pulumi.Input[str]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter
def title(self) -> pulumi.Input[str]:
"""
Human readable title. Must be unique within the Policy.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: pulumi.Input[str]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the ServicePerimeter and its use. Does not affect
behavior.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="perimeterType")
def perimeter_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
"""
return pulumi.get(self, "perimeter_type")
@perimeter_type.setter
def perimeter_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "perimeter_type", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['ServicePerimeterSpecArgs']]:
"""
Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['ServicePerimeterSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['ServicePerimeterStatusArgs']]:
"""
ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['ServicePerimeterStatusArgs']]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="useExplicitDryRunSpec")
def use_explicit_dry_run_spec(self) -> Optional[pulumi.Input[bool]]:
"""
Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
return pulumi.get(self, "use_explicit_dry_run_spec")
@use_explicit_dry_run_spec.setter
def use_explicit_dry_run_spec(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_explicit_dry_run_spec", value)
@pulumi.input_type
class _ServicePerimeterState:
def __init__(__self__, *,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input['ServicePerimeterSpecArgs']] = None,
status: Optional[pulumi.Input['ServicePerimeterStatusArgs']] = None,
title: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering ServicePerimeter resources.
:param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
if create_time is not None:
pulumi.set(__self__, "create_time", create_time)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if parent is not None:
pulumi.set(__self__, "parent", parent)
if perimeter_type is not None:
pulumi.set(__self__, "perimeter_type", perimeter_type)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
if title is not None:
pulumi.set(__self__, "title", title)
if update_time is not None:
pulumi.set(__self__, "update_time", update_time)
if use_explicit_dry_run_spec is not None:
pulumi.set(__self__, "use_explicit_dry_run_spec", use_explicit_dry_run_spec)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> Optional[pulumi.Input[str]]:
"""
Time the AccessPolicy was created in UTC.
"""
return pulumi.get(self, "create_time")
@create_time.setter
def create_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "create_time", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the ServicePerimeter and its use. Does not affect
behavior.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def parent(self) -> Optional[pulumi.Input[str]]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter(name="perimeterType")
def perimeter_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
"""
return pulumi.get(self, "perimeter_type")
@perimeter_type.setter
def perimeter_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "perimeter_type", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['ServicePerimeterSpecArgs']]:
"""
Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['ServicePerimeterSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['ServicePerimeterStatusArgs']]:
"""
ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['ServicePerimeterStatusArgs']]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
"""
Human readable title. Must be unique within the Policy.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> Optional[pulumi.Input[str]]:
"""
Time the AccessPolicy was updated in UTC.
"""
return pulumi.get(self, "update_time")
@update_time.setter
def update_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_time", value)
@property
@pulumi.getter(name="useExplicitDryRunSpec")
def use_explicit_dry_run_spec(self) -> Optional[pulumi.Input[bool]]:
"""
Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
return pulumi.get(self, "use_explicit_dry_run_spec")
@use_explicit_dry_run_spec.setter
def use_explicit_dry_run_spec(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_explicit_dry_run_spec", value)
class ServicePerimeter(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
ServicePerimeter describes a set of GCP resources which can freely import
and export data amongst themselves, but not export outside of the
ServicePerimeter. If a request with a source within this ServicePerimeter
has a target outside of the ServicePerimeter, the request will be blocked.
Otherwise the request is allowed. There are two types of Service Perimeter
- Regular and Bridge. Regular Service Perimeters cannot overlap, a single
GCP project can only belong to a single regular Service Perimeter. Service
Perimeter Bridges can contain only GCP projects as members, a single GCP
project may belong to multiple Service Perimeter Bridges.
To get more information about ServicePerimeter, see:
* [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters)
* How-to Guides
* [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart)
> **Warning:** If you are using User ADCs (Application Default Credentials) with this resource,
you must specify a `billing_project` and set `user_project_override` to true
in the provider configuration. Otherwise the ACM API will return a 403 error.
Your account must have the `serviceusage.services.use` permission on the
`billing_project` you defined.
## Example Usage
### Access Context Manager Service Perimeter Basic
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
title="restrict_storage")
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
require_screen_lock=False,
),
regions=[
"CH",
"IT",
"US",
],
)],
),
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="chromeos_no_lock")
```
### Access Context Manager Service Perimeter Secure Data Exchange
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
service_perimeters=[
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
),
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["bigtable.googleapis.com"],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=["bigquery.googleapis.com"],
),
),
),
])
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="secure_data_exchange",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
require_screen_lock=False,
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
),
regions=[
"CH",
"IT",
"US",
],
)],
))
test_access = gcp.accesscontextmanager.ServicePerimeter("test-access",
parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}",
title="%s",
perimeter_type="PERIMETER_TYPE_REGULAR",
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
access_levels=[access_level.name],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
),
ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs(
ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs(
sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs(
access_level=google_access_context_manager_access_level["test-access"]["name"],
)],
identity_type="ANY_IDENTITY",
),
ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs(
resources=["*"],
operations=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="bigquery.googleapis.com",
method_selectors=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="BigQueryStorage.ReadRows",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="TableService.ListTables",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
permission="bigquery.jobs.get",
),
],
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="storage.googleapis.com",
method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="google.storage.objects.create",
)],
),
],
),
)],
egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs(
egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs(
identity_type="ANY_USER_ACCOUNT",
),
)],
))
```
### Access Context Manager Service Perimeter Dry Run
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs(
restricted_services=["storage.googleapis.com"],
),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["bigquery.googleapis.com"],
),
title="restrict_bigquery_dryrun_storage",
use_explicit_dry_run_spec=True)
```
## Import
ServicePerimeter can be imported using any of these accepted formats
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServicePerimeterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
ServicePerimeter describes a set of GCP resources which can freely import
and export data amongst themselves, but not export outside of the
ServicePerimeter. If a request with a source within this ServicePerimeter
has a target outside of the ServicePerimeter, the request will be blocked.
Otherwise the request is allowed. There are two types of Service Perimeter
- Regular and Bridge. Regular Service Perimeters cannot overlap, a single
GCP project can only belong to a single regular Service Perimeter. Service
Perimeter Bridges can contain only GCP projects as members, a single GCP
project may belong to multiple Service Perimeter Bridges.
To get more information about ServicePerimeter, see:
* [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters)
* How-to Guides
* [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart)
> **Warning:** If you are using User ADCs (Application Default Credentials) with this resource,
you must specify a `billing_project` and set `user_project_override` to true
in the provider configuration. Otherwise the ACM API will return a 403 error.
Your account must have the `serviceusage.services.use` permission on the
`billing_project` you defined.
## Example Usage
### Access Context Manager Service Perimeter Basic
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
title="restrict_storage")
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
require_screen_lock=False,
),
regions=[
"CH",
"IT",
"US",
],
)],
),
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="chromeos_no_lock")
```
### Access Context Manager Service Perimeter Secure Data Exchange
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
service_perimeters=[
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
),
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["bigtable.googleapis.com"],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=["bigquery.googleapis.com"],
),
),
),
])
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="secure_data_exchange",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
require_screen_lock=False,
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
),
regions=[
"CH",
"IT",
"US",
],
)],
))
test_access = gcp.accesscontextmanager.ServicePerimeter("test-access",
parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}",
title="%s",
perimeter_type="PERIMETER_TYPE_REGULAR",
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
access_levels=[access_level.name],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
),
ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs(
ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs(
sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs(
access_level=google_access_context_manager_access_level["test-access"]["name"],
)],
identity_type="ANY_IDENTITY",
),
ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs(
resources=["*"],
operations=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="bigquery.googleapis.com",
method_selectors=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="BigQueryStorage.ReadRows",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="TableService.ListTables",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
permission="bigquery.jobs.get",
),
],
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="storage.googleapis.com",
method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="google.storage.objects.create",
)],
),
],
),
)],
egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs(
egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs(
identity_type="ANY_USER_ACCOUNT",
),
)],
))
```
### Access Context Manager Service Perimeter Dry Run
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs(
restricted_services=["storage.googleapis.com"],
),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["bigquery.googleapis.com"],
),
title="restrict_bigquery_dryrun_storage",
use_explicit_dry_run_spec=True)
```
## Import
ServicePerimeter can be imported using any of these accepted formats
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}}
```
:param str resource_name: The name of the resource.
:param ServicePerimeterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServicePerimeterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServicePerimeterArgs.__new__(ServicePerimeterArgs)
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
if parent is None and not opts.urn:
raise TypeError("Missing required property 'parent'")
__props__.__dict__["parent"] = parent
__props__.__dict__["perimeter_type"] = perimeter_type
__props__.__dict__["spec"] = spec
__props__.__dict__["status"] = status
if title is None and not opts.urn:
raise TypeError("Missing required property 'title'")
__props__.__dict__["title"] = title
__props__.__dict__["use_explicit_dry_run_spec"] = use_explicit_dry_run_spec
__props__.__dict__["create_time"] = None
__props__.__dict__["update_time"] = None
super(ServicePerimeter, __self__).__init__(
'gcp:accesscontextmanager/servicePerimeter:ServicePerimeter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None) -> 'ServicePerimeter':
"""
Get an existing ServicePerimeter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServicePerimeterState.__new__(_ServicePerimeterState)
__props__.__dict__["create_time"] = create_time
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["parent"] = parent
__props__.__dict__["perimeter_type"] = perimeter_type
__props__.__dict__["spec"] = spec
__props__.__dict__["status"] = status
__props__.__dict__["title"] = title
__props__.__dict__["update_time"] = update_time
__props__.__dict__["use_explicit_dry_run_spec"] = use_explicit_dry_run_spec
return ServicePerimeter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
Time the AccessPolicy was created in UTC.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Description of the ServicePerimeter and its use. Does not affect
behavior.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def parent(self) -> pulumi.Output[str]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@property
@pulumi.getter(name="perimeterType")
def perimeter_type(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
"""
return pulumi.get(self, "perimeter_type")
@property
@pulumi.getter
def spec(self) -> pulumi.Output[Optional['outputs.ServicePerimeterSpec']]:
"""
Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
"""
return pulumi.get(self, "spec")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional['outputs.ServicePerimeterStatus']]:
"""
ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def title(self) -> pulumi.Output[str]:
"""
Human readable title. Must be unique within the Policy.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> pulumi.Output[str]:
"""
Time the AccessPolicy was updated in UTC.
"""
return pulumi.get(self, "update_time")
@property
@pulumi.getter(name="useExplicitDryRunSpec")
def use_explicit_dry_run_spec(self) -> pulumi.Output[Optional[bool]]:
"""
Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
return pulumi.get(self, "use_explicit_dry_run_spec")
| [((139, 5, 139, 40), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((197, 5, 197, 48), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((299, 5, 299, 37), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((351, 5, 351, 40), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((421, 5, 421, 37), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((433, 5, 433, 48), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((1042, 5, 1042, 37), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((1078, 5, 1078, 40), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((1132, 5, 1132, 37), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((1140, 5, 1140, 48), 'pulumi.getter', 'pulumi.getter', (), '', False, 'import pulumi\n'), ((71, 8, 71, 46), 'pulumi.set', 'pulumi.set', ({(71, 19, 71, 27): '__self__', (71, 29, 71, 37): '"""parent"""', (71, 39, 71, 45): 'parent'}, {}), "(__self__, 'parent', parent)", False, 'import pulumi\n'), ((72, 8, 72, 44), 'pulumi.set', 'pulumi.set', ({(72, 19, 72, 27): '__self__', (72, 29, 72, 36): '"""title"""', (72, 38, 72, 43): 'title'}, {}), "(__self__, 'title', title)", False, 'import pulumi\n'), ((93, 15, 93, 41), 'pulumi.get', 'pulumi.get', ({(93, 26, 93, 30): 'self', (93, 32, 93, 40): '"""parent"""'}, {}), "(self, 'parent')", False, 'import pulumi\n'), ((97, 8, 97, 41), 'pulumi.set', 'pulumi.set', ({(97, 19, 97, 23): 'self', (97, 25, 97, 33): '"""parent"""', (97, 35, 97, 40): 'value'}, {}), "(self, 'parent', value)", False, 'import pulumi\n'), ((105, 15, 105, 40), 'pulumi.get', 'pulumi.get', ({(105, 26, 105, 30): 'self', (105, 32, 105, 39): '"""title"""'}, {}), "(self, 'title')", False, 'import pulumi\n'), ((109, 8, 109, 40), 'pulumi.set', 'pulumi.set', ({(109, 19, 109, 23): 'self', (109, 25, 109, 32): '"""title"""', (109, 34, 109, 39): 'value'}, {}), "(self, 'title', value)", False, 'import pulumi\n'), ((118, 15, 118, 46), 'pulumi.get', 'pulumi.get', ({(118, 26, 118, 30): 'self', (118, 32, 118, 45): '"""description"""'}, {}), "(self, 'description')", False, 'import pulumi\n'), ((122, 8, 122, 46), 'pulumi.set', 'pulumi.set', ({(122, 19, 122, 23): 'self', (122, 25, 122, 38): '"""description"""', (122, 40, 122, 45): 'value'}, {}), "(self, 'description', value)", False, 'import pulumi\n'), ((132, 15, 132, 39), 'pulumi.get', 'pulumi.get', ({(132, 26, 132, 30): 'self', (132, 32, 132, 38): '"""name"""'}, {}), "(self, 'name')", False, 'import pulumi\n'), ((136, 8, 136, 39), 'pulumi.set', 'pulumi.set', ({(136, 19, 136, 23): 'self', (136, 25, 136, 31): '"""name"""', (136, 33, 136, 38): 'value'}, {}), "(self, 'name', value)", False, 'import pulumi\n'), ((159, 15, 159, 49), 'pulumi.get', 'pulumi.get', ({(159, 26, 159, 30): 'self', (159, 32, 159, 48): '"""perimeter_type"""'}, {}), "(self, 'perimeter_type')", False, 'import pulumi\n'), ((163, 8, 163, 49), 'pulumi.set', 'pulumi.set', ({(163, 19, 163, 23): 'self', (163, 25, 163, 41): '"""perimeter_type"""', (163, 43, 163, 48): 'value'}, {}), "(self, 'perimeter_type', value)", False, 'import pulumi\n'), ((175, 15, 175, 39), 'pulumi.get', 'pulumi.get', ({(175, 26, 175, 30): 'self', (175, 32, 175, 38): '"""spec"""'}, {}), "(self, 'spec')", False, 'import pulumi\n'), ((179, 8, 179, 39), 'pulumi.set', 'pulumi.set', ({(179, 19, 179, 23): 'self', (179, 25, 179, 31): '"""spec"""', (179, 33, 179, 38): 'value'}, {}), "(self, 'spec', value)", False, 'import pulumi\n'), ((190, 15, 190, 41), 'pulumi.get', 'pulumi.get', ({(190, 26, 190, 30): 'self', (190, 32, 190, 40): '"""status"""'}, {}), "(self, 'status')", False, 'import pulumi\n'), ((194, 8, 194, 41), 'pulumi.set', 'pulumi.set', ({(194, 19, 194, 23): 'self', (194, 25, 194, 33): '"""status"""', (194, 35, 194, 40): 'value'}, {}), "(self, 'status', value)", False, 'import pulumi\n'), ((210, 15, 210, 60), 'pulumi.get', 'pulumi.get', ({(210, 26, 210, 30): 'self', (210, 32, 210, 59): '"""use_explicit_dry_run_spec"""'}, {}), "(self, 'use_explicit_dry_run_spec')", False, 'import pulumi\n'), ((214, 8, 214, 60), 'pulumi.set', 'pulumi.set', ({(214, 19, 214, 23): 'self', (214, 25, 214, 52): '"""use_explicit_dry_run_spec"""', (214, 54, 214, 59): 'value'}, {}), "(self, 'use_explicit_dry_run_spec', value)", False, 'import pulumi\n'), ((304, 15, 304, 46), 'pulumi.get', 'pulumi.get', ({(304, 26, 304, 30): 'self', (304, 32, 304, 45): '"""create_time"""'}, {}), "(self, 'create_time')", False, 'import pulumi\n'), ((308, 8, 308, 46), 'pulumi.set', 'pulumi.set', ({(308, 19, 308, 23): 'self', (308, 25, 308, 38): '"""create_time"""', (308, 40, 308, 45): 'value'}, {}), "(self, 'create_time', value)", False, 'import pulumi\n'), ((317, 15, 317, 46), 'pulumi.get', 'pulumi.get', ({(317, 26, 317, 30): 'self', (317, 32, 317, 45): '"""description"""'}, {}), "(self, 'description')", False, 'import pulumi\n'), ((321, 8, 321, 46), 'pulumi.set', 'pulumi.set', ({(321, 19, 321, 23): 'self', (321, 25, 321, 38): '"""description"""', (321, 40, 321, 45): 'value'}, {}), "(self, 'description', value)", False, 'import pulumi\n'), ((331, 15, 331, 39), 'pulumi.get', 'pulumi.get', ({(331, 26, 331, 30): 'self', (331, 32, 331, 38): '"""name"""'}, {}), "(self, 'name')", False, 'import pulumi\n'), ((335, 8, 335, 39), 'pulumi.set', 'pulumi.set', ({(335, 19, 335, 23): 'self', (335, 25, 335, 31): '"""name"""', (335, 33, 335, 38): 'value'}, {}), "(self, 'name', value)", False, 'import pulumi\n'), ((344, 15, 344, 41), 'pulumi.get', 'pulumi.get', ({(344, 26, 344, 30): 'self', (344, 32, 344, 40): '"""parent"""'}, {}), "(self, 'parent')", False, 'import pulumi\n'), ((348, 8, 348, 41), 'pulumi.set', 'pulumi.set', ({(348, 19, 348, 23): 'self', (348, 25, 348, 33): '"""parent"""', (348, 35, 348, 40): 'value'}, {}), "(self, 'parent', value)", False, 'import pulumi\n'), ((371, 15, 371, 49), 'pulumi.get', 'pulumi.get', ({(371, 26, 371, 30): 'self', (371, 32, 371, 48): '"""perimeter_type"""'}, {}), "(self, 'perimeter_type')", False, 'import pulumi\n'), ((375, 8, 375, 49), 'pulumi.set', 'pulumi.set', ({(375, 19, 375, 23): 'self', (375, 25, 375, 41): '"""perimeter_type"""', (375, 43, 375, 48): 'value'}, {}), "(self, 'perimeter_type', value)", False, 'import pulumi\n'), ((387, 15, 387, 39), 'pulumi.get', 'pulumi.get', ({(387, 26, 387, 30): 'self', (387, 32, 387, 38): '"""spec"""'}, {}), "(self, 'spec')", False, 'import pulumi\n'), ((391, 8, 391, 39), 'pulumi.set', 'pulumi.set', ({(391, 19, 391, 23): 'self', (391, 25, 391, 31): '"""spec"""', (391, 33, 391, 38): 'value'}, {}), "(self, 'spec', value)", False, 'import pulumi\n'), ((402, 15, 402, 41), 'pulumi.get', 'pulumi.get', ({(402, 26, 402, 30): 'self', (402, 32, 402, 40): '"""status"""'}, {}), "(self, 'status')", False, 'import pulumi\n'), ((406, 8, 406, 41), 'pulumi.set', 'pulumi.set', ({(406, 19, 406, 23): 'self', (406, 25, 406, 33): '"""status"""', (406, 35, 406, 40): 'value'}, {}), "(self, 'status', value)", False, 'import pulumi\n'), ((414, 15, 414, 40), 'pulumi.get', 'pulumi.get', ({(414, 26, 414, 30): 'self', (414, 32, 414, 39): '"""title"""'}, {}), "(self, 'title')", False, 'import pulumi\n'), ((418, 8, 418, 40), 'pulumi.set', 'pulumi.set', ({(418, 19, 418, 23): 'self', (418, 25, 418, 32): '"""title"""', (418, 34, 418, 39): 'value'}, {}), "(self, 'title', value)", False, 'import pulumi\n'), ((426, 15, 426, 46), 'pulumi.get', 'pulumi.get', ({(426, 26, 426, 30): 'self', (426, 32, 426, 45): '"""update_time"""'}, {}), "(self, 'update_time')", False, 'import pulumi\n'), ((430, 8, 430, 46), 'pulumi.set', 'pulumi.set', ({(430, 19, 430, 23): 'self', (430, 25, 430, 38): '"""update_time"""', (430, 40, 430, 45): 'value'}, {}), "(self, 'update_time', value)", False, 'import pulumi\n'), ((446, 15, 446, 60), 'pulumi.get', 'pulumi.get', ({(446, 26, 446, 30): 'self', (446, 32, 446, 59): '"""use_explicit_dry_run_spec"""'}, {}), "(self, 'use_explicit_dry_run_spec')", False, 'import pulumi\n'), ((450, 8, 450, 60), 'pulumi.set', 'pulumi.set', ({(450, 19, 450, 23): 'self', (450, 25, 450, 52): '"""use_explicit_dry_run_spec"""', (450, 54, 450, 59): 'value'}, {}), "(self, 'use_explicit_dry_run_spec', value)", False, 'import pulumi\n'), ((1047, 15, 1047, 46), 'pulumi.get', 'pulumi.get', ({(1047, 26, 1047, 30): 'self', (1047, 32, 1047, 45): '"""create_time"""'}, {}), "(self, 'create_time')", False, 'import pulumi\n'), ((1056, 15, 1056, 46), 'pulumi.get', 'pulumi.get', ({(1056, 26, 1056, 30): 'self', (1056, 32, 1056, 45): '"""description"""'}, {}), "(self, 'description')", False, 'import pulumi\n'), ((1066, 15, 1066, 39), 'pulumi.get', 'pulumi.get', ({(1066, 26, 1066, 30): 'self', (1066, 32, 1066, 38): '"""name"""'}, {}), "(self, 'name')", False, 'import pulumi\n'), ((1075, 15, 1075, 41), 'pulumi.get', 'pulumi.get', ({(1075, 26, 1075, 30): 'self', (1075, 32, 1075, 40): '"""parent"""'}, {}), "(self, 'parent')", False, 'import pulumi\n'), ((1098, 15, 1098, 49), 'pulumi.get', 'pulumi.get', ({(1098, 26, 1098, 30): 'self', (1098, 32, 1098, 48): '"""perimeter_type"""'}, {}), "(self, 'perimeter_type')", False, 'import pulumi\n'), ((1110, 15, 1110, 39), 'pulumi.get', 'pulumi.get', ({(1110, 26, 1110, 30): 'self', (1110, 32, 1110, 38): '"""spec"""'}, {}), "(self, 'spec')", False, 'import pulumi\n'), ((1121, 15, 1121, 41), 'pulumi.get', 'pulumi.get', ({(1121, 26, 1121, 30): 'self', (1121, 32, 1121, 40): '"""status"""'}, {}), "(self, 'status')", False, 'import pulumi\n'), ((1129, 15, 1129, 40), 'pulumi.get', 'pulumi.get', ({(1129, 26, 1129, 30): 'self', (1129, 32, 1129, 39): '"""title"""'}, {}), "(self, 'title')", False, 'import pulumi\n'), ((1137, 15, 1137, 46), 'pulumi.get', 'pulumi.get', ({(1137, 26, 1137, 30): 'self', (1137, 32, 1137, 45): '"""update_time"""'}, {}), "(self, 'update_time')", False, 'import pulumi\n'), ((1153, 15, 1153, 60), 'pulumi.get', 'pulumi.get', ({(1153, 26, 1153, 30): 'self', (1153, 32, 1153, 59): '"""use_explicit_dry_run_spec"""'}, {}), "(self, 'use_explicit_dry_run_spec')", False, 'import pulumi\n'), ((74, 12, 74, 60), 'pulumi.set', 'pulumi.set', ({(74, 23, 74, 31): '__self__', (74, 33, 74, 46): '"""description"""', (74, 48, 74, 59): 'description'}, {}), "(__self__, 'description', description)", False, 'import pulumi\n'), ((76, 12, 76, 46), 'pulumi.set', 'pulumi.set', ({(76, 23, 76, 31): '__self__', (76, 33, 76, 39): '"""name"""', (76, 41, 76, 45): 'name'}, {}), "(__self__, 'name', name)", False, 'import pulumi\n'), ((78, 12, 78, 66), 'pulumi.set', 'pulumi.set', ({(78, 23, 78, 31): '__self__', (78, 33, 78, 49): '"""perimeter_type"""', (78, 51, 78, 65): 'perimeter_type'}, {}), "(__self__, 'perimeter_type', perimeter_type)", False, 'import pulumi\n'), ((80, 12, 80, 46), 'pulumi.set', 'pulumi.set', ({(80, 23, 80, 31): '__self__', (80, 33, 80, 39): '"""spec"""', (80, 41, 80, 45): 'spec'}, {}), "(__self__, 'spec', spec)", False, 'import pulumi\n'), ((82, 12, 82, 50), 'pulumi.set', 'pulumi.set', ({(82, 23, 82, 31): '__self__', (82, 33, 82, 41): '"""status"""', (82, 43, 82, 49): 'status'}, {}), "(__self__, 'status', status)", False, 'import pulumi\n'), ((84, 12, 84, 88), 'pulumi.set', 'pulumi.set', ({(84, 23, 84, 31): '__self__', (84, 33, 84, 60): '"""use_explicit_dry_run_spec"""', (84, 62, 84, 87): 'use_explicit_dry_run_spec'}, {}), "(__self__, 'use_explicit_dry_run_spec', use_explicit_dry_run_spec)", False, 'import pulumi\n'), ((278, 12, 278, 60), 'pulumi.set', 'pulumi.set', ({(278, 23, 278, 31): '__self__', (278, 33, 278, 46): '"""create_time"""', (278, 48, 278, 59): 'create_time'}, {}), "(__self__, 'create_time', create_time)", False, 'import pulumi\n'), ((280, 12, 280, 60), 'pulumi.set', 'pulumi.set', ({(280, 23, 280, 31): '__self__', (280, 33, 280, 46): '"""description"""', (280, 48, 280, 59): 'description'}, {}), "(__self__, 'description', description)", False, 'import pulumi\n'), ((282, 12, 282, 46), 'pulumi.set', 'pulumi.set', ({(282, 23, 282, 31): '__self__', (282, 33, 282, 39): '"""name"""', (282, 41, 282, 45): 'name'}, {}), "(__self__, 'name', name)", False, 'import pulumi\n'), ((284, 12, 284, 50), 'pulumi.set', 'pulumi.set', ({(284, 23, 284, 31): '__self__', (284, 33, 284, 41): '"""parent"""', (284, 43, 284, 49): 'parent'}, {}), "(__self__, 'parent', parent)", False, 'import pulumi\n'), ((286, 12, 286, 66), 'pulumi.set', 'pulumi.set', ({(286, 23, 286, 31): '__self__', (286, 33, 286, 49): '"""perimeter_type"""', (286, 51, 286, 65): 'perimeter_type'}, {}), "(__self__, 'perimeter_type', perimeter_type)", False, 'import pulumi\n'), ((288, 12, 288, 46), 'pulumi.set', 'pulumi.set', ({(288, 23, 288, 31): '__self__', (288, 33, 288, 39): '"""spec"""', (288, 41, 288, 45): 'spec'}, {}), "(__self__, 'spec', spec)", False, 'import pulumi\n'), ((290, 12, 290, 50), 'pulumi.set', 'pulumi.set', ({(290, 23, 290, 31): '__self__', (290, 33, 290, 41): '"""status"""', (290, 43, 290, 49): 'status'}, {}), "(__self__, 'status', status)", False, 'import pulumi\n'), ((292, 12, 292, 48), 'pulumi.set', 'pulumi.set', ({(292, 23, 292, 31): '__self__', (292, 33, 292, 40): '"""title"""', (292, 42, 292, 47): 'title'}, {}), "(__self__, 'title', title)", False, 'import pulumi\n'), ((294, 12, 294, 60), 'pulumi.set', 'pulumi.set', ({(294, 23, 294, 31): '__self__', (294, 33, 294, 46): '"""update_time"""', (294, 48, 294, 59): 'update_time'}, {}), "(__self__, 'update_time', update_time)", False, 'import pulumi\n'), ((296, 12, 296, 88), 'pulumi.set', 'pulumi.set', ({(296, 23, 296, 31): '__self__', (296, 33, 296, 60): '"""use_explicit_dry_run_spec"""', (296, 62, 296, 87): 'use_explicit_dry_run_spec'}, {}), "(__self__, 'use_explicit_dry_run_spec', use_explicit_dry_run_spec)", False, 'import pulumi\n'), ((929, 19, 929, 43), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ({}, {}), '()', False, 'import pulumi\n'), ((1025, 50, 1025, 79), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', (), '', False, 'import pulumi\n')] |
ianpartridge/incubator-openwhisk-runtime-swift | core/swift3.1.1Action/swift3runner.py | 5aacba1435f46b13cbb0a70874afb4b53c1a78bc | """Python proxy to run Swift action.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
"""
import os
import glob
import sys
import subprocess
import codecs
import json
sys.path.append('../actionProxy')
from actionproxy import ActionRunner, main, setRunner # noqa
SRC_EPILOGUE_FILE = '/swift3Action/epilogue.swift'
DEST_SCRIPT_FILE = '/swift3Action/spm-build/main.swift'
DEST_SCRIPT_DIR = '/swift3Action/spm-build'
DEST_BIN_FILE = '/swift3Action/spm-build/.build/release/Action'
BUILD_PROCESS = ['./swiftbuildandlink.sh']
class Swift3Runner(ActionRunner):
def __init__(self):
ActionRunner.__init__(self, DEST_SCRIPT_FILE, DEST_BIN_FILE)
# remove pre-existing binary before receiving a new binary
def preinit(self):
try:
os.remove(self.binary)
except: pass
def epilogue(self, init_message):
# skip if executable already exists (was unzipped)
if os.path.isfile(self.binary):
return
if 'main' in init_message:
main_function = init_message['main']
else:
main_function = 'main'
# make sure there is a main.swift file
open(DEST_SCRIPT_FILE, 'a').close()
with codecs.open(DEST_SCRIPT_FILE, 'a', 'utf-8') as fp:
os.chdir(DEST_SCRIPT_DIR)
for file in glob.glob("*.swift"):
if file not in ["Package.swift", "main.swift", "_WhiskJSONUtils.swift", "_Whisk.swift"]:
with codecs.open(file, 'r', 'utf-8') as f:
fp.write(f.read())
with codecs.open(SRC_EPILOGUE_FILE, 'r', 'utf-8') as ep:
fp.write(ep.read())
fp.write('_run_main(mainFunction: %s)\n' % main_function)
def build(self, init_message):
# short circuit the build, if there already exists a binary
# from the zip file
if os.path.isfile(self.binary):
# file may not have executable permission, set it
os.chmod(self.binary, 0o555)
return
p = subprocess.Popen(
BUILD_PROCESS,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=DEST_SCRIPT_DIR)
# run the process and wait until it completes.
# stdout/stderr will not be None because we passed PIPEs to Popen
(o, e) = p.communicate()
# stdout/stderr may be either text or bytes, depending on Python
# version, so if bytes, decode to text. Note that in Python 2
# a string will match both types; so also skip decoding in that case
if isinstance(o, bytes) and not isinstance(o, str):
o = o.decode('utf-8')
if isinstance(e, bytes) and not isinstance(e, str):
e = e.decode('utf-8')
if o:
sys.stdout.write(o)
sys.stdout.flush()
if e:
sys.stderr.write(e)
sys.stderr.flush()
def env(self, message):
env = ActionRunner.env(self, message)
args = message.get('value', {}) if message else {}
env['WHISK_INPUT'] = json.dumps(args)
return env
if __name__ == '__main__':
setRunner(Swift3Runner())
main()
| [((26, 0, 26, 33), 'sys.path.append', 'sys.path.append', ({(26, 16, 26, 32): '"""../actionProxy"""'}, {}), "('../actionProxy')", False, 'import sys\n'), ((114, 4, 114, 10), 'actionproxy.main', 'main', ({}, {}), '()', False, 'from actionproxy import ActionRunner, main, setRunner\n'), ((40, 8, 40, 68), 'actionproxy.ActionRunner.__init__', 'ActionRunner.__init__', ({(40, 30, 40, 34): 'self', (40, 36, 40, 52): 'DEST_SCRIPT_FILE', (40, 54, 40, 67): 'DEST_BIN_FILE'}, {}), '(self, DEST_SCRIPT_FILE, DEST_BIN_FILE)', False, 'from actionproxy import ActionRunner, main, setRunner\n'), ((50, 11, 50, 38), 'os.path.isfile', 'os.path.isfile', ({(50, 26, 50, 37): 'self.binary'}, {}), '(self.binary)', False, 'import os\n'), ((74, 11, 74, 38), 'os.path.isfile', 'os.path.isfile', ({(74, 26, 74, 37): 'self.binary'}, {}), '(self.binary)', False, 'import os\n'), ((79, 12, 83, 32), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((106, 14, 106, 45), 'actionproxy.ActionRunner.env', 'ActionRunner.env', ({(106, 31, 106, 35): 'self', (106, 37, 106, 44): 'message'}, {}), '(self, message)', False, 'from actionproxy import ActionRunner, main, setRunner\n'), ((108, 29, 108, 45), 'json.dumps', 'json.dumps', ({(108, 40, 108, 44): 'args'}, {}), '(args)', False, 'import json\n'), ((45, 12, 45, 34), 'os.remove', 'os.remove', ({(45, 22, 45, 33): 'self.binary'}, {}), '(self.binary)', False, 'import os\n'), ((60, 13, 60, 56), 'codecs.open', 'codecs.open', ({(60, 25, 60, 41): 'DEST_SCRIPT_FILE', (60, 43, 60, 46): '"""a"""', (60, 48, 60, 55): '"""utf-8"""'}, {}), "(DEST_SCRIPT_FILE, 'a', 'utf-8')", False, 'import codecs\n'), ((61, 12, 61, 37), 'os.chdir', 'os.chdir', ({(61, 21, 61, 36): 'DEST_SCRIPT_DIR'}, {}), '(DEST_SCRIPT_DIR)', False, 'import os\n'), ((62, 24, 62, 44), 'glob.glob', 'glob.glob', ({(62, 34, 62, 43): '"""*.swift"""'}, {}), "('*.swift')", False, 'import glob\n'), ((76, 12, 76, 40), 'os.chmod', 'os.chmod', ({(76, 21, 76, 32): 'self.binary', (76, 34, 76, 39): '(365)'}, {}), '(self.binary, 365)', False, 'import os\n'), ((98, 12, 98, 31), 'sys.stdout.write', 'sys.stdout.write', ({(98, 29, 98, 30): 'o'}, {}), '(o)', False, 'import sys\n'), ((99, 12, 99, 30), 'sys.stdout.flush', 'sys.stdout.flush', ({}, {}), '()', False, 'import sys\n'), ((102, 12, 102, 31), 'sys.stderr.write', 'sys.stderr.write', ({(102, 29, 102, 30): 'e'}, {}), '(e)', False, 'import sys\n'), ((103, 12, 103, 30), 'sys.stderr.flush', 'sys.stderr.flush', ({}, {}), '()', False, 'import sys\n'), ((66, 17, 66, 61), 'codecs.open', 'codecs.open', ({(66, 29, 66, 46): 'SRC_EPILOGUE_FILE', (66, 48, 66, 51): '"""r"""', (66, 53, 66, 60): '"""utf-8"""'}, {}), "(SRC_EPILOGUE_FILE, 'r', 'utf-8')", False, 'import codecs\n'), ((64, 25, 64, 56), 'codecs.open', 'codecs.open', ({(64, 37, 64, 41): 'file', (64, 43, 64, 46): '"""r"""', (64, 48, 64, 55): '"""utf-8"""'}, {}), "(file, 'r', 'utf-8')", False, 'import codecs\n')] |
vanderh0ff/ychaos | src/ychaos/utils/types.py | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | from typing import Dict, List, TypeVar, Union
JsonTypeVar = TypeVar("JsonTypeVar")
JsonPrimitive = Union[str, float, int, bool, None]
JsonDict = Dict[str, JsonTypeVar]
JsonArray = List[JsonTypeVar]
Json = Union[JsonPrimitive, JsonDict, JsonArray]
| [((3, 14, 3, 36), 'typing.TypeVar', 'TypeVar', ({(3, 22, 3, 35): '"""JsonTypeVar"""'}, {}), "('JsonTypeVar')", False, 'from typing import Dict, List, TypeVar, Union\n')] |
stefanitsky/yandex_market_language | yandex_market_language/models/promo.py | e17595b556fc55e183cf366227b2739c5c6178dc | import typing as t
from yandex_market_language import models
from yandex_market_language.models.abstract import XMLElement, XMLSubElement
class Promo(models.AbstractModel):
"""
Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
MAPPING = {
"start-date": "start_date",
"end-date": "end_date",
"description": "description",
"url": "url",
}
__slots__ = [
'promo_id',
'promo_type',
'purchase',
'promo_gifts',
'start_date',
'end_date',
'description',
'url'
]
def __init__(
self,
promo_id: str,
promo_type: str,
purchase: "Purchase",
promo_gifts: t.List["PromoGift"],
start_date=None,
end_date=None,
description=None,
url=None,
):
self.promo_id = promo_id
self.promo_type = promo_type
self.start_date = start_date
self.end_date = end_date
self.description = description
self.url = url
self.purchase = purchase
self.promo_gifts = promo_gifts
def create_dict(self, **kwargs) -> dict:
return dict(
promo_id=self.promo_id,
promo_type=self.promo_type,
start_date=self.start_date,
end_date=self.end_date,
description=self.description,
url=self.url,
purchase=self.purchase.to_dict(),
promo_gifts=[pg.to_dict() for pg in self.promo_gifts],
)
def create_xml(self, **kwargs) -> XMLElement:
attribs = {"id": self.promo_id, "type": self.promo_type}
promo_el = XMLElement("promo", attribs)
for tag, attr in self.MAPPING.items():
v = getattr(self, attr)
if v:
el = XMLSubElement(promo_el, tag)
el.text = v
# Add purchase el
self.purchase.to_xml(promo_el)
# Add promo gifts
promo_gifts_el = XMLSubElement(promo_el, "promo-gifts")
for pg in self.promo_gifts:
pg.to_xml(promo_gifts_el)
return promo_el
@classmethod
def from_xml(cls, promo_el: XMLElement) -> "Promo":
kwargs = dict(
promo_id=promo_el.attrib.get("id"),
promo_type=promo_el.attrib.get("type"),
promo_gifts=[]
)
for el in promo_el:
if el.tag in cls.MAPPING:
kwargs[cls.MAPPING[el.tag]] = el.text
elif el.tag == "purchase":
kwargs["purchase"] = Purchase.from_xml(el)
elif el.tag == "promo-gifts":
for pg_el in el:
kwargs["promo_gifts"].append(PromoGift.from_xml(pg_el))
return Promo(**kwargs)
class Purchase(models.AbstractModel):
"""
Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
__slots__ = [
'products',
'required_quantity'
]
def __init__(self, products: t.List["Product"], required_quantity="1"):
self.required_quantity = required_quantity
self.products = products
def create_dict(self, **kwargs) -> dict:
return dict(
required_quantity=self.required_quantity,
products=[p.to_dict() for p in self.products]
)
def create_xml(self, **kwargs) -> XMLElement:
purchase_el = XMLElement("purchase")
# Add required quantity el
required_quantity_el = XMLSubElement(purchase_el, "required-quantity")
required_quantity_el.text = self.required_quantity
# Add products el
for p in self.products:
p.to_xml(purchase_el)
return purchase_el
@staticmethod
def from_xml(purchase_el: XMLElement) -> "Purchase":
kwargs = {"products": []}
for el in purchase_el:
if el.tag == "required-quantity":
kwargs["required_quantity"] = el.text
elif el.tag == "product":
kwargs["products"].append(Product.from_xml(el))
return Purchase(**kwargs)
class Product(models.AbstractModel):
"""
Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
__slots__ = [
'offer_id',
'category_id'
]
def __init__(self, offer_id: str = None, category_id: str = None):
self.offer_id = offer_id
self.category_id = category_id
def create_dict(self, **kwargs) -> dict:
return dict(
offer_id=self.offer_id,
category_id=self.category_id,
)
def create_xml(self, **kwargs) -> XMLElement:
attribs = {}
if self.offer_id:
attribs["offer-id"] = self.offer_id
if self.category_id:
attribs["category-id"] = self.category_id
return XMLElement("product", attribs)
@staticmethod
def from_xml(product_el: XMLElement) -> "Product":
return Product(
offer_id=product_el.attrib.get("offer-id"),
category_id=product_el.attrib.get("category-id")
)
class PromoGift(models.AbstractModel):
"""
Docs:
https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
__slots__ = [
'offer_id',
'gift_id'
]
def __init__(self, offer_id: str = None, gift_id: str = None):
self.offer_id = offer_id
self.gift_id = gift_id
def create_dict(self, **kwargs) -> dict:
return dict(offer_id=self.offer_id, gift_id=self.gift_id)
def create_xml(self, **kwargs) -> XMLElement:
attribs = {}
if self.offer_id:
attribs["offer-id"] = self.offer_id
elif self.gift_id:
attribs["gift-id"] = self.gift_id
return XMLElement("promo-gift", attribs)
@staticmethod
def from_xml(el: XMLElement) -> "PromoGift":
return PromoGift(
offer_id=el.attrib.get("offer-id"),
gift_id=el.attrib.get("gift-id")
)
| [((63, 19, 63, 47), 'yandex_market_language.models.abstract.XMLElement', 'XMLElement', ({(63, 30, 63, 37): '"""promo"""', (63, 39, 63, 46): 'attribs'}, {}), "('promo', attribs)", False, 'from yandex_market_language.models.abstract import XMLElement, XMLSubElement\n'), ((75, 25, 75, 63), 'yandex_market_language.models.abstract.XMLSubElement', 'XMLSubElement', ({(75, 39, 75, 47): 'promo_el', (75, 49, 75, 62): '"""promo-gifts"""'}, {}), "(promo_el, 'promo-gifts')", False, 'from yandex_market_language.models.abstract import XMLElement, XMLSubElement\n'), ((122, 22, 122, 44), 'yandex_market_language.models.abstract.XMLElement', 'XMLElement', ({(122, 33, 122, 43): '"""purchase"""'}, {}), "('purchase')", False, 'from yandex_market_language.models.abstract import XMLElement, XMLSubElement\n'), ((125, 31, 125, 78), 'yandex_market_language.models.abstract.XMLSubElement', 'XMLSubElement', ({(125, 45, 125, 56): 'purchase_el', (125, 58, 125, 77): '"""required-quantity"""'}, {}), "(purchase_el, 'required-quantity')", False, 'from yandex_market_language.models.abstract import XMLElement, XMLSubElement\n'), ((173, 15, 173, 45), 'yandex_market_language.models.abstract.XMLElement', 'XMLElement', ({(173, 26, 173, 35): '"""product"""', (173, 37, 173, 44): 'attribs'}, {}), "('product', attribs)", False, 'from yandex_market_language.models.abstract import XMLElement, XMLSubElement\n'), ((207, 15, 207, 48), 'yandex_market_language.models.abstract.XMLElement', 'XMLElement', ({(207, 26, 207, 38): '"""promo-gift"""', (207, 40, 207, 47): 'attribs'}, {}), "('promo-gift', attribs)", False, 'from yandex_market_language.models.abstract import XMLElement, XMLSubElement\n'), ((68, 21, 68, 49), 'yandex_market_language.models.abstract.XMLSubElement', 'XMLSubElement', ({(68, 35, 68, 43): 'promo_el', (68, 45, 68, 48): 'tag'}, {}), '(promo_el, tag)', False, 'from yandex_market_language.models.abstract import XMLElement, XMLSubElement\n')] |
skogsbaer/check-assignments | src/testCmd.py | cda8208c10644eecfe0bb988bee61098485aa6c4 | import shell
from dataclasses import dataclass
from utils import *
from ownLogging import *
from typing import *
from ansi import *
import re
import os
import testHaskell
import testPython
import testJava
@dataclass
class TestArgs:
dirs: List[str]
assignments: List[str] # take all if empty
interactive: bool
startAt: str
INSPECT_COMMAND = 'inspect'
RERUN_COMMAND = 'rerun'
CONTINUE_COMMAND = 'continue'
HELP_COMMAND = 'help'
def readCommand(cfg, args, studentDir, assignment):
f = assignment.getMainFile(studentDir)
commands = [('h', HELP_COMMAND, 'Print this help message')]
if f:
commands.append( ('i', INSPECT_COMMAND, f'Inspect file {f}') )
commands.append( ('r', RERUN_COMMAND, f'Re-run tests') )
commands.append( ('c', CONTINUE_COMMAND, f'Continue with next assignment/student') )
def printHelp():
for char, cmd, help in commands:
print(f' {char}: {help}')
shortcutHelp = [x[0] for x in commands]
while True:
try:
c = input(f'What to do next? {"/".join(shortcutHelp)} ')
except EOFError:
raise KeyboardInterrupt()
for chars, cmd, help in commands:
if c in chars:
if cmd == HELP_COMMAND:
printHelp()
else:
return cmd
break
else:
print(f'Unknown command {c}.')
printHelp()
def inspectFile(cfg, args, studentDir, assignment):
f = assignment.getMainFile(studentDir)
editor = cfg.editor()
os.system(f"{editor} '{f}'")
TEST_DICT = {
'python': testPython.runPythonTests,
'java': testJava.runJavaTests,
'haskell': testHaskell.runHaskellTests
}
def prettyStudent(cfg, studentDir):
try:
(name, matrikel) = parseSubmissionDir(cfg, studentDir)
return f'{name} ({matrikel})'
except ValueError:
x = shell.basename(studentDir)
if not x:
x = studentDir
x = stripLeadingSlash(x)
x = stripTrailingSlash(x)
return x
def runTestsForAssignment(cfg, args, studentDir, assignment):
print(blue(f'Checking assignment {assignment.id} for student {prettyStudent(cfg, studentDir)}'))
k = assignment.kind
if k in TEST_DICT:
fun = TEST_DICT[k]
fun(cfg, args, studentDir, assignment)
else:
abort(f"Don't know how to run tests for assignment kind {k}")
def interactiveLoop(cfg, args, studentDir, a):
runTestsForAssignment(cfg, args, studentDir, a)
if args.interactive:
while True:
print()
print(blue(f'Just checked assignment {a.id} for student {prettyStudent(cfg, studentDir)}'))
cmd = readCommand(cfg, args, studentDir, a)
if cmd == INSPECT_COMMAND:
inspectFile(cfg, args, studentDir, a)
elif cmd == RERUN_COMMAND:
runTestsForAssignment(cfg, args, studentDir, a)
elif cmd == CONTINUE_COMMAND:
return
def runTests(cfg, args):
dirs = args.dirs
if not dirs:
dirs = collectSubmissionDirs(cfg)
dirs = sorted(dirs)
if args.startAt:
l = dirs
dirs = []
for x in l:
if shell.basename(x) >= args.startAt:
dirs.append(x)
else:
print(f'Skipping {x} as requested')
for d in dirs:
assignments = cfg.assignments
if args.assignments:
assignments = []
for a in cfg.assignments:
if a.id in args.assignments:
assignments.append(a)
if not assignments:
print(f'No assignments found or selected!')
for i, a in enumerate(assignments):
interactiveLoop(cfg, args, d, a)
if i > 0:
print()
| [((55, 4, 55, 32), 'os.system', 'os.system', ({(55, 14, 55, 31): 'f"""{editor} \'{f}\'"""'}, {}), '(f"{editor} \'{f}\'")', False, 'import os\n'), ((68, 12, 68, 38), 'shell.basename', 'shell.basename', ({(68, 27, 68, 37): 'studentDir'}, {}), '(studentDir)', False, 'import shell\n'), ((107, 15, 107, 32), 'shell.basename', 'shell.basename', ({(107, 30, 107, 31): 'x'}, {}), '(x)', False, 'import shell\n')] |
kipoi/kipoi-containers | kipoi_containers/singularityhelper.py | 5978cf1563dcc1072170f28a0a956cc28aa3c406 | from collections import Counter
from datetime import datetime
import os
import requests
from subprocess import Popen, PIPE
from pathlib import Path
import json
from typing import Dict, Union, TYPE_CHECKING
from kipoi_utils.external.torchvision.dataset_utils import download_url
if TYPE_CHECKING:
import zenodoclient
ZENODO_BASE = "https://zenodo.org"
ZENODO_DEPOSITION = f"{ZENODO_BASE}/api/deposit/depositions"
PathType = Union[str, Path]
def cleanup(singularity_file_path: PathType) -> None:
"""
Deletes the singularity image that was created by build_singularity_image
"""
if isinstance(singularity_file_path, str):
singularity_file_path = Path(singularity_file_path)
if singularity_file_path.exists():
singularity_file_path.unlink()
def build_singularity_image(
name_of_docker_image: str,
singularity_image_name: str,
singularity_image_folder: PathType,
) -> PathType:
"""
This function builds a singularity image from a dockerhub image
using singularity pull. The resulting .sif is stored in <singularity_image_folder> and
the filepath is returned.
"""
if isinstance(singularity_image_folder, Path):
singularity_image_folder = str(singularity_image_folder)
pull_cmd = [
"singularity",
"pull",
"--name",
f"{singularity_image_folder}/{singularity_image_name}",
"--force",
f"docker://{name_of_docker_image}",
]
print(f"Building {singularity_image_name} - {' '.join(pull_cmd)}")
process = Popen(pull_cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print(stderr)
print(stdout)
raise ValueError(
f"Singularity image {singularity_image_name} can not be built"
)
singularity_image_path = (
f"{singularity_image_folder}/{singularity_image_name}"
)
return singularity_image_path
def test_singularity_image(
singularity_image_folder: PathType, singularity_image_name: str, model: str
) -> None:
"""Tests a singularity image residing in singularity_image_folder
with kipoi test <model> --source=kipoi
Raises:
ValueError: Raise valueerror if the test is not successful"""
print(
f"Testing {model} with {singularity_image_folder}/{singularity_image_name}"
)
if model == "Basenji":
test_cmd = [
"kipoi",
"test",
f"{model}",
"--source=kipoi",
"--batch_size=2",
]
else:
test_cmd = ["kipoi", "test", f"{model}", "--source=kipoi"]
if isinstance(singularity_image_folder, str):
singularity_image_folder = Path(singularity_image_folder)
if isinstance(singularity_image_name, str):
singularity_image_name = Path(singularity_image_name)
exec_cmd = [
"singularity",
"exec",
f"{singularity_image_folder}/{singularity_image_name}",
]
exec_cmd.extend(test_cmd)
process = Popen(exec_cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print(stdout)
print(stderr)
raise ValueError(
f"Singularity image {singularity_image_name} for {model} did not pass relevant tests"
)
def create_new_deposition(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> str:
"""Creates a new version of an existing depsosition on zenodo and returns the
corresponding id"""
status_code, response = zenodo_client.post_content(
f"{ZENODO_DEPOSITION}/{deposition_id}/actions/newversion"
)
return response["links"]["latest_draft"].split("/")[-1]
def get_deposit(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> Dict:
"""Returns the response body of a get request for an existing deposition"""
response = zenodo_client.get_content(
f"{ZENODO_DEPOSITION}/{deposition_id}"
)
return response
def upload_file(
zenodo_client: "zenodoclient.Client",
url: str,
singularity_image_folder: PathType,
filename: str,
) -> None:
"""Upload singularity_image_folder/filename to a url"""
path = Path(singularity_image_folder) / Path(filename)
zenodo_client.put_content(url, data=path)
def upload_metadata(
zenodo_client: "zenodoclient.Client",
url: str,
model_group: str = "",
shared_env: str = "",
) -> None:
"""Upload metadata for a model group to a given url"""
if not model_group and not shared_env:
raise ValueError(
"You need to provide atlease a shared env name or a model group name"
)
if model_group:
data = {
"metadata": {
"title": f"{model_group} singularity container",
"upload_type": "physicalobject",
"description": "This is a singularity container for models "
f"under https://kipoi.org/models/{model_group}/",
"creators": [
{"name": "Haimasree, Bhattacharya", "affiliation": "EMBL"}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
elif shared_env:
if "shared" in shared_env:
data = {
"metadata": {
"title": f"{shared_env} singularity container",
"upload_type": "physicalobject",
"description": "Singularity container with conda environment "
f"https://github.com/kipoi/kipoi-containers/blob/main/envfiles/{shared_env}.yml",
"creators": [
{
"name": "Haimasree, Bhattacharya",
"affiliation": "EMBL",
}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
elif shared_env == "mmsplice":
data = {
"metadata": {
"title": "MMSplice singularity container except mtsplice",
"upload_type": "physicalobject",
"description": "Singularity container for MMSplice models except mtsplice "
"under http://kipoi.org/models/MMSplice/",
"creators": [
{
"name": "Haimasree, Bhattacharya",
"affiliation": "EMBL",
}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
else:
raise ValueError(
"Available options are - mmsplice, sharedpy3keras2tf1, sharedpy3keras2tf2, sharedpy3keras1.2"
)
zenodo_client.put_content(url, data=data)
def push_deposition(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> Dict:
"""Pushes a deposition to zenodo. An additional get request is made to the newy pushed
deposition and a response body is returned"""
status_code, response = zenodo_client.post_content(
f"{ZENODO_DEPOSITION}/{deposition_id}/actions/publish"
)
response = get_deposit(zenodo_client, deposition_id)
return response
def update_existing_singularity_container(
zenodo_client: "zenodoclient.Client",
singularity_dict: Dict,
singularity_image_folder: PathType,
model_group: str,
file_to_upload: str = "",
push: bool = True,
) -> None:
"""This function creates a new draft version of an existing image's zenodo entry with updated
metadata and file after deleting the old file. If push is True, the draft version is finalized
and the url, name and md5 fields are updated and the new deposition id and file id is added to
singularity dict which contains information about the existing image. Otherwise, only
the new deposotion id and file id is added to the dictionary. This modified dictionary is
returned"""
# Create a new version of an existing deposition
deposition_id = singularity_dict["url"].split("/")[4]
new_deposition_id = create_new_deposition(zenodo_client, deposition_id)
response = get_deposit(zenodo_client, new_deposition_id)
bucket_url = response["links"]["bucket"]
filename = (
file_to_upload if file_to_upload else f"{singularity_dict['name']}.sif"
)
file_id = ""
for fileobj in response["files"]:
if fileobj["filename"] == filename:
file_id = fileobj["id"] # Assuming only 1 version is added
# Delete existing file from this new version
if file_id:
zenodo_client.delete_content(
f"{ZENODO_DEPOSITION}/{new_deposition_id}/files/{file_id}"
)
# Add a new file to this new version
upload_file(
zenodo_client,
f"{bucket_url}/{filename}",
singularity_image_folder,
filename,
)
url = f"{ZENODO_DEPOSITION}/{new_deposition_id}"
if (
"shared" in singularity_dict["name"]
or singularity_dict["name"] == "kipoi-docker_mmsplice-slim"
):
shared_env_name = (
singularity_dict["name"]
.replace("kipoi-docker_", "")
.replace("-slim", "")
)
upload_metadata(zenodo_client, url, shared_env=shared_env_name)
else:
upload_metadata(zenodo_client, url, model_group=model_group)
# publish the newly created revision
if push:
response = push_deposition(zenodo_client, new_deposition_id)
record_id = response["metadata"]["prereserve_doi"]["recid"]
file_id, file_name, file_md5 = "", "", ""
for fileobj in response["files"]:
if fileobj["filename"] == filename:
file_id = fileobj["id"] # Assuming only 1 version is added
file_name = fileobj["filename"].replace(".sif", "")
file_md5 = fileobj["checksum"]
return {
"new_deposition_id": new_deposition_id,
"file_id": file_id,
"url": f"{ZENODO_BASE}/record/{record_id}/files/{filename}?download=1",
"name": file_name,
"md5": file_md5,
}
else:
return singularity_dict | {
"new_deposition_id": new_deposition_id,
"file_id": "",
}
def push_new_singularity_image(
zenodo_client: "zenodoclient.Client",
singularity_image_folder: PathType,
singularity_dict: Dict,
model_group: str,
file_to_upload: str = "",
path: str = "",
push: bool = True,
) -> None:
"""This function creates a draft version of a new zenodo entry with the
metadata and singularity image. If push is True, the draft version is finalized
and the url, name and md5 fields are updated and the new deposition id and file id is added to
singularity dict which contains empty strings as url and md5. Otherwise, only
the new deposotion id and file id is added to the dictionary. This modified dictionary is
returned"""
status_code, response = zenodo_client.post_content(f"{ZENODO_DEPOSITION}")
deposition_id = response["id"]
bucket_url = response["links"]["bucket"]
filename = (
file_to_upload if file_to_upload else f"{singularity_dict['name']}.sif"
)
upload_file(
zenodo_client,
f"{bucket_url}/{filename}",
singularity_image_folder,
filename,
)
url = f"{ZENODO_DEPOSITION}/{deposition_id}"
if "shared" in singularity_dict["name"]:
shared_env_name = (
singularity_dict["name"]
.replace("kipoi-docker_", "")
.replace("-slim", "")
)
upload_metadata(zenodo_client, url, shared_env=shared_env_name)
else:
upload_metadata(zenodo_client, url, model_group=model_group)
if push:
push_deposition(zenodo_client, deposition_id)
response = get_deposit(zenodo_client, deposition_id)
record_id = response["metadata"]["prereserve_doi"]["recid"]
return {
"new_deposition_id": deposition_id,
"file_id": response["files"][0]["id"],
"url": f"{ZENODO_BASE}/record/{record_id}/files/{filename}?download=1",
"name": response["files"][0]["filename"].replace(".sif", ""),
"md5": response["files"][0]["checksum"],
}
else:
return singularity_dict | {
"new_deposition_id": deposition_id,
"file_id": "",
}
def get_singularity_image(
singularity_image_folder: PathType,
singularity_image_dict: Dict,
model_or_model_group: str,
) -> PathType:
"""This function downloads the singularity image corresponding to the given model or
model group from zenodo to singularity_image_folder and returns the name of the image"""
if (
model_or_model_group in singularity_image_dict
): # Special case for MMSPlice/mtsplice, APARENT/veff
image_name = (
f"{singularity_image_dict[model_or_model_group]['name']}.sif"
)
image_url = f"{singularity_image_dict[model_or_model_group]['url']}"
image_md5 = f"{singularity_image_dict[model_or_model_group]['md5']}"
else:
model_group = model_or_model_group.split("/")[0]
image_name = f"{singularity_image_dict[model_group]['name']}.sif"
image_url = f"{singularity_image_dict[model_group]['url']}"
image_md5 = f"{singularity_image_dict[model_group]['md5']}"
if isinstance(singularity_image_folder, str):
singularity_image_folder = Path(singularity_image_folder)
if isinstance(image_name, str):
image_name = Path(image_name)
if not (singularity_image_folder / image_name).exists():
download_url(
url=image_url,
root=singularity_image_folder,
filename=image_name,
md5=image_md5,
)
return image_name
| [((53, 14, 53, 55), 'subprocess.Popen', 'Popen', (), '', False, 'from subprocess import Popen, PIPE\n'), ((98, 14, 98, 55), 'subprocess.Popen', 'Popen', (), '', False, 'from subprocess import Popen, PIPE\n'), ((27, 32, 27, 59), 'pathlib.Path', 'Path', ({(27, 37, 27, 58): 'singularity_file_path'}, {}), '(singularity_file_path)', False, 'from pathlib import Path\n'), ((89, 35, 89, 65), 'pathlib.Path', 'Path', ({(89, 40, 89, 64): 'singularity_image_folder'}, {}), '(singularity_image_folder)', False, 'from pathlib import Path\n'), ((91, 33, 91, 61), 'pathlib.Path', 'Path', ({(91, 38, 91, 60): 'singularity_image_name'}, {}), '(singularity_image_name)', False, 'from pathlib import Path\n'), ((136, 11, 136, 41), 'pathlib.Path', 'Path', ({(136, 16, 136, 40): 'singularity_image_folder'}, {}), '(singularity_image_folder)', False, 'from pathlib import Path\n'), ((136, 44, 136, 58), 'pathlib.Path', 'Path', ({(136, 49, 136, 57): 'filename'}, {}), '(filename)', False, 'from pathlib import Path\n'), ((378, 35, 378, 65), 'pathlib.Path', 'Path', ({(378, 40, 378, 64): 'singularity_image_folder'}, {}), '(singularity_image_folder)', False, 'from pathlib import Path\n'), ((380, 21, 380, 37), 'pathlib.Path', 'Path', ({(380, 26, 380, 36): 'image_name'}, {}), '(image_name)', False, 'from pathlib import Path\n'), ((383, 8, 388, 9), 'kipoi_utils.external.torchvision.dataset_utils.download_url', 'download_url', (), '', False, 'from kipoi_utils.external.torchvision.dataset_utils import download_url\n'), ((161, 36, 161, 52), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n'), ((179, 40, 179, 56), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n'), ((196, 40, 196, 56), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n')] |
garenchan/policy | policy/_cache.py | fbd056c0474e62252d1fe986fe029cacde6845d8 | # -*- coding: utf-8 -*-
"""
policy._cache
~~~~~~~~~~~~~~~
Cache for policy file.
"""
import os
import logging
LOG = logging.getLogger(__name__)
# Global file cache
CACHE = {}
def read_file(filename: str, force_reload=False):
"""Read a file if it has been modified.
:param filename: File name which want to be read from.
:param force_reload: Whether to reload the file.
:returns: A tuple with a boolean specifying if the data is fresh or not.
"""
if force_reload:
_delete_cached_file(filename)
reloaded = False
mtime = os.path.getmtime(filename)
cache_info = CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
LOG.debug('Reloading cached file %s', filename)
with open(filename) as fp:
cache_info['data'] = fp.read()
cache_info['mtime'] = mtime
reloaded = True
return reloaded, cache_info['data']
def _delete_cached_file(filename: str):
"""Delete cached file if present.
:param filename: Filename to delete
"""
try:
del CACHE[filename]
except KeyError:
pass
| [((13, 6, 13, 33), 'logging.getLogger', 'logging.getLogger', ({(13, 24, 13, 32): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((31, 12, 31, 38), 'os.path.getmtime', 'os.path.getmtime', ({(31, 29, 31, 37): 'filename'}, {}), '(filename)', False, 'import os\n')] |
samn/opencensus-python | contrib/opencensus-ext-django/opencensus/ext/django/middleware.py | d8709f141b67f7f5ba011c440b8ba8fb9cbc419a | # Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Django middleware helper to capture and trace a request."""
import logging
from opencensus.ext.django.config import (settings, convert_to_import)
from opencensus.trace import attributes_helper
from opencensus.trace import execution_context
from opencensus.trace import span as span_module
from opencensus.trace import tracer as tracer_module
from opencensus.trace import utils
from opencensus.trace.samplers import probability
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError: # pragma: NO COVER
MiddlewareMixin = object
HTTP_METHOD = attributes_helper.COMMON_ATTRIBUTES['HTTP_METHOD']
HTTP_URL = attributes_helper.COMMON_ATTRIBUTES['HTTP_URL']
HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES['HTTP_STATUS_CODE']
REQUEST_THREAD_LOCAL_KEY = 'django_request'
SPAN_THREAD_LOCAL_KEY = 'django_span'
BLACKLIST_PATHS = 'BLACKLIST_PATHS'
GCP_EXPORTER_PROJECT = 'GCP_EXPORTER_PROJECT'
SAMPLING_RATE = 'SAMPLING_RATE'
TRANSPORT = 'TRANSPORT'
SERVICE_NAME = 'SERVICE_NAME'
ZIPKIN_EXPORTER_SERVICE_NAME = 'ZIPKIN_EXPORTER_SERVICE_NAME'
ZIPKIN_EXPORTER_HOST_NAME = 'ZIPKIN_EXPORTER_HOST_NAME'
ZIPKIN_EXPORTER_PORT = 'ZIPKIN_EXPORTER_PORT'
ZIPKIN_EXPORTER_PROTOCOL = 'ZIPKIN_EXPORTER_PROTOCOL'
JAEGER_EXPORTER_HOST_NAME = 'JAEGER_EXPORTER_HOST_NAME'
JAEGER_EXPORTER_PORT = 'JAEGER_EXPORTER_PORT'
JAEGER_EXPORTER_AGENT_HOST_NAME = 'JAEGER_EXPORTER_AGENT_HOST_NAME'
JAEGER_EXPORTER_AGENT_PORT = 'JAEGER_EXPORTER_AGENT_PORT'
JAEGER_EXPORTER_SERVICE_NAME = 'JAEGER_EXPORTER_SERVICE_NAME'
OCAGENT_TRACE_EXPORTER_ENDPOINT = 'OCAGENT_TRACE_EXPORTER_ENDPOINT'
BLACKLIST_HOSTNAMES = 'BLACKLIST_HOSTNAMES'
log = logging.getLogger(__name__)
class _DjangoMetaWrapper(object):
"""
Wrapper class which takes HTTP header name and retrieve the value from
Django request.META
"""
def __init__(self, meta=None):
self.meta = meta or _get_django_request().META
def get(self, key):
return self.meta.get('HTTP_' + key.upper().replace('-', '_'))
def _get_django_request():
"""Get Django request from thread local.
:rtype: str
:returns: Django request.
"""
return execution_context.get_opencensus_attr(REQUEST_THREAD_LOCAL_KEY)
def _get_django_span():
"""Get Django span from thread local.
:rtype: str
:returns: Django request.
"""
return execution_context.get_opencensus_attr(SPAN_THREAD_LOCAL_KEY)
def _get_current_tracer():
"""Get the current request tracer."""
return execution_context.get_opencensus_tracer()
def _set_django_attributes(span, request):
"""Set the django related attributes."""
django_user = getattr(request, 'user', None)
if django_user is None:
return
user_id = django_user.pk
try:
user_name = django_user.get_username()
except AttributeError:
# AnonymousUser in some older versions of Django doesn't implement
# get_username
return
# User id is the django autofield for User model as the primary key
if user_id is not None:
span.add_attribute('django.user.id', str(user_id))
if user_name is not None:
span.add_attribute('django.user.name', str(user_name))
class OpencensusMiddleware(MiddlewareMixin):
"""Saves the request in thread local"""
def __init__(self, get_response=None):
# One-time configuration and initialization.
self.get_response = get_response
self._sampler = settings.SAMPLER
self._exporter = settings.EXPORTER
self._propagator = settings.PROPAGATOR
self._blacklist_paths = settings.params.get(BLACKLIST_PATHS)
# Initialize the sampler
if self._sampler.__name__ == 'ProbabilitySampler':
_rate = settings.params.get(
SAMPLING_RATE, probability.DEFAULT_SAMPLING_RATE)
self.sampler = self._sampler(_rate)
else:
self.sampler = self._sampler()
# Initialize the exporter
transport = convert_to_import(settings.params.get(TRANSPORT))
if self._exporter.__name__ == 'GoogleCloudExporter':
_project_id = settings.params.get(GCP_EXPORTER_PROJECT, None)
self.exporter = self._exporter(
project_id=_project_id,
transport=transport)
elif self._exporter.__name__ == 'ZipkinExporter':
_service_name = self._get_service_name(settings.params)
_zipkin_host_name = settings.params.get(
ZIPKIN_EXPORTER_HOST_NAME, 'localhost')
_zipkin_port = settings.params.get(
ZIPKIN_EXPORTER_PORT, 9411)
_zipkin_protocol = settings.params.get(
ZIPKIN_EXPORTER_PROTOCOL, 'http')
self.exporter = self._exporter(
service_name=_service_name,
host_name=_zipkin_host_name,
port=_zipkin_port,
protocol=_zipkin_protocol,
transport=transport)
elif self._exporter.__name__ == 'TraceExporter':
_service_name = self._get_service_name(settings.params)
_endpoint = settings.params.get(
OCAGENT_TRACE_EXPORTER_ENDPOINT, None)
self.exporter = self._exporter(
service_name=_service_name,
endpoint=_endpoint,
transport=transport)
elif self._exporter.__name__ == 'JaegerExporter':
_service_name = settings.params.get(
JAEGER_EXPORTER_SERVICE_NAME,
self._get_service_name(settings.params))
_jaeger_host_name = settings.params.get(
JAEGER_EXPORTER_HOST_NAME, None)
_jaeger_port = settings.params.get(
JAEGER_EXPORTER_PORT, None)
_jaeger_agent_host_name = settings.params.get(
JAEGER_EXPORTER_AGENT_HOST_NAME, 'localhost')
_jaeger_agent_port = settings.params.get(
JAEGER_EXPORTER_AGENT_PORT, 6831)
self.exporter = self._exporter(
service_name=_service_name,
host_name=_jaeger_host_name,
port=_jaeger_port,
agent_host_name=_jaeger_agent_host_name,
agent_port=_jaeger_agent_port,
transport=transport)
else:
self.exporter = self._exporter(transport=transport)
self.blacklist_hostnames = settings.params.get(
BLACKLIST_HOSTNAMES, None)
# Initialize the propagator
self.propagator = self._propagator()
def process_request(self, request):
"""Called on each request, before Django decides which view to execute.
:type request: :class:`~django.http.request.HttpRequest`
:param request: Django http request.
"""
# Do not trace if the url is blacklisted
if utils.disable_tracing_url(request.path, self._blacklist_paths):
return
# Add the request to thread local
execution_context.set_opencensus_attr(
REQUEST_THREAD_LOCAL_KEY,
request)
execution_context.set_opencensus_attr(
'blacklist_hostnames',
self.blacklist_hostnames)
try:
# Start tracing this request
span_context = self.propagator.from_headers(
_DjangoMetaWrapper(_get_django_request().META))
# Reload the tracer with the new span context
tracer = tracer_module.Tracer(
span_context=span_context,
sampler=self.sampler,
exporter=self.exporter,
propagator=self.propagator)
# Span name is being set at process_view
span = tracer.start_span()
span.span_kind = span_module.SpanKind.SERVER
tracer.add_attribute_to_current_span(
attribute_key=HTTP_METHOD,
attribute_value=request.method)
tracer.add_attribute_to_current_span(
attribute_key=HTTP_URL,
attribute_value=str(request.path))
# Add the span to thread local
# in some cases (exceptions, timeouts) currentspan in
# response event will be one of a child spans.
# let's keep reference to 'django' span and
# use it in response event
execution_context.set_opencensus_attr(
SPAN_THREAD_LOCAL_KEY,
span)
except Exception: # pragma: NO COVER
log.error('Failed to trace request', exc_info=True)
def process_view(self, request, view_func, *args, **kwargs):
"""Process view is executed before the view function, here we get the
function name add set it as the span name.
"""
# Do not trace if the url is blacklisted
if utils.disable_tracing_url(request.path, self._blacklist_paths):
return
try:
# Get the current span and set the span name to the current
# function name of the request.
tracer = _get_current_tracer()
span = tracer.current_span()
span.name = utils.get_func_name(view_func)
except Exception: # pragma: NO COVER
log.error('Failed to trace request', exc_info=True)
def process_response(self, request, response):
# Do not trace if the url is blacklisted
if utils.disable_tracing_url(request.path, self._blacklist_paths):
return response
try:
span = _get_django_span()
span.add_attribute(
attribute_key=HTTP_STATUS_CODE,
attribute_value=str(response.status_code))
_set_django_attributes(span, request)
tracer = _get_current_tracer()
tracer.end_span()
tracer.finish()
except Exception: # pragma: NO COVER
log.error('Failed to trace request', exc_info=True)
finally:
return response
def _get_service_name(self, params):
_service_name = params.get(
SERVICE_NAME, None)
if _service_name is None:
_service_name = params.get(
ZIPKIN_EXPORTER_SERVICE_NAME, 'my_service')
return _service_name
| [((55, 6, 55, 33), 'logging.getLogger', 'logging.getLogger', ({(55, 24, 55, 32): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((77, 11, 77, 74), 'opencensus.trace.execution_context.get_opencensus_attr', 'execution_context.get_opencensus_attr', ({(77, 49, 77, 73): 'REQUEST_THREAD_LOCAL_KEY'}, {}), '(REQUEST_THREAD_LOCAL_KEY)', False, 'from opencensus.trace import execution_context\n'), ((86, 11, 86, 71), 'opencensus.trace.execution_context.get_opencensus_attr', 'execution_context.get_opencensus_attr', ({(86, 49, 86, 70): 'SPAN_THREAD_LOCAL_KEY'}, {}), '(SPAN_THREAD_LOCAL_KEY)', False, 'from opencensus.trace import execution_context\n'), ((91, 11, 91, 52), 'opencensus.trace.execution_context.get_opencensus_tracer', 'execution_context.get_opencensus_tracer', ({}, {}), '()', False, 'from opencensus.trace import execution_context\n'), ((127, 32, 127, 68), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(127, 52, 127, 67): 'BLACKLIST_PATHS'}, {}), '(BLACKLIST_PATHS)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((189, 35, 190, 38), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(190, 12, 190, 31): 'BLACKLIST_HOSTNAMES', (190, 33, 190, 37): 'None'}, {}), '(BLACKLIST_HOSTNAMES, None)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((202, 11, 202, 73), 'opencensus.trace.utils.disable_tracing_url', 'utils.disable_tracing_url', ({(202, 37, 202, 49): 'request.path', (202, 51, 202, 72): 'self._blacklist_paths'}, {}), '(request.path, self._blacklist_paths)', False, 'from opencensus.trace import utils\n'), ((206, 8, 208, 20), 'opencensus.trace.execution_context.set_opencensus_attr', 'execution_context.set_opencensus_attr', ({(207, 12, 207, 36): 'REQUEST_THREAD_LOCAL_KEY', (208, 12, 208, 19): 'request'}, {}), '(REQUEST_THREAD_LOCAL_KEY, request)', False, 'from opencensus.trace import execution_context\n'), ((210, 8, 212, 37), 'opencensus.trace.execution_context.set_opencensus_attr', 'execution_context.set_opencensus_attr', ({(211, 12, 211, 33): '"""blacklist_hostnames"""', (212, 12, 212, 36): 'self.blacklist_hostnames'}, {}), "('blacklist_hostnames', self.\n blacklist_hostnames)", False, 'from opencensus.trace import execution_context\n'), ((254, 11, 254, 73), 'opencensus.trace.utils.disable_tracing_url', 'utils.disable_tracing_url', ({(254, 37, 254, 49): 'request.path', (254, 51, 254, 72): 'self._blacklist_paths'}, {}), '(request.path, self._blacklist_paths)', False, 'from opencensus.trace import utils\n'), ((268, 11, 268, 73), 'opencensus.trace.utils.disable_tracing_url', 'utils.disable_tracing_url', ({(268, 37, 268, 49): 'request.path', (268, 51, 268, 72): 'self._blacklist_paths'}, {}), '(request.path, self._blacklist_paths)', False, 'from opencensus.trace import utils\n'), ((131, 20, 132, 65), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(132, 16, 132, 29): 'SAMPLING_RATE', (132, 31, 132, 64): 'probability.DEFAULT_SAMPLING_RATE'}, {}), '(SAMPLING_RATE, probability.DEFAULT_SAMPLING_RATE)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((138, 38, 138, 68), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(138, 58, 138, 67): 'TRANSPORT'}, {}), '(TRANSPORT)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((141, 26, 141, 73), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(141, 46, 141, 66): 'GCP_EXPORTER_PROJECT', (141, 68, 141, 72): 'None'}, {}), '(GCP_EXPORTER_PROJECT, None)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((220, 21, 224, 43), 'opencensus.trace.tracer.Tracer', 'tracer_module.Tracer', (), '', True, 'from opencensus.trace import tracer as tracer_module\n'), ((241, 12, 243, 21), 'opencensus.trace.execution_context.set_opencensus_attr', 'execution_context.set_opencensus_attr', ({(242, 16, 242, 37): 'SPAN_THREAD_LOCAL_KEY', (243, 16, 243, 20): 'span'}, {}), '(SPAN_THREAD_LOCAL_KEY, span)', False, 'from opencensus.trace import execution_context\n'), ((262, 24, 262, 54), 'opencensus.trace.utils.get_func_name', 'utils.get_func_name', ({(262, 44, 262, 53): 'view_func'}, {}), '(view_func)', False, 'from opencensus.trace import utils\n'), ((147, 32, 148, 55), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(148, 16, 148, 41): 'ZIPKIN_EXPORTER_HOST_NAME', (148, 43, 148, 54): '"""localhost"""'}, {}), "(ZIPKIN_EXPORTER_HOST_NAME, 'localhost')", False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((149, 27, 150, 43), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(150, 16, 150, 36): 'ZIPKIN_EXPORTER_PORT', (150, 38, 150, 42): '9411'}, {}), '(ZIPKIN_EXPORTER_PORT, 9411)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((151, 31, 152, 49), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(152, 16, 152, 40): 'ZIPKIN_EXPORTER_PROTOCOL', (152, 42, 152, 48): '"""http"""'}, {}), "(ZIPKIN_EXPORTER_PROTOCOL, 'http')", False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((161, 24, 162, 54), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(162, 16, 162, 47): 'OCAGENT_TRACE_EXPORTER_ENDPOINT', (162, 49, 162, 53): 'None'}, {}), '(OCAGENT_TRACE_EXPORTER_ENDPOINT, None)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((171, 32, 172, 48), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(172, 16, 172, 41): 'JAEGER_EXPORTER_HOST_NAME', (172, 43, 172, 47): 'None'}, {}), '(JAEGER_EXPORTER_HOST_NAME, None)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((173, 27, 174, 43), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(174, 16, 174, 36): 'JAEGER_EXPORTER_PORT', (174, 38, 174, 42): 'None'}, {}), '(JAEGER_EXPORTER_PORT, None)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((175, 38, 176, 61), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(176, 16, 176, 47): 'JAEGER_EXPORTER_AGENT_HOST_NAME', (176, 49, 176, 60): '"""localhost"""'}, {}), "(JAEGER_EXPORTER_AGENT_HOST_NAME, 'localhost')", False, 'from opencensus.ext.django.config import settings, convert_to_import\n'), ((177, 33, 178, 49), 'opencensus.ext.django.config.settings.params.get', 'settings.params.get', ({(178, 16, 178, 42): 'JAEGER_EXPORTER_AGENT_PORT', (178, 44, 178, 48): '6831'}, {}), '(JAEGER_EXPORTER_AGENT_PORT, 6831)', False, 'from opencensus.ext.django.config import settings, convert_to_import\n')] |
ICT2x01-p2-4/ICT2x01-p2-4 | codeblockCar/codingPage/tests.py | 6249c0a807354b33db80f367344fe14cb5512840 | from typing import Reversible
from django.test import TestCase, Client
from challenge.models import Challenge
from codingPage.models import Command, Log
from django.core.exceptions import ValidationError
from django.urls import reverse
class CodingPageTest(TestCase):
def setUp(self) -> None:
self.client = Client(HTTP_USER_AGENT='Mozilla/5.0')
self.challenge = Challenge.objects.create(name='abc', map='0,0,0,0,0,0,0,0,0', size=3, difficulty='Easy')
self.command = Command.objects.create(action='Dodo', code=1)
self.log = Log.objects.create(data='123', challenge = self.challenge)
return super().setUp()
def test_validation(self):
"""Test if validation works for creating new command"""
Command.objects.create(action='asd', code=5)
self.assertRaises(ValidationError)
def test_check_code(self):
"""Test if code checkers dont upload to database if log false is given"""
response = self.client.post(
reverse('ajax_view'),
data = {
'code': '1\n2\n3\n',
'log': False,
'challenge_id': 1
},
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
url = '/codingPage/test_code'
)
self.assertEqual(response, '123')
| [((10, 22, 10, 59), 'django.test.Client', 'Client', (), '', False, 'from django.test import TestCase, Client\n'), ((11, 25, 11, 113), 'challenge.models.Challenge.objects.create', 'Challenge.objects.create', (), '', False, 'from challenge.models import Challenge\n'), ((12, 23, 12, 68), 'codingPage.models.Command.objects.create', 'Command.objects.create', (), '', False, 'from codingPage.models import Command, Log\n'), ((13, 19, 13, 77), 'codingPage.models.Log.objects.create', 'Log.objects.create', (), '', False, 'from codingPage.models import Command, Log\n'), ((18, 8, 18, 52), 'codingPage.models.Command.objects.create', 'Command.objects.create', (), '', False, 'from codingPage.models import Command, Log\n'), ((24, 12, 24, 32), 'django.urls.reverse', 'reverse', ({(24, 20, 24, 31): '"""ajax_view"""'}, {}), "('ajax_view')", False, 'from django.urls import reverse\n')] |
vigov5/oshougatsu2015 | app/hint/models.py | 38cbf325675ee2c08a6965b8689fad8308eb84eb | import os
import datetime
from app import app, db
class Hint(db.Model):
__tablename__ = 'hints'
id = db.Column(db.Integer, primary_key=True)
description = db.Column(db.Text)
is_open = db.Column(db.Boolean)
problem_id = db.Column(db.Integer, db.ForeignKey('problems.id'))
def __repr__(self):
return '<Hint %r>' % (self.description)
def __init__(self, description='', is_open=False, problem=None):
self.description = description
self.is_open = is_open
self.problem = problem | [((9, 9, 9, 48), 'app.db.Column', 'db.Column', (), '', False, 'from app import app, db\n'), ((10, 18, 10, 36), 'app.db.Column', 'db.Column', ({(10, 28, 10, 35): 'db.Text'}, {}), '(db.Text)', False, 'from app import app, db\n'), ((11, 14, 11, 35), 'app.db.Column', 'db.Column', ({(11, 24, 11, 34): 'db.Boolean'}, {}), '(db.Boolean)', False, 'from app import app, db\n'), ((12, 39, 12, 67), 'app.db.ForeignKey', 'db.ForeignKey', ({(12, 53, 12, 66): '"""problems.id"""'}, {}), "('problems.id')", False, 'from app import app, db\n')] |
almustafa-noureddin/Portfolio-website | base/urls.py | 67462c98fec65e74183ae057e8b31b5bdff1402c | from django.urls import path
from . import views
app_name = "base"
urlpatterns = [
path('', views.IndexView.as_view(), name="home"),
path('contact/', views.ContactView.as_view(), name="contact"),] | [] |
cds-snc/notification-admin | app/main/views/templates.py | d4056798bf889ad29893667bbb67ead2f8e466e4 | from datetime import datetime, timedelta
from string import ascii_uppercase
from dateutil.parser import parse
from flask import abort, flash, jsonify, redirect, render_template, request, url_for
from flask_babel import _
from flask_babel import lazy_gettext as _l
from flask_login import current_user
from markupsafe import Markup
from notifications_python_client.errors import HTTPError
from notifications_utils.formatters import nl2br
from notifications_utils.recipients import first_column_headings
from app import (
current_service,
service_api_client,
template_api_prefill_client,
template_folder_api_client,
template_statistics_client,
)
from app.main import main
from app.main.forms import (
CreateTemplateForm,
EmailTemplateForm,
LetterTemplateForm,
LetterTemplatePostageForm,
SearchByNameForm,
SetTemplateSenderForm,
SMSTemplateForm,
TemplateAndFoldersSelectionForm,
TemplateFolderForm,
)
from app.main.views.send import get_example_csv_rows, get_sender_details
from app.models.service import Service
from app.models.template_list import TemplateList, TemplateLists
from app.template_previews import TemplatePreview, get_page_count_for_letter
from app.utils import (
email_or_sms_not_enabled,
get_template,
should_skip_template_page,
user_has_permissions,
user_is_platform_admin,
)
form_objects = {
"email": EmailTemplateForm,
"sms": SMSTemplateForm,
"letter": LetterTemplateForm,
}
def get_email_preview_template(template, template_id, service_id):
email_preview_template = get_template(
template,
current_service,
letter_preview_url=url_for(
".view_letter_template_preview",
service_id=service_id,
template_id=template_id,
filetype="png",
),
show_recipient=True,
page_count=get_page_count_for_letter(template),
)
return email_preview_template
@main.route("/services/<service_id>/templates/<uuid:template_id>")
@user_has_permissions()
def view_template(service_id, template_id):
template = current_service.get_template(template_id)
template_folder = current_service.get_template_folder(template["folder"])
user_has_template_permission = current_user.has_template_folder_permission(template_folder)
if should_skip_template_page(template["template_type"]):
return redirect(url_for(".send_one_off", service_id=service_id, template_id=template_id))
return render_template(
"views/templates/template.html",
template=get_email_preview_template(template, template_id, service_id),
template_postage=template["postage"],
user_has_template_permission=user_has_template_permission,
)
@main.route("/services/<service_id>/start-tour/<uuid:template_id>")
@user_has_permissions("view_activity")
def start_tour(service_id, template_id):
template = current_service.get_template(template_id)
if template["template_type"] != "email":
abort(404)
return render_template(
"views/templates/start-tour.html",
template=get_template(
template,
current_service,
show_recipient=True,
),
help="1",
)
@main.route("/services/<service_id>/templates", methods=["GET", "POST"])
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>",
methods=["GET", "POST"],
)
@main.route("/services/<service_id>/templates/<template_type>", methods=["GET", "POST"])
@main.route(
"/services/<service_id>/templates/<template_type>/folders/<template_folder_id>",
methods=["GET", "POST"],
)
@user_has_permissions()
def choose_template(service_id, template_type="all", template_folder_id=None):
template_folder = current_service.get_template_folder(template_folder_id)
user_has_template_folder_permission = current_user.has_template_folder_permission(template_folder)
template_list = TemplateList(current_service, template_type, template_folder_id, current_user)
templates_and_folders_form = TemplateAndFoldersSelectionForm(
all_template_folders=current_service.get_user_template_folders(current_user),
template_list=template_list,
template_type=template_type,
allow_adding_letter_template=current_service.has_permission("letter"),
allow_adding_copy_of_template=(current_service.all_templates or len(current_user.service_ids) > 1),
)
option_hints = {template_folder_id: "current folder"}
if request.method == "POST" and templates_and_folders_form.validate_on_submit():
if not current_user.has_permissions("manage_templates"):
abort(403)
try:
return process_folder_management_form(templates_and_folders_form, template_folder_id)
except HTTPError as e:
flash(e.message)
if "templates_and_folders" in templates_and_folders_form.errors:
flash(_("Select at least one template or folder"))
initial_state = request.args.get("initial_state")
if request.method == "GET" and initial_state:
templates_and_folders_form.op = initial_state
sending_view = request.args.get("view") == "sending"
return render_template(
"views/templates/choose.html",
current_template_folder_id=template_folder_id,
template_folder_path=current_service.get_template_folder_path(template_folder_id),
template_list=template_list,
show_search_box=current_service.count_of_templates_and_folders > 7,
show_template_nav=(current_service.has_multiple_template_types and (len(current_service.all_templates) > 2)),
sending_view=sending_view,
template_nav_items=get_template_nav_items(template_folder_id, sending_view),
template_type=template_type,
search_form=SearchByNameForm(),
templates_and_folders_form=templates_and_folders_form,
move_to_children=templates_and_folders_form.move_to.children(),
user_has_template_folder_permission=user_has_template_folder_permission,
option_hints=option_hints,
)
def process_folder_management_form(form, current_folder_id):
current_service.get_template_folder_with_user_permission_or_403(current_folder_id, current_user)
new_folder_id = None
if form.is_add_folder_op:
new_folder_id = template_folder_api_client.create_template_folder(
current_service.id, name=form.get_folder_name(), parent_id=current_folder_id
)
if form.is_move_op:
# if we've just made a folder, we also want to move there
move_to_id = new_folder_id or form.move_to.data
current_service.move_to_folder(ids_to_move=form.templates_and_folders.data, move_to=move_to_id)
return redirect(request.url)
def get_template_nav_label(value):
return {
"all": _l("All"),
"sms": _l("Text message"),
"email": _l("Email"),
"letter": _l("Letter"),
}[value]
def get_template_nav_items(template_folder_id, sending_view):
return [
(
get_template_nav_label(key),
key,
url_for(
".choose_template",
service_id=current_service.id,
template_type=key,
template_folder_id=template_folder_id,
view="sending" if sending_view else None,
),
"",
)
for key in ["all"] + current_service.available_template_types
]
@main.route("/services/<service_id>/templates/<template_id>.<filetype>")
@user_has_permissions()
def view_letter_template_preview(service_id, template_id, filetype):
if filetype not in ("pdf", "png"):
abort(404)
db_template = current_service.get_template(template_id)
return TemplatePreview.from_database_object(db_template, filetype, page=request.args.get("page"))
@main.route("/templates/letter-preview-image/<filename>")
@user_is_platform_admin
def letter_branding_preview_image(filename):
template = {
"subject": "An example letter",
"content": (
"Lorem Ipsum is simply dummy text of the printing and typesetting "
"industry.\n\nLorem Ipsum has been the industry’s standard dummy "
"text ever since the 1500s, when an unknown printer took a galley "
"of type and scrambled it to make a type specimen book.\n\n"
"# History\n\nIt has survived not only\n\n"
"* five centuries\n"
"* but also the leap into electronic typesetting\n\n"
"It was popularised in the 1960s with the release of Letraset "
"sheets containing Lorem Ipsum passages, and more recently with "
"desktop publishing software like Aldus PageMaker including "
"versions of Lorem Ipsum.\n\n"
"The point of using Lorem Ipsum is that it has a more-or-less "
"normal distribution of letters, as opposed to using ‘Content "
"here, content here’, making it look like readable English."
),
}
filename = None if filename == "no-branding" else filename
return TemplatePreview.from_example_template(template, filename)
def _view_template_version(service_id, template_id, version, letters_as_pdf=False):
return dict(
template=get_template(
current_service.get_template(template_id, version=version),
current_service,
letter_preview_url=url_for(
".view_template_version_preview",
service_id=service_id,
template_id=template_id,
version=version,
filetype="png",
)
if not letters_as_pdf
else None,
)
)
@main.route("/services/<service_id>/templates/<template_id>/version/<int:version>")
@user_has_permissions()
def view_template_version(service_id, template_id, version):
return render_template(
"views/templates/template_history.html",
**_view_template_version(service_id=service_id, template_id=template_id, version=version),
)
@main.route("/services/<service_id>/templates/<template_id>/version/<int:version>.<filetype>")
@user_has_permissions()
def view_template_version_preview(service_id, template_id, version, filetype):
db_template = current_service.get_template(template_id, version=version)
return TemplatePreview.from_database_object(db_template, filetype)
def _add_template_by_type(template_type, template_folder_id):
if template_type == "copy-existing":
return redirect(
url_for(
".choose_template_to_copy",
service_id=current_service.id,
)
)
if template_type == "letter":
blank_letter = service_api_client.create_service_template(
"New letter template",
"letter",
"Body",
current_service.id,
"Main heading",
"normal",
template_folder_id,
)
return redirect(
url_for(
".view_template",
service_id=current_service.id,
template_id=blank_letter["data"]["id"],
)
)
if email_or_sms_not_enabled(template_type, current_service.permissions):
return redirect(
url_for(
".action_blocked",
service_id=current_service.id,
notification_type=template_type,
return_to="add_new_template",
template_id="0",
)
)
else:
return redirect(
url_for(
".add_service_template",
service_id=current_service.id,
template_type=template_type,
template_folder_id=template_folder_id,
)
)
@main.route("/services/<service_id>/templates/create", methods=["GET", "POST"])
@main.route("/services/<service_id>/templates/folders/<template_folder_id>/create", methods=["GET", "POST"])
@main.route("/services/<service_id>/templates/<template_type>/create", methods=["GET", "POST"])
@main.route("/services/<service_id>/templates/<template_type>/folders/<template_folder_id>/create", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def create_template(service_id, template_type="all", template_folder_id=None):
form = CreateTemplateForm()
if request.method == "POST" and form.validate_on_submit():
try:
return _add_template_by_type(
form.what_type.data,
template_folder_id,
)
except HTTPError as e:
flash(e.message)
return render_template(
"views/templates/create.html",
service_id=service_id,
template_folder_id=template_folder_id,
template_type=template_type,
form=form,
disabled_options={},
option_hints={},
)
@main.route("/services/<service_id>/templates/copy")
@main.route("/services/<service_id>/templates/all/copy")
@main.route("/services/<service_id>/templates/email/copy")
@main.route("/services/<service_id>/templates/sms/copy")
@main.route("/services/<service_id>/templates/copy/from-folder/<uuid:from_folder>")
@main.route("/services/<service_id>/templates/copy/from-service/<uuid:from_service>")
@main.route("/services/<service_id>/templates/copy/from-service/<uuid:from_service>/from-folder/<uuid:from_folder>")
@main.route("/services/<service_id>/templates/all/folders/<uuid:from_folder>/copy")
@user_has_permissions("manage_templates")
def choose_template_to_copy(
service_id,
from_service=None,
from_folder=None,
):
if from_folder and from_service is None:
from_service = service_id
if from_service:
current_user.belongs_to_service_or_403(from_service)
service = Service(service_api_client.get_service(from_service)["data"])
return render_template(
"views/templates/copy.html",
services_templates_and_folders=TemplateList(service, template_folder_id=from_folder, user=current_user),
template_folder_path=service.get_template_folder_path(from_folder),
from_service=service,
search_form=SearchByNameForm(),
)
else:
return render_template(
"views/templates/copy.html",
services_templates_and_folders=TemplateLists(current_user),
search_form=SearchByNameForm(),
)
@main.route("/services/<service_id>/templates/copy/<uuid:template_id>", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def copy_template(service_id, template_id):
from_service = request.args.get("from_service")
current_user.belongs_to_service_or_403(from_service)
template = service_api_client.get_service_template(from_service, str(template_id))["data"]
template_folder = template_folder_api_client.get_template_folder(from_service, template["folder"])
if not current_user.has_template_folder_permission(template_folder):
abort(403)
if request.method == "POST":
return add_service_template(
service_id,
template["template_type"],
template_folder_id=template_folder.get("id"),
)
template["template_content"] = template["content"]
template["name"] = _get_template_copy_name(template, current_service.all_templates)
form = form_objects[template["template_type"]](**template)
return render_template(
f"views/edit-{template['template_type']}-template.html",
form=form,
template=template,
heading=_l("Copy email template") if template["template_type"] == "email" else _l("Copy text message template"),
service_id=service_id,
services=current_user.service_ids,
)
def _get_template_copy_name(template, existing_templates):
template_names = [existing["name"] for existing in existing_templates]
for index in reversed(range(1, 10)):
if "{} (copy {})".format(template["name"], index) in template_names:
return "{} (copy {})".format(template["name"], index + 1)
if "{} (copy)".format(template["name"]) in template_names:
return "{} (copy 2)".format(template["name"])
return "{} (copy)".format(template["name"])
@main.route("/services/<service_id>/templates/action-blocked/<notification_type>/<return_to>/<template_id>")
@user_has_permissions("manage_templates")
def action_blocked(service_id, notification_type, return_to, template_id):
if notification_type == "sms":
notification_type = "text messages"
elif notification_type == "email":
notification_type = "emails"
return render_template(
"views/templates/action_blocked.html",
service_id=service_id,
notification_type=notification_type,
return_to=return_to,
template_id=template_id,
)
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>/manage",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def manage_template_folder(service_id, template_folder_id):
template_folder = current_service.get_template_folder_with_user_permission_or_403(template_folder_id, current_user)
form = TemplateFolderForm(
name=template_folder["name"],
users_with_permission=template_folder.get("users_with_permission", None),
all_service_users=[user for user in current_service.active_users if user.id != current_user.id],
)
if form.validate_on_submit():
if current_user.has_permissions("manage_service") and form.users_with_permission.all_service_users:
users_with_permission = form.users_with_permission.data + [current_user.id]
else:
users_with_permission = None
template_folder_api_client.update_template_folder(
current_service.id,
template_folder_id,
name=form.name.data,
users_with_permission=users_with_permission,
)
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_folder_id=template_folder_id,
)
)
return render_template(
"views/templates/manage-template-folder.html",
form=form,
template_folder_path=current_service.get_template_folder_path(template_folder_id),
current_service_id=current_service.id,
template_folder_id=template_folder_id,
template_type="all",
)
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>/delete",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def delete_template_folder(service_id, template_folder_id):
template_folder = current_service.get_template_folder_with_user_permission_or_403(template_folder_id, current_user)
if len(current_service.get_template_folders_and_templates(template_type="all", template_folder_id=template_folder_id)) > 0:
flash(_l("You must empty this folder before you can delete it"), "info")
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_type="all",
template_folder_id=template_folder_id,
)
)
if request.method == "POST":
try:
template_folder_api_client.delete_template_folder(current_service.id, template_folder_id)
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_folder_id=template_folder["parent_id"],
)
)
except HTTPError as e:
msg = _l("Folder is not empty")
if e.status_code == 400 and msg in e.message:
flash(_("You must empty this folder before you can delete it"), "info")
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_type="all",
template_folder_id=template_folder_id,
)
)
else:
abort(500, e)
else:
flash(
"{} ‘{}’ {}".format(
_l("Are you sure you want to delete the"),
template_folder["name"],
_l("folder?"),
),
"delete",
)
return manage_template_folder(service_id, template_folder_id)
@main.route("/services/templates/<template_id>/get-data", methods=["POST"])
def get_template_data(template_id):
data = template_api_prefill_client.get_template(template_id)
return jsonify({"result": data})
@main.route("/services/<service_id>/templates/add-<template_type>", methods=["GET", "POST"])
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>/add-<template_type>",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def add_service_template(service_id, template_type, template_folder_id=None):
if template_type not in ["sms", "email", "letter"]:
abort(404)
if not current_service.has_permission("letter") and template_type == "letter":
abort(403)
form = form_objects[template_type]()
if form.validate_on_submit():
if form.process_type.data != "normal":
abort_403_if_not_admin_user()
try:
new_template = service_api_client.create_service_template(
form.name.data,
template_type,
form.template_content.data,
service_id,
form.subject.data if hasattr(form, "subject") else None,
form.process_type.data,
template_folder_id,
)
except HTTPError as e:
if (
e.status_code == 400
and "content" in e.message
and any(["character count greater than" in x for x in e.message["content"]])
):
form.template_content.errors.extend(e.message["content"])
else:
raise e
else:
flash(_("'{}' template saved").format(form.name.data), "default_with_tick")
return redirect(
url_for(
".view_template",
service_id=service_id,
template_id=new_template["data"]["id"],
)
)
if email_or_sms_not_enabled(template_type, current_service.permissions):
return redirect(
url_for(
".action_blocked",
service_id=service_id,
notification_type=template_type,
template_folder_id=template_folder_id,
return_to="templates",
template_id="0",
)
)
else:
return render_template(
f"views/edit-{template_type}-template.html",
form=form,
template_type=template_type,
template_folder_id=template_folder_id,
service_id=service_id,
heading=_l("New email template") if template_type == "email" else _l("New text message template"),
)
def abort_403_if_not_admin_user():
if not current_user.platform_admin:
abort(403)
@main.route("/services/<service_id>/templates/<template_id>/edit", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def edit_service_template(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
template["template_content"] = template["content"]
form = form_objects[template["template_type"]](**template)
if form.validate_on_submit():
if form.process_type.data != template["process_type"]:
abort_403_if_not_admin_user()
subject = form.subject.data if hasattr(form, "subject") else None
new_template_data = {
"name": form.name.data,
"content": form.template_content.data,
"subject": subject,
"template_type": template["template_type"],
"id": template["id"],
"process_type": form.process_type.data,
"reply_to_text": template["reply_to_text"],
}
new_template = get_template(new_template_data, current_service)
template_change = get_template(template, current_service).compare_to(new_template)
if template_change.placeholders_added and not request.form.get("confirm"):
example_column_headings = first_column_headings[new_template.template_type] + list(new_template.placeholders)
return render_template(
"views/templates/breaking-change.html",
template_change=template_change,
new_template=new_template,
column_headings=list(ascii_uppercase[: len(example_column_headings)]),
example_rows=[
example_column_headings,
get_example_csv_rows(new_template),
get_example_csv_rows(new_template),
],
form=form,
)
try:
service_api_client.update_service_template(
template_id,
form.name.data,
template["template_type"],
form.template_content.data,
service_id,
subject,
form.process_type.data,
)
except HTTPError as e:
if e.status_code == 400:
if "content" in e.message and any(["character count greater than" in x for x in e.message["content"]]):
form.template_content.errors.extend(e.message["content"])
else:
raise e
else:
raise e
else:
flash(_("'{}' template saved").format(form.name.data), "default_with_tick")
return redirect(url_for(".view_template", service_id=service_id, template_id=template_id))
if email_or_sms_not_enabled(template["template_type"], current_service.permissions):
return redirect(
url_for(
".action_blocked",
service_id=service_id,
notification_type=template["template_type"],
return_to="view_template",
template_id=template_id,
)
)
else:
return render_template(
f"views/edit-{template['template_type']}-template.html",
form=form,
template=template,
heading=_l("Edit email template") if template["template_type"] == "email" else _l("Edit text message template"),
)
@main.route("/services/<service_id>/templates/<template_id>/delete", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def delete_service_template(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
if request.method == "POST":
service_api_client.delete_service_template(service_id, template_id)
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_folder_id=template["folder"],
)
)
try:
last_used_notification = template_statistics_client.get_template_statistics_for_template(service_id, template["id"])
last_used_text = ""
if not last_used_notification:
last_used_text = _l("more than seven days")
else:
last_used_date = parse(last_used_notification["created_at"]).replace(tzinfo=None)
last_used_text = get_human_readable_delta(last_used_date, datetime.utcnow())
message = "{} {} {}".format(_l("This template was last used"), last_used_text, _l("ago."))
except HTTPError as e:
if e.status_code == 404:
message = None
else:
raise e
flash(
[
"{} ‘{}’?".format(_l("Are you sure you want to delete"), template["name"]),
message,
],
"delete",
)
return render_template(
"views/templates/template.html",
template=get_email_preview_template(template, template["id"], service_id),
user_has_template_permission=True,
)
@main.route("/services/<service_id>/templates/<template_id>/redact", methods=["GET"])
@user_has_permissions("manage_templates")
def confirm_redact_template(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
return render_template(
"views/templates/template.html",
template=get_email_preview_template(template, template["id"], service_id),
user_has_template_permission=True,
show_redaction_message=True,
)
@main.route("/services/<service_id>/templates/<template_id>/redact", methods=["POST"])
@user_has_permissions("manage_templates")
def redact_template(service_id, template_id):
service_api_client.redact_service_template(service_id, template_id)
flash(
_("Personalised content will be hidden for messages sent with this template"),
"default_with_tick",
)
return redirect(
url_for(
".view_template",
service_id=service_id,
template_id=template_id,
)
)
@main.route("/services/<service_id>/templates/<template_id>/versions")
@user_has_permissions("view_activity")
def view_template_versions(service_id, template_id):
return render_template(
"views/templates/choose_history.html",
versions=[
get_template(
template,
current_service,
letter_preview_url=url_for(
".view_template_version_preview",
service_id=service_id,
template_id=template_id,
version=template["version"],
filetype="png",
),
)
for template in service_api_client.get_service_template_versions(service_id, template_id)["data"]
],
)
@main.route(
"/services/<service_id>/templates/<template_id>/set-template-sender",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def set_template_sender(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
sender_details = get_template_sender_form_dict(service_id, template)
no_senders = sender_details.get("no_senders", False)
form = SetTemplateSenderForm(
sender=sender_details["current_choice"],
sender_choices=sender_details["value_and_label"],
)
option_hints = {sender_details["default_sender"]: "(Default)"}
if form.validate_on_submit():
service_api_client.update_service_template_sender(
service_id,
template_id,
form.sender.data if form.sender.data else None,
)
return redirect(url_for(".view_template", service_id=service_id, template_id=template_id))
return render_template(
"views/templates/set-template-sender.html",
form=form,
template_id=template_id,
no_senders=no_senders,
option_hints=option_hints,
)
@main.route(
"/services/<service_id>/templates/<template_id>/edit-postage",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def edit_template_postage(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
if template["template_type"] != "letter":
abort(404)
form = LetterTemplatePostageForm(**template)
if form.validate_on_submit():
postage = form.postage.data
service_api_client.update_service_template_postage(service_id, template_id, postage)
return redirect(url_for(".view_template", service_id=service_id, template_id=template_id))
return render_template(
"views/templates/edit-template-postage.html",
form=form,
service_id=service_id,
template_id=template_id,
template_postage=template["postage"],
)
def get_template_sender_form_dict(service_id, template):
context = {
"email": {"field_name": "email_address"},
"letter": {"field_name": "contact_block"},
"sms": {"field_name": "sms_sender"},
}[template["template_type"]]
sender_format = context["field_name"]
service_senders = get_sender_details(service_id, template["template_type"])
context["default_sender"] = next((x["id"] for x in service_senders if x["is_default"]), "Not set")
if not service_senders:
context["no_senders"] = True
context["value_and_label"] = [(sender["id"], Markup(nl2br(sender[sender_format]))) for sender in service_senders]
context["value_and_label"].insert(0, ("", "Blank")) # Add blank option to start of list
context["current_choice"] = template["service_letter_contact"] if template["service_letter_contact"] else ""
return context
def get_human_readable_delta(from_time, until_time):
delta = until_time - from_time
if delta < timedelta(seconds=60):
return "under a minute"
elif delta < timedelta(hours=1):
minutes = int(delta.seconds / 60)
return "{} minute{}".format(minutes, "" if minutes == 1 else "s")
elif delta < timedelta(days=1):
hours = int(delta.seconds / 3600)
return "{} hour{}".format(hours, "" if hours == 1 else "s")
else:
days = delta.days
return "{} day{}".format(days, "" if days == 1 else "s")
| [((69, 1, 69, 66), 'app.main.main.route', 'main.route', ({(69, 12, 69, 65): '"""/services/<service_id>/templates/<uuid:template_id>"""'}, {}), "('/services/<service_id>/templates/<uuid:template_id>')", False, 'from app.main import main\n'), ((70, 1, 70, 23), 'app.utils.user_has_permissions', 'user_has_permissions', ({}, {}), '()', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((88, 1, 88, 67), 'app.main.main.route', 'main.route', ({(88, 12, 88, 66): '"""/services/<service_id>/start-tour/<uuid:template_id>"""'}, {}), "('/services/<service_id>/start-tour/<uuid:template_id>')", False, 'from app.main import main\n'), ((89, 1, 89, 38), 'app.utils.user_has_permissions', 'user_has_permissions', ({(89, 22, 89, 37): '"""view_activity"""'}, {}), "('view_activity')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((108, 1, 108, 72), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((109, 1, 112, 1), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((113, 1, 113, 88), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((114, 1, 117, 1), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((118, 1, 118, 23), 'app.utils.user_has_permissions', 'user_has_permissions', ({}, {}), '()', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((215, 1, 215, 72), 'app.main.main.route', 'main.route', ({(215, 12, 215, 71): '"""/services/<service_id>/templates/<template_id>.<filetype>"""'}, {}), "('/services/<service_id>/templates/<template_id>.<filetype>')", False, 'from app.main import main\n'), ((216, 1, 216, 23), 'app.utils.user_has_permissions', 'user_has_permissions', ({}, {}), '()', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((226, 1, 226, 57), 'app.main.main.route', 'main.route', ({(226, 12, 226, 56): '"""/templates/letter-preview-image/<filename>"""'}, {}), "('/templates/letter-preview-image/<filename>')", False, 'from app.main import main\n'), ((271, 1, 271, 83), 'app.main.main.route', 'main.route', ({(271, 12, 271, 82): '"""/services/<service_id>/templates/<template_id>/version/<int:version>"""'}, {}), "(\n '/services/<service_id>/templates/<template_id>/version/<int:version>')", False, 'from app.main import main\n'), ((272, 1, 272, 23), 'app.utils.user_has_permissions', 'user_has_permissions', ({}, {}), '()', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((280, 1, 280, 94), 'app.main.main.route', 'main.route', ({(280, 12, 280, 93): '"""/services/<service_id>/templates/<template_id>/version/<int:version>.<filetype>"""'}, {}), "(\n '/services/<service_id>/templates/<template_id>/version/<int:version>.<filetype>'\n )", False, 'from app.main import main\n'), ((281, 1, 281, 23), 'app.utils.user_has_permissions', 'user_has_permissions', ({}, {}), '()', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((336, 1, 336, 79), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((337, 1, 337, 108), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((338, 1, 338, 95), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((339, 1, 339, 124), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((340, 1, 340, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(340, 22, 340, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((363, 1, 363, 52), 'app.main.main.route', 'main.route', ({(363, 12, 363, 51): '"""/services/<service_id>/templates/copy"""'}, {}), "('/services/<service_id>/templates/copy')", False, 'from app.main import main\n'), ((364, 1, 364, 56), 'app.main.main.route', 'main.route', ({(364, 12, 364, 55): '"""/services/<service_id>/templates/all/copy"""'}, {}), "('/services/<service_id>/templates/all/copy')", False, 'from app.main import main\n'), ((365, 1, 365, 58), 'app.main.main.route', 'main.route', ({(365, 12, 365, 57): '"""/services/<service_id>/templates/email/copy"""'}, {}), "('/services/<service_id>/templates/email/copy')", False, 'from app.main import main\n'), ((366, 1, 366, 56), 'app.main.main.route', 'main.route', ({(366, 12, 366, 55): '"""/services/<service_id>/templates/sms/copy"""'}, {}), "('/services/<service_id>/templates/sms/copy')", False, 'from app.main import main\n'), ((367, 1, 367, 83), 'app.main.main.route', 'main.route', ({(367, 12, 367, 82): '"""/services/<service_id>/templates/copy/from-folder/<uuid:from_folder>"""'}, {}), "(\n '/services/<service_id>/templates/copy/from-folder/<uuid:from_folder>')", False, 'from app.main import main\n'), ((368, 1, 368, 85), 'app.main.main.route', 'main.route', ({(368, 12, 368, 84): '"""/services/<service_id>/templates/copy/from-service/<uuid:from_service>"""'}, {}), "(\n '/services/<service_id>/templates/copy/from-service/<uuid:from_service>')", False, 'from app.main import main\n'), ((369, 1, 369, 116), 'app.main.main.route', 'main.route', ({(369, 12, 369, 115): '"""/services/<service_id>/templates/copy/from-service/<uuid:from_service>/from-folder/<uuid:from_folder>"""'}, {}), "(\n '/services/<service_id>/templates/copy/from-service/<uuid:from_service>/from-folder/<uuid:from_folder>'\n )", False, 'from app.main import main\n'), ((370, 1, 370, 83), 'app.main.main.route', 'main.route', ({(370, 12, 370, 82): '"""/services/<service_id>/templates/all/folders/<uuid:from_folder>/copy"""'}, {}), "(\n '/services/<service_id>/templates/all/folders/<uuid:from_folder>/copy')", False, 'from app.main import main\n'), ((371, 1, 371, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(371, 22, 371, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((402, 1, 402, 96), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((403, 1, 403, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(403, 22, 403, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((450, 1, 450, 108), 'app.main.main.route', 'main.route', ({(450, 12, 450, 107): '"""/services/<service_id>/templates/action-blocked/<notification_type>/<return_to>/<template_id>"""'}, {}), "(\n '/services/<service_id>/templates/action-blocked/<notification_type>/<return_to>/<template_id>'\n )", False, 'from app.main import main\n'), ((451, 1, 451, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(451, 22, 451, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((467, 1, 470, 1), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((471, 1, 471, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(471, 22, 471, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((508, 1, 511, 1), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((512, 1, 512, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(512, 22, 512, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((565, 1, 565, 75), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((571, 1, 571, 92), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((572, 1, 575, 1), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((576, 1, 576, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(576, 22, 576, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((645, 1, 645, 91), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((646, 1, 646, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(646, 22, 646, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((725, 1, 725, 93), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((726, 1, 726, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(726, 22, 726, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((772, 1, 772, 85), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((773, 1, 773, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(773, 22, 773, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((785, 1, 785, 86), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((786, 1, 786, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(786, 22, 786, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((805, 1, 805, 70), 'app.main.main.route', 'main.route', ({(805, 12, 805, 69): '"""/services/<service_id>/templates/<template_id>/versions"""'}, {}), "('/services/<service_id>/templates/<template_id>/versions')", False, 'from app.main import main\n'), ((806, 1, 806, 38), 'app.utils.user_has_permissions', 'user_has_permissions', ({(806, 22, 806, 37): '"""view_activity"""'}, {}), "('view_activity')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((827, 1, 830, 1), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((831, 1, 831, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(831, 22, 831, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((860, 1, 863, 1), 'app.main.main.route', 'main.route', (), '', False, 'from app.main import main\n'), ((864, 1, 864, 41), 'app.utils.user_has_permissions', 'user_has_permissions', ({(864, 22, 864, 40): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((72, 15, 72, 56), 'app.current_service.get_template', 'current_service.get_template', ({(72, 44, 72, 55): 'template_id'}, {}), '(template_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((73, 22, 73, 77), 'app.current_service.get_template_folder', 'current_service.get_template_folder', ({(73, 58, 73, 76): "template['folder']"}, {}), "(template['folder'])", False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((75, 35, 75, 95), 'flask_login.current_user.has_template_folder_permission', 'current_user.has_template_folder_permission', ({(75, 79, 75, 94): 'template_folder'}, {}), '(template_folder)', False, 'from flask_login import current_user\n'), ((77, 7, 77, 59), 'app.utils.should_skip_template_page', 'should_skip_template_page', ({(77, 33, 77, 58): "template['template_type']"}, {}), "(template['template_type'])", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((92, 15, 92, 56), 'app.current_service.get_template', 'current_service.get_template', ({(92, 44, 92, 55): 'template_id'}, {}), '(template_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((120, 22, 120, 77), 'app.current_service.get_template_folder', 'current_service.get_template_folder', ({(120, 58, 120, 76): 'template_folder_id'}, {}), '(template_folder_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((122, 42, 122, 102), 'flask_login.current_user.has_template_folder_permission', 'current_user.has_template_folder_permission', ({(122, 86, 122, 101): 'template_folder'}, {}), '(template_folder)', False, 'from flask_login import current_user\n'), ((124, 20, 124, 98), 'app.models.template_list.TemplateList', 'TemplateList', ({(124, 33, 124, 48): 'current_service', (124, 50, 124, 63): 'template_type', (124, 65, 124, 83): 'template_folder_id', (124, 85, 124, 97): 'current_user'}, {}), '(current_service, template_type, template_folder_id, current_user)', False, 'from app.models.template_list import TemplateList, TemplateLists\n'), ((146, 20, 146, 53), 'flask.request.args.get', 'request.args.get', ({(146, 37, 146, 52): '"""initial_state"""'}, {}), "('initial_state')", False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((171, 4, 171, 100), 'app.current_service.get_template_folder_with_user_permission_or_403', 'current_service.get_template_folder_with_user_permission_or_403', ({(171, 68, 171, 85): 'current_folder_id', (171, 87, 171, 99): 'current_user'}, {}), '(\n current_folder_id, current_user)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((185, 11, 185, 32), 'flask.redirect', 'redirect', ({(185, 20, 185, 31): 'request.url'}, {}), '(request.url)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((221, 18, 221, 59), 'app.current_service.get_template', 'current_service.get_template', ({(221, 47, 221, 58): 'template_id'}, {}), '(template_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((250, 11, 250, 68), 'app.template_previews.TemplatePreview.from_example_template', 'TemplatePreview.from_example_template', ({(250, 49, 250, 57): 'template', (250, 59, 250, 67): 'filename'}, {}), '(template, filename)', False, 'from app.template_previews import TemplatePreview, get_page_count_for_letter\n'), ((283, 18, 283, 76), 'app.current_service.get_template', 'current_service.get_template', (), '', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((284, 11, 284, 70), 'app.template_previews.TemplatePreview.from_database_object', 'TemplatePreview.from_database_object', ({(284, 48, 284, 59): 'db_template', (284, 61, 284, 69): 'filetype'}, {}), '(db_template, filetype)', False, 'from app.template_previews import TemplatePreview, get_page_count_for_letter\n'), ((315, 7, 315, 75), 'app.utils.email_or_sms_not_enabled', 'email_or_sms_not_enabled', ({(315, 32, 315, 45): 'template_type', (315, 47, 315, 74): 'current_service.permissions'}, {}), '(template_type, current_service.permissions)', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((342, 11, 342, 31), 'app.main.forms.CreateTemplateForm', 'CreateTemplateForm', ({}, {}), '()', False, 'from app.main.forms import CreateTemplateForm, EmailTemplateForm, LetterTemplateForm, LetterTemplatePostageForm, SearchByNameForm, SetTemplateSenderForm, SMSTemplateForm, TemplateAndFoldersSelectionForm, TemplateFolderForm\n'), ((352, 11, 360, 5), 'flask.render_template', 'render_template', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((405, 19, 405, 51), 'flask.request.args.get', 'request.args.get', ({(405, 36, 405, 50): '"""from_service"""'}, {}), "('from_service')", False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((407, 4, 407, 56), 'flask_login.current_user.belongs_to_service_or_403', 'current_user.belongs_to_service_or_403', ({(407, 43, 407, 55): 'from_service'}, {}), '(from_service)', False, 'from flask_login import current_user\n'), ((411, 22, 411, 102), 'app.template_folder_api_client.get_template_folder', 'template_folder_api_client.get_template_folder', ({(411, 69, 411, 81): 'from_service', (411, 83, 411, 101): "template['folder']"}, {}), "(from_service, template['folder']\n )", False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((458, 11, 464, 5), 'flask.render_template', 'render_template', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((473, 22, 473, 119), 'app.current_service.get_template_folder_with_user_permission_or_403', 'current_service.get_template_folder_with_user_permission_or_403', ({(473, 86, 473, 104): 'template_folder_id', (473, 106, 473, 118): 'current_user'}, {}), '(\n template_folder_id, current_user)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((514, 22, 514, 119), 'app.current_service.get_template_folder_with_user_permission_or_403', 'current_service.get_template_folder_with_user_permission_or_403', ({(514, 86, 514, 104): 'template_folder_id', (514, 106, 514, 118): 'current_user'}, {}), '(\n template_folder_id, current_user)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((567, 11, 567, 64), 'app.template_api_prefill_client.get_template', 'template_api_prefill_client.get_template', ({(567, 52, 567, 63): 'template_id'}, {}), '(template_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((568, 11, 568, 36), 'flask.jsonify', 'jsonify', ({(568, 19, 568, 35): "{'result': data}"}, {}), "({'result': data})", False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((618, 7, 618, 75), 'app.utils.email_or_sms_not_enabled', 'email_or_sms_not_enabled', ({(618, 32, 618, 45): 'template_type', (618, 47, 618, 74): 'current_service.permissions'}, {}), '(template_type, current_service.permissions)', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((648, 15, 648, 98), 'app.current_service.get_template_with_user_permission_or_403', 'current_service.get_template_with_user_permission_or_403', ({(648, 72, 648, 83): 'template_id', (648, 85, 648, 97): 'current_user'}, {}), '(template_id,\n current_user)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((706, 7, 706, 87), 'app.utils.email_or_sms_not_enabled', 'email_or_sms_not_enabled', ({(706, 32, 706, 57): "template['template_type']", (706, 59, 706, 86): 'current_service.permissions'}, {}), "(template['template_type'], current_service.permissions\n )", False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((728, 15, 728, 98), 'app.current_service.get_template_with_user_permission_or_403', 'current_service.get_template_with_user_permission_or_403', ({(728, 72, 728, 83): 'template_id', (728, 85, 728, 97): 'current_user'}, {}), '(template_id,\n current_user)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((775, 15, 775, 98), 'app.current_service.get_template_with_user_permission_or_403', 'current_service.get_template_with_user_permission_or_403', ({(775, 72, 775, 83): 'template_id', (775, 85, 775, 97): 'current_user'}, {}), '(template_id,\n current_user)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((789, 4, 789, 71), 'app.service_api_client.redact_service_template', 'service_api_client.redact_service_template', ({(789, 47, 789, 57): 'service_id', (789, 59, 789, 70): 'template_id'}, {}), '(service_id, template_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((833, 15, 833, 98), 'app.current_service.get_template_with_user_permission_or_403', 'current_service.get_template_with_user_permission_or_403', ({(833, 72, 833, 83): 'template_id', (833, 85, 833, 97): 'current_user'}, {}), '(template_id,\n current_user)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((837, 11, 840, 5), 'app.main.forms.SetTemplateSenderForm', 'SetTemplateSenderForm', (), '', False, 'from app.main.forms import CreateTemplateForm, EmailTemplateForm, LetterTemplateForm, LetterTemplatePostageForm, SearchByNameForm, SetTemplateSenderForm, SMSTemplateForm, TemplateAndFoldersSelectionForm, TemplateFolderForm\n'), ((851, 11, 857, 5), 'flask.render_template', 'render_template', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((866, 15, 866, 98), 'app.current_service.get_template_with_user_permission_or_403', 'current_service.get_template_with_user_permission_or_403', ({(866, 72, 866, 83): 'template_id', (866, 85, 866, 97): 'current_user'}, {}), '(template_id,\n current_user)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((869, 11, 869, 48), 'app.main.forms.LetterTemplatePostageForm', 'LetterTemplatePostageForm', ({}, {}), '(**template)', False, 'from app.main.forms import CreateTemplateForm, EmailTemplateForm, LetterTemplateForm, LetterTemplatePostageForm, SearchByNameForm, SetTemplateSenderForm, SMSTemplateForm, TemplateAndFoldersSelectionForm, TemplateFolderForm\n'), ((876, 11, 882, 5), 'flask.render_template', 'render_template', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((893, 22, 893, 79), 'app.main.views.send.get_sender_details', 'get_sender_details', ({(893, 41, 893, 51): 'service_id', (893, 53, 893, 78): "template['template_type']"}, {}), "(service_id, template['template_type'])", False, 'from app.main.views.send import get_example_csv_rows, get_sender_details\n'), ((95, 8, 95, 18), 'flask.abort', 'abort', ({(95, 14, 95, 17): '(404)'}, {}), '(404)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((150, 19, 150, 43), 'flask.request.args.get', 'request.args.get', ({(150, 36, 150, 42): '"""view"""'}, {}), "('view')", False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((183, 8, 183, 103), 'app.current_service.move_to_folder', 'current_service.move_to_folder', (), '', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((219, 8, 219, 18), 'flask.abort', 'abort', ({(219, 14, 219, 17): '(404)'}, {}), '(404)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((298, 23, 306, 9), 'app.service_api_client.create_service_template', 'service_api_client.create_service_template', ({(299, 12, 299, 33): '"""New letter template"""', (300, 12, 300, 20): '"""letter"""', (301, 12, 301, 18): '"""Body"""', (302, 12, 302, 30): 'current_service.id', (303, 12, 303, 26): '"""Main heading"""', (304, 12, 304, 20): '"""normal"""', (305, 12, 305, 30): 'template_folder_id'}, {}), "('New letter template', 'letter',\n 'Body', current_service.id, 'Main heading', 'normal', template_folder_id)", False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((383, 8, 383, 60), 'flask_login.current_user.belongs_to_service_or_403', 'current_user.belongs_to_service_or_403', ({(383, 47, 383, 59): 'from_service'}, {}), '(from_service)', False, 'from flask_login import current_user\n'), ((412, 11, 412, 71), 'flask_login.current_user.has_template_folder_permission', 'current_user.has_template_folder_permission', ({(412, 55, 412, 70): 'template_folder'}, {}), '(template_folder)', False, 'from flask_login import current_user\n'), ((413, 8, 413, 18), 'flask.abort', 'abort', ({(413, 14, 413, 17): '(403)'}, {}), '(403)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((484, 8, 489, 9), 'app.template_folder_api_client.update_template_folder', 'template_folder_api_client.update_template_folder', (), '', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((580, 8, 580, 18), 'flask.abort', 'abort', ({(580, 14, 580, 17): '(404)'}, {}), '(404)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((582, 8, 582, 18), 'flask.abort', 'abort', ({(582, 14, 582, 17): '(403)'}, {}), '(403)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((642, 8, 642, 18), 'flask.abort', 'abort', ({(642, 14, 642, 17): '(403)'}, {}), '(403)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((667, 23, 667, 71), 'app.utils.get_template', 'get_template', ({(667, 36, 667, 53): 'new_template_data', (667, 55, 667, 70): 'current_service'}, {}), '(new_template_data, current_service)', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((731, 8, 731, 75), 'app.service_api_client.delete_service_template', 'service_api_client.delete_service_template', ({(731, 51, 731, 61): 'service_id', (731, 63, 731, 74): 'template_id'}, {}), '(service_id, template_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((741, 33, 741, 124), 'app.template_statistics_client.get_template_statistics_for_template', 'template_statistics_client.get_template_statistics_for_template', ({(741, 97, 741, 107): 'service_id', (741, 109, 741, 123): "template['id']"}, {}), "(service_id,\n template['id'])", False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((792, 8, 792, 85), 'flask_babel._', '_', ({(792, 10, 792, 84): '"""Personalised content will be hidden for messages sent with this template"""'}, {}), "('Personalised content will be hidden for messages sent with this template')", False, 'from flask_babel import _\n'), ((797, 8, 801, 9), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((844, 8, 848, 9), 'app.service_api_client.update_service_template_sender', 'service_api_client.update_service_template_sender', ({(845, 12, 845, 22): 'service_id', (846, 12, 846, 23): 'template_id', (847, 12, 847, 58): '(form.sender.data if form.sender.data else None)'}, {}), '(service_id, template_id, \n form.sender.data if form.sender.data else None)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((868, 8, 868, 18), 'flask.abort', 'abort', ({(868, 14, 868, 17): '(404)'}, {}), '(404)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((872, 8, 872, 92), 'app.service_api_client.update_service_template_postage', 'service_api_client.update_service_template_postage', ({(872, 59, 872, 69): 'service_id', (872, 71, 872, 82): 'template_id', (872, 84, 872, 91): 'postage'}, {}), '(service_id, template_id,\n postage)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((907, 15, 907, 36), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import datetime, timedelta\n'), ((56, 27, 61, 9), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((63, 19, 63, 54), 'app.template_previews.get_page_count_for_letter', 'get_page_count_for_letter', ({(63, 45, 63, 53): 'template'}, {}), '(template)', False, 'from app.template_previews import TemplatePreview, get_page_count_for_letter\n'), ((78, 24, 78, 96), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((99, 17, 103, 9), 'app.utils.get_template', 'get_template', (), '', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((127, 29, 127, 84), 'app.current_service.get_user_template_folders', 'current_service.get_user_template_folders', ({(127, 71, 127, 83): 'current_user'}, {}), '(current_user)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((130, 37, 130, 77), 'app.current_service.has_permission', 'current_service.has_permission', ({(130, 68, 130, 76): '"""letter"""'}, {}), "('letter')", False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((136, 15, 136, 63), 'flask_login.current_user.has_permissions', 'current_user.has_permissions', ({(136, 44, 136, 62): '"""manage_templates"""'}, {}), "('manage_templates')", False, 'from flask_login import current_user\n'), ((137, 12, 137, 22), 'flask.abort', 'abort', ({(137, 18, 137, 21): '(403)'}, {}), '(403)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((144, 14, 144, 57), 'flask_babel._', '_', ({(144, 16, 144, 56): '"""Select at least one template or folder"""'}, {}), "('Select at least one template or folder')", False, 'from flask_babel import _\n'), ((155, 29, 155, 89), 'app.current_service.get_template_folder_path', 'current_service.get_template_folder_path', ({(155, 70, 155, 88): 'template_folder_id'}, {}), '(template_folder_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((162, 20, 162, 38), 'app.main.forms.SearchByNameForm', 'SearchByNameForm', ({}, {}), '()', False, 'from app.main.forms import CreateTemplateForm, EmailTemplateForm, LetterTemplateForm, LetterTemplatePostageForm, SearchByNameForm, SetTemplateSenderForm, SMSTemplateForm, TemplateAndFoldersSelectionForm, TemplateFolderForm\n'), ((190, 15, 190, 24), 'flask_babel.lazy_gettext', '_l', ({(190, 18, 190, 23): '"""All"""'}, {}), "('All')", True, 'from flask_babel import lazy_gettext as _l\n'), ((191, 15, 191, 33), 'flask_babel.lazy_gettext', '_l', ({(191, 18, 191, 32): '"""Text message"""'}, {}), "('Text message')", True, 'from flask_babel import lazy_gettext as _l\n'), ((192, 17, 192, 28), 'flask_babel.lazy_gettext', '_l', ({(192, 20, 192, 27): '"""Email"""'}, {}), "('Email')", True, 'from flask_babel import lazy_gettext as _l\n'), ((193, 18, 193, 30), 'flask_babel.lazy_gettext', '_l', ({(193, 21, 193, 29): '"""Letter"""'}, {}), "('Letter')", True, 'from flask_babel import lazy_gettext as _l\n'), ((202, 12, 208, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((223, 76, 223, 100), 'flask.request.args.get', 'request.args.get', ({(223, 93, 223, 99): '"""page"""'}, {}), "('page')", False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((291, 12, 294, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((308, 12, 312, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((317, 12, 323, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((327, 12, 332, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((480, 11, 480, 57), 'flask_login.current_user.has_permissions', 'current_user.has_permissions', ({(480, 40, 480, 56): '"""manage_service"""'}, {}), "('manage_service')", False, 'from flask_login import current_user\n'), ((491, 12, 495, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((501, 29, 501, 89), 'app.current_service.get_template_folder_path', 'current_service.get_template_folder_path', ({(501, 70, 501, 88): 'template_folder_id'}, {}), '(template_folder_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((516, 11, 516, 121), 'app.current_service.get_template_folders_and_templates', 'current_service.get_template_folders_and_templates', (), '', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((517, 14, 517, 71), 'flask_babel.lazy_gettext', '_l', ({(517, 17, 517, 70): '"""You must empty this folder before you can delete it"""'}, {}), "('You must empty this folder before you can delete it')", True, 'from flask_babel import lazy_gettext as _l\n'), ((519, 12, 524, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((530, 12, 530, 101), 'app.template_folder_api_client.delete_template_folder', 'template_folder_api_client.delete_template_folder', ({(530, 62, 530, 80): 'current_service.id', (530, 82, 530, 100): 'template_folder_id'}, {}), '(current_service.id,\n template_folder_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((581, 11, 581, 51), 'app.current_service.has_permission', 'current_service.has_permission', ({(581, 42, 581, 50): '"""letter"""'}, {}), "('letter')", False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((620, 12, 627, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((684, 12, 692, 13), 'app.service_api_client.update_service_template', 'service_api_client.update_service_template', ({(685, 16, 685, 27): 'template_id', (686, 16, 686, 30): 'form.name.data', (687, 16, 687, 41): "template['template_type']", (688, 16, 688, 42): 'form.template_content.data', (689, 16, 689, 26): 'service_id', (690, 16, 690, 23): 'subject', (691, 16, 691, 38): 'form.process_type.data'}, {}), "(template_id, form.name.data,\n template['template_type'], form.template_content.data, service_id,\n subject, form.process_type.data)", False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((708, 12, 714, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((733, 12, 737, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((745, 29, 745, 55), 'flask_babel.lazy_gettext', '_l', ({(745, 32, 745, 54): '"""more than seven days"""'}, {}), "('more than seven days')", True, 'from flask_babel import lazy_gettext as _l\n'), ((750, 36, 750, 69), 'flask_babel.lazy_gettext', '_l', ({(750, 39, 750, 68): '"""This template was last used"""'}, {}), "('This template was last used')", True, 'from flask_babel import lazy_gettext as _l\n'), ((750, 87, 750, 97), 'flask_babel.lazy_gettext', '_l', ({(750, 90, 750, 96): '"""ago."""'}, {}), "('ago.')", True, 'from flask_babel import lazy_gettext as _l\n'), ((849, 24, 849, 97), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((874, 24, 874, 97), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((909, 17, 909, 35), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import datetime, timedelta\n'), ((141, 12, 141, 28), 'flask.flash', 'flash', ({(141, 18, 141, 27): 'e.message'}, {}), '(e.message)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((256, 12, 256, 70), 'app.current_service.get_template', 'current_service.get_template', (), '', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((351, 12, 351, 28), 'flask.flash', 'flash', ({(351, 18, 351, 27): 'e.message'}, {}), '(e.message)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((384, 26, 384, 70), 'app.service_api_client.get_service', 'service_api_client.get_service', ({(384, 57, 384, 69): 'from_service'}, {}), '(from_service)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n'), ((388, 43, 388, 115), 'app.models.template_list.TemplateList', 'TemplateList', (), '', False, 'from app.models.template_list import TemplateList, TemplateLists\n'), ((391, 24, 391, 42), 'app.main.forms.SearchByNameForm', 'SearchByNameForm', ({}, {}), '()', False, 'from app.main.forms import CreateTemplateForm, EmailTemplateForm, LetterTemplateForm, LetterTemplatePostageForm, SearchByNameForm, SetTemplateSenderForm, SMSTemplateForm, TemplateAndFoldersSelectionForm, TemplateFolderForm\n'), ((397, 43, 397, 70), 'app.models.template_list.TemplateLists', 'TemplateLists', ({(397, 57, 397, 69): 'current_user'}, {}), '(current_user)', False, 'from app.models.template_list import TemplateList, TemplateLists\n'), ((398, 24, 398, 42), 'app.main.forms.SearchByNameForm', 'SearchByNameForm', ({}, {}), '()', False, 'from app.main.forms import CreateTemplateForm, EmailTemplateForm, LetterTemplateForm, LetterTemplatePostageForm, SearchByNameForm, SetTemplateSenderForm, SMSTemplateForm, TemplateAndFoldersSelectionForm, TemplateFolderForm\n'), ((430, 16, 430, 41), 'flask_babel.lazy_gettext', '_l', ({(430, 19, 430, 40): '"""Copy email template"""'}, {}), "('Copy email template')", True, 'from flask_babel import lazy_gettext as _l\n'), ((430, 87, 430, 119), 'flask_babel.lazy_gettext', '_l', ({(430, 90, 430, 118): '"""Copy text message template"""'}, {}), "('Copy text message template')", True, 'from flask_babel import lazy_gettext as _l\n'), ((533, 16, 537, 17), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((540, 18, 540, 43), 'flask_babel.lazy_gettext', '_l', ({(540, 21, 540, 42): '"""Folder is not empty"""'}, {}), "('Folder is not empty')", True, 'from flask_babel import lazy_gettext as _l\n'), ((556, 16, 556, 57), 'flask_babel.lazy_gettext', '_l', ({(556, 19, 556, 56): '"""Are you sure you want to delete the"""'}, {}), "('Are you sure you want to delete the')", True, 'from flask_babel import lazy_gettext as _l\n'), ((558, 16, 558, 29), 'flask_babel.lazy_gettext', '_l', ({(558, 19, 558, 28): '"""folder?"""'}, {}), "('folder?')", True, 'from flask_babel import lazy_gettext as _l\n'), ((611, 16, 615, 17), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((668, 26, 668, 65), 'app.utils.get_template', 'get_template', ({(668, 39, 668, 47): 'template', (668, 49, 668, 64): 'current_service'}, {}), '(template, current_service)', False, 'from app.utils import email_or_sms_not_enabled, get_template, should_skip_template_page, user_has_permissions, user_is_platform_admin\n'), ((669, 54, 669, 81), 'flask.request.form.get', 'request.form.get', ({(669, 71, 669, 80): '"""confirm"""'}, {}), "('confirm')", False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((704, 28, 704, 101), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((748, 70, 748, 87), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime, timedelta\n'), ((760, 34, 760, 71), 'flask_babel.lazy_gettext', '_l', ({(760, 37, 760, 70): '"""Are you sure you want to delete"""'}, {}), "('Are you sure you want to delete')", True, 'from flask_babel import lazy_gettext as _l\n'), ((898, 56, 898, 84), 'notifications_utils.formatters.nl2br', 'nl2br', ({(898, 62, 898, 83): 'sender[sender_format]'}, {}), '(sender[sender_format])', False, 'from notifications_utils.formatters import nl2br\n'), ((912, 17, 912, 34), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import datetime, timedelta\n'), ((552, 16, 552, 29), 'flask.abort', 'abort', ({(552, 22, 552, 25): '(500)', (552, 27, 552, 28): 'e'}, {}), '(500, e)', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((636, 20, 636, 44), 'flask_babel.lazy_gettext', '_l', ({(636, 23, 636, 43): '"""New email template"""'}, {}), "('New email template')", True, 'from flask_babel import lazy_gettext as _l\n'), ((636, 78, 636, 109), 'flask_babel.lazy_gettext', '_l', ({(636, 81, 636, 108): '"""New text message template"""'}, {}), "('New text message template')", True, 'from flask_babel import lazy_gettext as _l\n'), ((721, 20, 721, 45), 'flask_babel.lazy_gettext', '_l', ({(721, 23, 721, 44): '"""Edit email template"""'}, {}), "('Edit email template')", True, 'from flask_babel import lazy_gettext as _l\n'), ((721, 91, 721, 123), 'flask_babel.lazy_gettext', '_l', ({(721, 94, 721, 122): '"""Edit text message template"""'}, {}), "('Edit text message template')", True, 'from flask_babel import lazy_gettext as _l\n'), ((747, 29, 747, 72), 'dateutil.parser.parse', 'parse', ({(747, 35, 747, 71): "last_used_notification['created_at']"}, {}), "(last_used_notification['created_at'])", False, 'from dateutil.parser import parse\n'), ((258, 31, 264, 13), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((542, 22, 542, 78), 'flask_babel._', '_', ({(542, 24, 542, 77): '"""You must empty this folder before you can delete it"""'}, {}), "('You must empty this folder before you can delete it')", False, 'from flask_babel import _\n'), ((544, 20, 549, 21), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((608, 18, 608, 42), 'flask_babel._', '_', ({(608, 20, 608, 41): '"""\'{}\' template saved"""'}, {}), '("\'{}\' template saved")', False, 'from flask_babel import _\n'), ((678, 20, 678, 54), 'app.main.views.send.get_example_csv_rows', 'get_example_csv_rows', ({(678, 41, 678, 53): 'new_template'}, {}), '(new_template)', False, 'from app.main.views.send import get_example_csv_rows, get_sender_details\n'), ((679, 20, 679, 54), 'app.main.views.send.get_example_csv_rows', 'get_example_csv_rows', ({(679, 41, 679, 53): 'new_template'}, {}), '(new_template)', False, 'from app.main.views.send import get_example_csv_rows, get_sender_details\n'), ((702, 18, 702, 42), 'flask_babel._', '_', ({(702, 20, 702, 41): '"""\'{}\' template saved"""'}, {}), '("\'{}\' template saved")', False, 'from flask_babel import _\n'), ((814, 35, 820, 17), 'flask.url_for', 'url_for', (), '', False, 'from flask import abort, flash, jsonify, redirect, render_template, request, url_for\n'), ((822, 28, 822, 101), 'app.service_api_client.get_service_template_versions', 'service_api_client.get_service_template_versions', ({(822, 77, 822, 87): 'service_id', (822, 89, 822, 100): 'template_id'}, {}), '(service_id, template_id)', False, 'from app import current_service, service_api_client, template_api_prefill_client, template_folder_api_client, template_statistics_client\n')] |
MarcelloVendruscolo/DeepLearningForImageAnalysis | linearRegression_gradientDescent/linearRegression_gradientDescent.py | 0f57d63510d0f7b2729d214b3729a21a663794b5 | import numpy as np
from load_auto import load_auto
import matplotlib.pyplot as plt
import math
def initialize_parameters(observation_dimension):
# observation_dimension: number of features taken into consideration of the input
# returns weights as a vector and offset as a scalar
weights = np.zeros((observation_dimension, 1))
offset_b = 0
return weights, offset_b
def model_forward(train_dataset, weights, offset_b):
# train_dataset: input data points
# weights and offset_b: model parameters
# returns the output predictions as a vector corresponding to each input data point
number_observations = np.size(train_dataset, axis = 1)
predictions = np.zeros((1, number_observations))
for observation in range(0, number_observations):
with np.errstate(over='raise', invalid='raise'):
try:
predictions[0, observation] = weights.T @ train_dataset[:, observation] + offset_b
except:
predictions[0, observation] = np.inf
return predictions
def compute_cost(predictions, train_labels):
# predictions: computed output values
# train_labels: true output values (ground truth)
# returns the cost function value
number_observations = np.size(predictions, axis = 1)
sum = 0
with np.errstate(over='raise', invalid='raise'):
try:
for observation in range(0, number_observations):
sum += (train_labels[observation, 0] - predictions[0, observation])**2
except:
return np.inf
return sum/number_observations
def model_backward(observation_dimension, train_dataset, predictions, train_labels):
# observation_dimension: number of features taken into consideration of the input
# train_dataset: input data points
# predictions: computed output values
# train_labels: true output values (ground truth)
# returns the gradient of the cost function with respect to all parameters
number_observations = np.size(train_dataset, axis = 1)
sum_weights = np.zeros((observation_dimension, 1))
sum_offset = 0
for observation in range(0, number_observations):
diff = predictions[0, observation] - train_labels[observation, 0]
with np.errstate(over='raise', invalid='raise'):
try:
sum_weights += train_dataset[:, observation].reshape(observation_dimension,-1) * diff
sum_offset += diff
except:
return np.full(sum_weights.shape, np.inf), np.inf
gradient_weights = sum_weights * (2/number_observations)
gradient_offset = sum_offset * (2/number_observations)
return gradient_weights, gradient_offset
def update_parameters(weights, offset_b, gradient_weights, gradient_offset, learning_rate):
# weights and offset_b: parameters computed (or initialised) in this iteration
# gradient_weights and gradient_offset: gradients of the cost function
# learning_rate: step size
# returns the updated parameters for the next iteration
updated_weights = weights - (learning_rate * gradient_weights)
updated_offset = offset_b - (learning_rate * gradient_offset)
return updated_weights, updated_offset
def predict(train_dataset, weights, offset_b):
return model_forward(train_dataset, weights, offset_b)
def train_linear_model(train_dataset, train_labels, number_iterations, learning_rate):
# train_dataset: input data points
# train_labels: true output values (ground truth)
# number_iterations and learning_rate: user-defined hyperparameters
# returns the model parameters and cost function values as a vector
cost = []
observation_dimension = np.size(train_dataset, axis = 0)
weights, offset_b = initialize_parameters(observation_dimension)
while number_iterations > 0:
predictions = predict(train_dataset, weights, offset_b)
cost.append(compute_cost(predictions, train_labels))
gradient_weights, gradient_offset = model_backward(observation_dimension, train_dataset, predictions, train_labels)
weights, offset_b = update_parameters(weights, offset_b, gradient_weights, gradient_offset, learning_rate)
number_iterations -= 1
return weights, offset_b, cost
def plotting_cost_iteration(learning_rates, cost_consolidated):
for counter in range(0, cost_consolidated.shape[0]):
plt.plot(np.arange(start=1, stop = (cost_consolidated.shape[1] + 1), step= 1), cost_consolidated[counter,:], label=r'$\alpha = $' + str(learning_rates[counter]))
plt.xlabel('Iteration')
plt.ylabel('Cost')
plt.title('Cost per Iteration')
plt.ylim(0,720)
plt.legend()
plt.show()
def plotting_horsepower_mpg(train_dataset, train_labels, weights, offset_b):
plt.scatter(train_dataset[0,:], train_labels[:,0], label='Data points')
plt.plot(train_dataset[0,:], np.array(train_dataset[0,:]*weights + offset_b).reshape(train_labels.shape),'r-', label='Linear Regression')
plt.xlabel('(normalised) Horsepower')
plt.ylabel('MPG')
plt.title('MPG vs (normalised) Horsepower')
plt.legend()
plt.show()
PATH_DATASET = '/Users/marcellovendruscolo/Documents/vscode-workspace/DeepLearningForImageAnalysis/linearRegression_gradientDescent/Auto.csv'
train_dataset, train_labels = load_auto(PATH_DATASET)
train_dataset = np.array(train_dataset)
non_normalised_dataset = np.array(np.transpose(train_dataset))
non_normalised_horsepower = non_normalised_dataset[2,:].reshape(1,-1)
train_labels = np.array(train_labels)
mean = np.mean(train_dataset, axis=0)
sd = np.std(train_dataset, axis=0)
for col in range(0, train_dataset.shape[1]):
train_dataset[:,col] = (train_dataset[:,col] - mean[col])/sd[col]
normalised_dataset = np.transpose(train_dataset)
horsepower_dataset = normalised_dataset[2,:].reshape(1,-1)
# Exercise 1.4.1 and Exercise 1.4.2:
# learning_rate = 0.1
# number_iterations = 1000
# print('\nChoice of input dataset: (i) Only horsepower feature.')
# weights, offset_b, cost_function_value = train_linear_model(horsepower_dataset, train_labels, number_iterations, learning_rate)
# print('Number of iterations: ' +str(number_iterations))
# print('Learning rate: ' +str(learning_rate))
# print('Cost function value: ' +str(cost_function_value[len(cost_function_value) - 1]))
# print('Weights: ' +str(weights))
# print('Offset: ' +str(offset_b))
# print('\nChoice of input dataset: (ii) All features except name.')
# weights, offset_b, cost_function_value = train_linear_model(normalised_dataset, train_labels, number_iterations, learning_rate)
# print('Number of iterations: ' +str(number_iterations))
# print('Learning rate: ' +str(learning_rate))
# print('Cost function value: ' +str(cost_function_value[len(cost_function_value) - 1]))
# print('Weights: ' +str(weights))
# print('Offset: ' +str(offset_b) + '\n')
# Exercise 1.4.3:
# learning_rates = [1, 1e-1, 1e-2, 1e-3, 1e-4]
# number_iterations = 1000
# cost_consolidated = np.ndarray(shape=(len(learning_rates), number_iterations))
# for counter in range(0, len(learning_rates)):
# weights, offset_b, cost_consolidated[counter,:] = train_linear_model(normalised_dataset, train_labels, number_iterations, learning_rates[counter])
# plotting_cost_iteration(learning_rates, cost_consolidated)
# Exercise 1.4.4:
# learning_rate = [1, 1e-1, 1e-2, 1e-3, 1e-4]
# number_iterations = 1000
# cost_consolidated = np.ndarray(shape=(len(learning_rate), number_iterations))
# for counter in range(0, len(learning_rate)):
# weights, offset_b, cost_consolidated[counter,:] = train_linear_model(non_normalised_dataset, train_labels, number_iterations, learning_rate[counter])
# plotting_cost_iteration(learning_rate, cost_consolidated)
# Exercise 1.4.5:
# learning_rate = 0.1
# number_iterations = 1000
# weights, offset_b, cost_function_value = train_linear_model(horsepower_dataset, train_labels, number_iterations, learning_rate)
# plotting_horsepower_mpg(horsepower_dataset, train_labels, weights, offset_b) | [((110, 30, 110, 53), 'load_auto.load_auto', 'load_auto', ({(110, 40, 110, 52): 'PATH_DATASET'}, {}), '(PATH_DATASET)', False, 'from load_auto import load_auto\n'), ((111, 16, 111, 39), 'numpy.array', 'np.array', ({(111, 25, 111, 38): 'train_dataset'}, {}), '(train_dataset)', True, 'import numpy as np\n'), ((114, 15, 114, 37), 'numpy.array', 'np.array', ({(114, 24, 114, 36): 'train_labels'}, {}), '(train_labels)', True, 'import numpy as np\n'), ((116, 7, 116, 37), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((117, 5, 117, 34), 'numpy.std', 'np.std', (), '', True, 'import numpy as np\n'), ((120, 21, 120, 48), 'numpy.transpose', 'np.transpose', ({(120, 34, 120, 47): 'train_dataset'}, {}), '(train_dataset)', True, 'import numpy as np\n'), ((9, 14, 9, 50), 'numpy.zeros', 'np.zeros', ({(9, 23, 9, 49): '(observation_dimension, 1)'}, {}), '((observation_dimension, 1))', True, 'import numpy as np\n'), ((17, 26, 17, 58), 'numpy.size', 'np.size', (), '', True, 'import numpy as np\n'), ((18, 18, 18, 52), 'numpy.zeros', 'np.zeros', ({(18, 27, 18, 51): '(1, number_observations)'}, {}), '((1, number_observations))', True, 'import numpy as np\n'), ((31, 26, 31, 56), 'numpy.size', 'np.size', (), '', True, 'import numpy as np\n'), ((47, 26, 47, 58), 'numpy.size', 'np.size', (), '', True, 'import numpy as np\n'), ((48, 18, 48, 54), 'numpy.zeros', 'np.zeros', ({(48, 27, 48, 53): '(observation_dimension, 1)'}, {}), '((observation_dimension, 1))', True, 'import numpy as np\n'), ((80, 28, 80, 60), 'numpy.size', 'np.size', (), '', True, 'import numpy as np\n'), ((93, 4, 93, 27), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(93, 15, 93, 26): '"""Iteration"""'}, {}), "('Iteration')", True, 'import matplotlib.pyplot as plt\n'), ((94, 4, 94, 22), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(94, 15, 94, 21): '"""Cost"""'}, {}), "('Cost')", True, 'import matplotlib.pyplot as plt\n'), ((95, 4, 95, 35), 'matplotlib.pyplot.title', 'plt.title', ({(95, 14, 95, 34): '"""Cost per Iteration"""'}, {}), "('Cost per Iteration')", True, 'import matplotlib.pyplot as plt\n'), ((96, 4, 96, 19), 'matplotlib.pyplot.ylim', 'plt.ylim', ({(96, 13, 96, 14): '(0)', (96, 15, 96, 18): '(720)'}, {}), '(0, 720)', True, 'import matplotlib.pyplot as plt\n'), ((97, 4, 97, 16), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((98, 4, 98, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((101, 4, 101, 75), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((103, 4, 103, 41), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(103, 15, 103, 40): '"""(normalised) Horsepower"""'}, {}), "('(normalised) Horsepower')", True, 'import matplotlib.pyplot as plt\n'), ((104, 4, 104, 21), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(104, 15, 104, 20): '"""MPG"""'}, {}), "('MPG')", True, 'import matplotlib.pyplot as plt\n'), ((105, 4, 105, 47), 'matplotlib.pyplot.title', 'plt.title', ({(105, 14, 105, 46): '"""MPG vs (normalised) Horsepower"""'}, {}), "('MPG vs (normalised) Horsepower')", True, 'import matplotlib.pyplot as plt\n'), ((106, 4, 106, 16), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((107, 4, 107, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((112, 34, 112, 61), 'numpy.transpose', 'np.transpose', ({(112, 47, 112, 60): 'train_dataset'}, {}), '(train_dataset)', True, 'import numpy as np\n'), ((33, 9, 33, 51), 'numpy.errstate', 'np.errstate', (), '', True, 'import numpy as np\n'), ((20, 13, 20, 55), 'numpy.errstate', 'np.errstate', (), '', True, 'import numpy as np\n'), ((52, 13, 52, 55), 'numpy.errstate', 'np.errstate', (), '', True, 'import numpy as np\n'), ((92, 17, 92, 85), 'numpy.arange', 'np.arange', (), '', True, 'import numpy as np\n'), ((102, 33, 102, 80), 'numpy.array', 'np.array', ({(102, 42, 102, 79): '(train_dataset[(0), :] * weights + offset_b)'}, {}), '(train_dataset[(0), :] * weights + offset_b)', True, 'import numpy as np\n'), ((57, 23, 57, 57), 'numpy.full', 'np.full', ({(57, 31, 57, 48): 'sum_weights.shape', (57, 50, 57, 56): 'np.inf'}, {}), '(sum_weights.shape, np.inf)', True, 'import numpy as np\n')] |
mdholbrook/heart_rate_sentinel_server | unit_tests/test_hr_calculations.py | 927b59ad6d2078bd6e3491014fdebbc610d25e63 | import pytest
from functions.hr_calculations import *
@pytest.mark.parametrize("candidate, database, expected", [
('jack', [{'patient_id': 'jump'}, {'patient_id': 'jack'}], 1),
('jungle', [{'patient_id': 'jungle'}, {'patient_id': 'jack'}], 0),
('bo', [{'patient_id': 'james'}, {'patient_id': 'boo'},
{'patient_id': 'bo'}], 2)])
def test_find_id_ind(candidate, database, expected):
# Run the test
assert find_id_ind(candidate, database) == expected
@pytest.mark.parametrize("candidate, database, expected", [
('jump', [{'patient_id': 'jump', 'heart_rate': [50, 60, 70]},
{'patient_id': 'jack', 'heart_rate': [120, 112, 131]}],
[50, 60, 70]),
('jack', [{'patient_id': 'jump', 'heart_rate': [50, 60, 70]},
{'patient_id': 'jack', 'heart_rate': [120, 112, 131]}],
[120, 112, 131])
])
def test_get_heart_rates(candidate, database, expected):
# Run the test
assert get_heart_rates(candidate, database) == expected
@pytest.mark.parametrize("candidate, expected", [
([50, 60, 70], 60),
([50, 55, 56], 53.66),
([0, 50, 100], 50),
])
def test_average_heart_rate(candidate, expected):
# Run the test
assert pytest.approx(average_heart_rate(candidate), 1e-2) == expected
@pytest.mark.parametrize("candidate", [
'2018-03-09 11:00:36.372339',
'2017-10-19 15:11:36.167854',
])
def test_get_date_as_numeric(candidate):
# Run the test
result = pytest.approx(get_date_as_numeric(candidate), rel=1e-6)
# Generate expected result
expression = "%Y-%m-%d %H:%M:%S.%f"
expected = datetime.strptime(candidate, expression).timestamp()
assert result == expected
@pytest.mark.parametrize("candidate, expected", [
('Mark', ['2018-03-09 11:00:36.372339', '2017-10-19 15:11:36.167854']),
('Matt', ['2018-03-10 11:00:32.372339', '2017-10-19 35:11:36.167854'])
])
def test_get_times(candidate, expected):
database = [{'patient_id': 'Mark', 'time':
['2018-03-09 11:00:36.372339', '2017-10-19 15:11:36.167854']},
{'patient_id': 'Matt', 'time':
['2018-03-10 11:00:32.372339', '2017-10-19 35:11:36.167854']}]
# Run the test
assert get_times(candidate, database) == expected
@pytest.mark.parametrize("ref_time, times, hr, expected", [
('2018-03-09 11:00:36.372339',
['2018-03-09 11:00:34.372339',
'2018-03-09 11:00:35.372339',
'2018-03-09 11:00:36.872339'], [0, 0, 0], [0]),
('2018-03-09 11:00:36.372339',
['2018-03-09 11:00:35.372339',
'2018-03-09 11:00:36.372359',
'2018-03-09 11:00:37.372339'], [0, 0, 0], [0, 0])])
def test_hr_after_time(ref_time, times, hr, expected):
# Run the test
assert hr_after_time(ref_time, times, hr) == expected
@pytest.mark.parametrize("times, ref_time, expected", [
([0, 1, 2, 3, 4], 3, 4),
([0, 1, 2, 3, 4], 2.5, 3),
([0, 1, 2, 3, 4], 1.5, 2)
])
def test_find_index_larger(ref_time, times, expected):
# Run the test
assert find_index_larger(times, ref_time) == expected
@pytest.mark.parametrize("times, ref_time, expected", [
([0, 1, 2, 3, 4], 3, True),
([0, 1, 2, 3, 4], 4, True),
([0, 1, 2, 3, 4], 4.5, False),
([0, 1, 2, 3, 4], 0, True)
])
def test_check_recent_timestamps(ref_time, times, expected):
# Run the test
assert check_recent_timestamps(times, ref_time) == expected
| [((5, 1, 9, 39), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(5, 25, 5, 56): '"""candidate, database, expected"""', (5, 58, 9, 38): "[('jack', [{'patient_id': 'jump'}, {'patient_id': 'jack'}], 1), ('jungle',\n [{'patient_id': 'jungle'}, {'patient_id': 'jack'}], 0), ('bo', [{\n 'patient_id': 'james'}, {'patient_id': 'boo'}, {'patient_id': 'bo'}], 2)]"}, {}), "('candidate, database, expected', [('jack', [{\n 'patient_id': 'jump'}, {'patient_id': 'jack'}], 1), ('jungle', [{\n 'patient_id': 'jungle'}, {'patient_id': 'jack'}], 0), ('bo', [{\n 'patient_id': 'james'}, {'patient_id': 'boo'}, {'patient_id': 'bo'}], 2)])", False, 'import pytest\n'), ((16, 1, 23, 7), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(16, 25, 16, 56): '"""candidate, database, expected"""', (16, 58, 23, 6): "[('jump', [{'patient_id': 'jump', 'heart_rate': [50, 60, 70]}, {\n 'patient_id': 'jack', 'heart_rate': [120, 112, 131]}], [50, 60, 70]), (\n 'jack', [{'patient_id': 'jump', 'heart_rate': [50, 60, 70]}, {\n 'patient_id': 'jack', 'heart_rate': [120, 112, 131]}], [120, 112, 131])]"}, {}), "('candidate, database, expected', [('jump', [{\n 'patient_id': 'jump', 'heart_rate': [50, 60, 70]}, {'patient_id':\n 'jack', 'heart_rate': [120, 112, 131]}], [50, 60, 70]), ('jack', [{\n 'patient_id': 'jump', 'heart_rate': [50, 60, 70]}, {'patient_id':\n 'jack', 'heart_rate': [120, 112, 131]}], [120, 112, 131])])", False, 'import pytest\n'), ((30, 1, 34, 6), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(30, 25, 30, 46): '"""candidate, expected"""', (30, 48, 34, 5): '[([50, 60, 70], 60), ([50, 55, 56], 53.66), ([0, 50, 100], 50)]'}, {}), "('candidate, expected', [([50, 60, 70], 60), ([50, \n 55, 56], 53.66), ([0, 50, 100], 50)])", False, 'import pytest\n'), ((41, 1, 44, 6), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(41, 25, 41, 36): '"""candidate"""', (41, 38, 44, 5): "['2018-03-09 11:00:36.372339', '2017-10-19 15:11:36.167854']"}, {}), "('candidate', ['2018-03-09 11:00:36.372339',\n '2017-10-19 15:11:36.167854'])", False, 'import pytest\n'), ((57, 1, 60, 6), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(57, 25, 57, 46): '"""candidate, expected"""', (57, 48, 60, 5): "[('Mark', ['2018-03-09 11:00:36.372339', '2017-10-19 15:11:36.167854']), (\n 'Matt', ['2018-03-10 11:00:32.372339', '2017-10-19 35:11:36.167854'])]"}, {}), "('candidate, expected', [('Mark', [\n '2018-03-09 11:00:36.372339', '2017-10-19 15:11:36.167854']), ('Matt',\n ['2018-03-10 11:00:32.372339', '2017-10-19 35:11:36.167854'])])", False, 'import pytest\n'), ((71, 1, 79, 57), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(71, 25, 71, 56): '"""ref_time, times, hr, expected"""', (71, 58, 79, 56): "[('2018-03-09 11:00:36.372339', ['2018-03-09 11:00:34.372339',\n '2018-03-09 11:00:35.372339', '2018-03-09 11:00:36.872339'], [0, 0, 0],\n [0]), ('2018-03-09 11:00:36.372339', ['2018-03-09 11:00:35.372339',\n '2018-03-09 11:00:36.372359', '2018-03-09 11:00:37.372339'], [0, 0, 0],\n [0, 0])]"}, {}), "('ref_time, times, hr, expected', [(\n '2018-03-09 11:00:36.372339', ['2018-03-09 11:00:34.372339',\n '2018-03-09 11:00:35.372339', '2018-03-09 11:00:36.872339'], [0, 0, 0],\n [0]), ('2018-03-09 11:00:36.372339', ['2018-03-09 11:00:35.372339',\n '2018-03-09 11:00:36.372359', '2018-03-09 11:00:37.372339'], [0, 0, 0],\n [0, 0])])", False, 'import pytest\n'), ((86, 1, 90, 6), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(86, 25, 86, 52): '"""times, ref_time, expected"""', (86, 54, 90, 5): '[([0, 1, 2, 3, 4], 3, 4), ([0, 1, 2, 3, 4], 2.5, 3), ([0, 1, 2, 3, 4], 1.5, 2)]'}, {}), "('times, ref_time, expected', [([0, 1, 2, 3, 4], 3, \n 4), ([0, 1, 2, 3, 4], 2.5, 3), ([0, 1, 2, 3, 4], 1.5, 2)])", False, 'import pytest\n'), ((97, 1, 102, 6), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(97, 25, 97, 52): '"""times, ref_time, expected"""', (97, 54, 102, 5): '[([0, 1, 2, 3, 4], 3, True), ([0, 1, 2, 3, 4], 4, True), ([0, 1, 2, 3, 4], \n 4.5, False), ([0, 1, 2, 3, 4], 0, True)]'}, {}), "('times, ref_time, expected', [([0, 1, 2, 3, 4], 3, \n True), ([0, 1, 2, 3, 4], 4, True), ([0, 1, 2, 3, 4], 4.5, False), ([0, \n 1, 2, 3, 4], 0, True)])", False, 'import pytest\n')] |
919bot/Tessa | selfdrive/boardd/tests/test_boardd_api.py | 9b48ff9020e8fb6992fc78271f2720fd19e01093 | import random
import numpy as np
import selfdrive.boardd.tests.boardd_old as boardd_old
import selfdrive.boardd.boardd as boardd
from common.realtime import sec_since_boot
from cereal import log
import unittest
def generate_random_can_data_list():
can_list = []
cnt = random.randint(1, 64)
for j in range(cnt):
can_data = np.random.bytes(random.randint(1, 8))
can_list.append([random.randint(0, 128), random.randint(0, 128), can_data, random.randint(0, 128)])
return can_list, cnt
class TestBoarddApiMethods(unittest.TestCase):
def test_correctness(self):
for i in range(1000):
can_list, _ = generate_random_can_data_list()
# Sendcan
# Old API
m_old = boardd_old.can_list_to_can_capnp(can_list, 'sendcan').to_bytes()
# new API
m = boardd.can_list_to_can_capnp(can_list, 'sendcan')
ev_old = log.Event.from_bytes(m_old)
ev = log.Event.from_bytes(m)
self.assertEqual(ev_old.which(), ev.which())
self.assertEqual(len(ev.sendcan), len(ev_old.sendcan))
for i in range(len(ev.sendcan)):
attrs = ['address', 'busTime', 'dat', 'src']
for attr in attrs:
self.assertEqual(getattr(ev.sendcan[i], attr, 'new'), getattr(ev_old.sendcan[i], attr, 'old'))
# Can
m_old = boardd_old.can_list_to_can_capnp(can_list, 'can').to_bytes()
# new API
m = boardd.can_list_to_can_capnp(can_list, 'can')
ev_old = log.Event.from_bytes(m_old)
ev = log.Event.from_bytes(m)
self.assertEqual(ev_old.which(), ev.which())
self.assertEqual(len(ev.can), len(ev_old.can))
for i in range(len(ev.can)):
attrs = ['address', 'busTime', 'dat', 'src']
for attr in attrs:
self.assertEqual(getattr(ev.can[i], attr, 'new'), getattr(ev_old.can[i], attr, 'old'))
def test_performance(self):
can_list, cnt = generate_random_can_data_list()
recursions = 1000
n1 = sec_since_boot()
for i in range(recursions):
boardd_old.can_list_to_can_capnp(can_list, 'sendcan').to_bytes()
n2 = sec_since_boot()
elapsed_old = n2 - n1
# print('Old API, elapsed time: {} secs'.format(elapsed_old))
n1 = sec_since_boot()
for i in range(recursions):
boardd.can_list_to_can_capnp(can_list)
n2 = sec_since_boot()
elapsed_new = n2 - n1
# print('New API, elapsed time: {} secs'.format(elapsed_new))
self.assertTrue(elapsed_new < elapsed_old / 2)
if __name__ == '__main__':
unittest.main()
| [((14, 8, 14, 29), 'random.randint', 'random.randint', ({(14, 23, 14, 24): '1', (14, 26, 14, 28): '64'}, {}), '(1, 64)', False, 'import random\n'), ((77, 4, 77, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((60, 9, 60, 25), 'common.realtime.sec_since_boot', 'sec_since_boot', ({}, {}), '()', False, 'from common.realtime import sec_since_boot\n'), ((63, 9, 63, 25), 'common.realtime.sec_since_boot', 'sec_since_boot', ({}, {}), '()', False, 'from common.realtime import sec_since_boot\n'), ((67, 9, 67, 25), 'common.realtime.sec_since_boot', 'sec_since_boot', ({}, {}), '()', False, 'from common.realtime import sec_since_boot\n'), ((70, 9, 70, 25), 'common.realtime.sec_since_boot', 'sec_since_boot', ({}, {}), '()', False, 'from common.realtime import sec_since_boot\n'), ((16, 31, 16, 51), 'random.randint', 'random.randint', ({(16, 46, 16, 47): '1', (16, 49, 16, 50): '8'}, {}), '(1, 8)', False, 'import random\n'), ((30, 10, 30, 59), 'selfdrive.boardd.boardd.can_list_to_can_capnp', 'boardd.can_list_to_can_capnp', ({(30, 39, 30, 47): 'can_list', (30, 49, 30, 58): '"""sendcan"""'}, {}), "(can_list, 'sendcan')", True, 'import selfdrive.boardd.boardd as boardd\n'), ((32, 15, 32, 42), 'cereal.log.Event.from_bytes', 'log.Event.from_bytes', ({(32, 36, 32, 41): 'm_old'}, {}), '(m_old)', False, 'from cereal import log\n'), ((33, 11, 33, 34), 'cereal.log.Event.from_bytes', 'log.Event.from_bytes', ({(33, 32, 33, 33): 'm'}, {}), '(m)', False, 'from cereal import log\n'), ((45, 10, 45, 55), 'selfdrive.boardd.boardd.can_list_to_can_capnp', 'boardd.can_list_to_can_capnp', ({(45, 39, 45, 47): 'can_list', (45, 49, 45, 54): '"""can"""'}, {}), "(can_list, 'can')", True, 'import selfdrive.boardd.boardd as boardd\n'), ((47, 15, 47, 42), 'cereal.log.Event.from_bytes', 'log.Event.from_bytes', ({(47, 36, 47, 41): 'm_old'}, {}), '(m_old)', False, 'from cereal import log\n'), ((48, 11, 48, 34), 'cereal.log.Event.from_bytes', 'log.Event.from_bytes', ({(48, 32, 48, 33): 'm'}, {}), '(m)', False, 'from cereal import log\n'), ((69, 6, 69, 44), 'selfdrive.boardd.boardd.can_list_to_can_capnp', 'boardd.can_list_to_can_capnp', ({(69, 35, 69, 43): 'can_list'}, {}), '(can_list)', True, 'import selfdrive.boardd.boardd as boardd\n'), ((17, 21, 17, 43), 'random.randint', 'random.randint', ({(17, 36, 17, 37): '(0)', (17, 39, 17, 42): '(128)'}, {}), '(0, 128)', False, 'import random\n'), ((17, 45, 17, 67), 'random.randint', 'random.randint', ({(17, 60, 17, 61): '(0)', (17, 63, 17, 66): '(128)'}, {}), '(0, 128)', False, 'import random\n'), ((17, 79, 17, 101), 'random.randint', 'random.randint', ({(17, 94, 17, 95): '(0)', (17, 97, 17, 100): '(128)'}, {}), '(0, 128)', False, 'import random\n'), ((28, 14, 28, 67), 'selfdrive.boardd.tests.boardd_old.can_list_to_can_capnp', 'boardd_old.can_list_to_can_capnp', ({(28, 47, 28, 55): 'can_list', (28, 57, 28, 66): '"""sendcan"""'}, {}), "(can_list, 'sendcan')", True, 'import selfdrive.boardd.tests.boardd_old as boardd_old\n'), ((43, 14, 43, 63), 'selfdrive.boardd.tests.boardd_old.can_list_to_can_capnp', 'boardd_old.can_list_to_can_capnp', ({(43, 47, 43, 55): 'can_list', (43, 57, 43, 62): '"""can"""'}, {}), "(can_list, 'can')", True, 'import selfdrive.boardd.tests.boardd_old as boardd_old\n'), ((62, 6, 62, 59), 'selfdrive.boardd.tests.boardd_old.can_list_to_can_capnp', 'boardd_old.can_list_to_can_capnp', ({(62, 39, 62, 47): 'can_list', (62, 49, 62, 58): '"""sendcan"""'}, {}), "(can_list, 'sendcan')", True, 'import selfdrive.boardd.tests.boardd_old as boardd_old\n')] |
zekna/py-types | py_types/static/parse.py | ec39da1277986f0ea44830dfb0da9d906deb13e1 | import ast
import inspect
import sys
import argparse
from ..runtime.asserts import typecheck
@typecheck
def pretty_print_defs(defs: list) -> None:
for d in defs:
print("Function definition for {}".format(d["name"]))
print("Arguments:")
for arg in d["args"]:
arg_type = "untyped"
if arg[2]:
arg_type = arg[2].id
print("\t{} : type {}".format(arg[1], arg_type))
if len(d["args"]) == 0:
print("\tNo arguments.")
return_type = None
if d["return"]:
return_type = d["return"].id
print("Return type: {}".format(return_type))
print("")
@typecheck
def parse(filename: str) -> list:
"""Parses and does basic analysis of functions declared at the top level of a file."""
with open(filename, "r") as file_to_parse:
a = file_to_parse.read()
file_ast = ast.parse(a)
# initial pass -- get all function definitions, their names, args, and annotations
@typecheck
def get_name_annotations(block) -> dict:
if not isinstance(block, ast.FunctionDef):
return
return_annotation = block.returns
arg_annotations = []
for i, arg in enumerate(block.args.args):
arg_annotations.append((i, arg.arg, arg.annotation))
fn_name = block.name
annotations = {
"name": fn_name,
"return": return_annotation,
"args": arg_annotations
}
return annotations
definitions = [get_name_annotations(block) for block in file_ast.body]
pretty_print_defs(definitions)
# second pass -- find all expressions, double check origins of any arguments passed to any function in definitions
def depth_first_traversal(ast_tree, filter_type, results: list) -> ast.Module:
pass
return definitions
if __name__ == "__main__":
parse("static/example_parse_me.py")
| [((35, 19, 35, 31), 'ast.parse', 'ast.parse', ({(35, 29, 35, 30): 'a'}, {}), '(a)', False, 'import ast\n')] |
pmaccamp/django-tastypie-swagger | example/example/urls.py | d51ef3ea8e33791617edba8ed55a1be1f16e4ccc | from django.conf.urls import include, url
from django.contrib import admin
from demo.apis import api
urlpatterns = [
url(r'^api/', include(api.urls)),
url(r'^api/doc/', include(('tastypie_swagger.urls', 'tastypie_swagger'),
namespace='demo_api_swagger'),
kwargs={
"tastypie_api_module":"demo.apis.api",
"namespace":"demo_api_swagger",
"version": "0.1"}
),
url(r'^admin/', admin.site.urls),
]
| [((14, 4, 14, 36), 'django.conf.urls.url', 'url', ({(14, 8, 14, 18): '"""^admin/"""', (14, 20, 14, 35): 'admin.site.urls'}, {}), "('^admin/', admin.site.urls)", False, 'from django.conf.urls import include, url\n'), ((6, 18, 6, 35), 'django.conf.urls.include', 'include', ({(6, 26, 6, 34): 'api.urls'}, {}), '(api.urls)', False, 'from django.conf.urls import include, url\n'), ((7, 22, 8, 59), 'django.conf.urls.include', 'include', (), '', False, 'from django.conf.urls import include, url\n')] |
savor007/scrapy_framework | scrapy_framework/midwares/download_midware.py | 9f1266eb2d4bb7e181d1c5352b05298e77040980 | from scrapy_framework.html.request import Request
from scrapy_framework.html.response import Response
import random
def get_ua():
first_num=random.randint(55,69)
third_num=random.randint(0,3200)
forth_num=random.randint(0, 140)
os_type = [
'(Windows NT 6.1; WOW64)', '(Windows NT 10.0; WOW64)', '(X11; Linux x86_64)',
'(Macintosh; Intel Mac OS X 10_12_6)'
]
chrome_version = 'Chrome/{}.0.{}.{}'.format(first_num, third_num, forth_num)
ua = ' '.join(['Mozilla/5.0', random.choice(os_type), 'AppleWebKit/537.36',
'(KHTML, like Gecko)', chrome_version, 'Safari/537.36']
)
return ua
class DownloadMidware(object):
def process_request(self, request):
if request.headers==None:
request.headers=dict()
request.headers["User-Agent"]=get_ua()
return request
def process_response(self, response):
return response | [((8, 14, 8, 35), 'random.randint', 'random.randint', ({(8, 29, 8, 31): '55', (8, 32, 8, 34): '69'}, {}), '(55, 69)', False, 'import random\n'), ((9, 14, 9, 36), 'random.randint', 'random.randint', ({(9, 29, 9, 30): '0', (9, 31, 9, 35): '3200'}, {}), '(0, 3200)', False, 'import random\n'), ((10, 14, 10, 36), 'random.randint', 'random.randint', ({(10, 29, 10, 30): '0', (10, 32, 10, 35): '140'}, {}), '(0, 140)', False, 'import random\n'), ((18, 34, 18, 56), 'random.choice', 'random.choice', ({(18, 48, 18, 55): 'os_type'}, {}), '(os_type)', False, 'import random\n')] |
bytepl/tracardi | tracardi/process_engine/action/v1/pro/scheduler/plugin.py | e8fa4684fa6bd3d05165fe48aa925fc6c1e74923 | from pydantic import BaseModel
from tracardi.domain.entity import Entity
from tracardi.domain.scheduler_config import SchedulerConfig
from tracardi.domain.resource import ResourceCredentials
from tracardi.service.storage.driver import storage
from tracardi.service.plugin.runner import ActionRunner
from tracardi.service.plugin.domain.register import Plugin, Spec, MetaData, Form, FormGroup, FormField, FormComponent
from tracardi.service.plugin.domain.result import Result
class Configuration(BaseModel):
source: Entity
event_type: str
properties: str = "{}"
postpone: str
def validate(config: dict) -> Configuration:
return Configuration(**config)
class SchedulerPlugin(ActionRunner):
@staticmethod
async def build(**kwargs) -> 'SchedulerPlugin':
config = validate(kwargs)
resource = await storage.driver.resource.load(config.source.id)
plugin = SchedulerPlugin(config, resource.credentials)
return plugin
def __init__(self, config: Configuration, credentials: ResourceCredentials):
self.config = config
self.credentials = credentials.get_credentials(
self,
output=SchedulerConfig) # type: SchedulerConfig
async def run(self, payload):
run_in_background = True
if not run_in_background:
return Result(port="response", value=None)
else:
return Result(port="response", value=None)
def register() -> Plugin:
return Plugin(
start=False,
spec=Spec(
module='tracardi.process_engine.action.v1.pro.scheduler.plugin',
className='SchedulerPlugin',
inputs=["payload"],
outputs=['response', 'error'],
version='0.6.2',
license="MIT",
author="Risto Kowaczewski",
init= {
"source": {
"id": ""
},
"event_type": "",
"properties": "{}",
"postpone": "+1m"
}
),
metadata=MetaData(
name='Schedule event',
desc='This plugin schedules events',
icon='calendar',
group=["Time"],
tags=["Pro", "Scheduler"],
pro=True,
)
)
| [((28, 25, 28, 71), 'tracardi.service.storage.driver.storage.driver.resource.load', 'storage.driver.resource.load', ({(28, 54, 28, 70): 'config.source.id'}, {}), '(config.source.id)', False, 'from tracardi.service.storage.driver import storage\n'), ((43, 19, 43, 54), 'tracardi.service.plugin.domain.result.Result', 'Result', (), '', False, 'from tracardi.service.plugin.domain.result import Result\n'), ((45, 19, 45, 54), 'tracardi.service.plugin.domain.result.Result', 'Result', (), '', False, 'from tracardi.service.plugin.domain.result import Result\n'), ((51, 13, 67, 9), 'tracardi.service.plugin.domain.register.Spec', 'Spec', (), '', False, 'from tracardi.service.plugin.domain.register import Plugin, Spec, MetaData, Form, FormGroup, FormField, FormComponent\n'), ((68, 17, 75, 9), 'tracardi.service.plugin.domain.register.MetaData', 'MetaData', (), '', False, 'from tracardi.service.plugin.domain.register import Plugin, Spec, MetaData, Form, FormGroup, FormField, FormComponent\n')] |
alvarobartt/covid-daily | tests/test_covid_daily.py | cb4506a007ac206e85409a13281028f6f82441a6 | # Copyright 2020 Alvaro Bartolome, alvarobartt @ GitHub
# See LICENSE for details.
import pytest
import covid_daily
def test_overview():
params = [
{
'as_json': True
},
{
'as_json': False
}
]
for param in params:
covid_daily.overview(as_json=param['as_json'])
def test_data():
data = covid_daily.data(
country='france',
chart='graph-deaths-daily',
as_json=False
)
print(data.tail())
if __name__ == "__main__":
test_overview()
test_data()
| [((24, 11, 28, 5), 'covid_daily.data', 'covid_daily.data', (), '', False, 'import covid_daily\n'), ((20, 8, 20, 54), 'covid_daily.overview', 'covid_daily.overview', (), '', False, 'import covid_daily\n')] |
BryanWhitehurst/HPCCEA | 2021/HANFS/fence-agents/fence/agents/zvm/fence_zvmip.py | 54b7e7355b67ba3fdce2e28cc1b0e3b29d2bdefa | #!@PYTHON@ -tt
import sys
import atexit
import socket
import struct
import logging
sys.path.append("@FENCEAGENTSLIBDIR@")
from fencing import *
from fencing import fail, fail_usage, run_delay, EC_LOGIN_DENIED, EC_TIMED_OUT
#BEGIN_VERSION_GENERATION
RELEASE_VERSION=""
REDHAT_COPYRIGHT=""
BUILD_DATE=""
#END_VERSION_GENERATION
INT4 = 4
def open_socket(options):
try:
if "--inet6-only" in options:
protocol = socket.AF_INET6
elif "--inet4-only" in options:
protocol = socket.AF_INET
else:
protocol = 0
(_, _, _, _, addr) = socket.getaddrinfo( \
options["--ip"], options["--ipport"], protocol,
0, socket.IPPROTO_TCP, socket.AI_PASSIVE
)[0]
except socket.gaierror:
fail(EC_LOGIN_DENIED)
conn = socket.socket()
conn.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
conn.settimeout(float(options["--shell-timeout"]))
try:
conn.connect(addr)
except socket.error:
fail(EC_LOGIN_DENIED)
return conn
def smapi_pack_string(string):
return struct.pack("!i%ds" % (len(string)), len(string), string)
def prepare_smapi_command(options, smapi_function, additional_args):
packet_size = 3*INT4 + len(smapi_function) + len(options["--username"]) + len(options["--password"])
for arg in additional_args:
packet_size += INT4 + len(arg)
command = struct.pack("!i", packet_size)
command += smapi_pack_string(smapi_function)
command += smapi_pack_string(options["--username"])
command += smapi_pack_string(options["--password"])
for arg in additional_args:
command += smapi_pack_string(arg)
return command
def get_power_status(conn, options):
del conn
if options.get("--original-action", None) == "monitor":
(return_code, reason_code, images_active) = \
get_list_of_images(options, "Check_Authentication", None)
logging.debug("Check_Authenticate (%d,%d)", return_code, reason_code)
if return_code == 0:
return {}
else:
fail(EC_LOGIN_DENIED)
if options["--action"] == "list":
# '*' = list all active images
options["--plug"] = "*"
(return_code, reason_code, images_active) = \
get_list_of_images(options, "Image_Status_Query", options["--plug"])
logging.debug("Image_Status_Query results are (%d,%d)", return_code, reason_code)
if not options["--action"] == "list":
if (return_code == 0) and (reason_code == 0):
return "on"
elif (return_code == 0) and (reason_code == 12):
# We are running always with --missing-as-off because we can not check if image
# is defined or not (look at rhbz#1188750)
return "off"
else:
return "unknown"
else:
(return_code, reason_code, images_defined) = \
get_list_of_images(options, "Image_Name_Query_DM", options["--username"])
logging.debug("Image_Name_Query_DM results are (%d,%d)", return_code, reason_code)
return dict([(i, ("", "on" if i in images_active else "off")) for i in images_defined])
def set_power_status(conn, options):
conn = open_socket(options)
packet = None
if options["--action"] == "on":
packet = prepare_smapi_command(options, "Image_Activate", [options["--plug"]])
elif options["--action"] == "off":
packet = prepare_smapi_command(options, "Image_Deactivate", [options["--plug"], "IMMED"])
conn.send(packet)
request_id = struct.unpack("!i", conn.recv(INT4))[0]
(output_len, request_id, return_code, reason_code) = struct.unpack("!iiii", conn.recv(INT4 * 4))
logging.debug("Image_(De)Activate results are (%d,%d)", return_code, reason_code)
conn.close()
return
def get_list_of_images(options, command, data_as_plug):
conn = open_socket(options)
if data_as_plug is None:
packet = prepare_smapi_command(options, command, [])
else:
packet = prepare_smapi_command(options, command, [data_as_plug])
conn.send(packet)
request_id = struct.unpack("!i", conn.recv(INT4))[0]
(output_len, request_id, return_code, reason_code) = struct.unpack("!iiii", conn.recv(INT4 * 4))
images = set()
if output_len > 3*INT4:
array_len = struct.unpack("!i", conn.recv(INT4))[0]
data = ""
while True:
read_data = conn.recv(1024, socket.MSG_WAITALL)
data += read_data
if array_len == len(data):
break
elif not read_data:
logging.error("Failed: Not enough data read from socket")
fail(EC_TIMED_OUT)
parsed_len = 0
while parsed_len < array_len:
string_len = struct.unpack("!i", data[parsed_len:parsed_len+INT4])[0]
parsed_len += INT4
image_name = struct.unpack("!%ds" % (string_len), data[parsed_len:parsed_len+string_len])[0]
parsed_len += string_len
images.add(image_name)
conn.close()
return (return_code, reason_code, images)
def main():
device_opt = ["ipaddr", "login", "passwd", "port", "method", "missing_as_off"]
atexit.register(atexit_handler)
all_opt["ipport"]["default"] = "44444"
all_opt["shell_timeout"]["default"] = "5.0"
all_opt["missing_as_off"]["default"] = "1"
options = check_input(device_opt, process_input(device_opt), other_conditions=True)
if len(options.get("--plug", "")) > 8:
fail_usage("Failed: Name of image can not be longer than 8 characters")
if options["--action"] == "validate-all":
sys.exit(0)
docs = {}
docs["shortdesc"] = "Fence agent for use with z/VM Virtual Machines"
docs["longdesc"] = """The fence_zvm agent is intended to be used with with z/VM SMAPI service via TCP/IP
To use this agent the z/VM SMAPI service needs to be configured to allow the virtual machine running this agent to connect to it and issue
the image_recycle operation. This involves updating the VSMWORK1 AUTHLIST VMSYS:VSMWORK1. file. The entry should look something similar to
this:
Column 1 Column 66 Column 131
| | |
V V V
XXXXXXXX ALL IMAGE_OPERATIONS
Where XXXXXXX is the name of the virtual machine used in the authuser field of the request.
"""
docs["vendorurl"] = "http://www.ibm.com"
show_docs(options, docs)
run_delay(options)
result = fence_action(None, options, set_power_status, get_power_status, get_power_status)
sys.exit(result)
if __name__ == "__main__":
main()
| [((8, 0, 8, 38), 'sys.path.append', 'sys.path.append', ({(8, 16, 8, 37): '"""@FENCEAGENTSLIBDIR@"""'}, {}), "('@FENCEAGENTSLIBDIR@')", False, 'import sys\n'), ((35, 8, 35, 23), 'socket.socket', 'socket.socket', ({}, {}), '()', False, 'import socket\n'), ((53, 11, 53, 41), 'struct.pack', 'struct.pack', ({(53, 23, 53, 27): '"""!i"""', (53, 29, 53, 40): 'packet_size'}, {}), "('!i', packet_size)", False, 'import struct\n'), ((81, 1, 81, 82), 'logging.debug', 'logging.debug', ({(81, 15, 81, 55): '"""Image_Status_Query results are (%d,%d)"""', (81, 57, 81, 68): 'return_code', (81, 70, 81, 81): 'reason_code'}, {}), "('Image_Status_Query results are (%d,%d)', return_code,\n reason_code)", False, 'import logging\n'), ((111, 1, 111, 82), 'logging.debug', 'logging.debug', ({(111, 15, 111, 55): '"""Image_(De)Activate results are (%d,%d)"""', (111, 57, 111, 68): 'return_code', (111, 70, 111, 81): 'reason_code'}, {}), "('Image_(De)Activate results are (%d,%d)', return_code,\n reason_code)", False, 'import logging\n'), ((157, 1, 157, 32), 'atexit.register', 'atexit.register', ({(157, 17, 157, 31): 'atexit_handler'}, {}), '(atexit_handler)', False, 'import atexit\n'), ((190, 1, 190, 19), 'fencing.run_delay', 'run_delay', ({(190, 11, 190, 18): 'options'}, {}), '(options)', False, 'from fencing import fail, fail_usage, run_delay, EC_LOGIN_DENIED, EC_TIMED_OUT\n'), ((192, 1, 192, 17), 'sys.exit', 'sys.exit', ({(192, 10, 192, 16): 'result'}, {}), '(result)', False, 'import sys\n'), ((69, 2, 69, 71), 'logging.debug', 'logging.debug', ({(69, 16, 69, 44): '"""Check_Authenticate (%d,%d)"""', (69, 46, 69, 57): 'return_code', (69, 59, 69, 70): 'reason_code'}, {}), "('Check_Authenticate (%d,%d)', return_code, reason_code)", False, 'import logging\n'), ((95, 2, 95, 84), 'logging.debug', 'logging.debug', ({(95, 16, 95, 57): '"""Image_Name_Query_DM results are (%d,%d)"""', (95, 59, 95, 70): 'return_code', (95, 72, 95, 83): 'reason_code'}, {}), "('Image_Name_Query_DM results are (%d,%d)', return_code,\n reason_code)", False, 'import logging\n'), ((165, 2, 165, 73), 'fencing.fail_usage', 'fail_usage', ({(165, 13, 165, 72): '"""Failed: Name of image can not be longer than 8 characters"""'}, {}), "('Failed: Name of image can not be longer than 8 characters')", False, 'from fencing import fail, fail_usage, run_delay, EC_LOGIN_DENIED, EC_TIMED_OUT\n'), ((168, 2, 168, 13), 'sys.exit', 'sys.exit', ({(168, 11, 168, 12): '(0)'}, {}), '(0)', False, 'import sys\n'), ((28, 23, 31, 5), 'socket.getaddrinfo', 'socket.getaddrinfo', ({(29, 4, 29, 19): "options['--ip']", (29, 21, 29, 40): "options['--ipport']", (29, 42, 29, 50): 'protocol', (30, 4, 30, 5): '(0)', (30, 7, 30, 25): 'socket.IPPROTO_TCP', (30, 27, 30, 44): 'socket.AI_PASSIVE'}, {}), "(options['--ip'], options['--ipport'], protocol, 0,\n socket.IPPROTO_TCP, socket.AI_PASSIVE)", False, 'import socket\n'), ((33, 2, 33, 23), 'fencing.fail', 'fail', ({(33, 7, 33, 22): 'EC_LOGIN_DENIED'}, {}), '(EC_LOGIN_DENIED)', False, 'from fencing import fail, fail_usage, run_delay, EC_LOGIN_DENIED, EC_TIMED_OUT\n'), ((41, 2, 41, 23), 'fencing.fail', 'fail', ({(41, 7, 41, 22): 'EC_LOGIN_DENIED'}, {}), '(EC_LOGIN_DENIED)', False, 'from fencing import fail, fail_usage, run_delay, EC_LOGIN_DENIED, EC_TIMED_OUT\n'), ((73, 3, 73, 24), 'fencing.fail', 'fail', ({(73, 8, 73, 23): 'EC_LOGIN_DENIED'}, {}), '(EC_LOGIN_DENIED)', False, 'from fencing import fail, fail_usage, run_delay, EC_LOGIN_DENIED, EC_TIMED_OUT\n'), ((145, 16, 145, 69), 'struct.unpack', 'struct.unpack', ({(145, 30, 145, 34): '"""!i"""', (145, 36, 145, 68): 'data[parsed_len:parsed_len + INT4]'}, {}), "('!i', data[parsed_len:parsed_len + INT4])", False, 'import struct\n'), ((147, 16, 147, 92), 'struct.unpack', 'struct.unpack', ({(147, 30, 147, 51): "('!%ds' % string_len)", (147, 53, 147, 91): 'data[parsed_len:parsed_len + string_len]'}, {}), "('!%ds' % string_len, data[parsed_len:parsed_len + string_len])", False, 'import struct\n'), ((140, 4, 140, 61), 'logging.error', 'logging.error', ({(140, 18, 140, 60): '"""Failed: Not enough data read from socket"""'}, {}), "('Failed: Not enough data read from socket')", False, 'import logging\n'), ((141, 4, 141, 22), 'fencing.fail', 'fail', ({(141, 9, 141, 21): 'EC_TIMED_OUT'}, {}), '(EC_TIMED_OUT)', False, 'from fencing import fail, fail_usage, run_delay, EC_LOGIN_DENIED, EC_TIMED_OUT\n')] |
feel-easy/myspider | 2.5.9/test_splash/test_splash/spiders/with_splash.py | dcc65032015d7dbd8bea78f846fd3cac7638c332 | # -*- coding: utf-8 -*-
import scrapy
from scrapy_splash import SplashRequest # 使用scrapy_splash包提供的request对象
class WithSplashSpider(scrapy.Spider):
name = 'with_splash'
allowed_domains = ['baidu.com']
start_urls = ['https://www.baidu.com/s?wd=13161933309']
def start_requests(self):
yield SplashRequest(self.start_urls[0],
callback=self.parse_splash,
args={'wait': 10}, # 最大超时时间,单位:秒
endpoint='render.html') # 使用splash服务的固定参数
def parse_splash(self, response):
with open('with_splash.html', 'w') as f:
f.write(response.body.decode())
| [((11, 14, 14, 51), 'scrapy_splash.SplashRequest', 'SplashRequest', (), '', False, 'from scrapy_splash import SplashRequest\n')] |
iudaichi/iu_linebot | run.py | d3f5a7b0227b175963d51d62bcd5894366bde35c | from main import app
import os
import uvicorn
if __name__ == '__main__':
port = int(os.getenv("PORT"))
uvicorn.run(app, host="0.0.0.0", port=port, workers=1, reload=True)
| [((7, 4, 7, 71), 'uvicorn.run', 'uvicorn.run', (), '', False, 'import uvicorn\n'), ((6, 15, 6, 32), 'os.getenv', 'os.getenv', ({(6, 25, 6, 31): '"""PORT"""'}, {}), "('PORT')", False, 'import os\n')] |
DwijayDS/fastestimator | fastestimator/architecture/pytorch/unet.py | 9b288cb2bd870f971ec4cee09d0b3205e1316a94 | # Copyright 2019 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from typing import Tuple
import torch
import torch.nn as nn
from torch.nn import functional as F
from torch.nn.init import kaiming_normal_ as he_normal
class UNetEncoderBlock(nn.Module):
"""A UNet encoder block.
This class is intentionally not @traceable (models and layers are handled by a different process).
Args:
in_channels: How many channels enter the encoder.
out_channels: How many channels leave the encoder.
"""
def __init__(self, in_channels: int, out_channels: int) -> None:
super().__init__()
self.layers = nn.Sequential(nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
for layer in self.layers:
if isinstance(layer, nn.Conv2d):
he_normal(layer.weight.data)
layer.bias.data.zero_()
def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
out = self.layers(x)
return out, F.max_pool2d(out, 2)
class UNetDecoderBlock(nn.Module):
"""A UNet decoder block.
This class is intentionally not @traceable (models and layers are handled by a different process).
Args:
in_channels: How many channels enter the decoder.
mid_channels: How many channels are used for the decoder's intermediate layer.
out_channels: How many channels leave the decoder.
"""
def __init__(self, in_channels: int, mid_channels: int, out_channels: int) -> None:
super().__init__()
self.layers = nn.Sequential(nn.Conv2d(in_channels, mid_channels, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(mid_channels, mid_channels, 3, padding=1),
nn.ReLU(inplace=True),
nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True),
nn.Conv2d(mid_channels, out_channels, 3, padding=1),
nn.ReLU(inplace=True))
for layer in self.layers:
if isinstance(layer, nn.Conv2d):
he_normal(layer.weight.data)
layer.bias.data.zero_()
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.layers(x)
class UNet(nn.Module):
"""A standard UNet implementation in PyTorch.
This class is intentionally not @traceable (models and layers are handled by a different process).
Args:
input_size: The size of the input tensor (channels, height, width).
Raises:
ValueError: Length of `input_size` is not 3.
ValueError: `input_size`[1] or `input_size`[2] is not a multiple of 16.
"""
def __init__(self, input_size: Tuple[int, int, int] = (1, 128, 128)) -> None:
UNet._check_input_size(input_size)
super().__init__()
self.input_size = input_size
self.enc1 = UNetEncoderBlock(in_channels=input_size[0], out_channels=64)
self.enc2 = UNetEncoderBlock(in_channels=64, out_channels=128)
self.enc3 = UNetEncoderBlock(in_channels=128, out_channels=256)
self.enc4 = UNetEncoderBlock(in_channels=256, out_channels=512)
self.bottle_neck = UNetDecoderBlock(in_channels=512, mid_channels=1024, out_channels=512)
self.dec4 = UNetDecoderBlock(in_channels=1024, mid_channels=512, out_channels=256)
self.dec3 = UNetDecoderBlock(in_channels=512, mid_channels=256, out_channels=128)
self.dec2 = UNetDecoderBlock(in_channels=256, mid_channels=128, out_channels=64)
self.dec1 = nn.Sequential(nn.Conv2d(128, 64, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(64, 64, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(64, 1, 1),
nn.Sigmoid())
for layer in self.dec1:
if isinstance(layer, nn.Conv2d):
he_normal(layer.weight.data)
layer.bias.data.zero_()
def forward(self, x: torch.Tensor) -> torch.Tensor:
x1, x_e1 = self.enc1(x)
x2, x_e2 = self.enc2(x_e1)
x3, x_e3 = self.enc3(x_e2)
x4, x_e4 = self.enc4(x_e3)
x_bottle_neck = self.bottle_neck(x_e4)
x_d4 = self.dec4(torch.cat((x_bottle_neck, x4), 1))
x_d3 = self.dec3(torch.cat((x_d4, x3), 1))
x_d2 = self.dec2(torch.cat((x_d3, x2), 1))
x_out = self.dec1(torch.cat((x_d2, x1), 1))
return x_out
@staticmethod
def _check_input_size(input_size):
if len(input_size) != 3:
raise ValueError("Length of `input_size` is not 3 (channel, height, width)")
_, height, width = input_size
if height < 16 or not (height / 16.0).is_integer() or width < 16 or not (width / 16.0).is_integer():
raise ValueError("Both height and width of input_size need to be multiples of 16 (16, 32, 48...)")
| [((34, 36, 34, 98), 'torch.nn.Conv2d', 'nn.Conv2d', (), '', True, 'import torch.nn as nn\n'), ((35, 36, 35, 57), 'torch.nn.ReLU', 'nn.ReLU', (), '', True, 'import torch.nn as nn\n'), ((36, 36, 36, 99), 'torch.nn.Conv2d', 'nn.Conv2d', (), '', True, 'import torch.nn as nn\n'), ((37, 36, 37, 57), 'torch.nn.ReLU', 'nn.ReLU', (), '', True, 'import torch.nn as nn\n'), ((46, 20, 46, 40), 'torch.nn.functional.max_pool2d', 'F.max_pool2d', ({(46, 33, 46, 36): 'out', (46, 38, 46, 39): '(2)'}, {}), '(out, 2)', True, 'from torch.nn import functional as F\n'), ((61, 36, 61, 86), 'torch.nn.Conv2d', 'nn.Conv2d', (), '', True, 'import torch.nn as nn\n'), ((62, 36, 62, 57), 'torch.nn.ReLU', 'nn.ReLU', (), '', True, 'import torch.nn as nn\n'), ((63, 36, 63, 87), 'torch.nn.Conv2d', 'nn.Conv2d', (), '', True, 'import torch.nn as nn\n'), ((64, 36, 64, 57), 'torch.nn.ReLU', 'nn.ReLU', (), '', True, 'import torch.nn as nn\n'), ((65, 36, 65, 100), 'torch.nn.Upsample', 'nn.Upsample', (), '', True, 'import torch.nn as nn\n'), ((66, 36, 66, 87), 'torch.nn.Conv2d', 'nn.Conv2d', (), '', True, 'import torch.nn as nn\n'), ((67, 36, 67, 57), 'torch.nn.ReLU', 'nn.ReLU', (), '', True, 'import torch.nn as nn\n'), ((102, 34, 102, 66), 'torch.nn.Conv2d', 'nn.Conv2d', (), '', True, 'import torch.nn as nn\n'), ((103, 34, 103, 55), 'torch.nn.ReLU', 'nn.ReLU', (), '', True, 'import torch.nn as nn\n'), ((104, 34, 104, 65), 'torch.nn.Conv2d', 'nn.Conv2d', (), '', True, 'import torch.nn as nn\n'), ((105, 34, 105, 55), 'torch.nn.ReLU', 'nn.ReLU', (), '', True, 'import torch.nn as nn\n'), ((106, 34, 106, 53), 'torch.nn.Conv2d', 'nn.Conv2d', ({(106, 44, 106, 46): '64', (106, 48, 106, 49): '1', (106, 51, 106, 52): '1'}, {}), '(64, 1, 1)', True, 'import torch.nn as nn\n'), ((107, 34, 107, 46), 'torch.nn.Sigmoid', 'nn.Sigmoid', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((121, 25, 121, 58), 'torch.cat', 'torch.cat', ({(121, 35, 121, 54): '(x_bottle_neck, x4)', (121, 56, 121, 57): '1'}, {}), '((x_bottle_neck, x4), 1)', False, 'import torch\n'), ((122, 25, 122, 49), 'torch.cat', 'torch.cat', ({(122, 35, 122, 45): '(x_d4, x3)', (122, 47, 122, 48): '1'}, {}), '((x_d4, x3), 1)', False, 'import torch\n'), ((123, 25, 123, 49), 'torch.cat', 'torch.cat', ({(123, 35, 123, 45): '(x_d3, x2)', (123, 47, 123, 48): '1'}, {}), '((x_d3, x2), 1)', False, 'import torch\n'), ((124, 26, 124, 50), 'torch.cat', 'torch.cat', ({(124, 36, 124, 46): '(x_d2, x1)', (124, 48, 124, 49): '1'}, {}), '((x_d2, x1), 1)', False, 'import torch\n'), ((41, 16, 41, 44), 'torch.nn.init.kaiming_normal_', 'he_normal', ({(41, 26, 41, 43): 'layer.weight.data'}, {}), '(layer.weight.data)', True, 'from torch.nn.init import kaiming_normal_ as he_normal\n'), ((71, 16, 71, 44), 'torch.nn.init.kaiming_normal_', 'he_normal', ({(71, 26, 71, 43): 'layer.weight.data'}, {}), '(layer.weight.data)', True, 'from torch.nn.init import kaiming_normal_ as he_normal\n'), ((111, 16, 111, 44), 'torch.nn.init.kaiming_normal_', 'he_normal', ({(111, 26, 111, 43): 'layer.weight.data'}, {}), '(layer.weight.data)', True, 'from torch.nn.init import kaiming_normal_ as he_normal\n')] |
Mandera/generalfile | generalfile/path.py | 5e476a1c075fa072c7e52e62455feeb78b9bb298 |
import pathlib
import os
from generallibrary import VerInfo, TreeDiagram, Recycle, classproperty, deco_cache
from generalfile.errors import InvalidCharacterError
from generalfile.path_lock import Path_ContextManager
from generalfile.path_operations import Path_Operations
from generalfile.path_strings import Path_Strings
from generalfile.optional_dependencies.path_spreadsheet import Path_Spreadsheet
from generalfile.optional_dependencies.path_text import Path_Text
from generalfile.optional_dependencies.path_cfg import Path_Cfg
from generalfile.optional_dependencies.path_pickle import Path_Pickle
class Path(TreeDiagram, Recycle, Path_ContextManager, Path_Operations, Path_Strings, Path_Spreadsheet, Path_Text, Path_Cfg, Path_Pickle):
""" Immutable cross-platform Path.
Built on pathlib and TreeDiagram.
Implements rules to ensure cross-platform compatability.
Adds useful methods.
Todo: Binary extension. """
verInfo = VerInfo()
_path_delimiter = verInfo.pathDelimiter
Path = ...
_recycle_keys = {"path": lambda path: Path.scrub("" if path is None else str(path))}
_alternative_chars = {_path_delimiter: "/", ":": ":", ".": "."}
def __init__(self, path=None): # Don't have parent here because of Recycle
self.path = self.scrub(str_path="" if path is None else str(path))
self._path = pathlib.Path(self.path)
self._latest_listdir = set()
copy_node = NotImplemented # Maybe something like this to disable certain methods
@classproperty
def path_delimiter(cls):
return cls._path_delimiter
def spawn_parents(self):
if not self.get_parent(spawn=False) and self.path and not self.is_root():
try:
index = self.path.rindex(self.path_delimiter) + 1
except ValueError:
index = 0
self.set_parent(Path(path=self.path[:index]))
def spawn_children(self):
if self.is_folder():
old_children = {path.name() for path in self.get_children(spawn=False)}
try:
new_children = set(os.listdir(self.path if self.path else "."))
except PermissionError:
new_children = set()
for name in old_children.symmetric_difference(new_children):
path = Path(path=self / name)
path.set_parent(self if name in new_children else None)
def __str__(self):
return getattr(self, "path", "<Path not loaded yet>")
# return self.path
def __repr__(self):
return self.name()
def __fspath__(self):
return self.path
def __format__(self, format_spec):
return self.path.__format__(format_spec)
def __truediv__(self, other):
""" :rtype: generalfile.Path """
# print("here", self._recycle_instances)
return self.Path(self._path / str(other))
def __eq__(self, other):
if isinstance(other, Path):
other = other.path
else:
other = self._scrub("" if other is None else str(other))
return self.path == other
def __hash__(self):
return hash(self.path)
def __contains__(self, item):
return self.path.__contains__(item)
@classmethod
def _scrub(cls, str_path):
str_path = cls._replace_delimiters(str_path=str_path)
str_path = cls._invalid_characters(str_path=str_path)
str_path = cls._trim(str_path=str_path)
str_path = cls._delimiter_suffix_if_root(str_path=str_path)
return str_path
@classmethod
@deco_cache()
def scrub(cls, str_path):
return cls._scrub(str_path=str_path)
@classmethod
@deco_cache()
def _replace_delimiters(cls, str_path):
str_path = str_path.replace("/", cls.path_delimiter)
str_path = str_path.replace("\\", cls.path_delimiter)
return str_path
@classmethod
@deco_cache()
def _invalid_characters(cls, str_path):
# Simple invalid characters testing from Windows
for character in '<>"|?*':
if character in str_path:
raise InvalidCharacterError(f"Invalid character '{character}' in '{str_path}'")
if ":" in str_path:
if not cls.verInfo.pathRootHasColon:
raise InvalidCharacterError(f"Path has a colon but '{cls.verInfo.os}' doesn't use colon for path root: '{str_path}'")
if str_path[1] != ":":
raise InvalidCharacterError(f"Path has a colon but there's no colon at index 1: '{str_path}'")
if len(str_path) >= 3 and str_path[2] != cls.path_delimiter:
raise InvalidCharacterError(f"Path has a colon but index 2 is not a delimiter: '{str_path}'")
if ":" in str_path[2:]:
raise InvalidCharacterError(f"Path has a colon that's not at index 1: '{str_path}'")
if str_path.endswith("."):
raise InvalidCharacterError(f"Path cannot end with a dot ('.').")
return str_path
@classmethod
@deco_cache()
def _trim(cls, str_path):
if not cls.verInfo.pathRootIsDelimiter and str_path.startswith(cls.path_delimiter):
str_path = str_path[1:]
if str_path.endswith(cls.path_delimiter) and len(str_path) > 1:
str_path = str_path[0:-1]
return str_path
@classmethod
@deco_cache()
def _delimiter_suffix_if_root(cls, str_path):
if len(str_path) == 2 and str_path[1] == ":":
return f"{str_path}{cls.path_delimiter}"
return str_path
setattr(Path, "Path", Path)
| [((24, 14, 24, 23), 'generallibrary.VerInfo', 'VerInfo', ({}, {}), '()', False, 'from generallibrary import VerInfo, TreeDiagram, Recycle, classproperty, deco_cache\n'), ((104, 5, 104, 17), 'generallibrary.deco_cache', 'deco_cache', ({}, {}), '()', False, 'from generallibrary import VerInfo, TreeDiagram, Recycle, classproperty, deco_cache\n'), ((109, 5, 109, 17), 'generallibrary.deco_cache', 'deco_cache', ({}, {}), '()', False, 'from generallibrary import VerInfo, TreeDiagram, Recycle, classproperty, deco_cache\n'), ((116, 5, 116, 17), 'generallibrary.deco_cache', 'deco_cache', ({}, {}), '()', False, 'from generallibrary import VerInfo, TreeDiagram, Recycle, classproperty, deco_cache\n'), ((138, 5, 138, 17), 'generallibrary.deco_cache', 'deco_cache', ({}, {}), '()', False, 'from generallibrary import VerInfo, TreeDiagram, Recycle, classproperty, deco_cache\n'), ((147, 5, 147, 17), 'generallibrary.deco_cache', 'deco_cache', ({}, {}), '()', False, 'from generallibrary import VerInfo, TreeDiagram, Recycle, classproperty, deco_cache\n'), ((34, 21, 34, 44), 'pathlib.Path', 'pathlib.Path', ({(34, 34, 34, 43): 'self.path'}, {}), '(self.path)', False, 'import pathlib\n'), ((134, 18, 134, 77), 'generalfile.errors.InvalidCharacterError', 'InvalidCharacterError', ({(134, 40, 134, 76): 'f"""Path cannot end with a dot (\'.\')."""'}, {}), '(f"Path cannot end with a dot (\'.\').")', False, 'from generalfile.errors import InvalidCharacterError\n'), ((121, 22, 121, 95), 'generalfile.errors.InvalidCharacterError', 'InvalidCharacterError', ({(121, 44, 121, 94): 'f"""Invalid character \'{character}\' in \'{str_path}\'"""'}, {}), '(f"Invalid character \'{character}\' in \'{str_path}\'")', False, 'from generalfile.errors import InvalidCharacterError\n'), ((125, 22, 125, 133), 'generalfile.errors.InvalidCharacterError', 'InvalidCharacterError', ({(125, 44, 125, 132): 'f"""Path has a colon but \'{cls.verInfo.os}\' doesn\'t use colon for path root: \'{str_path}\'"""'}, {}), '(\n f"Path has a colon but \'{cls.verInfo.os}\' doesn\'t use colon for path root: \'{str_path}\'"\n )', False, 'from generalfile.errors import InvalidCharacterError\n'), ((127, 22, 127, 110), 'generalfile.errors.InvalidCharacterError', 'InvalidCharacterError', ({(127, 44, 127, 109): 'f"""Path has a colon but there\'s no colon at index 1: \'{str_path}\'"""'}, {}), '(\n f"Path has a colon but there\'s no colon at index 1: \'{str_path}\'")', False, 'from generalfile.errors import InvalidCharacterError\n'), ((129, 22, 129, 109), 'generalfile.errors.InvalidCharacterError', 'InvalidCharacterError', ({(129, 44, 129, 108): 'f"""Path has a colon but index 2 is not a delimiter: \'{str_path}\'"""'}, {}), '(\n f"Path has a colon but index 2 is not a delimiter: \'{str_path}\'")', False, 'from generalfile.errors import InvalidCharacterError\n'), ((131, 22, 131, 100), 'generalfile.errors.InvalidCharacterError', 'InvalidCharacterError', ({(131, 44, 131, 99): 'f"""Path has a colon that\'s not at index 1: \'{str_path}\'"""'}, {}), '(f"Path has a colon that\'s not at index 1: \'{str_path}\'")', False, 'from generalfile.errors import InvalidCharacterError\n'), ((56, 35, 56, 78), 'os.listdir', 'os.listdir', ({(56, 46, 56, 77): "self.path if self.path else '.'"}, {}), "(self.path if self.path else '.')", False, 'import os\n')] |
Tontolda/genui | src/genui/models/models.py | c5b7da7c5a99fc16d34878e2170145ac7c8e31c4 | import os
from django.db import models
import uuid
# Create your models here.
from djcelery_model.models import TaskMixin
from polymorphic.models import PolymorphicModel
from genui.utils.models import NON_POLYMORPHIC_CASCADE, OverwriteStorage
from genui.utils.extensions.tasks.models import TaskShortcutsMixIn, PolymorphicTaskManager
from genui.projects.models import DataSet
class AlgorithmMode(models.Model):
name = models.CharField(unique=True, blank=False, max_length=32)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelFileFormat(models.Model):
fileExtension = models.CharField(max_length=32, blank=False, unique=True)
description = models.TextField(max_length=10000, blank=True)
class ImportableModelComponent(models.Model):
corePackage = models.CharField(blank=False, null=False, default='genui.models.genuimodels', max_length=1024)
class Meta:
abstract = True
class Algorithm(ImportableModelComponent):
name = models.CharField(blank=False, max_length=128, unique=True)
fileFormats = models.ManyToManyField(ModelFileFormat)
validModes = models.ManyToManyField(AlgorithmMode)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelParameter(models.Model):
STRING = 'string'
BOOL = 'bool'
INTEGER = 'integer'
FLOAT = 'float'
CONTENT_TYPES = [
(STRING, 'String'),
(BOOL, 'Logical'),
(INTEGER, 'Integer'),
(FLOAT, 'Float'),
]
name = models.CharField(max_length=128, blank=False)
algorithm = models.ForeignKey(Algorithm, on_delete=models.CASCADE, null=False, related_name='parameters')
contentType = models.CharField(max_length=32, choices=CONTENT_TYPES, default=STRING)
defaultValue = models.ForeignKey("ModelParameterValue", on_delete=models.SET_NULL, null=True)
class Meta:
unique_together = ('name', 'algorithm')
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelBuilder(ImportableModelComponent):
name = models.CharField(max_length=128, blank=False, unique=True)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelFile(models.Model):
MAIN = "main"
AUXILIARY = "aux"
KINDS = [
(MAIN, 'Main'),
(AUXILIARY, 'Auxiliary'),
]
class Rejected(Exception):
def __init__(self, msg):
super().__init__(msg)
class InvalidFileFormatError(Exception):
def __init__(self, msg):
super().__init__(msg)
modelInstance = models.ForeignKey("Model", null=False, related_name="files", on_delete=models.CASCADE)
kind = models.CharField(max_length=32, choices=KINDS, null=False, default=AUXILIARY)
note = models.CharField(max_length=128, blank=True)
format = models.ForeignKey(ModelFileFormat, null=True, on_delete=models.CASCADE)
file = models.FileField(null=True, upload_to='models/', storage=OverwriteStorage()) # TODO: add custom logic to save in a directory specific to the project where the model is
@property
def path(self):
return self.file.path
@staticmethod
def generateMainFileName(model, fileFormat):
return f"{model.trainingStrategy.algorithm.name}{model.id}_project{model.project.id}_{uuid.uuid4().hex}_main{fileFormat.fileExtension}"
@staticmethod
def generateAuxFileName(model, fileFormat):
return f"{model.trainingStrategy.algorithm.name}{model.id}_project{model.project.id}_{uuid.uuid4().hex}_aux{fileFormat.fileExtension}"
@staticmethod
def create(model, name, file_, kind=AUXILIARY, note=None):
if not note:
note = ''
algorithm = model.trainingStrategy.algorithm
if kind == ModelFile.MAIN and model.modelFile:
file_format = None
for format_ in algorithm.fileFormats.all():
if name.endswith(format_.fileExtension):
file_format = format_
break
if not file_format:
raise ModelFile.InvalidFileFormatError(f"The extension for file '{name}' of the submitted file did not match any of the known formats for algorithm: ({algorithm.name}).")
if model.modelFile.format.fileExtension == file_format.fileExtension:
model.modelFile.file.save(os.path.basename(model.modelFile.path), file_)
else:
model.modelFile.delete()
ModelFile.objects.create(
model=model,
kind=ModelFile.MAIN,
format=file_format,
note=note,
file=file_
)
return model.modelFile
else:
file_format = None
for format_ in ModelFileFormat.objects.all():
if name.endswith(format_.fileExtension):
file_format = format_
break
if kind == ModelFile.MAIN:
if not file_format:
raise ModelFile.InvalidFileFormatError(f"The extension for file '{name}' of the submitted file did not match any of the known formats for algorithm: ({algorithm.name}).")
ret = ModelFile.objects.create(
modelInstance=model,
kind=ModelFile.MAIN,
format=file_format,
note=note
)
ret.file.save(ret.generateMainFileName(model, file_format), file_)
else:
ret = ModelFile.objects.create(
modelInstance=model,
kind=kind,
format=file_format if file_format else ModelFileFormat.objects.get_or_create(
fileExtension='.' + name.split('.')[-1]
)[0],
note=note
)
ret.file.save(ret.generateAuxFileName(model, ret.format), file_)
return ret
class Model(TaskShortcutsMixIn, TaskMixin, DataSet):
objects = PolymorphicTaskManager()
builder = models.ForeignKey(ModelBuilder, on_delete=models.CASCADE, null=False)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
@property
def modelFile(self):
# TODO: exception when more than one main file found
main = self.files.filter(kind=ModelFile.MAIN)
if main:
return main.get()
else:
return None
def onFileSave(self, saved : ModelFile):
"""
This will be called when a file is being
saved to this model instance. You can throw
the ModelFile.Rejected exception if the file
is invalid.
:param saved:
:return:
"""
pass
# @modelFile.setter
# def modelFile(self, val):
# main = self.files.filter(kind=ModelFile.MAIN)
# if main:
# main.delete()
# val.kind = ModelFile.MAIN
# val.save()
# self.files.add(val)
# self.save()
@property
def trainingStrategy(self):
count = self.trainingStrategies.count()
if count == 1:
return self.trainingStrategies.get()
elif count == 0:
return None
else:
raise Exception("Training strategy returned more than one value. This indicates an integrity error in the database!")
@property
def validationStrategy(self):
count = self.validationStrategies.count()
if count == 1:
return self.validationStrategies.get()
elif count == 0:
return None
else:
raise Exception("Validation strategy returned more than one value. This indicates an integrity error in the database!")
class TrainingStrategy(PolymorphicModel):
algorithm = models.ForeignKey(Algorithm, on_delete=models.CASCADE, null=False)
mode = models.ForeignKey(AlgorithmMode, on_delete=models.CASCADE, null=False)
modelInstance = models.ForeignKey(Model, null=False, on_delete=models.CASCADE, related_name="trainingStrategies")
class ModelParameterValue(PolymorphicModel):
parameter = models.ForeignKey(ModelParameter, on_delete=models.CASCADE, null=False)
strategy = models.ForeignKey(TrainingStrategy, on_delete=NON_POLYMORPHIC_CASCADE, null=True, related_name='parameters')
@staticmethod
def parseValue(val):
return str(val)
class ModelParameterStr(ModelParameterValue):
value = models.CharField(max_length=1024)
class ModelParameterBool(ModelParameterValue):
value = models.BooleanField(null=False)
@staticmethod
def parseValue(val):
return bool(val)
class ModelParameterInt(ModelParameterValue):
value = models.IntegerField(null=False)
@staticmethod
def parseValue(val):
return int(val)
class ModelParameterFloat(ModelParameterValue):
value = models.FloatField(null=False)
@staticmethod
def parseValue(val):
return float(val)
PARAM_VALUE_CTYPE_TO_MODEL_MAP = {
ModelParameter.STRING : ModelParameterStr,
ModelParameter.INTEGER : ModelParameterInt,
ModelParameter.FLOAT : ModelParameterFloat,
ModelParameter.BOOL : ModelParameterBool
}
class ModelPerformanceMetric(ImportableModelComponent):
name = models.CharField(unique=True, blank=False, max_length=128)
validModes = models.ManyToManyField(AlgorithmMode, related_name='metrics')
validAlgorithms = models.ManyToManyField(Algorithm, related_name='metrics')
description = models.TextField(max_length=10000, blank=True)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ValidationStrategy(PolymorphicModel):
metrics = models.ManyToManyField(ModelPerformanceMetric)
modelInstance = models.ForeignKey(Model, null=False, on_delete=models.CASCADE, related_name='validationStrategies')
class CV(ValidationStrategy):
cvFolds = models.IntegerField(blank=False)
class Meta:
abstract = True
class ValidationSet(ValidationStrategy):
validSetSize = models.FloatField(blank=False)
class Meta:
abstract = True
class BasicValidationStrategy(ValidationSet, CV):
pass
class ModelPerformance(PolymorphicModel):
metric = models.ForeignKey(ModelPerformanceMetric, null=False, on_delete=models.CASCADE)
value = models.FloatField(blank=False)
model = models.ForeignKey(Model, null=False, on_delete=NON_POLYMORPHIC_CASCADE, related_name="performance")
class ModelPerformanceCV(ModelPerformance):
fold = models.IntegerField(blank=False)
class ModelPerfomanceNN(ModelPerformance):
epoch = models.IntegerField(null=False, blank=False)
step = models.IntegerField(null=False, blank=False)
class ROCCurvePoint(ModelPerformance):
fpr = models.FloatField(blank=False)
auc = models.ForeignKey(ModelPerformance, null=False, on_delete=NON_POLYMORPHIC_CASCADE, related_name="points")
@property
def tpr(self):
return self.value | [((16, 11, 16, 68), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((23, 20, 23, 77), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((24, 18, 24, 64), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((28, 18, 28, 112), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((35, 11, 35, 69), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((36, 18, 36, 57), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ({(36, 41, 36, 56): 'ModelFileFormat'}, {}), '(ModelFileFormat)', False, 'from django.db import models\n'), ((37, 17, 37, 54), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ({(37, 40, 37, 53): 'AlgorithmMode'}, {}), '(AlgorithmMode)', False, 'from django.db import models\n'), ((55, 11, 55, 56), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((56, 16, 56, 109), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((57, 18, 57, 88), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((58, 19, 58, 97), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((67, 11, 67, 69), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((90, 20, 90, 106), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((91, 11, 91, 88), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((92, 11, 92, 55), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((93, 13, 93, 84), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((165, 14, 165, 38), 'genui.utils.extensions.tasks.models.PolymorphicTaskManager', 'PolymorphicTaskManager', ({}, {}), '()', False, 'from genui.utils.extensions.tasks.models import TaskShortcutsMixIn, PolymorphicTaskManager\n'), ((166, 14, 166, 83), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((225, 16, 225, 82), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((226, 11, 226, 81), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((227, 20, 227, 117), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((231, 16, 231, 87), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((232, 15, 232, 123), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((240, 12, 240, 45), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((244, 12, 244, 43), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((252, 12, 252, 43), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((260, 12, 260, 41), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import models\n'), ((275, 11, 275, 69), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((276, 17, 276, 78), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (), '', False, 'from django.db import models\n'), ((277, 22, 277, 79), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (), '', False, 'from django.db import models\n'), ((278, 18, 278, 64), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((285, 14, 285, 60), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ({(285, 37, 285, 59): 'ModelPerformanceMetric'}, {}), '(ModelPerformanceMetric)', False, 'from django.db import models\n'), ((286, 20, 286, 119), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((290, 14, 290, 46), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((297, 19, 297, 49), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import models\n'), ((308, 13, 308, 92), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((309, 12, 309, 42), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import models\n'), ((310, 12, 310, 111), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((314, 11, 314, 43), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((317, 12, 317, 56), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((318, 11, 318, 55), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((322, 10, 322, 40), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import models\n'), ((323, 10, 323, 115), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((94, 68, 94, 86), 'genui.utils.models.OverwriteStorage', 'OverwriteStorage', ({}, {}), '()', False, 'from genui.utils.models import NON_POLYMORPHIC_CASCADE, OverwriteStorage\n'), ((102, 94, 102, 106), 'uuid.uuid4', 'uuid.uuid4', ({}, {}), '()', False, 'import uuid\n'), ((106, 94, 106, 106), 'uuid.uuid4', 'uuid.uuid4', ({}, {}), '()', False, 'import uuid\n'), ((123, 42, 123, 80), 'os.path.basename', 'os.path.basename', ({(123, 59, 123, 79): 'model.modelFile.path'}, {}), '(model.modelFile.path)', False, 'import os\n')] |
bihealth/sodar_core | projectroles/tests/test_views_api.py | a6c22c4f276b64ffae6de48779a82d59a60a9333 | """REST API view tests for the projectroles app"""
import base64
import json
import pytz
from django.conf import settings
from django.core import mail
from django.forms.models import model_to_dict
from django.test import override_settings
from django.urls import reverse
from django.utils import timezone
from knox.models import AuthToken
from test_plus.test import APITestCase
from projectroles import views_api
from projectroles.models import (
Project,
Role,
RoleAssignment,
ProjectInvite,
SODAR_CONSTANTS,
)
from projectroles.plugins import change_plugin_status, get_backend_api
from projectroles.remote_projects import RemoteProjectAPI
from projectroles.tests.test_models import (
ProjectMixin,
RoleAssignmentMixin,
ProjectInviteMixin,
RemoteSiteMixin,
RemoteProjectMixin,
AppSettingMixin,
)
from projectroles.tests.test_views import (
TestViewsBase,
PROJECT_TYPE_CATEGORY,
PROJECT_TYPE_PROJECT,
PROJECT_ROLE_OWNER,
PROJECT_ROLE_DELEGATE,
PROJECT_ROLE_CONTRIBUTOR,
PROJECT_ROLE_GUEST,
REMOTE_SITE_NAME,
REMOTE_SITE_URL,
SITE_MODE_SOURCE,
SITE_MODE_TARGET,
REMOTE_SITE_DESC,
REMOTE_SITE_SECRET,
)
from projectroles.utils import build_secret
CORE_API_MEDIA_TYPE_INVALID = 'application/vnd.bihealth.invalid'
CORE_API_VERSION_INVALID = '9.9.9'
INVALID_UUID = '11111111-1111-1111-1111-111111111111'
NEW_CATEGORY_TITLE = 'New Category'
NEW_PROJECT_TITLE = 'New Project'
UPDATED_TITLE = 'Updated Title'
UPDATED_DESC = 'Updated description'
UPDATED_README = 'Updated readme'
INVITE_USER_EMAIL = '[email protected]'
INVITE_USER2_EMAIL = '[email protected]'
INVITE_MESSAGE = 'Message'
# Base Classes -----------------------------------------------------------------
class SODARAPIViewTestMixin:
"""
Mixin for SODAR and SODAR Core API views with accept headers, knox token
authorization and general helper methods.
"""
# Default API header parameters are for external SODAR site APIs
# Override these for testing SODAR Core API views
media_type = settings.SODAR_API_MEDIA_TYPE
api_version = settings.SODAR_API_DEFAULT_VERSION
# Copied from Knox tests
@classmethod
def _get_basic_auth_header(cls, username, password):
return (
'Basic %s'
% base64.b64encode(
('%s:%s' % (username, password)).encode('ascii')
).decode()
)
@classmethod
def get_token(cls, user, full_result=False):
"""
Get or create a knox token for a user.
:param user: User object
:param full_result: Return full result of AuthToken creation if True
:return: Token string or AuthToken creation tuple
"""
result = AuthToken.objects.create(user=user)
return result if full_result else result[1]
@classmethod
def get_serialized_user(cls, user):
"""
Return serialization for a user.
:param user: User object
:return: Dict
"""
return {
'email': user.email,
'name': user.name,
'sodar_uuid': str(user.sodar_uuid),
'username': user.username,
}
@classmethod
def get_drf_datetime(cls, obj_dt):
"""
Return datetime in DRF compatible format.
:param obj_dt: Object DateTime field
:return: String
"""
return timezone.localtime(
obj_dt, pytz.timezone(settings.TIME_ZONE)
).isoformat()
@classmethod
def get_accept_header(
cls,
media_type=None,
version=None,
):
"""
Return version accept header based on the media type and version string.
:param media_type: String (default = cls.media_type)
:param version: String (default = cls.api_version)
:return: Dict
"""
if not media_type:
media_type = cls.media_type
if not version:
version = cls.api_version
return {'HTTP_ACCEPT': '{}; version={}'.format(media_type, version)}
@classmethod
def get_token_header(cls, token):
"""
Return auth header based on token.
:param token: Token string
:return: Dict
"""
return {'HTTP_AUTHORIZATION': 'token {}'.format(token)}
def request_knox(
self,
url,
method='GET',
format='json',
data=None,
token=None,
media_type=None,
version=None,
header=None,
):
"""
Perform a HTTP request with Knox token auth.
:param url: URL for the request
:param method: Request method (string, default="GET")
:param format: Request format (string, default="json")
:param data: Optional data for request (dict)
:param token: Knox token string (if None, use self.knox_token)
:param media_type: String (default = cls.media_type)
:param version: String (default = cls.api_version)
:return: Response object
"""
if not token:
token = self.knox_token
req_kwargs = {
'format': format,
**self.get_accept_header(media_type, version),
**self.get_token_header(token),
}
if data:
req_kwargs['data'] = data
if header:
req_kwargs.update(header)
req_method = getattr(self.client, method.lower(), None)
if not req_method:
raise ValueError('Unsupported method "{}"'.format(method))
return req_method(url, **req_kwargs)
class TestAPIViewsBase(
ProjectMixin, RoleAssignmentMixin, SODARAPIViewTestMixin, APITestCase
):
"""Base API test view with knox authentication"""
def setUp(self):
# Show complete diff in case of failure
self.maxDiff = None
# Force disabling of taskflow plugin if it's available
if get_backend_api('taskflow'):
change_plugin_status(
name='taskflow', status=1, plugin_type='backend' # 0 = Disabled
)
# Init roles
self.role_owner = Role.objects.get_or_create(name=PROJECT_ROLE_OWNER)[0]
self.role_delegate = Role.objects.get_or_create(
name=PROJECT_ROLE_DELEGATE
)[0]
self.role_contributor = Role.objects.get_or_create(
name=PROJECT_ROLE_CONTRIBUTOR
)[0]
self.role_guest = Role.objects.get_or_create(name=PROJECT_ROLE_GUEST)[0]
# Init superuser
self.user = self.make_user('superuser')
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
# Set up category and project with owner role assignments
self.category = self._make_project(
'TestCategory', PROJECT_TYPE_CATEGORY, None
)
self.cat_owner_as = self._make_assignment(
self.category, self.user, self.role_owner
)
self.project = self._make_project(
'TestProject', PROJECT_TYPE_PROJECT, self.category
)
self.owner_as = self._make_assignment(
self.project, self.user, self.role_owner
)
# Get knox token for self.user
self.knox_token = self.get_token(self.user)
class TestCoreAPIViewsBase(TestAPIViewsBase):
"""Override of TestAPIViewsBase to be used with SODAR Core API views"""
media_type = views_api.CORE_API_MEDIA_TYPE
api_version = views_api.CORE_API_DEFAULT_VERSION
# Tests ------------------------------------------------------------------------
class TestProjectListAPIView(TestCoreAPIViewsBase):
"""Tests for ProjectListAPIView"""
def test_get(self):
"""Test ProjectListAPIView get() as project owner"""
url = reverse('projectroles:api_project_list')
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 2)
expected = [
{
'title': self.category.title,
'type': self.category.type,
'parent': None,
'description': self.category.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.category.submit_status,
'roles': {
str(self.cat_owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.cat_owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
},
{
'title': self.project.title,
'type': self.project.type,
'parent': str(self.category.sodar_uuid),
'description': self.project.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.project.submit_status,
'roles': {
str(self.owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
def test_get_no_roles(self):
"""Test ProjectListAPIView get() without roles"""
user_no_roles = self.make_user('user_no_roles')
url = reverse('projectroles:api_project_list')
response = self.request_knox(url, token=self.get_token(user_no_roles))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 0)
def test_get_limited_roles(self):
"""Test ProjectListAPIView get() with only one role"""
user_no_roles = self.make_user('user_no_roles')
self._make_assignment(
self.project, user_no_roles, self.role_contributor
)
url = reverse('projectroles:api_project_list')
response = self.request_knox(url, token=self.get_token(user_no_roles))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 1)
class TestProjectRetrieveAPIView(AppSettingMixin, TestCoreAPIViewsBase):
"""Tests for ProjectRetrieveAPIView"""
def test_get_category(self):
"""Test ProjectRetrieveAPIView get() with a category"""
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.category.sodar_uuid},
)
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'title': self.category.title,
'type': self.category.type,
'parent': None,
'description': self.category.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.category.submit_status,
'roles': {
str(self.cat_owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.cat_owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
}
self.assertEqual(response_data, expected)
def test_get_project(self):
"""Test ProjectRetrieveAPIView get() with a project"""
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.project.sodar_uuid},
)
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'title': self.project.title,
'type': self.project.type,
'parent': str(self.category.sodar_uuid),
'description': self.project.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.project.submit_status,
'roles': {
str(self.owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
self.assertEqual(response_data, expected)
def test_get_not_found(self):
"""Test ProjectRetrieveAPIView get() with an invalid UUID"""
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': INVALID_UUID},
)
response = self.request_knox(url)
self.assertEqual(response.status_code, 404)
class TestIPAllowing(AppSettingMixin, TestCoreAPIViewsBase):
"""Tests for IP allowing settings using ProjectRetrieveAPIView"""
def _setup_ip_allowing(self, ip_list, role_suffix):
"""Setup users and roles for IP allowing test"""
# Create new user
user = self.make_user(role_suffix)
# Remove previously assigned owner role for superuser.
superuser_as_owner_role = RoleAssignment.objects.get(
project=self.project, user=self.user, role=self.role_owner
)
superuser_as_owner_role.delete()
# Assign requested role to user
user_as = self._make_assignment(
self.project, user, getattr(self, 'role_' + role_suffix)
)
user_cat_as = RoleAssignment.objects.get(
project=self.category, user=self.user, role=self.role_owner
)
if role_suffix == 'owner':
user_cat_as.delete()
user_cat_as = self._make_assignment(
self.category, user, getattr(self, 'role_' + role_suffix)
)
# Init IP restrict setting
self._make_setting(
app_name='projectroles',
name='ip_restrict',
setting_type='BOOLEAN',
value=True,
project=self.project,
)
# Init IP allowlist setting
self._make_setting(
app_name='projectroles',
name='ip_allowlist',
setting_type='JSON',
value=None,
value_json=ip_list,
project=self.project,
)
return user, user_as, user_cat_as
def _get_project_ip_allowing(
self, username, http_attribute, ip_list, blocked=None
):
"""Helper for IP allowing tests"""
if blocked is None:
raise Exception('Please set "blocked" argument (True/False)')
user, user_as, user_cat_as = self._setup_ip_allowing(ip_list, username)
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.project.sodar_uuid},
)
header = {http_attribute: '192.168.1.1'}
response = self.request_knox(
url, token=self.get_token(user), header=header
)
if username == 'owner':
role = PROJECT_ROLE_OWNER
elif username == 'delegate':
role = PROJECT_ROLE_DELEGATE
elif username == 'contributor':
role = PROJECT_ROLE_CONTRIBUTOR
else:
role = PROJECT_ROLE_GUEST
if blocked:
self.assertEqual(response.status_code, 403)
else:
expected = {
'title': self.project.title,
'type': self.project.type,
'parent': str(self.category.sodar_uuid),
'description': self.project.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.project.submit_status,
'roles': {
str(user_as.sodar_uuid): {
'user': {
'username': user.username,
'name': user.name,
'email': user.email,
'sodar_uuid': str(user.sodar_uuid),
},
'role': role,
'sodar_uuid': str(user_as.sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected)
def test_http_x_forwarded_for_block_all_owner(self):
self._get_project_ip_allowing(
'owner', 'HTTP_X_FORWARDED_FOR', [], blocked=False
)
def test_http_x_forwarded_for_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'HTTP_X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_http_x_forwarded_for_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'HTTP_X_FORWARDED_FOR', [], blocked=False
)
def test_http_x_forwarded_for_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'HTTP_X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_http_x_forwarded_for_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'HTTP_X_FORWARDED_FOR', [], blocked=True
)
def test_http_x_forwarded_for_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor',
'HTTP_X_FORWARDED_FOR',
['192.168.1.1'],
blocked=False,
)
def test_http_x_forwarded_for_block_all_guest(self):
self._get_project_ip_allowing(
'guest', 'HTTP_X_FORWARDED_FOR', [], blocked=True
)
def test_http_x_forwarded_for_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'HTTP_X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_x_forwarded_for_block_all_owner(self):
self._get_project_ip_allowing(
'owner', 'X_FORWARDED_FOR', [], blocked=False
)
def test_x_forwarded_for_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_x_forwarded_for_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'X_FORWARDED_FOR', [], blocked=False
)
def test_x_forwarded_for_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_x_forwarded_for_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'X_FORWARDED_FOR', [], blocked=True
)
def test_forwarded_for_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_forwarded_for_block_all_guest(self):
self._get_project_ip_allowing(
'guest', 'X_FORWARDED_FOR', [], blocked=True
)
def test_forwarded_for_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_owner(self):
self._get_project_ip_allowing('owner', 'FORWARDED', [], blocked=False)
def test_forwarded_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'FORWARDED', [], blocked=False
)
def test_forwarded_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'FORWARDED', [], blocked=True
)
def test_forwarded_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_guest(self):
self._get_project_ip_allowing('guest', 'FORWARDED', [], blocked=True)
def test_forwarded_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_owner(self):
self._get_project_ip_allowing('owner', 'REMOTE_ADDR', [], blocked=False)
def test_remote_addr_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'REMOTE_ADDR', [], blocked=False
)
def test_remote_addr_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'REMOTE_ADDR', [], blocked=True
)
def test_remote_addr_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_guest(self):
self._get_project_ip_allowing('guest', 'REMOTE_ADDR', [], blocked=True)
def test_remote_addr_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_allow_network_guest(self):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.1.0/24'], blocked=False
)
def test_remote_addr_block_not_in_allowlist_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.1.2'], blocked=True
)
def test_remote_addr_block_not_in_allowlist_network_guest(
self,
):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.2.0/24'], blocked=True
)
class TestProjectCreateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for ProjectCreateAPIView"""
def test_create_category(self):
"""Test creating a root category"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': '',
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
# Assert object content
new_category = Project.objects.get(title=NEW_CATEGORY_TITLE)
model_dict = model_to_dict(new_category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': new_category.pk,
'title': new_category.title,
'type': new_category.type,
'parent': None,
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': new_category.title,
'sodar_uuid': new_category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(
RoleAssignment.objects.filter(
project=new_category, user=self.user, role=self.role_owner
).count(),
1,
)
# Assert API response
expected = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'sodar_uuid': str(new_category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_category_nested(self):
"""Test creating a category under an existing category"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
# Assert object content
new_category = Project.objects.get(title=NEW_CATEGORY_TITLE)
model_dict = model_to_dict(new_category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': new_category.pk,
'title': new_category.title,
'type': new_category.type,
'parent': self.category.pk,
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + new_category.title,
'sodar_uuid': new_category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(
RoleAssignment.objects.filter(
project=new_category, user=self.user, role=self.role_owner
).count(),
1,
)
# Assert API response
expected = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': str(self.category.sodar_uuid),
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'sodar_uuid': str(new_category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_project(self):
"""Test creating a project under an existing category"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
# Assert object content
new_project = Project.objects.get(title=NEW_PROJECT_TITLE)
model_dict = model_to_dict(new_project)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': new_project.pk,
'title': new_project.title,
'type': new_project.type,
'parent': self.category.pk,
'description': new_project.description,
'readme': new_project.readme.raw,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + new_project.title,
'sodar_uuid': new_project.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(
RoleAssignment.objects.filter(
project=new_project, user=self.user, role=self.role_owner
).count(),
1,
)
# Assert API response
expected = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': new_project.description,
'readme': new_project.readme.raw,
'public_guest_access': False,
'sodar_uuid': str(new_project.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_project_root(self):
"""Test creating a project in root (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': None,
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
@override_settings(PROJECTROLES_DISABLE_CATEGORIES=True)
def test_create_project_disable_categories(self):
"""Test creating a project in root with disabled categories"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': '',
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
def test_create_project_duplicate_title(self):
"""Test creating a project with a title already in category (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': self.project.title,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
def test_create_project_unknown_user(self):
"""Test creating a project with a non-existent user (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': INVALID_UUID,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
def test_create_project_unknown_parent(self):
"""Test creating a project with a non-existent parent category (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': INVALID_UUID,
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
def test_create_project_invalid_parent(self):
"""Test creating a project with a project as parent (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.project.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_project_target_enabled(self):
"""Test creating a project as TARGET with target creation allowed"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_project_target_remote(self):
"""Test creating a project as TARGET under a remote category (should fail)"""
# Create source site
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
# Make category remote
self._make_remote_project(
project_uuid=self.category.sodar_uuid,
project=self.category,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
@override_settings(
PROJECTROLES_SITE_MODE=SITE_MODE_TARGET,
PROJECTROLES_TARGET_CREATE=False,
)
def test_create_project_target_disabled(self):
"""Test creating a project as TARGET with target creation disallowed (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
class TestProjectUpdateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for ProjectUpdateAPIView"""
def test_put_category(self):
"""Test put() for category updating"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.category.sodar_uuid},
)
put_data = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': '',
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.category.refresh_from_db()
model_dict = model_to_dict(self.category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.category.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': UPDATED_TITLE,
'sodar_uuid': self.category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'roles': {
str(self.category.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.category.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_put_project(self):
"""Test put() for project updating"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
put_data = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.project.refresh_from_db()
model_dict = model_to_dict(self.project)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.project.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': self.category.pk,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + UPDATED_TITLE,
'sodar_uuid': self.project.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'roles': {
str(self.project.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.project.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_category(self):
"""Test patch() for updating category metadata"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.category.sodar_uuid},
)
patch_data = {
'title': UPDATED_TITLE,
'description': UPDATED_DESC,
'readme': UPDATED_README,
}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.category.refresh_from_db()
model_dict = model_to_dict(self.category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.category.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': UPDATED_TITLE,
'sodar_uuid': self.category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(self.category.get_owner().user, self.user)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': False,
'roles': {
str(self.category.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.category.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_project(self):
"""Test patch() for updating project metadata"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {
'title': UPDATED_TITLE,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.project.refresh_from_db()
model_dict = model_to_dict(self.project)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.project.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': self.category.pk,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + UPDATED_TITLE,
'sodar_uuid': self.project.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(self.project.get_owner().user, self.user)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'roles': {
str(self.project.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.project.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_project_owner(self):
"""Test patch() for updating project owner (should fail)"""
new_owner = self.make_user('new_owner')
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'owner': str(new_owner.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_patch_project_move(self):
"""Test patch() for moving project under a different category"""
# Assert preconditions
self.assertEqual(
self.project.full_title,
self.category.title + ' / ' + self.project.title,
)
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
self._make_assignment(new_category, self.user, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': str(new_category.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 200, msg=response.content)
# Assert object content
self.project.refresh_from_db()
model_dict = model_to_dict(self.project)
self.assertEqual(model_dict['parent'], new_category.pk)
# Assert role assignment
self.assertEqual(self.project.get_owner().user, self.user)
# Assert child project full title update
self.assertEqual(
self.project.full_title,
new_category.title + ' / ' + self.project.title,
)
# Assert API response
self.assertEqual(
json.loads(response.content)['parent'], str(new_category.sodar_uuid)
)
def test_patch_project_move_unallowed(self):
"""Test patch() for moving project without permissions (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
new_owner = self.make_user('new_owner')
self._make_assignment(new_category, new_owner, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': str(new_category.sodar_uuid)}
# Disable superuser status from self.user and perform request
self.user.is_superuser = False
self.user.save()
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 403, msg=response.content)
def test_patch_project_move_root(self):
"""Test patch() for moving project without permissions (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
new_owner = self.make_user('new_owner')
self._make_assignment(new_category, new_owner, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': ''}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 200, msg=response.content)
def test_patch_project_move_root_unallowed(self):
"""Test patch() for moving project to root without permissions (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
new_owner = self.make_user('new_owner')
self._make_assignment(new_category, new_owner, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': ''}
# Disable superuser status from self.user and perform request
self.user.is_superuser = False
self.user.save()
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 403, msg=response.content)
def test_patch_project_move_child(self):
"""Test patch() for moving a category inside its child (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, self.category
)
self._make_assignment(new_category, self.user, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.category.sodar_uuid},
)
patch_data = {'parent': str(new_category.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_patch_project_type_change(self):
"""Test patch() with a changed project type (should fail)"""
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'type': PROJECT_TYPE_CATEGORY}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_patch_project_remote(self):
"""Test patch() for updating remote project metadata (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {
'title': UPDATED_TITLE,
'description': UPDATED_DESC,
'readme': UPDATED_README,
}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
class TestRoleAssignmentCreateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentCreateAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
def test_create_contributor(self):
"""Test creating a contributor role for user"""
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Assert object
role_as = RoleAssignment.objects.filter(
project=self.project,
role=self.role_contributor,
user=self.assign_user,
).first()
self.assertIsNotNone(role_as)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(role_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_owner(self):
"""Test creating an owner role (should fail)"""
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_OWNER,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_create_delegate(self):
"""Test creating a delegate role for user as owner"""
# Disable superuser status from self.user
self.user.is_superuser = False
self.user.save()
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Assert object
role_as = RoleAssignment.objects.filter(
project=self.project, role=self.role_delegate, user=self.assign_user
).first()
self.assertIsNotNone(role_as)
def test_create_delegate_unauthorized(self):
"""Test creating a delegate role without authorization (should fail)"""
# Create new user and grant delegate role
new_user = self.make_user('new_user')
self._make_assignment(self.project, new_user, self.role_contributor)
new_user_token = self.get_token(new_user)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(
url, method='POST', data=post_data, token=new_user_token
)
# Assert response
self.assertEqual(response.status_code, 403, msg=response.content)
def test_create_delegate_limit(self):
"""Test creating a delegate role with limit reached (should fail)"""
# Create new user and grant delegate role
new_user = self.make_user('new_user')
self._make_assignment(self.project, new_user, self.role_delegate)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
# NOTE: Post as owner
response = self.request_knox(url, method='POST', data=post_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_create_delegate_limit_inherit(self):
"""Test creating a delegate role existing role for inherited owner"""
# Set up category owner
new_user = self.make_user('new_user')
self.cat_owner_as.user = new_user
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
# NOTE: Post as owner
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Assert object
role_as = RoleAssignment.objects.filter(
project=self.project, role=self.role_delegate, user=self.assign_user
).first()
self.assertIsNotNone(role_as)
def test_create_delegate_category(self):
"""Test creating a non-owner role for category"""
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.category.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response
self.assertEqual(response.status_code, 201, msg=response.content)
def test_create_role_existing(self):
"""Test creating a role for user already in the project"""
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Post again
post_data = {
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_remote(self):
"""Test creating a role for a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
class TestRoleAssignmentUpdateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentUpdateAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
self.update_as = self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
def test_put_role(self):
"""Test put() for role assignment updating"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response and role status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
# Assert object content
self.update_as.refresh_from_db()
model_dict = model_to_dict(self.update_as)
expected = {
'id': self.update_as.pk,
'project': self.project.pk,
'role': self.role_guest.pk,
'user': self.assign_user.pk,
'sodar_uuid': self.update_as.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(self.update_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_put_delegate(self):
"""Test put() for delegate role assignment"""
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response
self.assertEqual(response.status_code, 200, msg=response.content)
# Assert object content
self.update_as.refresh_from_db()
model_dict = model_to_dict(self.update_as)
expected = {
'id': self.update_as.pk,
'project': self.project.pk,
'role': self.role_delegate.pk,
'user': self.assign_user.pk,
'sodar_uuid': self.update_as.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(self.update_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_put_owner(self):
"""Test put() for owner role assignment (should fail)"""
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_OWNER,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_put_change_user(self):
"""Test put() with a different user (should fail)"""
new_user = self.make_user('new_user')
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_GUEST,
'user': str(new_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_patch_role(self):
"""Test patch() for role assignment updating"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
patch_data = {'role': PROJECT_ROLE_GUEST}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and role status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
# Assert object content
self.update_as.refresh_from_db()
model_dict = model_to_dict(self.update_as)
expected = {
'id': self.update_as.pk,
'project': self.project.pk,
'role': self.role_guest.pk,
'user': self.assign_user.pk,
'sodar_uuid': self.update_as.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(self.update_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_change_user(self):
"""Test patch() with a different user (should fail)"""
new_user = self.make_user('new_user')
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
patch_data = {'user': str(new_user.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_patch_role_remote(self):
"""Test patch() for updating a role in a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
patch_data = {'role': PROJECT_ROLE_GUEST}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
class TestRoleAssignmentDestroyAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentDestroyAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
self.update_as = self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
def test_delete_role(self):
"""Test delete for role assignment deletion"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
response = self.request_knox(url, method='DELETE')
# Assert response and role status
self.assertEqual(response.status_code, 204, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 2)
self.assertEqual(
RoleAssignment.objects.filter(
project=self.project, user=self.assign_user
).count(),
0,
)
def test_delete_delegate_unauthorized(self):
"""Test delete for delegate deletion without perms (should fail)"""
new_user = self.make_user('new_user')
delegate_as = self._make_assignment(
self.project, new_user, self.role_delegate
)
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 4)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': delegate_as.sodar_uuid},
)
# NOTE: Perform record as contributor user
token = self.get_token(self.assign_user)
response = self.request_knox(url, method='DELETE', token=token)
# Assert response and role status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 4)
def test_delete_owner(self):
"""Test delete for owner deletion (should fail)"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': self.owner_as.sodar_uuid},
)
response = self.request_knox(url, method='DELETE')
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_delete_remote(self):
"""Test delete for a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
response = self.request_knox(url, method='DELETE')
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
class TestRoleAssignmentOwnerTransferAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentOwnerTransferAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
def test_transfer_owner(self):
"""Test transferring ownership for a project"""
# Assign role to new user
self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
# Assert preconditions
self.assertEqual(self.project.get_owner().user, self.user)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(self.project.get_owner().user, self.assign_user)
def test_transfer_owner_category(self):
"""Test transferring ownership for a category"""
# Assign role to new user
self._make_assignment(
self.category, self.assign_user, self.role_contributor
)
# Assert preconditions
self.assertEqual(self.category.get_owner().user, self.user)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.category.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(self.category.get_owner().user, self.assign_user)
def test_transfer_owner_inherit(self):
"""Test transferring ownership to an inherited owner"""
# Assign role to new user
self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
# Set alt owner to current project, make self.user inherited owner
alt_owner = self.make_user('alt_owner')
self.owner_as.user = alt_owner
self.owner_as.save()
# Assert preconditions
self.assertEqual(self.project.get_owner().user, alt_owner)
self.assertEqual(
self.project.get_owners(inherited_only=True)[0].user, self.user
)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(self.project.get_owner().user, self.user)
old_owner_as = RoleAssignment.objects.get(
project=self.project, user=alt_owner
)
self.assertEqual(old_owner_as.role, self.role_contributor)
def test_transfer_owner_no_roles(self):
"""Test transferring ownership to user with no existing roles (should fail)"""
# NOTE: No role given to user
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_transfer_remote(self):
"""Test transferring ownership for a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assign role to new user
self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
# Assert preconditions
self.assertEqual(self.project.get_owner().user, self.user)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(self.project.get_owner().user, self.user)
class TestProjectInviteListAPIView(ProjectInviteMixin, TestCoreAPIViewsBase):
"""Tests for ProjectInviteListAPIView"""
def setUp(self):
super().setUp()
# Create invites
self.invite = self._make_invite(
email=INVITE_USER_EMAIL,
project=self.project,
role=self.role_guest,
issuer=self.user,
message='',
secret=build_secret(),
)
self.invite2 = self._make_invite(
email=INVITE_USER2_EMAIL,
project=self.project,
role=self.role_contributor,
issuer=self.user,
message=INVITE_MESSAGE,
secret=build_secret(),
)
def test_get(self):
"""Test ProjectInviteListAPIView get()"""
url = reverse(
'projectroles:api_invite_list',
kwargs={'project': self.project.sodar_uuid},
)
response = self.request_knox(url, token=self.get_token(self.user))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 2)
expected = [
{
'email': INVITE_USER_EMAIL,
'project': str(self.project.sodar_uuid),
'role': self.role_guest.name,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(self.invite.date_created),
'date_expire': self.get_drf_datetime(self.invite.date_expire),
'message': '',
'sodar_uuid': str(self.invite.sodar_uuid),
},
{
'email': INVITE_USER2_EMAIL,
'project': str(self.project.sodar_uuid),
'role': self.role_contributor.name,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(
self.invite2.date_created
),
'date_expire': self.get_drf_datetime(self.invite2.date_expire),
'message': INVITE_MESSAGE,
'sodar_uuid': str(self.invite2.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
def test_get_inactive(self):
"""Test get() with an inactive invite"""
self.invite.active = False
self.invite.save()
url = reverse(
'projectroles:api_invite_list',
kwargs={'project': self.project.sodar_uuid},
)
response = self.request_knox(url, token=self.get_token(self.user))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 1)
expected = [
{
'email': INVITE_USER2_EMAIL,
'project': str(self.project.sodar_uuid),
'role': self.role_contributor.name,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(
self.invite2.date_created
),
'date_expire': self.get_drf_datetime(self.invite2.date_expire),
'message': INVITE_MESSAGE,
'sodar_uuid': str(self.invite2.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
class TestProjectInviteCreateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for ProjectInviteCreateAPIView"""
def test_create(self):
"""Test creating a contributor invite for user"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 1
)
# Assert data
invite = ProjectInvite.objects.first()
self.assertEqual(invite.email, INVITE_USER_EMAIL)
self.assertEqual(invite.role, self.role_contributor)
self.assertEqual(invite.issuer, self.user)
self.assertEqual(invite.message, INVITE_MESSAGE)
# Assert response
expected = {
'email': INVITE_USER_EMAIL,
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_CONTRIBUTOR,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(invite.date_created),
'date_expire': self.get_drf_datetime(invite.date_expire),
'message': invite.message,
'sodar_uuid': str(invite.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
self.assertEqual(len(mail.outbox), 1)
def test_create_owner(self):
"""Test creating an invite for an owner role (should fail)"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_OWNER,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_delegate(self):
"""Test creating an invite for an delegate role"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_DELEGATE,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 1
)
invite = ProjectInvite.objects.first()
self.assertEqual(invite.role, self.role_delegate)
self.assertEqual(len(mail.outbox), 1)
@override_settings(PROJECTROLES_DELEGATE_LIMIT=2)
def test_create_delegate_no_perms(self):
"""Test creating an delegate invite without perms (should fail)"""
del_user = self.make_user('delegate')
self._make_assignment(self.project, del_user, self.role_delegate)
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_DELEGATE,
'message': INVITE_MESSAGE,
}
response = self.request_knox(
url, method='POST', data=post_data, token=self.get_token(del_user)
)
# Assert response and data
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_delegate_limit(self):
"""Test creating an delegate invite with exceeded limit (should fail)"""
del_user = self.make_user('delegate')
self._make_assignment(self.project, del_user, self.role_delegate)
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_DELEGATE,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_invalid_email(self):
"""Test creating an invite with invalid email (should fail)"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': 'NOT_AN_EMAIL!',
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_existing_user(self):
"""Test creating an invite for an existing user (should fail)"""
user = self.make_user('new_user')
user.email = INVITE_USER_EMAIL
user.save()
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_remote(self):
"""Test creating an invite for a remote project (should fail)"""
# Set up remote site and project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
class TestProjectInviteRevokeAPIView(ProjectInviteMixin, TestCoreAPIViewsBase):
"""Tests for ProjectInviteRevokeAPIView"""
def setUp(self):
super().setUp()
# Create invite
self.invite = self._make_invite(
email=INVITE_USER_EMAIL,
project=self.project,
role=self.role_contributor,
issuer=self.user,
)
def test_revoke(self):
"""Test revoking an invite"""
# Assert preconditions
self.assertEqual(self.invite.active, True)
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and invite status
self.assertEqual(response.status_code, 200, msg=response.content)
self.invite.refresh_from_db()
self.assertEqual(self.invite.active, False)
def test_revoke_inactive(self):
"""Test revoking an already inactive invite (should fail)"""
self.invite.active = False
self.invite.save()
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and invite status
self.assertEqual(response.status_code, 400, msg=response.content)
def test_revoke_delegate(self):
"""Test revoking a delegate invite with sufficient perms"""
self.invite.role = self.role_delegate
self.invite.save()
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and invite status
self.assertEqual(response.status_code, 200, msg=response.content)
self.invite.refresh_from_db()
self.assertEqual(self.invite.active, False)
def test_revoke_delegate_no_perms(self):
"""Test revoking a delegate invite without perms (should fail)"""
self.invite.role = self.role_delegate
self.invite.save()
delegate = self.make_user('delegate')
self._make_assignment(self.project, delegate, self.role_delegate)
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(
url, method='POST', token=self.get_token(delegate)
)
# Assert response and invite status
self.assertEqual(response.status_code, 403, msg=response.content)
self.invite.refresh_from_db()
self.assertEqual(self.invite.active, True)
def test_revoke_not_found(self):
"""Test revoking invite with invalid UUID"""
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': INVALID_UUID},
)
response = self.request_knox(url, method='POST')
self.assertEqual(response.status_code, 404)
class TestProjectInviteResendAPIView(ProjectInviteMixin, TestCoreAPIViewsBase):
"""Tests for ProjectInviteResendAPIView"""
def setUp(self):
super().setUp()
# Create invite
self.invite = self._make_invite(
email=INVITE_USER_EMAIL,
project=self.project,
role=self.role_contributor,
issuer=self.user,
)
def test_resend(self):
"""Test resending an invite"""
# Assert preconditions
self.assertEqual(len(mail.outbox), 0)
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and mail status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(len(mail.outbox), 1)
def test_resend_inactive(self):
"""Test resending an inactive invite (should fail)"""
self.invite.active = False
self.invite.save()
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and mail status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(len(mail.outbox), 0)
def test_resend_delegate(self):
"""Test resending a delegate invite with sufficient perms"""
self.invite.role = self.role_delegate
self.invite.save()
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and mail status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(len(mail.outbox), 1)
def test_resend_delegate_no_perms(self):
"""Test resending a delegate invite without perms (should fail)"""
self.invite.role = self.role_delegate
self.invite.save()
delegate = self.make_user('delegate')
self._make_assignment(self.project, delegate, self.role_delegate)
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(
url, method='POST', token=self.get_token(delegate)
)
# Assert response and mail status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(len(mail.outbox), 0)
def test_resend_not_found(self):
"""Test resending invite with invalid UUID"""
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': INVALID_UUID},
)
response = self.request_knox(url, method='POST')
self.assertEqual(response.status_code, 404)
class TestUserListAPIView(TestCoreAPIViewsBase):
"""Tests for UserListAPIView"""
def setUp(self):
super().setUp()
# Create additional users
self.domain_user = self.make_user('domain_user@domain')
def test_get(self):
"""Test UserListAPIView get() as a regular user"""
url = reverse('projectroles:api_user_list')
response = self.request_knox(
url, token=self.get_token(self.domain_user)
)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 1) # System users not returned
expected = [
{
'username': self.domain_user.username,
'name': self.domain_user.name,
'email': self.domain_user.email,
'sodar_uuid': str(self.domain_user.sodar_uuid),
}
]
self.assertEqual(response_data, expected)
def test_get_superuser(self):
"""Test UserListAPIView get() as a superuser"""
url = reverse('projectroles:api_user_list')
response = self.request_knox(url) # Default token is for superuser
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 2)
expected = [
{
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
{
'username': self.domain_user.username,
'name': self.domain_user.name,
'email': self.domain_user.email,
'sodar_uuid': str(self.domain_user.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
class TestCurrentUserRetrieveAPIView(TestCoreAPIViewsBase):
"""Tests for CurrentUserRetrieveAPIView"""
def setUp(self):
super().setUp()
# Create additional users
self.domain_user = self.make_user('domain_user@domain')
def test_get(self):
"""Test CurrentUserRetrieveAPIView get() as a regular user"""
url = reverse('projectroles:api_user_current')
response = self.request_knox(
url, token=self.get_token(self.domain_user)
)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'username': self.domain_user.username,
'name': self.domain_user.name,
'email': self.domain_user.email,
'sodar_uuid': str(self.domain_user.sodar_uuid),
}
self.assertEqual(response_data, expected)
def test_get_superuser(self):
"""Test CurrentUserRetrieveAPIView get() as superuser"""
url = reverse('projectroles:api_user_current')
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
}
self.assertEqual(response_data, expected)
class TestAPIVersioning(TestCoreAPIViewsBase):
"""Tests for REST API view versioning using ProjectRetrieveAPIView"""
def setUp(self):
super().setUp()
self.url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.project.sodar_uuid},
)
def test_api_versioning(self):
"""Test SODAR API Access with correct version headers"""
response = self.request_knox(
self.url,
media_type=views_api.CORE_API_MEDIA_TYPE,
version=views_api.CORE_API_DEFAULT_VERSION,
)
self.assertEqual(response.status_code, 200)
def test_api_versioning_invalid_version(self):
"""Test SODAR API Access with unsupported version (should fail)"""
response = self.request_knox(
self.url,
media_type=views_api.CORE_API_MEDIA_TYPE,
version=CORE_API_VERSION_INVALID,
)
self.assertEqual(response.status_code, 406)
def test_api_versioning_invalid_media_type(self):
"""Test SODAR API Access with unsupported media type (should fail)"""
response = self.request_knox(
self.url,
media_type=CORE_API_MEDIA_TYPE_INVALID,
version=views_api.CORE_API_MEDIA_TYPE,
)
self.assertEqual(response.status_code, 406)
# TODO: To be updated once the legacy API view is redone for SODAR Core v1.0
class TestRemoteProjectGetAPIView(
ProjectMixin,
RoleAssignmentMixin,
RemoteSiteMixin,
RemoteProjectMixin,
SODARAPIViewTestMixin,
TestViewsBase,
):
"""Tests for remote project getting API view"""
media_type = views_api.CORE_API_MEDIA_TYPE
api_version = views_api.CORE_API_DEFAULT_VERSION
def setUp(self):
super().setUp()
# Set up projects
self.category = self._make_project(
'TestCategory', PROJECT_TYPE_CATEGORY, None
)
self.cat_owner_as = self._make_assignment(
self.category, self.user, self.role_owner
)
self.project = self._make_project(
'TestProject', PROJECT_TYPE_PROJECT, self.category
)
self.project_owner_as = self._make_assignment(
self.project, self.user, self.role_owner
)
# Create target site
self.target_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_TARGET,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
# Create remote project
self.remote_project = self._make_remote_project(
site=self.target_site,
project_uuid=self.project.sodar_uuid,
project=self.project,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_INFO'],
)
self.remote_api = RemoteProjectAPI()
def test_get(self):
"""Test retrieving project data to the target site"""
response = self.client.get(
reverse(
'projectroles:api_remote_get',
kwargs={'secret': REMOTE_SITE_SECRET},
)
)
self.assertEqual(response.status_code, 200)
expected = self.remote_api.get_source_data(self.target_site)
response_dict = json.loads(response.content.decode('utf-8'))
self.assertEqual(response_dict, expected)
def test_get_invalid_secret(self):
"""Test retrieving project data with an invalid secret (should fail)"""
response = self.client.get(
reverse(
'projectroles:api_remote_get', kwargs={'secret': build_secret()}
)
)
self.assertEqual(response.status_code, 401)
| [((919, 5, 919, 60), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((1030, 5, 1030, 63), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((1053, 5, 1053, 63), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((1093, 5, 1096, 5), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((1521, 5, 1521, 63), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((1782, 5, 1782, 63), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((1996, 5, 1996, 63), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((2099, 5, 2099, 63), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((2246, 5, 2246, 63), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((2486, 5, 2486, 53), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((2600, 5, 2600, 63), 'django.test.override_settings', 'override_settings', (), '', False, 'from django.test import override_settings\n'), ((101, 17, 101, 52), 'knox.models.AuthToken.objects.create', 'AuthToken.objects.create', (), '', False, 'from knox.models import AuthToken\n'), ((217, 11, 217, 38), 'projectroles.plugins.get_backend_api', 'get_backend_api', ({(217, 27, 217, 37): '"""taskflow"""'}, {}), "('taskflow')", False, 'from projectroles.plugins import change_plugin_status, get_backend_api\n'), ((271, 14, 271, 54), 'django.urls.reverse', 'reverse', ({(271, 22, 271, 53): '"""projectroles:api_project_list"""'}, {}), "('projectroles:api_project_list')", False, 'from django.urls import reverse\n'), ((276, 24, 276, 52), 'json.loads', 'json.loads', ({(276, 35, 276, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((329, 14, 329, 54), 'django.urls.reverse', 'reverse', ({(329, 22, 329, 53): '"""projectroles:api_project_list"""'}, {}), "('projectroles:api_project_list')", False, 'from django.urls import reverse\n'), ((334, 24, 334, 52), 'json.loads', 'json.loads', ({(334, 35, 334, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((343, 14, 343, 54), 'django.urls.reverse', 'reverse', ({(343, 22, 343, 53): '"""projectroles:api_project_list"""'}, {}), "('projectroles:api_project_list')", False, 'from django.urls import reverse\n'), ((348, 24, 348, 52), 'json.loads', 'json.loads', ({(348, 35, 348, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((357, 14, 360, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((365, 24, 365, 52), 'json.loads', 'json.loads', ({(365, 35, 365, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((392, 14, 395, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((400, 24, 400, 52), 'json.loads', 'json.loads', ({(400, 35, 400, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((427, 14, 430, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((443, 34, 445, 9), 'projectroles.models.RoleAssignment.objects.get', 'RoleAssignment.objects.get', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((452, 22, 454, 9), 'projectroles.models.RoleAssignment.objects.get', 'RoleAssignment.objects.get', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((490, 14, 493, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((723, 14, 723, 56), 'django.urls.reverse', 'reverse', ({(723, 22, 723, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((740, 23, 740, 68), 'projectroles.models.Project.objects.get', 'Project.objects.get', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((741, 21, 741, 48), 'django.forms.models.model_to_dict', 'model_to_dict', ({(741, 35, 741, 47): 'new_category'}, {}), '(new_category)', False, 'from django.forms.models import model_to_dict\n'), ((783, 14, 783, 56), 'django.urls.reverse', 'reverse', ({(783, 22, 783, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((800, 23, 800, 68), 'projectroles.models.Project.objects.get', 'Project.objects.get', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((801, 21, 801, 48), 'django.forms.models.model_to_dict', 'model_to_dict', ({(801, 35, 801, 47): 'new_category'}, {}), '(new_category)', False, 'from django.forms.models import model_to_dict\n'), ((843, 14, 843, 56), 'django.urls.reverse', 'reverse', ({(843, 22, 843, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((860, 22, 860, 66), 'projectroles.models.Project.objects.get', 'Project.objects.get', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((861, 21, 861, 47), 'django.forms.models.model_to_dict', 'model_to_dict', ({(861, 35, 861, 46): 'new_project'}, {}), '(new_project)', False, 'from django.forms.models import model_to_dict\n'), ((903, 14, 903, 56), 'django.urls.reverse', 'reverse', ({(903, 22, 903, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((926, 14, 926, 56), 'django.urls.reverse', 'reverse', ({(926, 22, 926, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((948, 14, 948, 56), 'django.urls.reverse', 'reverse', ({(948, 22, 948, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((970, 14, 970, 56), 'django.urls.reverse', 'reverse', ({(970, 22, 970, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((992, 14, 992, 56), 'django.urls.reverse', 'reverse', ({(992, 22, 992, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((1014, 14, 1014, 56), 'django.urls.reverse', 'reverse', ({(1014, 22, 1014, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((1037, 14, 1037, 56), 'django.urls.reverse', 'reverse', ({(1037, 22, 1037, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((1077, 14, 1077, 56), 'django.urls.reverse', 'reverse', ({(1077, 22, 1077, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((1103, 14, 1103, 56), 'django.urls.reverse', 'reverse', ({(1103, 22, 1103, 55): '"""projectroles:api_project_create"""'}, {}), "('projectroles:api_project_create')", False, 'from django.urls import reverse\n'), ((1131, 14, 1134, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1152, 21, 1152, 49), 'django.forms.models.model_to_dict', 'model_to_dict', ({(1152, 35, 1152, 48): 'self.category'}, {}), '(self.category)', False, 'from django.forms.models import model_to_dict\n'), ((1194, 14, 1197, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1215, 21, 1215, 48), 'django.forms.models.model_to_dict', 'model_to_dict', ({(1215, 35, 1215, 47): 'self.project'}, {}), '(self.project)', False, 'from django.forms.models import model_to_dict\n'), ((1257, 14, 1260, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1274, 21, 1274, 49), 'django.forms.models.model_to_dict', 'model_to_dict', ({(1274, 35, 1274, 48): 'self.category'}, {}), '(self.category)', False, 'from django.forms.models import model_to_dict\n'), ((1319, 14, 1322, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1337, 21, 1337, 48), 'django.forms.models.model_to_dict', 'model_to_dict', ({(1337, 35, 1337, 47): 'self.project'}, {}), '(self.project)', False, 'from django.forms.models import model_to_dict\n'), ((1380, 14, 1383, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1403, 14, 1406, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1415, 21, 1415, 48), 'django.forms.models.model_to_dict', 'model_to_dict', ({(1415, 35, 1415, 47): 'self.project'}, {}), '(self.project)', False, 'from django.forms.models import model_to_dict\n'), ((1440, 14, 1443, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1461, 14, 1464, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1479, 14, 1482, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1499, 14, 1502, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1511, 14, 1514, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1540, 14, 1543, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1572, 14, 1575, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1608, 14, 1611, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1633, 14, 1636, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1663, 14, 1666, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1685, 14, 1688, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1707, 14, 1710, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1732, 14, 1735, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1753, 14, 1756, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1806, 14, 1809, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1841, 14, 1844, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1857, 21, 1857, 50), 'django.forms.models.model_to_dict', 'model_to_dict', ({(1857, 35, 1857, 49): 'self.update_as'}, {}), '(self.update_as)', False, 'from django.forms.models import model_to_dict\n'), ((1878, 14, 1881, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1893, 21, 1893, 50), 'django.forms.models.model_to_dict', 'model_to_dict', ({(1893, 35, 1893, 49): 'self.update_as'}, {}), '(self.update_as)', False, 'from django.forms.models import model_to_dict\n'), ((1914, 14, 1917, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1931, 14, 1934, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1950, 14, 1953, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1963, 21, 1963, 50), 'django.forms.models.model_to_dict', 'model_to_dict', ({(1963, 35, 1963, 49): 'self.update_as'}, {}), '(self.update_as)', False, 'from django.forms.models import model_to_dict\n'), ((1986, 14, 1989, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2015, 14, 2018, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2045, 14, 2048, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2071, 14, 2074, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2089, 14, 2092, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2121, 14, 2124, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2152, 14, 2155, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2177, 14, 2180, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2210, 14, 2213, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2223, 23, 2225, 9), 'projectroles.models.RoleAssignment.objects.get', 'RoleAssignment.objects.get', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2233, 14, 2236, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2273, 14, 2276, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2313, 14, 2316, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2321, 24, 2321, 52), 'json.loads', 'json.loads', ({(2321, 35, 2321, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((2354, 14, 2357, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2362, 24, 2362, 52), 'json.loads', 'json.loads', ({(2362, 35, 2362, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((2394, 14, 2397, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2412, 17, 2412, 46), 'projectroles.models.ProjectInvite.objects.first', 'ProjectInvite.objects.first', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2440, 14, 2443, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2466, 14, 2469, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2482, 17, 2482, 46), 'projectroles.models.ProjectInvite.objects.first', 'ProjectInvite.objects.first', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2497, 14, 2500, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2527, 14, 2530, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2553, 14, 2556, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2582, 14, 2585, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2623, 14, 2626, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2662, 14, 2665, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2678, 14, 2681, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2692, 14, 2695, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2710, 14, 2713, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2725, 14, 2728, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2753, 14, 2756, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2768, 14, 2771, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2783, 14, 2786, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2800, 14, 2803, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2814, 14, 2817, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((2832, 14, 2832, 51), 'django.urls.reverse', 'reverse', ({(2832, 22, 2832, 50): '"""projectroles:api_user_list"""'}, {}), "('projectroles:api_user_list')", False, 'from django.urls import reverse\n'), ((2839, 24, 2839, 52), 'json.loads', 'json.loads', ({(2839, 35, 2839, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((2853, 14, 2853, 51), 'django.urls.reverse', 'reverse', ({(2853, 22, 2853, 50): '"""projectroles:api_user_list"""'}, {}), "('projectroles:api_user_list')", False, 'from django.urls import reverse\n'), ((2858, 24, 2858, 52), 'json.loads', 'json.loads', ({(2858, 35, 2858, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((2887, 14, 2887, 54), 'django.urls.reverse', 'reverse', ({(2887, 22, 2887, 53): '"""projectroles:api_user_current"""'}, {}), "('projectroles:api_user_current')", False, 'from django.urls import reverse\n'), ((2894, 24, 2894, 52), 'json.loads', 'json.loads', ({(2894, 35, 2894, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((2905, 14, 2905, 54), 'django.urls.reverse', 'reverse', ({(2905, 22, 2905, 53): '"""projectroles:api_user_current"""'}, {}), "('projectroles:api_user_current')", False, 'from django.urls import reverse\n'), ((2910, 24, 2910, 52), 'json.loads', 'json.loads', ({(2910, 35, 2910, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((2926, 19, 2929, 9), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((3007, 26, 3007, 44), 'projectroles.remote_projects.RemoteProjectAPI', 'RemoteProjectAPI', ({}, {}), '()', False, 'from projectroles.remote_projects import RemoteProjectAPI\n'), ((218, 12, 220, 13), 'projectroles.plugins.change_plugin_status', 'change_plugin_status', (), '', False, 'from projectroles.plugins import change_plugin_status, get_backend_api\n'), ((223, 26, 223, 77), 'projectroles.models.Role.objects.get_or_create', 'Role.objects.get_or_create', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((224, 29, 226, 9), 'projectroles.models.Role.objects.get_or_create', 'Role.objects.get_or_create', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((227, 32, 229, 9), 'projectroles.models.Role.objects.get_or_create', 'Role.objects.get_or_create', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((230, 26, 230, 77), 'projectroles.models.Role.objects.get_or_create', 'Role.objects.get_or_create', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((536, 28, 536, 56), 'json.loads', 'json.loads', ({(536, 39, 536, 55): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((721, 25, 721, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((737, 25, 737, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((775, 25, 775, 53), 'json.loads', 'json.loads', ({(775, 36, 775, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((781, 25, 781, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((797, 25, 797, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((835, 25, 835, 53), 'json.loads', 'json.loads', ({(835, 36, 835, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((841, 25, 841, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((857, 25, 857, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((895, 25, 895, 53), 'json.loads', 'json.loads', ({(895, 36, 895, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((901, 25, 901, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((917, 25, 917, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((924, 25, 924, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((940, 25, 940, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((946, 25, 946, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((962, 25, 962, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((968, 25, 968, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((984, 25, 984, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((990, 25, 990, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1006, 25, 1006, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1012, 25, 1012, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1028, 25, 1028, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1035, 25, 1035, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1051, 25, 1051, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1075, 25, 1075, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1091, 25, 1091, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1101, 25, 1101, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1117, 25, 1117, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1129, 25, 1129, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1148, 25, 1148, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1186, 25, 1186, 53), 'json.loads', 'json.loads', ({(1186, 36, 1186, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((1192, 25, 1192, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1211, 25, 1211, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1249, 25, 1249, 53), 'json.loads', 'json.loads', ({(1249, 36, 1249, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((1255, 25, 1255, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1270, 25, 1270, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1311, 25, 1311, 53), 'json.loads', 'json.loads', ({(1311, 36, 1311, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((1317, 25, 1317, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1333, 25, 1333, 48), 'projectroles.models.Project.objects.count', 'Project.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1374, 25, 1374, 53), 'json.loads', 'json.loads', ({(1374, 36, 1374, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((1603, 25, 1603, 53), 'json.loads', 'json.loads', ({(1603, 36, 1603, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((1839, 25, 1839, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1853, 25, 1853, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1874, 25, 1874, 53), 'json.loads', 'json.loads', ({(1874, 36, 1874, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((1910, 25, 1910, 53), 'json.loads', 'json.loads', ({(1910, 36, 1910, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((1948, 25, 1948, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1959, 25, 1959, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1980, 25, 1980, 53), 'json.loads', 'json.loads', ({(1980, 36, 1980, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((2043, 25, 2043, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2053, 25, 2053, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2069, 25, 2069, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2081, 25, 2081, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2087, 25, 2087, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2097, 25, 2097, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2119, 25, 2119, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2129, 25, 2129, 55), 'projectroles.models.RoleAssignment.objects.count', 'RoleAssignment.objects.count', ({}, {}), '()', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2429, 25, 2429, 53), 'json.loads', 'json.loads', ({(2429, 36, 2429, 52): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((3013, 12, 3016, 13), 'django.urls.reverse', 'reverse', (), '', False, 'from django.urls import reverse\n'), ((1429, 12, 1429, 40), 'json.loads', 'json.loads', ({(1429, 23, 1429, 39): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((1589, 18, 1593, 9), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1650, 18, 1652, 9), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1724, 18, 1726, 9), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2300, 19, 2300, 33), 'projectroles.utils.build_secret', 'build_secret', ({}, {}), '()', False, 'from projectroles.utils import build_secret\n'), ((2308, 19, 2308, 33), 'projectroles.utils.build_secret', 'build_secret', ({}, {}), '()', False, 'from projectroles.utils import build_secret\n'), ((128, 20, 128, 53), 'pytz.timezone', 'pytz.timezone', ({(128, 34, 128, 52): 'settings.TIME_ZONE'}, {}), '(settings.TIME_ZONE)', False, 'import pytz\n'), ((759, 12, 761, 13), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((819, 12, 821, 13), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((879, 12, 881, 13), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1569, 12, 1569, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1585, 12, 1585, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1630, 12, 1630, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1646, 12, 1646, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1721, 12, 1721, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1750, 12, 1750, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1766, 12, 1766, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1779, 12, 1779, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1803, 12, 1803, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((1819, 12, 1819, 63), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2055, 12, 2057, 13), 'projectroles.models.RoleAssignment.objects.filter', 'RoleAssignment.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2391, 12, 2391, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2408, 12, 2408, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2437, 12, 2437, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2454, 12, 2454, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2463, 12, 2463, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2480, 12, 2480, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2494, 12, 2494, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2513, 12, 2513, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2524, 12, 2524, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2541, 12, 2541, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2550, 12, 2550, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2567, 12, 2567, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2579, 12, 2579, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2596, 12, 2596, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2620, 12, 2620, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((2637, 12, 2637, 62), 'projectroles.models.ProjectInvite.objects.filter', 'ProjectInvite.objects.filter', (), '', False, 'from projectroles.models import Project, Role, RoleAssignment, ProjectInvite, SODAR_CONSTANTS\n'), ((3031, 65, 3031, 79), 'projectroles.utils.build_secret', 'build_secret', ({}, {}), '()', False, 'from projectroles.utils import build_secret\n')] |
kwasnydam/animal_disambiguation | src/model/model.py | 1dba0a2f40ca952a3adab925ff9ef54238cf7c1c | """Contains the classification model I am going to use in my problem and some utility functions.
Functions
build_mmdisambiguator - build the core application object with the collaborators info
Classes
MMDisambiguator - core class of the application
"""
import pickle
import os
import numpy as np
from sklearn.linear_model import LogisticRegression
import sklearn.metrics as metrics
from src.data import dataset
DEFAULT_CLASSIFIER_SETTINGS = {
'solver': 'liblinear',
'class_weight': 'balanced',
'C': 1.
}
up = os.path.dirname
DEFAULT_ROOT_DIRECTORY = up(up(up(__file__))) # Get directory two levels above
DEFAULT_MODEL_DIRECTORY = os.path.join(DEFAULT_ROOT_DIRECTORY, 'models')
def try_opening_file_pickle(path):
try:
with open(path, 'rb') as f:
file_content = pickle.load(f)
except FileNotFoundError as e:
print('FileNotFound exception occured when trying to open: {}. Disambiguator build failed.'.format(
path
))
raise e
except Exception as e:
print('Exception occured when trying to open {}: {}'.format(path, e))
raise e
return file_content
def build_mmdisambiguator(data_model_params, data_model_path, classificator_parameters, classificator_path=None):
"""Given collaborator parameters and /or load paths, build the MMDisambiguator"""
if classificator_path is None:
data_model = dataset.TextLabelsVectorizer(data_model_params)
data_model_saved = try_opening_file_pickle(data_model_path)
data_model.deserialize(data_model_saved)
classificator = LogisticRegression(**classificator_parameters)
disambiguator = MMDisambiguator(data_model, classificator)
else:
disambiguator_pieces = try_opening_file_pickle(classificator_path)
data_model = dataset.TextLabelsVectorizer(data_model_params)
data_model.deserialize(disambiguator_pieces['data_model'])
classificator = disambiguator_pieces['classificator']
disambiguator = MMDisambiguator(data_model, classificator)
return disambiguator
class NotTrainedException(Exception):
pass
class MMDisambiguator:
"""The class representing the core logic of the disambiguation app.
It uses data_model for feature and text manipulation and Logistic Regression for performing prediction
With 'source' flag user controls if the training/prediction is preformed from precomputed numercial features
or text. If it is done from text, the input is put through feature_extraction first.
Methods:
train - fit the classifier or both data model and classifier from training data
predict - get prediction on data. the data can be single or multiple samples
transform_labels - get numerical representation of labels
performance_report - generate summary of performance
serialize - get representation for saving
"""
def __init__(self, data_model:dataset.TextLabelsVectorizer, classificator: LogisticRegression):
self.data_model = data_model
self.classificator = classificator
def is_trained(self):
"""Returns True if the underlying classification model is trained"""
return hasattr(self.classificator, "coef_")
def train(self, data, classes, report=False, source='features'):
"""Train the model with training data DATA and training labels CLASSES
Args:
data - training data (text or features)
classes- training classes (text or numerical)
report - flag, if True generate training report
source - 'features': numerical, train directly. 'text': train vectorizer, transfrom, then train classifier
"""
if source == 'text':
features, classes = self.data_model.fit_transform(data, classes)
else:
features = data
self.classificator.fit(features, classes)
if report:
return self.performance_report(self._classify(self.classificator.predict_proba(features)), classes)
else:
return None
def transform_labels(self, labels):
"""Returns numerical encoding of text labels"""
return self.data_model.transform_labels(labels)
def predict(self, unseen_features, mode='classification', threshold=0.5, format='text', source='features'):
"""Predict classes on unseen data.
Args:
unseen_features - 'string' or list/pandas Series of 'string' if source = 'text'.
numpy array if source = 'features'
mode -
'classification' - predict probabilities and then make classifcation decision based on 'threshold
'predicition' - return predicted probabilities
threshold - if mode = 'classification', threshold for the decision
source - 'text' if sentences, 'features' if input already transformed
"""
if not self.is_trained():
raise NotTrainedException('Attempted to perform prediction on a model that has not been trained')
if source == 'text':
unseen_features = self.data_model.transform(unseen_features)
predicted_probability = self.classificator.predict_proba(unseen_features)
if mode == 'classification':
classification_binary = self._classify(predicted_probability, threshold).astype(np.int)
classification = classification_binary
if format == 'text':
classification = self.data_model.get_classes_name(classification_binary)
result = []
for idx in range(classification.shape[0]):
result.append([classification[idx], predicted_probability[idx,classification_binary[idx]]])
result = np.asarray(result)
elif mode == 'prediction':
result = predicted_probability
return result
def _classify(self, predicted_probabilities, threshold=0.5):
"""Decision: class based on predicted probability and threshold"""
classes = predicted_probabilities.copy()[:,1]
classes[classes >= threshold] = 1
classes[classes < threshold] = 0
return classes
def performance_report(self, predicted_classes, real_classes):
"""Generates performance of the given classifier given predicted and real classes
Args:
predicted_classes - iterable containing the prediciton results, len(num_of_samples)
real_classes - iterable containing ground truth classes, len(num_of_samples)
Output:
report - dictionary containing the following fields:
'accuracy',
'precision',
'recall',
'f1_score',
'confussion_matrix'
"""
report = {
'accuracy': metrics.accuracy_score(real_classes, predicted_classes),
'precision': metrics.precision_score(real_classes, predicted_classes),
'recall': metrics.recall_score(real_classes, predicted_classes),
'f1': metrics.f1_score(real_classes, predicted_classes),
'confussion_matrix': metrics.confusion_matrix(real_classes, predicted_classes, labels = [1, 0]).tolist()
}
return report
def serialize(self):
"""Returns objects and parameters necessary to perform prediciton"""
to_serialize = {
'data_model': self.data_model.serialize(),
'classificator': self.classificator
}
return to_serialize
| [((26, 26, 26, 72), 'os.path.join', 'os.path.join', ({(26, 39, 26, 61): 'DEFAULT_ROOT_DIRECTORY', (26, 63, 26, 71): '"""models"""'}, {}), "(DEFAULT_ROOT_DIRECTORY, 'models')", False, 'import os\n'), ((48, 21, 48, 68), 'src.data.dataset.TextLabelsVectorizer', 'dataset.TextLabelsVectorizer', ({(48, 50, 48, 67): 'data_model_params'}, {}), '(data_model_params)', False, 'from src.data import dataset\n'), ((51, 24, 51, 70), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ({}, {}), '(**classificator_parameters)', False, 'from sklearn.linear_model import LogisticRegression\n'), ((55, 21, 55, 68), 'src.data.dataset.TextLabelsVectorizer', 'dataset.TextLabelsVectorizer', ({(55, 50, 55, 67): 'data_model_params'}, {}), '(data_model_params)', False, 'from src.data import dataset\n'), ((32, 27, 32, 41), 'pickle.load', 'pickle.load', ({(32, 39, 32, 40): 'f'}, {}), '(f)', False, 'import pickle\n'), ((145, 21, 145, 39), 'numpy.asarray', 'np.asarray', ({(145, 32, 145, 38): 'result'}, {}), '(result)', True, 'import numpy as np\n'), ((175, 24, 175, 79), 'sklearn.metrics.accuracy_score', 'metrics.accuracy_score', ({(175, 47, 175, 59): 'real_classes', (175, 61, 175, 78): 'predicted_classes'}, {}), '(real_classes, predicted_classes)', True, 'import sklearn.metrics as metrics\n'), ((176, 25, 176, 81), 'sklearn.metrics.precision_score', 'metrics.precision_score', ({(176, 49, 176, 61): 'real_classes', (176, 63, 176, 80): 'predicted_classes'}, {}), '(real_classes, predicted_classes)', True, 'import sklearn.metrics as metrics\n'), ((177, 22, 177, 75), 'sklearn.metrics.recall_score', 'metrics.recall_score', ({(177, 43, 177, 55): 'real_classes', (177, 57, 177, 74): 'predicted_classes'}, {}), '(real_classes, predicted_classes)', True, 'import sklearn.metrics as metrics\n'), ((178, 18, 178, 67), 'sklearn.metrics.f1_score', 'metrics.f1_score', ({(178, 35, 178, 47): 'real_classes', (178, 49, 178, 66): 'predicted_classes'}, {}), '(real_classes, predicted_classes)', True, 'import sklearn.metrics as metrics\n'), ((179, 33, 179, 107), 'sklearn.metrics.confusion_matrix', 'metrics.confusion_matrix', (), '', True, 'import sklearn.metrics as metrics\n')] |
myelintek/results | v0.5.0/nvidia/submission/code/recommendation/pytorch/load.py | 11c38436a158c453e3011f8684570f7a55c03330 | # Copyright (c) 2018, deepakn94. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
import pandas as pd
RatingData = namedtuple('RatingData',
['items', 'users', 'ratings', 'min_date', 'max_date'])
def describe_ratings(ratings):
info = RatingData(items=len(ratings['item_id'].unique()),
users=len(ratings['user_id'].unique()),
ratings=len(ratings),
min_date=ratings['timestamp'].min(),
max_date=ratings['timestamp'].max())
print("{ratings} ratings on {items} items from {users} users"
" from {min_date} to {max_date}"
.format(**(info._asdict())))
return info
def process_movielens(ratings, sort=True):
ratings['timestamp'] = pd.to_datetime(ratings['timestamp'], unit='s')
if sort:
ratings.sort_values(by='timestamp', inplace=True)
describe_ratings(ratings)
return ratings
def load_ml_100k(filename, sort=True):
names = ['user_id', 'item_id', 'rating', 'timestamp']
ratings = pd.read_csv(filename, sep='\t', names=names)
return process_movielens(ratings, sort=sort)
def load_ml_1m(filename, sort=True):
names = ['user_id', 'item_id', 'rating', 'timestamp']
ratings = pd.read_csv(filename, sep='::', names=names, engine='python')
return process_movielens(ratings, sort=sort)
def load_ml_10m(filename, sort=True):
names = ['user_id', 'item_id', 'rating', 'timestamp']
ratings = pd.read_csv(filename, sep='::', names=names, engine='python')
return process_movielens(ratings, sort=sort)
def load_ml_20m(filename, sort=True):
ratings = pd.read_csv(filename)
ratings['timestamp'] = pd.to_datetime(ratings['timestamp'], unit='s')
names = {'userId': 'user_id', 'movieId': 'item_id'}
ratings.rename(columns=names, inplace=True)
return process_movielens(ratings, sort=sort)
DATASETS = [k.replace('load_', '') for k in locals().keys() if "load_" in k]
def get_dataset_name(filename):
for dataset in DATASETS:
if dataset in filename.replace('-', '_').lower():
return dataset
raise NotImplementedError
def implicit_load(filename, sort=True):
func = globals()["load_" + get_dataset_name(filename)]
return func(filename, sort=sort)
| [((20, 13, 21, 78), 'collections.namedtuple', 'namedtuple', ({(20, 24, 20, 36): '"""RatingData"""', (21, 24, 21, 77): "['items', 'users', 'ratings', 'min_date', 'max_date']"}, {}), "('RatingData', ['items', 'users', 'ratings', 'min_date', 'max_date'])", False, 'from collections import namedtuple\n'), ((37, 27, 37, 73), 'pandas.to_datetime', 'pd.to_datetime', (), '', True, 'import pandas as pd\n'), ((46, 14, 46, 58), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((52, 14, 52, 75), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((58, 14, 58, 75), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((63, 14, 63, 35), 'pandas.read_csv', 'pd.read_csv', ({(63, 26, 63, 34): 'filename'}, {}), '(filename)', True, 'import pandas as pd\n'), ((64, 27, 64, 73), 'pandas.to_datetime', 'pd.to_datetime', (), '', True, 'import pandas as pd\n')] |
jptomo/pypy-lang-scheme | rpython/jit/backend/llsupport/test/test_rewrite.py | 55edb2cec69d78f86793282a4566fcbc1ef9fcac | from rpython.jit.backend.llsupport.descr import get_size_descr,\
get_field_descr, get_array_descr, ArrayDescr, FieldDescr,\
SizeDescr, get_interiorfield_descr
from rpython.jit.backend.llsupport.gc import GcLLDescr_boehm,\
GcLLDescr_framework
from rpython.jit.backend.llsupport import jitframe
from rpython.jit.metainterp.gc import get_description
from rpython.jit.tool.oparser import parse
from rpython.jit.metainterp.optimizeopt.util import equaloplists
from rpython.jit.metainterp.history import JitCellToken, FLOAT
from rpython.jit.metainterp.history import AbstractFailDescr
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper import rclass
from rpython.jit.backend.x86.arch import WORD
class Evaluator(object):
def __init__(self, scope):
self.scope = scope
def __getitem__(self, key):
return eval(key, self.scope)
class FakeLoopToken(object):
pass
o_vtable = lltype.malloc(rclass.OBJECT_VTABLE, immortal=True)
class RewriteTests(object):
def check_rewrite(self, frm_operations, to_operations, **namespace):
S = lltype.GcStruct('S', ('x', lltype.Signed),
('y', lltype.Signed))
sdescr = get_size_descr(self.gc_ll_descr, S)
sdescr.tid = 1234
#
T = lltype.GcStruct('T', ('y', lltype.Signed),
('z', lltype.Ptr(S)),
('t', lltype.Signed))
tdescr = get_size_descr(self.gc_ll_descr, T)
tdescr.tid = 5678
tzdescr = get_field_descr(self.gc_ll_descr, T, 'z')
#
A = lltype.GcArray(lltype.Signed)
adescr = get_array_descr(self.gc_ll_descr, A)
adescr.tid = 4321
alendescr = adescr.lendescr
#
B = lltype.GcArray(lltype.Char)
bdescr = get_array_descr(self.gc_ll_descr, B)
bdescr.tid = 8765
blendescr = bdescr.lendescr
#
C = lltype.GcArray(lltype.Ptr(S))
cdescr = get_array_descr(self.gc_ll_descr, C)
cdescr.tid = 8111
clendescr = cdescr.lendescr
#
E = lltype.GcStruct('Empty')
edescr = get_size_descr(self.gc_ll_descr, E)
edescr.tid = 9000
#
vtable_descr = self.gc_ll_descr.fielddescr_vtable
O = lltype.GcStruct('O', ('parent', rclass.OBJECT),
('x', lltype.Signed))
o_descr = self.cpu.sizeof(O, True)
o_vtable = globals()['o_vtable']
#
tiddescr = self.gc_ll_descr.fielddescr_tid
wbdescr = self.gc_ll_descr.write_barrier_descr
WORD = globals()['WORD']
#
strdescr = self.gc_ll_descr.str_descr
unicodedescr = self.gc_ll_descr.unicode_descr
strlendescr = strdescr.lendescr
unicodelendescr = unicodedescr.lendescr
strhashdescr = self.gc_ll_descr.str_hash_descr
unicodehashdescr = self.gc_ll_descr.unicode_hash_descr
casmdescr = JitCellToken()
clt = FakeLoopToken()
clt._ll_initial_locs = [0, 8]
frame_info = lltype.malloc(jitframe.JITFRAMEINFO, flavor='raw')
clt.frame_info = frame_info
frame_info.jfi_frame_depth = 13
frame_info.jfi_frame_size = 255
framedescrs = self.gc_ll_descr.getframedescrs(self.cpu)
framelendescr = framedescrs.arraydescr.lendescr
jfi_frame_depth = framedescrs.jfi_frame_depth
jfi_frame_size = framedescrs.jfi_frame_size
jf_frame_info = framedescrs.jf_frame_info
jf_savedata = framedescrs.jf_savedata
jf_force_descr = framedescrs.jf_force_descr
jf_descr = framedescrs.jf_descr
jf_guard_exc = framedescrs.jf_guard_exc
jf_forward = framedescrs.jf_forward
jf_extra_stack_depth = framedescrs.jf_extra_stack_depth
signedframedescr = self.cpu.signedframedescr
floatframedescr = self.cpu.floatframedescr
casmdescr.compiled_loop_token = clt
#
guarddescr = AbstractFailDescr()
#
namespace.update(locals())
#
for funcname in self.gc_ll_descr._generated_functions:
namespace[funcname] = self.gc_ll_descr.get_malloc_fn(funcname)
namespace[funcname + '_descr'] = getattr(self.gc_ll_descr,
'%s_descr' % funcname)
#
ops = parse(frm_operations, namespace=namespace)
expected = parse(to_operations % Evaluator(namespace),
namespace=namespace)
operations = self.gc_ll_descr.rewrite_assembler(self.cpu,
ops.operations,
[])
remap = {}
for a, b in zip(ops.inputargs, expected.inputargs):
remap[b] = a
equaloplists(operations, expected.operations, remap=remap)
lltype.free(frame_info, flavor='raw')
class FakeTracker(object):
pass
class BaseFakeCPU(object):
JITFRAME_FIXED_SIZE = 0
def __init__(self):
self.tracker = FakeTracker()
self._cache = {}
self.signedframedescr = ArrayDescr(3, 8, FieldDescr('len', 0, 0, 0), 0)
self.floatframedescr = ArrayDescr(5, 8, FieldDescr('len', 0, 0, 0), 0)
def getarraydescr_for_frame(self, tp):
if tp == FLOAT:
return self.floatframedescr
return self.signedframedescr
def unpack_arraydescr_size(self, d):
return 0, d.itemsize, 0
def unpack_fielddescr(self, d):
return d.offset
def arraydescrof(self, ARRAY):
try:
return self._cache[ARRAY]
except KeyError:
r = ArrayDescr(1, 2, FieldDescr('len', 0, 0, 0), 0)
self._cache[ARRAY] = r
return r
def fielddescrof(self, STRUCT, fname):
key = (STRUCT, fname)
try:
return self._cache[key]
except KeyError:
r = FieldDescr(fname, 1, 1, 1)
self._cache[key] = r
return r
class TestBoehm(RewriteTests):
def setup_method(self, meth):
class FakeCPU(BaseFakeCPU):
def sizeof(self, STRUCT, is_object):
assert is_object
return SizeDescr(102, gc_fielddescrs=[],
vtable=o_vtable)
self.cpu = FakeCPU()
self.gc_ll_descr = GcLLDescr_boehm(None, None, None)
def test_new(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
jump()
""")
def test_no_collapsing(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new(descr=sdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
p1 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
jump()
""")
def test_new_array_fixed(self):
self.check_rewrite("""
[]
p0 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(adescr.basesize)d, \
10, \
%(adescr.itemsize)d, \
%(adescr.lendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
## should ideally be:
## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \
## %(adescr.basesize + 10 * adescr.itemsize)d, \
## descr=malloc_fixedsize_descr)
## setfield_gc(p0, 10, descr=alendescr)
def test_new_array_variable(self):
self.check_rewrite("""
[i1]
p0 = new_array(i1, descr=adescr)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(adescr.basesize)d, \
i1, \
%(adescr.itemsize)d, \
%(adescr.lendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
def test_new_with_vtable(self):
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), 102, \
descr=malloc_fixedsize_descr)
setfield_gc(p0, ConstClass(o_vtable), descr=vtable_descr)
jump()
""")
def test_newstr(self):
self.check_rewrite("""
[i1]
p0 = newstr(i1)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(strdescr.basesize)d, \
i1, \
%(strdescr.itemsize)d, \
%(strlendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
def test_newunicode(self):
self.check_rewrite("""
[i1]
p0 = newunicode(10)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(unicodedescr.basesize)d, \
10, \
%(unicodedescr.itemsize)d, \
%(unicodelendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
## should ideally be:
## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \
## %(unicodedescr.basesize + \
## 10 * unicodedescr.itemsize)d, \
## descr=malloc_fixedsize_descr)
## setfield_gc(p0, 10, descr=unicodelendescr)
class TestFramework(RewriteTests):
def setup_method(self, meth):
class config_(object):
class translation(object):
gc = 'minimark'
gcrootfinder = 'asmgcc'
gctransformer = 'framework'
gcremovetypeptr = False
gcdescr = get_description(config_)
self.gc_ll_descr = GcLLDescr_framework(gcdescr, None, None, None,
really_not_translated=True)
self.gc_ll_descr.write_barrier_descr.has_write_barrier_from_array = (
lambda cpu: True)
self.gc_ll_descr.malloc_zero_filled = False
#
class FakeCPU(BaseFakeCPU):
def sizeof(self, STRUCT, is_object):
descr = SizeDescr(104, gc_fielddescrs=[])
descr.tid = 9315
return descr
self.cpu = FakeCPU()
def test_rewrite_assembler_new_to_malloc(self):
self.check_rewrite("""
[p1]
p0 = new(descr=sdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(sdescr.size)d)
setfield_gc(p0, 1234, descr=tiddescr)
jump()
""")
def test_rewrite_assembler_new3_to_malloc(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new(descr=tdescr)
p2 = new(descr=sdescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(sdescr.size + tdescr.size + sdescr.size)d)
setfield_gc(p0, 1234, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(sdescr.size)d)
setfield_gc(p1, 5678, descr=tiddescr)
p2 = nursery_ptr_increment(p1, %(tdescr.size)d)
setfield_gc(p2, 1234, descr=tiddescr)
zero_ptr_field(p1, %(tdescr.gc_fielddescrs[0].offset)s)
jump()
""")
def test_rewrite_assembler_new_array_fixed_to_malloc(self):
self.check_rewrite("""
[]
p0 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(adescr.basesize + 10 * adescr.itemsize)d)
setfield_gc(p0, 4321, descr=tiddescr)
setfield_gc(p0, 10, descr=alendescr)
jump()
""")
def test_rewrite_assembler_new_and_new_array_fixed_to_malloc(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(sdescr.size + \
adescr.basesize + 10 * adescr.itemsize)d)
setfield_gc(p0, 1234, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(sdescr.size)d)
setfield_gc(p1, 4321, descr=tiddescr)
setfield_gc(p1, 10, descr=alendescr)
jump()
""")
def test_rewrite_assembler_round_up(self):
self.check_rewrite("""
[]
p0 = new_array(6, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(bdescr.basesize + 8)d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 6, descr=blendescr)
jump()
""")
def test_rewrite_assembler_round_up_always(self):
self.check_rewrite("""
[]
p0 = new_array(5, descr=bdescr)
p1 = new_array(5, descr=bdescr)
p2 = new_array(5, descr=bdescr)
p3 = new_array(5, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(4 * (bdescr.basesize + 8))d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 5, descr=blendescr)
p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 8)d)
setfield_gc(p1, 8765, descr=tiddescr)
setfield_gc(p1, 5, descr=blendescr)
p2 = nursery_ptr_increment(p1, %(bdescr.basesize + 8)d)
setfield_gc(p2, 8765, descr=tiddescr)
setfield_gc(p2, 5, descr=blendescr)
p3 = nursery_ptr_increment(p2, %(bdescr.basesize + 8)d)
setfield_gc(p3, 8765, descr=tiddescr)
setfield_gc(p3, 5, descr=blendescr)
jump()
""")
def test_rewrite_assembler_minimal_size(self):
self.check_rewrite("""
[]
p0 = new(descr=edescr)
p1 = new(descr=edescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(4*WORD)d)
setfield_gc(p0, 9000, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(2*WORD)d)
setfield_gc(p1, 9000, descr=tiddescr)
jump()
""")
def test_rewrite_assembler_variable_size(self):
self.check_rewrite("""
[i0]
p0 = new_array(i0, descr=bdescr)
jump(i0)
""", """
[i0]
p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr)
setfield_gc(p0, i0, descr=blendescr)
jump(i0)
""")
def test_rewrite_new_string(self):
self.check_rewrite("""
[i0]
p0 = newstr(i0)
jump(i0)
""", """
[i0]
p0 = call_malloc_nursery_varsize(1, 1, i0, descr=strdescr)
setfield_gc(p0, i0, descr=strlendescr)
setfield_gc(p0, 0, descr=strhashdescr)
jump(i0)
""")
def test_rewrite_assembler_nonstandard_array(self):
# a non-standard array is a bit hard to get; e.g. GcArray(Float)
# is like that on Win32, but not on Linux. Build one manually...
NONSTD = lltype.GcArray(lltype.Float)
nonstd_descr = get_array_descr(self.gc_ll_descr, NONSTD)
nonstd_descr.tid = 6464
nonstd_descr.basesize = 64 # <= hacked
nonstd_descr.itemsize = 8
nonstd_descr_gcref = 123
self.check_rewrite("""
[i0, p1]
p0 = new_array(i0, descr=nonstd_descr)
setarrayitem_gc(p0, i0, p1)
jump(i0)
""", """
[i0, p1]
p0 = call_malloc_gc(ConstClass(malloc_array_nonstandard), \
64, 8, \
%(nonstd_descr.lendescr.offset)d, \
6464, i0, \
descr=malloc_array_nonstandard_descr)
cond_call_gc_wb_array(p0, i0, descr=wbdescr)
setarrayitem_gc(p0, i0, p1)
jump(i0)
""", nonstd_descr=nonstd_descr)
def test_rewrite_assembler_maximal_size_1(self):
self.gc_ll_descr.max_size_of_young_obj = 100
self.check_rewrite("""
[]
p0 = new_array(103, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), 1, \
%(bdescr.tid)d, 103, \
descr=malloc_array_descr)
jump()
""")
def test_rewrite_assembler_maximal_size_2(self):
self.gc_ll_descr.max_size_of_young_obj = 300
self.check_rewrite("""
[]
p0 = new_array(101, descr=bdescr)
p1 = new_array(102, descr=bdescr) # two new_arrays can be combined
p2 = new_array(103, descr=bdescr) # but not all three
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(2 * (bdescr.basesize + 104))d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 101, descr=blendescr)
p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 104)d)
setfield_gc(p1, 8765, descr=tiddescr)
setfield_gc(p1, 102, descr=blendescr)
p2 = call_malloc_nursery( \
%(bdescr.basesize + 104)d)
setfield_gc(p2, 8765, descr=tiddescr)
setfield_gc(p2, 103, descr=blendescr)
jump()
""")
def test_rewrite_assembler_huge_size(self):
# "huge" is defined as "larger than 0xffffff bytes, or 16MB"
self.check_rewrite("""
[]
p0 = new_array(20000000, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), 1, \
%(bdescr.tid)d, 20000000, \
descr=malloc_array_descr)
jump()
""")
def test_new_with_vtable(self):
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(104) # rounded up
setfield_gc(p0, 9315, descr=tiddescr)
setfield_gc(p0, 0, descr=vtable_descr)
jump()
""")
def test_new_with_vtable_too_big(self):
self.gc_ll_descr.max_size_of_young_obj = 100
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_big_fixedsize), 104, 9315, \
descr=malloc_big_fixedsize_descr)
setfield_gc(p0, 0, descr=vtable_descr)
jump()
""")
def test_rewrite_assembler_newstr_newunicode(self):
self.check_rewrite("""
[i2]
p0 = newstr(14)
p1 = newunicode(10)
p2 = newunicode(i2)
p3 = newstr(i2)
jump()
""", """
[i2]
p0 = call_malloc_nursery( \
%(strdescr.basesize + 16 * strdescr.itemsize + \
unicodedescr.basesize + 10 * unicodedescr.itemsize)d)
setfield_gc(p0, %(strdescr.tid)d, descr=tiddescr)
setfield_gc(p0, 14, descr=strlendescr)
setfield_gc(p0, 0, descr=strhashdescr)
p1 = nursery_ptr_increment(p0, %(strdescr.basesize + 16 * strdescr.itemsize)d)
setfield_gc(p1, %(unicodedescr.tid)d, descr=tiddescr)
setfield_gc(p1, 10, descr=unicodelendescr)
setfield_gc(p1, 0, descr=unicodehashdescr)
p2 = call_malloc_nursery_varsize(2, %(unicodedescr.itemsize)d, i2,\
descr=unicodedescr)
setfield_gc(p2, i2, descr=unicodelendescr)
setfield_gc(p2, 0, descr=unicodehashdescr)
p3 = call_malloc_nursery_varsize(1, 1, i2, \
descr=strdescr)
setfield_gc(p3, i2, descr=strlendescr)
setfield_gc(p3, 0, descr=strhashdescr)
jump()
""")
def test_write_barrier_before_setfield_gc(self):
self.check_rewrite("""
[p1, p2]
setfield_gc(p1, p2, descr=tzdescr)
jump()
""", """
[p1, p2]
cond_call_gc_wb(p1, descr=wbdescr)
setfield_gc(p1, p2, descr=tzdescr)
jump()
""")
def test_write_barrier_before_array_without_from_array(self):
self.gc_ll_descr.write_barrier_descr.has_write_barrier_from_array = (
lambda cpu: False)
self.check_rewrite("""
[p1, i2, p3]
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[p1, i2, p3]
cond_call_gc_wb(p1, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_short_array(self):
self.gc_ll_descr.max_size_of_young_obj = 2000
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(129, descr=cdescr)
call_n(123456)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 129 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 129, descr=clendescr)
zero_array(p1, 0, 129, descr=cdescr)
call_n(123456)
cond_call_gc_wb(p1, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_long_array(self):
# the limit of "being too long" is fixed, arbitrarily, at 130
self.gc_ll_descr.max_size_of_young_obj = 2000
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(130, descr=cdescr)
call_n(123456)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 130 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 130, descr=clendescr)
zero_array(p1, 0, 130, descr=cdescr)
call_n(123456)
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_unknown_array(self):
self.check_rewrite("""
[p1, i2, p3]
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[p1, i2, p3]
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_label_makes_size_unknown(self):
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(5, descr=cdescr)
label(p1, i2, p3)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 5, descr=clendescr)
zero_array(p1, 0, 5, descr=cdescr)
label(p1, i2, p3)
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_setinteriorfield_gc(self):
S1 = lltype.GcStruct('S1')
INTERIOR = lltype.GcArray(('z', lltype.Ptr(S1)))
interiordescr = get_array_descr(self.gc_ll_descr, INTERIOR)
interiordescr.tid = 1291
interiorlendescr = interiordescr.lendescr
interiorzdescr = get_interiorfield_descr(self.gc_ll_descr,
INTERIOR, 'z')
self.check_rewrite("""
[p1, p2]
setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr)
jump(p1, p2)
""", """
[p1, p2]
cond_call_gc_wb_array(p1, 0, descr=wbdescr)
setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr)
jump(p1, p2)
""", interiorzdescr=interiorzdescr)
def test_initialization_store(self):
self.check_rewrite("""
[p1]
p0 = new(descr=tdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_initialization_store_2(self):
self.check_rewrite("""
[]
p0 = new(descr=tdescr)
p1 = new(descr=sdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(tdescr.size + sdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(tdescr.size)d)
setfield_gc(p1, 1234, descr=tiddescr)
# <<<no cond_call_gc_wb here>>>
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_initialization_store_array(self):
self.check_rewrite("""
[p1, i2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, i2, p1, descr=cdescr)
jump()
""", """
[p1, i2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 5, descr=cdescr)
setarrayitem_gc(p0, i2, p1, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 1, p1, descr=cdescr)
setarrayitem_gc(p0, 0, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 2, 3, descr=cdescr)
setarrayitem_gc(p0, 1, p1, descr=cdescr)
setarrayitem_gc(p0, 0, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_right(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 3, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
jump()
""")
def test_zero_array_not_reduced_at_all(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_completely(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 5, 0, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left_with_call(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
call_n(321321)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 1, 4, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
call_n(321321)
cond_call_gc_wb(p0, descr=wbdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left_with_label(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
label(p0, p2)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 1, 4, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
label(p0, p2)
cond_call_gc_wb_array(p0, 1, descr=wbdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_varsize(self):
self.check_rewrite("""
[p1, p2, i3]
p0 = new_array_clear(i3, descr=bdescr)
jump()
""", """
[p1, p2, i3]
p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr)
setfield_gc(p0, i3, descr=blendescr)
zero_array(p0, 0, i3, descr=bdescr)
jump()
""")
def test_zero_array_varsize_cannot_reduce(self):
self.check_rewrite("""
[p1, p2, i3]
p0 = new_array_clear(i3, descr=bdescr)
setarrayitem_gc(p0, 0, p1, descr=bdescr)
jump()
""", """
[p1, p2, i3]
p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr)
setfield_gc(p0, i3, descr=blendescr)
zero_array(p0, 0, i3, descr=bdescr)
cond_call_gc_wb_array(p0, 0, descr=wbdescr)
setarrayitem_gc(p0, 0, p1, descr=bdescr)
jump()
""")
def test_initialization_store_potentially_large_array(self):
# the write barrier cannot be omitted, because we might get
# an array with cards and the GC assumes that the write
# barrier is always called, even on young (but large) arrays
self.check_rewrite("""
[i0, p1, i2]
p0 = new_array(i0, descr=bdescr)
setarrayitem_gc(p0, i2, p1, descr=bdescr)
jump()
""", """
[i0, p1, i2]
p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr)
setfield_gc(p0, i0, descr=blendescr)
cond_call_gc_wb_array(p0, i2, descr=wbdescr)
setarrayitem_gc(p0, i2, p1, descr=bdescr)
jump()
""")
def test_non_initialization_store(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
p1 = newstr(i0)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
p1 = call_malloc_nursery_varsize(1, 1, i0, \
descr=strdescr)
setfield_gc(p1, i0, descr=strlendescr)
setfield_gc(p1, 0, descr=strhashdescr)
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_non_initialization_store_label(self):
self.check_rewrite("""
[p1]
p0 = new(descr=tdescr)
label(p0, p1)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
label(p0, p1)
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_multiple_writes(self):
self.check_rewrite("""
[p0, p1, p2]
setfield_gc(p0, p1, descr=tzdescr)
setfield_gc(p0, p2, descr=tzdescr)
jump(p1, p2, p0)
""", """
[p0, p1, p2]
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
setfield_gc(p0, p2, descr=tzdescr)
jump(p1, p2, p0)
""")
def test_rewrite_call_assembler(self):
self.check_rewrite("""
[i0, f0]
i2 = call_assembler_i(i0, f0, descr=casmdescr)
""", """
[i0, f0]
i1 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_size)
p1 = call_malloc_nursery_varsize_frame(i1)
setfield_gc(p1, 0, descr=tiddescr)
i2 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_depth)
setfield_gc(p1, 0, descr=jf_extra_stack_depth)
setfield_gc(p1, NULL, descr=jf_savedata)
setfield_gc(p1, NULL, descr=jf_force_descr)
setfield_gc(p1, NULL, descr=jf_descr)
setfield_gc(p1, NULL, descr=jf_guard_exc)
setfield_gc(p1, NULL, descr=jf_forward)
setfield_gc(p1, i2, descr=framelendescr)
setfield_gc(p1, ConstClass(frame_info), descr=jf_frame_info)
setarrayitem_gc(p1, 0, i0, descr=signedframedescr)
setarrayitem_gc(p1, 1, f0, descr=floatframedescr)
i3 = call_assembler_i(p1, descr=casmdescr)
""")
def test_int_add_ovf(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
i1 = int_add_ovf(i0, 123)
guard_overflow(descr=guarddescr) []
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
i1 = int_add_ovf(i0, 123)
guard_overflow(descr=guarddescr) []
jump()
""")
def test_int_gt(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
i1 = int_gt(i0, 123)
guard_false(i1, descr=guarddescr) []
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
i1 = int_gt(i0, 123)
guard_false(i1, descr=guarddescr) []
jump()
""")
def test_zero_ptr_field_before_getfield(self):
# This case may need to be fixed in the metainterp/optimizeopt
# already so that it no longer occurs for rewrite.py. But anyway
# it's a good idea to make sure rewrite.py is correct on its own.
self.check_rewrite("""
[]
p0 = new(descr=tdescr)
p1 = getfield_gc_r(p0, descr=tdescr)
jump(p1)
""", """
[]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
p1 = getfield_gc_r(p0, descr=tdescr)
jump(p1)
""")
| [((26, 11, 26, 61), 'rpython.rtyper.lltypesystem.lltype.malloc', 'lltype.malloc', (), '', False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((30, 12, 31, 54), 'rpython.rtyper.lltypesystem.lltype.GcStruct', 'lltype.GcStruct', ({(30, 28, 30, 31): '"""S"""', (30, 33, 30, 53): "('x', lltype.Signed)", (31, 33, 31, 53): "('y', lltype.Signed)"}, {}), "('S', ('x', lltype.Signed), ('y', lltype.Signed))", False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((32, 17, 32, 52), 'rpython.jit.backend.llsupport.descr.get_size_descr', 'get_size_descr', ({(32, 32, 32, 48): 'self.gc_ll_descr', (32, 50, 32, 51): 'S'}, {}), '(self.gc_ll_descr, S)', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((38, 17, 38, 52), 'rpython.jit.backend.llsupport.descr.get_size_descr', 'get_size_descr', ({(38, 32, 38, 48): 'self.gc_ll_descr', (38, 50, 38, 51): 'T'}, {}), '(self.gc_ll_descr, T)', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((40, 18, 40, 59), 'rpython.jit.backend.llsupport.descr.get_field_descr', 'get_field_descr', ({(40, 34, 40, 50): 'self.gc_ll_descr', (40, 52, 40, 53): 'T', (40, 55, 40, 58): '"""z"""'}, {}), "(self.gc_ll_descr, T, 'z')", False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((42, 12, 42, 41), 'rpython.rtyper.lltypesystem.lltype.GcArray', 'lltype.GcArray', ({(42, 27, 42, 40): 'lltype.Signed'}, {}), '(lltype.Signed)', False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((43, 17, 43, 53), 'rpython.jit.backend.llsupport.descr.get_array_descr', 'get_array_descr', ({(43, 33, 43, 49): 'self.gc_ll_descr', (43, 51, 43, 52): 'A'}, {}), '(self.gc_ll_descr, A)', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((47, 12, 47, 39), 'rpython.rtyper.lltypesystem.lltype.GcArray', 'lltype.GcArray', ({(47, 27, 47, 38): 'lltype.Char'}, {}), '(lltype.Char)', False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((48, 17, 48, 53), 'rpython.jit.backend.llsupport.descr.get_array_descr', 'get_array_descr', ({(48, 33, 48, 49): 'self.gc_ll_descr', (48, 51, 48, 52): 'B'}, {}), '(self.gc_ll_descr, B)', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((53, 17, 53, 53), 'rpython.jit.backend.llsupport.descr.get_array_descr', 'get_array_descr', ({(53, 33, 53, 49): 'self.gc_ll_descr', (53, 51, 53, 52): 'C'}, {}), '(self.gc_ll_descr, C)', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((57, 12, 57, 36), 'rpython.rtyper.lltypesystem.lltype.GcStruct', 'lltype.GcStruct', ({(57, 28, 57, 35): '"""Empty"""'}, {}), "('Empty')", False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((58, 17, 58, 52), 'rpython.jit.backend.llsupport.descr.get_size_descr', 'get_size_descr', ({(58, 32, 58, 48): 'self.gc_ll_descr', (58, 50, 58, 51): 'E'}, {}), '(self.gc_ll_descr, E)', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((62, 12, 63, 54), 'rpython.rtyper.lltypesystem.lltype.GcStruct', 'lltype.GcStruct', ({(62, 28, 62, 31): '"""O"""', (62, 33, 62, 58): "('parent', rclass.OBJECT)", (63, 33, 63, 53): "('x', lltype.Signed)"}, {}), "('O', ('parent', rclass.OBJECT), ('x', lltype.Signed))", False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((78, 20, 78, 34), 'rpython.jit.metainterp.history.JitCellToken', 'JitCellToken', ({}, {}), '()', False, 'from rpython.jit.metainterp.history import JitCellToken, FLOAT\n'), ((81, 21, 81, 71), 'rpython.rtyper.lltypesystem.lltype.malloc', 'lltype.malloc', (), '', False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((100, 21, 100, 40), 'rpython.jit.metainterp.history.AbstractFailDescr', 'AbstractFailDescr', ({}, {}), '()', False, 'from rpython.jit.metainterp.history import AbstractFailDescr\n'), ((109, 14, 109, 56), 'rpython.jit.tool.oparser.parse', 'parse', (), '', False, 'from rpython.jit.tool.oparser import parse\n'), ((118, 8, 118, 66), 'rpython.jit.metainterp.optimizeopt.util.equaloplists', 'equaloplists', (), '', False, 'from rpython.jit.metainterp.optimizeopt.util import equaloplists\n'), ((119, 8, 119, 45), 'rpython.rtyper.lltypesystem.lltype.free', 'lltype.free', (), '', False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((169, 27, 169, 60), 'rpython.jit.backend.llsupport.gc.GcLLDescr_boehm', 'GcLLDescr_boehm', ({(169, 43, 169, 47): 'None', (169, 49, 169, 53): 'None', (169, 55, 169, 59): 'None'}, {}), '(None, None, None)', False, 'from rpython.jit.backend.llsupport.gc import GcLLDescr_boehm, GcLLDescr_framework\n'), ((295, 18, 295, 42), 'rpython.jit.metainterp.gc.get_description', 'get_description', ({(295, 34, 295, 41): 'config_'}, {}), '(config_)', False, 'from rpython.jit.metainterp.gc import get_description\n'), ((296, 27, 297, 74), 'rpython.jit.backend.llsupport.gc.GcLLDescr_framework', 'GcLLDescr_framework', (), '', False, 'from rpython.jit.backend.llsupport.gc import GcLLDescr_boehm, GcLLDescr_framework\n'), ((454, 17, 454, 45), 'rpython.rtyper.lltypesystem.lltype.GcArray', 'lltype.GcArray', ({(454, 32, 454, 44): 'lltype.Float'}, {}), '(lltype.Float)', False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((455, 23, 455, 64), 'rpython.jit.backend.llsupport.descr.get_array_descr', 'get_array_descr', ({(455, 39, 455, 55): 'self.gc_ll_descr', (455, 57, 455, 63): 'NONSTD'}, {}), '(self.gc_ll_descr, NONSTD)', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((689, 13, 689, 34), 'rpython.rtyper.lltypesystem.lltype.GcStruct', 'lltype.GcStruct', ({(689, 29, 689, 33): '"""S1"""'}, {}), "('S1')", False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((691, 24, 691, 67), 'rpython.jit.backend.llsupport.descr.get_array_descr', 'get_array_descr', ({(691, 40, 691, 56): 'self.gc_ll_descr', (691, 58, 691, 66): 'INTERIOR'}, {}), '(self.gc_ll_descr, INTERIOR)', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((694, 25, 695, 63), 'rpython.jit.backend.llsupport.descr.get_interiorfield_descr', 'get_interiorfield_descr', ({(694, 49, 694, 65): 'self.gc_ll_descr', (695, 49, 695, 57): 'INTERIOR', (695, 59, 695, 62): '"""z"""'}, {}), "(self.gc_ll_descr, INTERIOR, 'z')", False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((52, 27, 52, 40), 'rpython.rtyper.lltypesystem.lltype.Ptr', 'lltype.Ptr', ({(52, 38, 52, 39): 'S'}, {}), '(S)', False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((130, 49, 130, 75), 'rpython.jit.backend.llsupport.descr.FieldDescr', 'FieldDescr', ({(130, 60, 130, 65): '"""len"""', (130, 67, 130, 68): '0', (130, 70, 130, 71): '0', (130, 73, 130, 74): '0'}, {}), "('len', 0, 0, 0)", False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((131, 48, 131, 74), 'rpython.jit.backend.llsupport.descr.FieldDescr', 'FieldDescr', ({(131, 59, 131, 64): '"""len"""', (131, 66, 131, 67): '0', (131, 69, 131, 70): '0', (131, 72, 131, 73): '0'}, {}), "('len', 0, 0, 0)", False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((36, 39, 36, 52), 'rpython.rtyper.lltypesystem.lltype.Ptr', 'lltype.Ptr', ({(36, 50, 36, 51): 'S'}, {}), '(S)', False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((157, 16, 157, 42), 'rpython.jit.backend.llsupport.descr.FieldDescr', 'FieldDescr', ({(157, 27, 157, 32): 'fname', (157, 34, 157, 35): '1', (157, 37, 157, 38): '1', (157, 40, 157, 41): '1'}, {}), '(fname, 1, 1, 1)', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((166, 23, 167, 49), 'rpython.jit.backend.llsupport.descr.SizeDescr', 'SizeDescr', (), '', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((304, 24, 304, 57), 'rpython.jit.backend.llsupport.descr.SizeDescr', 'SizeDescr', (), '', False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n'), ((690, 40, 690, 54), 'rpython.rtyper.lltypesystem.lltype.Ptr', 'lltype.Ptr', ({(690, 51, 690, 53): 'S1'}, {}), '(S1)', False, 'from rpython.rtyper.lltypesystem import lltype, rffi\n'), ((148, 33, 148, 59), 'rpython.jit.backend.llsupport.descr.FieldDescr', 'FieldDescr', ({(148, 44, 148, 49): '"""len"""', (148, 51, 148, 52): '0', (148, 54, 148, 55): '0', (148, 57, 148, 58): '0'}, {}), "('len', 0, 0, 0)", False, 'from rpython.jit.backend.llsupport.descr import get_size_descr, get_field_descr, get_array_descr, ArrayDescr, FieldDescr, SizeDescr, get_interiorfield_descr\n')] |
sanchaymittal/hummingbot | hummingbot/client/command/history_command.py | f8d1c19dfd0875bd12717f9c46ddbe20cc7b9a0d | from decimal import Decimal
import pandas as pd
from typing import (
Any,
Dict,
Set,
Tuple,
TYPE_CHECKING)
from hummingbot.client.performance_analysis import PerformanceAnalysis
from hummingbot.core.utils.exchange_rate_conversion import ExchangeRateConversion
from hummingbot.market.market_base import MarketBase
from hummingbot.strategy.market_trading_pair_tuple import MarketTradingPairTuple
ERC = ExchangeRateConversion.get_instance()
s_float_0 = float(0)
if TYPE_CHECKING:
from hummingbot.client.hummingbot_application import HummingbotApplication
class HistoryCommand:
def history(self, # type: HummingbotApplication
):
if not all(market.ready for market in self.markets.values()):
self._notify(" History stats are not available before Markets are ready.")
return
self.list_trades()
self.trade_performance_report()
def balance_snapshot(self, # type: HummingbotApplication
) -> Dict[str, Dict[str, float]]:
snapshot: Dict[str, Any] = {}
for market_name in self.markets:
balance_dict = self.markets[market_name].get_all_balances()
balance_dict = {k.upper(): v for k, v in balance_dict.items()}
for asset in self.assets:
asset = asset.upper()
if asset not in snapshot:
snapshot[asset] = {}
if asset in balance_dict:
snapshot[asset][market_name] = balance_dict[asset]
else:
snapshot[asset][market_name] = 0.0
return snapshot
def balance_comparison_data_frame(self, # type: HummingbotApplication
market_trading_pair_stats: Dict[MarketTradingPairTuple, any],
) -> pd.DataFrame:
if len(self.starting_balances) == 0:
self._notify(" Balance snapshots are not available before bot starts")
return
rows = []
for market_trading_pair_tuple in self.market_trading_pair_tuples:
market: MarketBase = market_trading_pair_tuple.market
for asset in set(a.upper() for a in self.assets):
asset_delta: Dict[str, float] = market_trading_pair_stats[market_trading_pair_tuple]["asset"].get(
asset, {"delta": s_float_0})
starting_balance = self.starting_balances.get(asset).get(market.name)
current_balance = self.balance_snapshot().get(asset).get(market.name)
rows.append([market.display_name,
asset,
float(starting_balance),
float(current_balance),
float(current_balance - starting_balance),
float(asset_delta["delta"]),
ERC.adjust_token_rate(asset, Decimal(1))])
df = pd.DataFrame(rows, index=None, columns=["Market", "Asset", "Starting", "Current", "Net_Delta",
"Trade_Delta", "Conversion_Rate"])
return df
def get_performance_analysis_with_updated_balance(self, # type: HummingbotApplication
) -> PerformanceAnalysis:
performance_analysis = PerformanceAnalysis()
dedup_set: Set[Tuple[str, str, bool]] = set()
for market_trading_pair_tuple in self.market_trading_pair_tuples:
for is_base in [True, False]:
for is_starting in [True, False]:
market_name = market_trading_pair_tuple.market.name
asset_name = market_trading_pair_tuple.base_asset if is_base else market_trading_pair_tuple.quote_asset
asset_name = asset_name.upper()
if len(self.assets) == 0 or len(self.markets) == 0:
# Prevent KeyError '***SYMBOL***'
amount = self.starting_balances[asset_name][market_name]
else:
amount = self.starting_balances[asset_name][market_name] if is_starting \
else self.balance_snapshot()[asset_name][market_name]
amount = float(amount)
# Adding this check to prevent assets in the same market to be added multiple times
if (market_name, asset_name, is_starting) not in dedup_set:
dedup_set.add((market_name, asset_name, is_starting))
performance_analysis.add_balances(asset_name, amount, is_base, is_starting)
return performance_analysis
def get_market_mid_price(self, # type: HummingbotApplication
) -> float:
# Compute the current exchange rate. We use the first market_symbol_pair because
# if the trading pairs are different, such as WETH-DAI and ETH-USD, the currency
# pairs above will contain the information in terms of the first trading pair.
market_pair_info = self.market_trading_pair_tuples[0]
market = market_pair_info.market
buy_price = market.get_price(market_pair_info.trading_pair, True)
sell_price = market.get_price(market_pair_info.trading_pair, False)
price = float((buy_price + sell_price) / 2)
return price
def analyze_performance(self, # type: HummingbotApplication
):
""" Calculate bot profitability and print to output pane """
if len(self.starting_balances) == 0:
self._notify(" Performance analysis is not available before bot starts")
return
performance_analysis: PerformanceAnalysis = self.get_performance_analysis_with_updated_balance()
price: float = self.get_market_mid_price()
starting_token, starting_amount = performance_analysis.compute_starting(price)
current_token, current_amount = performance_analysis.compute_current(price)
delta_token, delta_amount = performance_analysis.compute_delta(price)
return_performance = performance_analysis.compute_return(price)
starting_amount = round(starting_amount, 3)
current_amount = round(current_amount, 3)
delta_amount = round(delta_amount, 3)
return_performance = round(return_performance, 3)
print_performance = "\n"
print_performance += " Performance:\n"
print_performance += " - Starting Inventory Value: " + str(starting_amount) + " " + starting_token + "\n"
print_performance += " - Current Inventory Value: " + str(current_amount) + " " + current_token + "\n"
print_performance += " - Delta: " + str(delta_amount) + " " + delta_token + "\n"
print_performance += " - Return: " + str(return_performance) + "%"
self._notify(print_performance)
def calculate_profitability(self) -> float:
""" Determine the profitability of the trading bot. """
performance_analysis: PerformanceAnalysis = self.get_performance_analysis_with_updated_balance()
price: float = self.get_market_mid_price()
return_performance = performance_analysis.compute_return(price)
return return_performance
def trade_performance_report(self, # type: HummingbotApplication
) -> pd.DataFrame:
if len(self.market_trading_pair_tuples) == 0:
self._notify(" Performance analysis is not available before bot starts")
return
try:
current_strategy_name: str = self.markets_recorder.strategy_name
analysis_start_time: int = self.init_time
primary_quote_asset: str = self.market_trading_pair_tuples[0].quote_asset.upper()
performance_analysis: PerformanceAnalysis = PerformanceAnalysis()
trade_performance_stats, market_trading_pair_stats = performance_analysis.calculate_trade_performance(
analysis_start_time,
current_strategy_name,
self.market_trading_pair_tuples
)
trade_performance_status_line = []
market_df_data: Set[Tuple[str, str, float, float, str, str]] = set()
market_df_columns = ["Market", "Trading_Pair", "Start_Price", "End_Price",
"Total_Value_Delta", "Profit"]
for market_trading_pair_tuple, trading_pair_stats in market_trading_pair_stats.items():
market_df_data.add((
market_trading_pair_tuple.market.display_name,
market_trading_pair_tuple.trading_pair.upper(),
float(trading_pair_stats["starting_quote_rate"]),
float(trading_pair_stats["end_quote_rate"]),
f"{trading_pair_stats['trading_pair_delta']:.8f} {primary_quote_asset}",
f"{trading_pair_stats['trading_pair_delta_percentage']:.3f} %"
))
inventory_df: pd.DataFrame = self.balance_comparison_data_frame(market_trading_pair_stats)
market_df: pd.DataFrame = pd.DataFrame(data=list(market_df_data), columns=market_df_columns)
portfolio_delta: Decimal = trade_performance_stats["portfolio_delta"]
portfolio_delta_percentage: Decimal = trade_performance_stats["portfolio_delta_percentage"]
trade_performance_status_line.extend(["", " Inventory:"] +
[" " + line for line in inventory_df.to_string().split("\n")])
trade_performance_status_line.extend(["", " Market Trading Pair Performance:"] +
[" " + line for line in market_df.to_string().split("\n")])
trade_performance_status_line.extend(
["", " Portfolio Performance:"] +
[f" Quote Value Delta: {portfolio_delta:.7g} {primary_quote_asset}"] +
[f" Delta Percentage: {portfolio_delta_percentage:.3f} %"])
self._notify("\n".join(trade_performance_status_line))
except Exception:
self.logger().error("Unexpected error running performance analysis.", exc_info=True)
self._notify("Error running performance analysis")
| [((15, 6, 15, 43), 'hummingbot.core.utils.exchange_rate_conversion.ExchangeRateConversion.get_instance', 'ExchangeRateConversion.get_instance', ({}, {}), '()', False, 'from hummingbot.core.utils.exchange_rate_conversion import ExchangeRateConversion\n'), ((70, 13, 71, 87), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((76, 31, 76, 52), 'hummingbot.client.performance_analysis.PerformanceAnalysis', 'PerformanceAnalysis', ({}, {}), '()', False, 'from hummingbot.client.performance_analysis import PerformanceAnalysis\n'), ((157, 56, 157, 77), 'hummingbot.client.performance_analysis.PerformanceAnalysis', 'PerformanceAnalysis', ({}, {}), '()', False, 'from hummingbot.client.performance_analysis import PerformanceAnalysis\n'), ((69, 58, 69, 68), 'decimal.Decimal', 'Decimal', ({(69, 66, 69, 67): '(1)'}, {}), '(1)', False, 'from decimal import Decimal\n')] |
sami2316/asm2vec-pytorch | scripts/bin2asm.py | 5de1351aeda61d7467b3231e48437fd8d34a970c | import re
import os
import click
import r2pipe
import hashlib
from pathlib import Path
import _pickle as cPickle
def sha3(data):
return hashlib.sha3_256(data.encode()).hexdigest()
def validEXE(filename):
magics = [bytes.fromhex('7f454c46')]
with open(filename, 'rb') as f:
header = f.read(4)
return header in magics
def normalize(opcode):
opcode = opcode.replace(' - ', ' + ')
opcode = re.sub(r'0x[0-9a-f]+', 'CONST', opcode)
opcode = re.sub(r'\*[0-9]', '*CONST', opcode)
opcode = re.sub(r' [0-9]', ' CONST', opcode)
return opcode
def fn2asm(pdf, minlen):
# check
if pdf is None:
return
if len(pdf['ops']) < minlen:
return
if 'invalid' in [op['type'] for op in pdf['ops']]:
return
ops = pdf['ops']
# set label
labels, scope = {}, [op['offset'] for op in ops]
assert(None not in scope)
for i, op in enumerate(ops):
if op.get('jump') in scope:
labels.setdefault(op.get('jump'), i)
# dump output
output = ''
for op in ops:
# add label
if labels.get(op.get('offset')) is not None:
output += f'LABEL{labels[op["offset"]]}:\n'
# add instruction
if labels.get(op.get('jump')) is not None:
output += f' {op["type"]} LABEL{labels[op["jump"]]}\n'
else:
output += f' {normalize(op["opcode"])}\n'
return output
def bin2asm(filename, opath, minlen):
#
# Create directory where results will be written to.
#
results_dir = os.path.join(opath, os.path.basename(filename))
if not os.access(results_dir, os.F_OK):
os.makedirs(results_dir)
r = r2pipe.open(str(filename))
r.cmd('aaaa')
count = 0
fp = open("%s/fv.pcl" % (results_dir), 'wb')
for fn in r.cmdj('aflj'):
r.cmd(f's {fn["offset"]}')
asm = fn2asm(r.cmdj('pdfj'), minlen)
if asm:
fv = [
fn["name"],
asm
]
cPickle.dump(fv, fp)
count += 1
fp.close()
print(f'[+] {filename}')
return count
@click.command()
@click.option('-i', '--input', 'ipath', help='input directory / file', required=True)
@click.option('-o', '--output', 'opath', default='asm', help='output directory')
@click.option('-l', '--len', 'minlen', default=1, help='ignore assembly code with instructions amount smaller than minlen')
def cli(ipath, opath, minlen):
'''
Extract assembly functions from binary executable
'''
ipath = Path(ipath)
opath = Path(opath)
# create output directory
if not os.path.exists(opath):
os.mkdir(opath)
fcount, bcount = 0, 0
# directory
if os.path.isdir(ipath):
for f in os.listdir(ipath):
if not os.path.islink(ipath / f) and not os.path.isdir(ipath / f):
fcount += bin2asm(ipath / f, opath, minlen)
bcount += 1
# file
elif os.path.exists(ipath):
fcount += bin2asm(ipath, opath, minlen)
bcount += 1
else:
print(f'[Error] No such file or directory: {ipath}')
print(f'[+] Total scan binary: {bcount} => Total generated assembly functions: {fcount}')
if __name__ == '__main__':
cli()
| [((88, 1, 88, 16), 'click.command', 'click.command', ({}, {}), '()', False, 'import click\n'), ((89, 1, 89, 85), 'click.option', 'click.option', (), '', False, 'import click\n'), ((90, 1, 90, 80), 'click.option', 'click.option', (), '', False, 'import click\n'), ((91, 1, 91, 123), 'click.option', 'click.option', (), '', False, 'import click\n'), ((20, 13, 20, 52), 're.sub', 're.sub', ({(20, 20, 20, 34): '"""0x[0-9a-f]+"""', (20, 36, 20, 43): '"""CONST"""', (20, 45, 20, 51): 'opcode'}, {}), "('0x[0-9a-f]+', 'CONST', opcode)", False, 'import re\n'), ((21, 13, 21, 49), 're.sub', 're.sub', ({(21, 20, 21, 30): '"""\\\\*[0-9]"""', (21, 32, 21, 40): '"""*CONST"""', (21, 42, 21, 48): 'opcode'}, {}), "('\\\\*[0-9]', '*CONST', opcode)", False, 'import re\n'), ((22, 13, 22, 48), 're.sub', 're.sub', ({(22, 20, 22, 29): '""" [0-9]"""', (22, 31, 22, 39): '""" CONST"""', (22, 41, 22, 47): 'opcode'}, {}), "(' [0-9]', ' CONST', opcode)", False, 'import re\n'), ((96, 12, 96, 23), 'pathlib.Path', 'Path', ({(96, 17, 96, 22): 'ipath'}, {}), '(ipath)', False, 'from pathlib import Path\n'), ((97, 12, 97, 23), 'pathlib.Path', 'Path', ({(97, 17, 97, 22): 'opath'}, {}), '(opath)', False, 'from pathlib import Path\n'), ((106, 7, 106, 27), 'os.path.isdir', 'os.path.isdir', ({(106, 21, 106, 26): 'ipath'}, {}), '(ipath)', False, 'import os\n'), ((61, 38, 61, 64), 'os.path.basename', 'os.path.basename', ({(61, 55, 61, 63): 'filename'}, {}), '(filename)', False, 'import os\n'), ((62, 11, 62, 42), 'os.access', 'os.access', ({(62, 21, 62, 32): 'results_dir', (62, 34, 62, 41): 'os.F_OK'}, {}), '(results_dir, os.F_OK)', False, 'import os\n'), ((63, 8, 63, 32), 'os.makedirs', 'os.makedirs', ({(63, 20, 63, 31): 'results_dir'}, {}), '(results_dir)', False, 'import os\n'), ((100, 11, 100, 32), 'os.path.exists', 'os.path.exists', ({(100, 26, 100, 31): 'opath'}, {}), '(opath)', False, 'import os\n'), ((101, 8, 101, 23), 'os.mkdir', 'os.mkdir', ({(101, 17, 101, 22): 'opath'}, {}), '(opath)', False, 'import os\n'), ((107, 17, 107, 34), 'os.listdir', 'os.listdir', ({(107, 28, 107, 33): 'ipath'}, {}), '(ipath)', False, 'import os\n'), ((112, 9, 112, 30), 'os.path.exists', 'os.path.exists', ({(112, 24, 112, 29): 'ipath'}, {}), '(ipath)', False, 'import os\n'), ((80, 12, 80, 32), '_pickle.dump', 'cPickle.dump', ({(80, 25, 80, 27): 'fv', (80, 29, 80, 31): 'fp'}, {}), '(fv, fp)', True, 'import _pickle as cPickle\n'), ((108, 19, 108, 44), 'os.path.islink', 'os.path.islink', ({(108, 34, 108, 43): '(ipath / f)'}, {}), '(ipath / f)', False, 'import os\n'), ((108, 53, 108, 77), 'os.path.isdir', 'os.path.isdir', ({(108, 67, 108, 76): '(ipath / f)'}, {}), '(ipath / f)', False, 'import os\n')] |
Chyroc/homework | 6/4.py | b1ee8e9629b4dbb6c46a550d710157702d57b00b | import re
def remove_not_alpha_num(string):
return re.sub('[^0-9a-zA-Z]+', '', string)
if __name__ == '__main__':
print(remove_not_alpha_num('a000 aa-b') == 'a000aab')
| [((5, 11, 5, 46), 're.sub', 're.sub', ({(5, 18, 5, 33): '"""[^0-9a-zA-Z]+"""', (5, 35, 5, 37): '""""""', (5, 39, 5, 45): 'string'}, {}), "('[^0-9a-zA-Z]+', '', string)", False, 'import re\n')] |
DougLazyAngus/lazyAngus | LazyAngus/Assets/Extensions/IOSDeploy/Scripts/Editor/post_process.py | 485a8d5061ab740ab055abfc7fc5b86b864a5c7e | import os
from sys import argv
from mod_pbxproj import XcodeProject
#import appcontroller
path = argv[1]
frameworks = argv[2].split(' ')
libraries = argv[3].split(' ')
cflags = argv[4].split(' ')
ldflags = argv[5].split(' ')
folders = argv[6].split(' ')
print('Step 1: add system frameworks ')
#if framework is optional, add `weak=True`
project = XcodeProject.Load(path +'/Unity-iPhone.xcodeproj/project.pbxproj')
for frwrk in frameworks:
files = project.get_files_by_name(frwrk)
for f in files:
project.remove_file(f)
if len(frwrk) > 0:
fo = frwrk.split('|')
if int(fo[1]):
project.add_file('System/Library/Frameworks/' + fo[0], tree='SDKROOT', weak=True)
else:
project.add_file('System/Library/Frameworks/' + fo[0], tree='SDKROOT')
print('Step 2: add system libraries ')
for lib in libraries:
files = project.get_files_by_name(lib)
for f in files:
project.remove_file(f)
if len(lib) > 0:
lo = lib.split('|')
if int(lo[1]):
project.add_file('usr/lib/' + lo[0], tree='SDKROOT', weak=True)
else:
project.add_file('usr/lib/' + lo[0], tree='SDKROOT')
print('Step 3: add CFLAGS ')
for cf in cflags:
if len(cf) > 0:
project.add_other_cflags(cf)
print('Step 4: add LDFLAGS ')
for ldf in ldflags:
if len(ldf) > 0:
project.add_other_ldflags(ldf)
print('Step 5: add language folders')
for langFolder in folders:
if len(langFolder) > 0:
project.add_folder(path + '/' + langFolder + '.lproj')
print('Step 6: save our change to xcode project file')
if project.modified:
project.backup()
project.saveFormat3_2()
| [((15, 10, 15, 76), 'mod_pbxproj.XcodeProject.Load', 'XcodeProject.Load', ({(15, 28, 15, 75): "path + '/Unity-iPhone.xcodeproj/project.pbxproj'"}, {}), "(path + '/Unity-iPhone.xcodeproj/project.pbxproj')", False, 'from mod_pbxproj import XcodeProject\n')] |
TheAvidDev/pnoj-site | judge/migrations/0024_auto_20200705_0246.py | 63299e873b1fb654667545222ce2b3157e78acd9 | # Generated by Django 3.0.8 on 2020-07-05 02:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('judge', '0023_auto_20200704_2318'),
]
operations = [
migrations.AlterField(
model_name='submission',
name='language',
field=models.CharField(choices=[('python3', 'Python 3'), ('java8', 'Java 8'), ('cpp17', 'C++17'), ('haskell', 'Haskell'), ('brainfuck', 'Brainfuck'), ('c18', 'C18'), ('java11', 'Java 11'), ('scratch', 'Scratch'), ('text', 'Text')], max_length=10, null=True),
),
migrations.AlterField(
model_name='user',
name='main_language',
field=models.CharField(choices=[('python3', 'Python 3'), ('java8', 'Java 8'), ('cpp17', 'C++17'), ('haskell', 'Haskell'), ('brainfuck', 'Brainfuck'), ('c18', 'C18'), ('java11', 'Java 11'), ('scratch', 'Scratch'), ('text', 'Text')], default='python3', max_length=10),
),
]
| [((16, 18, 16, 269), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((21, 18, 21, 277), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n')] |
stottlerhenke-seattle/openbadge-hub-py | src/badge_hub.py | d0eb1772eb1250862041cc50071252f46d4c4771 | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function
import os
import re
import shlex
import subprocess
import signal
import csv
import logging
import json
import time
from datetime import datetime as dt
from requests.exceptions import RequestException
import glob
import traceback
import random
from badge import *
from badge_discoverer import BadgeDiscoverer, BeaconDiscoverer
from badge_manager_server import BadgeManagerServer
from beacon_manager_server import BeaconManagerServer
from badge_manager_standalone import BadgeManagerStandalone
from beacon_manager_standalone import BeaconManagerStandalone
import hub_manager
from settings import DATA_DIR, LOG_DIR
log_file_name = LOG_DIR + 'hub.log'
scans_file_name = DATA_DIR + 'scan.txt'
pending_file_prefix = DATA_DIR + 'pending_'
audio_archive_file_name = DATA_DIR + 'audio_archive.txt'
proximity_archive_file_name = DATA_DIR + 'proximity_archive.txt'
standalone_audio_file = DATA_DIR + 'audio_data.txt'
standalone_proximity_file = DATA_DIR + 'proximity_data.txt'
AUDIO = "audio"
PROXIMITY = "proximity"
SCAN_DURATION = 3 # seconds
#NOTE try to keep under 100MB or so due to memory constraints
MAX_PENDING_FILE_SIZE = 15000000 # in bytes, so 15MB
# create logger with 'badge_server'
logger = logging.getLogger('badge_server')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler(log_file_name)
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
# formatter = logging.Formatter('%(asctime)s - %(levelname)s - [%(mac)s] %(message)s')
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
def round_float_for_log(x):
return float("{0:.3f}".format(x))
def has_chunks(filename):
"""
Returns true if there is data in the file, and false otherwise
"""
return os.path.exists(filename) and os.path.getsize(filename) > 0
def offload_data():
"""
Send pending files to server and move pending to archive
Return True on success, False on failure
"""
#TODO test with standalone
#NOTE not currently doing anything with the True/False
# return values, might decide to do something later
pending_files = sorted(glob.glob(pending_file_prefix + "*"))
for pending_file_name in pending_files:
logger.debug("Sending {} to server".format(pending_file_name))
if not has_chunks(pending_file_name):
continue
chunks = []
with open(pending_file_name, "r") as pending_file:
for line in pending_file:
chunks.append(json.loads(line))
# real quick grab the data type from the first data entry
data_type = "audio" if "audio" in chunks[0]["type"] else "proximity"
# fire away!
try:
chunks_written = hub_manager.send_data_to_server(logger, data_type, chunks)
if chunks_written == len(chunks):
logger.debug("Successfully wrote {} data entries to server"
.format(len(chunks)))
else:
# this seems unlikely to happen but is good to keep track of i guess
logger.error("Data mismatch: {} data entries were not written to server"
.format(len(chunks) - chunks_written))
logger.error("Error sending data from file {} to server!"
.format(pending_file_name))
return False
# write to archive and erase pending file
with open(get_archive_name(data_type), "a") as archive_file:
for chunk in chunks:
archive_file.write(json.dumps(chunk) + "\n")
os.remove(pending_file_name)
except RequestException as e:
s = traceback.format_exc()
logger.error("Error sending data from file {} to server!"
.format(pending_file_name))
logger.error("{},{}".format(e,s))
return False
return True
def get_archive_name(data_type):
"""
Return the name of the archive file for the passed data type
"""
if data_type == AUDIO:
return audio_archive_file_name
else:
return proximity_archive_file_name
def get_proximity_name(mode="server"):
"""
return the name of the existing pending proximity file,
or a new one if either one doesn't exist or if
the existing file is > MAX_PENDING_FILE_SIZE
"""
if mode == "server":
return _get_pending_file_name(PROXIMITY)
else:
return standalone_proximity_file
def get_audio_name(mode="server"):
if mode == "server":
return _get_pending_file_name(AUDIO)
else:
return standalone_audio_file
def _get_pending_file_name(data_type):
"""
If there are no current pending files < MAX_PENDING_FILE_SIZE in size,
return a new pending filename
Else, return an existing one.
"""
filenames = filter(
lambda x: os.path.getsize(x) < MAX_PENDING_FILE_SIZE,
glob.glob("{}*{}*".format(pending_file_prefix, data_type)))
if len(filenames) == 0:
return _create_pending_file_name(data_type)
else:
return filenames[0]
def _create_pending_file_name(data_type):
"""
Create a pending file name for the given data_type
Uses the current date/time to create a unique filename
"""
now = dt.now().strftime("%Y%m%d%H%M%S")
filename = "{}{}_{}.txt".format(pending_file_prefix, now, data_type)
if os.path.exists(filename):
# this seems unlikely to happen, but just in case :)
# get the number of pending files that match this time and add one
files = glob.glob("{}{}*{}*".format(pending_file_prefix, now, data_type))
now = '_'.join((now, str(len(files) + 1)))
filename = "{}{}_{}.txt".format(pending_file_prefix, now, data_type)
return filename
def dialogue(bdg, activate_audio, activate_proximity, mode="server"):
"""
Attempts to read data from the device specified by the address. Reading is handled by gatttool.
:param bdg:
:return:
"""
ret = bdg.pull_data(activate_audio, activate_proximity)
addr = bdg.addr
if ret == 0:
logger.info("Successfully pulled data")
# if we were able to pull data, we saw the badge again
bdg.last_seen_ts = time.time()
else:
logger.info("Errors pulling data.")
if bdg.dlg.chunks:
logger.info("Chunks received: {}".format(len(bdg.dlg.chunks)))
logger.info("saving chunks to file")
# store in JSON file
with open(get_audio_name(mode), "a") as fout:
for chunk in bdg.dlg.chunks:
ts_with_ms = round_float_for_log(ts_and_fract_to_float(chunk.ts, chunk.fract))
log_line = {
'type': "audio received",
'log_timestamp': round_float_for_log(time.time()),
'log_index': -1, # need to find a good accumulator.
'data': {
'voltage': round_float_for_log(chunk.voltage),
'timestamp': ts_with_ms,
'sample_period': chunk.sampleDelay,
'num_samples': len(chunk.samples),
'samples': chunk.samples,
'badge_address': addr,
'member': bdg.key,
'member_id':bdg.badge_id
}
}
logger.debug("Chunk timestamp: {0:.3f}, Voltage: {1:.3f}, Delay: {2}, Samples in chunk: {3}".format(
ts_with_ms, chunk.voltage, chunk.sampleDelay, len(chunk.samples)))
#logger.debug(json.dumps(log_line))
json.dump(log_line, fout)
fout.write('\n')
logger.info("done writing")
# update badge object to hold latest timestamps
last_chunk = bdg.dlg.chunks[-1]
last_chunk_ts_pretty = dt.fromtimestamp(last_chunk.ts).strftime("%Y-%m-%d@%H:%M:%S UTC")
if bdg.is_newer_audio_ts(last_chunk.ts, last_chunk.fract):
logger.debug("Setting last badge audio timestamp to {} {} ({})".format(
last_chunk.ts, last_chunk.fract, last_chunk_ts_pretty))
bdg.set_audio_ts(last_chunk.ts, last_chunk.fract)
else:
logger.debug("Keeping existing timestamp ({}.{}) for {}. Last chunk timestamp was: {}.{} ({})"
.format(bdg.last_audio_ts_int,bdg.last_audio_ts_fract,bdg.addr,
last_chunk.ts, last_chunk.fract, last_chunk_pretty))
else:
logger.info("No mic data ready")
if bdg.dlg.scans:
logger.info("Proximity scans received: {}".format(len(bdg.dlg.scans)))
logger.info("saving proximity scans to file")
with open(get_proximity_name(mode), "a") as fout:
for scan in bdg.dlg.scans:
ts_with_ms = round_float_for_log(scan.ts)
log_line = {
'type': "proximity received",
'log_timestamp': round_float_for_log(time.time()),
'log_index': -1, # need to find a good accumulator.
'data': {
'voltage': round_float_for_log(scan.voltage),
'timestamp': ts_with_ms,
'badge_address': addr,
'rssi_distances':
{
device.ID: {'rssi': device.rssi, 'count': device.count} for device in scan.devices
},
'member': bdg.key,
'member_id': bdg.badge_id
}
}
logger.debug("SCAN: scan timestamp: {0:.3f}, voltage: {1:.3f}, Devices in scan: {2}".format(
ts_with_ms, scan.voltage, scan.numDevices))
#logger.info(json.dumps(log_line))
json.dump(log_line, fout)
fout.write('\n')
# update badge object to hold latest timestamps
last_scan = bdg.dlg.scans[-1]
last_scan_ts_pretty = dt.fromtimestamp(last_scan.ts).strftime("%Y-%m-%d@%H:%M:%S UTC")
logger.debug("Setting last badge proximity timestamp to {} ([])".format(
last_scan.ts, last_scan_ts_pretty))
bdg.last_proximity_ts = last_scan.ts
else:
logger.info("No proximity scans ready")
def scan_for_devices(devices_whitelist, show_all=False):
bd = BadgeDiscoverer(logger)
try:
all_devices = bd.discover(scan_duration=SCAN_DURATION)
except Exception as e: # catch *all* exceptions
logger.error("[Badges] Scan failed,{}".format(e))
all_devices = {}
scanned_devices = []
for addr,device_info in all_devices.iteritems():
if addr in devices_whitelist:
logger.debug("\033[1;7m\033[1;32mFound {}, added. Device info: {}\033[0m".format(addr, device_info))
scanned_devices.append({'mac':addr,'device_info':device_info})
else:
if show_all:
logger.debug("Found {}, but not on whitelist. Device info: {}".format(addr, device_info))
pass
time.sleep(2) # requires sometimes to prevent connection from failing
return scanned_devices
def scan_for_bc_devices(devices_whitelist, show_all=False):
bc = BeaconDiscoverer(logger)
try:
all_bc_devices = bc.discover(scan_duration=SCAN_DURATION)
except Exception as e: # catch *all* exceptions
logger.error("[Beacons] Scan failed,{}".format(e))
all_bc_devices = {}
scanned_bc_devices = []
for addr,device_info in all_bc_devices.iteritems():
if addr in devices_whitelist:
logger.debug("\033[1;7m\033[1;32mFound {}, added. Device info: {}\033[0m".format(addr, device_info))
scanned_bc_devices.append({'mac':addr,'device_info':device_info})
else:
if show_all:
logger.debug("Found {}, but not on whitelist. Device info: {}".format(addr, device_info))
pass
time.sleep(2) # requires sometimes to prevent connection from failing
return scanned_bc_devices
def create_badge_manager_instance(mode,timestamp):
if mode == "server":
mgr = BadgeManagerServer(logger=logger)
else:
mgr = BadgeManagerStandalone(logger=logger,timestamp=timestamp)
return mgr
def create_beacon_manager_instance(mode,timestamp):
if mode == "server":
mgrb = BeaconManagerServer(logger=logger)
else:
mgrb = BeaconManagerStandalone(logger=logger,timestamp=timestamp)
return mgrb
def reset():
'''
Resets and reconfigures Bluetooth parameters. The specific parameters affect connection speed negotiation. It's
not pretty, but safer to change the conn params this way
:return:
'''
# Resets BLE hci
logger.info("Resetting bluetooth")
reset_command = "hciconfig hci0 reset"
args = shlex.split(reset_command)
p = subprocess.Popen(args)
# israspberry pi?
logger.info("Setting bluetooth connection parameters")
if os.uname()[4][:3] == 'arm':
logger.info("Raspberry Pi detected, changing bluetooth connection parameters")
with open("/sys/kernel/debug/bluetooth/hci0/conn_min_interval", "w") as connparam:
connparam.write("16")
with open("/sys/kernel/debug/bluetooth/hci0/conn_max_interval", "w") as connparam:
connparam.write("17")
else:
logger.warn("Not a Raspberry Pi, Bluetooth connection parameters remain untouched (communication may be slower)")
time.sleep(2) # requires sleep after reset
logger.info("Done resetting bluetooth")
def kill_bluepy():
"""
Kill orphaned/leftover/defunct bluepy-helper processes
I'd like to move this to a separate utility file or something when
we refactor
"""
# get all the bluepy-helper processes
CMD="/bin/ps ax | grep bluepy-helper | grep -v grep | awk '{ print $1 }'"
p = subprocess.Popen(CMD, shell=True, stdout=subprocess.PIPE)
pidstr = p.communicate()[0]
pids = pidstr.split("\n")
pids = [int(pid) for pid in pids if pid.isdigit()]
mypid = os.getpid()
# dont wanna kill our process by accident :)
if mypid in pids:
pids.remove(mypid)
for pid in pids:
# KILL KILL KILL
try:
os.kill(int(pid), signal.SIGKILL)
# we waitpid to clean up defunct processes
os.waitpid(int(pid), 0)
logger.info("Process with PID {} killed".format(pid))
except OSError as err:
logger.error("Unable to kill process with pid {}".format(pid))
logger.error(err)
def pull_devices(mgr, mgrb, start_recording):
logger.info('Started pulling')
activate_audio = False
activate_proximity = False
if start_recording is None or start_recording == "both":
activate_audio = True
activate_proximity = True
elif start_recording == "audio":
activate_audio = True
elif start_recording == "proximity":
activate_proximity = True
elif start_recording == "none":
activate_audio = False
activate_proximity = False
logger.info("Start recording: Audio = {}, Proximity = {}".format(activate_audio,activate_proximity))
mode = "server" if isinstance(mgr, BadgeManagerServer) else "standalone"
while True:
mgr.pull_badges_list()
mgrb.pull_beacons_list()
# When we refactor we can change this, but for now:
if mode == "server":
logger.info("Attempting to offload data to server")
offload_data()
logger.info("Scanning for members...")
scanned_devices = scan_for_devices(mgr.badges.keys())
# Randomly shuffle devices
random.shuffle(scanned_devices)
# iterate before the actual data collection loop just to offload
# voltages to the server (and update heartbeat on server)
for device in scanned_devices:
b = mgr.badges.get(device['mac'])
# i don't think adv_payload is ever supposed to be empty,
# but sometimes it is. and when it is, it breaks
if device['device_info']['adv_payload'] is not None:
b.last_voltage = device['device_info']['adv_payload']['voltage']
b.observed_id = device['device_info']['adv_payload']['badge_id']
observed_project_id = device['device_info']['adv_payload']['project_id']
if b.observed_id != b.badge_id or b.project_id != observed_project_id:
logger.debug("Warning! Observed IDs do not match server settings. "
"Observed: member_id:{}, project_id:{}. Expected: member_id:{}. project_id: {}"
.format(b.observed_id,observed_project_id,b.badge_id,b.project_id))
b.last_seen_ts = time.time()
mgr.send_badge(device['mac'])
# now the actual data collection
for device in scanned_devices:
# try to update latest badge timestamps from the server
mac = device['mac']
pull_success = mgr.pull_badge(mac)
if not pull_success:
logger.warn("""Problem pulling badge from server\n
Skipping badge with mac {} until next full badge list refresh"""
.format(mac))
continue
b = mgr.badges.get(mac)
# pull data
dialogue(b, activate_audio, activate_proximity, mode)
# update timestamps on server
mgr.send_badge(mac)
time.sleep(2) # requires sleep between devices
logger.info("Scanning for beacons...")
scanned_beacons = scan_for_bc_devices(mgrb.beacons.keys())
# Randomly shuffle devices
random.shuffle(scanned_beacons)
# iterate before the actual data collection loop just to offload
# voltages to the server (and update heartbeat on server)
for device in scanned_beacons:
bcn = mgrb.beacons.get(device['mac'])
if device['device_info']['adv_payload'] is not None:
bcn.last_voltage = device['device_info']['adv_payload']['voltage']
bcn.observed_id = device['device_info']['adv_payload']['badge_id']
observed_project_id = device['device_info']['adv_payload']['project_id']
if bcn.observed_id != bcn.badge_id or bcn.project_id != observed_project_id:
logger.debug("Warning! Observed IDs do not match server settings. "
"Observed: beacon_id:{}, project_id:{}. Expected: beacon_id:{}. project_id: {}"
.format(bcn.observed_id,observed_project_id,bcn.badge_id,bcn.project_id))
bcn.last_seen_ts = time.time()
mgrb.send_beacon(device['mac'])
# Update beacons with wrong id or project id
for device in scanned_beacons:
bcn = mgrb.beacons.get(device['mac'])
if device['device_info']['adv_payload'] is not None:
observed_id = device['device_info']['adv_payload']['badge_id']
observed_project_id = device['device_info']['adv_payload']['project_id']
if bcn.badge_id != observed_id or bcn.project_id != observed_project_id:
bcn.sync_timestamp()
mgrb.send_beacon(device['mac'])
time.sleep(2)
time.sleep(2) # allow BLE time to disconnect
# clean up any leftover bluepy processes
kill_bluepy()
def sync_all_devices(mgr):
logger.info('Syncing all badges recording.')
mgr.pull_badges_list()
for mac in mgr.badges:
bdg = mgr.badges.get(mac)
bdg.sync_timestamp()
time.sleep(2) # requires sleep between devices
time.sleep(2) # allow BLE time to disconnect
def devices_scanner(mgr, mgrb, show_all=False):
logger.info('Scanning for badges')
mgr.pull_badges_list()
logger.info('Scanning for beacons')
mgrb.pull_beacons_list()
while True:
logger.info("Scanning for devices...")
scanned_devices = scan_for_devices(mgr.badges.keys(), show_all) + scan_for_bc_devices(mgrb.beacons.keys())
with open(scans_file_name, "a") as fout:
for device in scanned_devices:
mac = device['mac']
scan_date = device['device_info']['scan_date']
rssi = device['device_info']['rssi']
if device['device_info']['adv_payload']:
voltage = device['device_info']['adv_payload']['voltage']
observed_id = device['device_info']['adv_payload']['badge_id']
project_id = device['device_info']['adv_payload']['project_id']
else:
voltage = 0.0
observed_id = -1
project_id = -1
logger.debug("{},{},{:.2f},{:.2f},{},{}".
format(scan_date, mac, rssi, voltage, observed_id, project_id))
fout.write("{},{},{:.2f},{:.2f},{},{}\n".
format(scan_date, mac, rssi, voltage, observed_id, project_id))
time.sleep(5) # give time to Ctrl-C
def start_all_devices(mgr):
logger.info('Starting all badges recording.')
while True:
mgr.pull_badges_list()
logger.info("Scanning for devices...")
scanned_devices = scan_for_devices(mgr.badges.keys())
for device in scanned_devices:
dev_info = device['device_info']
if dev_info ['adv_payload']:
sync = dev_info ['adv_payload']['sync_status']
audio = dev_info ['adv_payload']['audio_status']
proximity = dev_info ['adv_payload']['proximity_status']
badge_id = dev_info ['adv_payload']['badge_id']
project_id = dev_info ['adv_payload']['project_id']
if sync == 0 or audio == 0 or proximity == 0:
if(project_id==0):
logger.info("changing project ids {}".format(device['mac']))
logger.info("Starting {}".format(device['mac']))
bdg = mgr.badges.get(device['mac'])
bdg.start_recording()
time.sleep(2) # requires sleep between devices
else:
logger.info("Starting {}".format(device['mac']))
bdg = mgr.badges.get(device['mac'])
bdg.start_recording()
time.sleep(2) # requires sleep between devices
else:
logger.info("No need to start {}".format(device['mac']))
time.sleep(2) # allow BLE time to disconnect
def print_badges(mgr, mgrb):
logger.info("Printing badges:")
mgr.pull_badges_list()
mgrb.pull_beacons_list()
badge_list = mgr.badges
beacon_list = mgrb.beacons
print("Members:")
for key, value in badge_list.iteritems():
print("{},{},{},{}".format(value.key,value.addr,value.badge_id,value.project_id))
print("\nBadges:")
for key, value in beacon_list.iteritems():
print("{},{},{},{}".format(value.key,value.addr,value.badge_id,value.project_id))
def add_pull_command_options(subparsers):
pull_parser = subparsers.add_parser('pull', help='Continuously pull data from badges')
pull_parser.add_argument('-r','--start_recording'
, choices=('audio', 'proximity', 'both','none'), required=False
, default='both'
, dest='start_recording',help='data recording option')
def add_scan_command_options(subparsers):
scan_parser = subparsers.add_parser('scan', help='Continuously scan for badges')
scan_parser.add_argument('-a','--show_all', action='store_true', default=False, help="Show all devices")
def add_sync_all_command_options(subparsers):
sa_parser = subparsers.add_parser('sync_all', help='Send date to all devices in whitelist')
def add_start_all_command_options(subparsers):
st_parser = subparsers.add_parser('start_all', help='Start recording on all devices in whitelist')
def add_print_badges_command_options(subparsers):
lb_parser = subparsers.add_parser('print_badges', help='print badges in a CSV format')
if __name__ == "__main__":
import time
import argparse
parser = argparse.ArgumentParser(description="Run scans, send dates, or continuously pull data")
parser.add_argument('-dr','--disable_reset_ble', action='store_true', default=False, help="Do not reset BLE")
parser.add_argument('-m','--hub_mode', choices=('server', 'standalone')
, default='standalone', dest='hub_mode'
, help="Operation mode - standalone (using a configuration file) or a server")
parser.add_argument('-t', '--timestamp'
, type=int, required=False
, dest='timestamp', help='UTC timestamp to start pulling data from (int)')
subparsers = parser.add_subparsers(help='Program mode (e.g. Scan, send dates, pull, scan etc.)', dest='mode')
add_pull_command_options(subparsers)
add_scan_command_options(subparsers)
add_sync_all_command_options(subparsers)
add_start_all_command_options(subparsers)
add_print_badges_command_options(subparsers)
args = parser.parse_args()
mgr = create_badge_manager_instance(args.hub_mode, args.timestamp)
mgrb = create_beacon_manager_instance(args.hub_mode, args.timestamp)
if not args.disable_reset_ble:
reset()
if args.mode == "sync_all":
sync_all_devices(mgr)
# scan for devices
if args.mode == "scan":
devices_scanner(mgr,mgrb, args.show_all)
# pull data from all devices
if args.mode == "pull":
pull_devices(mgr, mgrb, args.start_recording)
if args.mode == "start_all":
start_all_devices(mgr)
if args.mode == "print_badges":
print_badges(mgr, mgrb)
exit(0)
| [((49, 9, 49, 42), 'logging.getLogger', 'logging.getLogger', ({(49, 27, 49, 41): '"""badge_server"""'}, {}), "('badge_server')", False, 'import logging\n'), ((53, 5, 53, 39), 'logging.FileHandler', 'logging.FileHandler', ({(53, 25, 53, 38): 'log_file_name'}, {}), '(log_file_name)', False, 'import logging\n'), ((57, 5, 57, 28), 'logging.StreamHandler', 'logging.StreamHandler', ({}, {}), '()', False, 'import logging\n'), ((62, 12, 62, 74), 'logging.Formatter', 'logging.Formatter', ({(62, 30, 62, 73): '"""%(asctime)s - %(levelname)s - %(message)s"""'}, {}), "('%(asctime)s - %(levelname)s - %(message)s')", False, 'import logging\n'), ((184, 7, 184, 31), 'os.path.exists', 'os.path.exists', ({(184, 22, 184, 30): 'filename'}, {}), '(filename)', False, 'import os\n'), ((298, 9, 298, 32), 'badge_discoverer.BadgeDiscoverer', 'BadgeDiscoverer', ({(298, 25, 298, 31): 'logger'}, {}), '(logger)', False, 'from badge_discoverer import BadgeDiscoverer, BeaconDiscoverer\n'), ((315, 4, 315, 17), 'time.sleep', 'time.sleep', ({(315, 15, 315, 16): '(2)'}, {}), '(2)', False, 'import time\n'), ((320, 9, 320, 33), 'badge_discoverer.BeaconDiscoverer', 'BeaconDiscoverer', ({(320, 26, 320, 32): 'logger'}, {}), '(logger)', False, 'from badge_discoverer import BadgeDiscoverer, BeaconDiscoverer\n'), ((337, 4, 337, 17), 'time.sleep', 'time.sleep', ({(337, 15, 337, 16): '(2)'}, {}), '(2)', False, 'import time\n'), ((367, 11, 367, 37), 'shlex.split', 'shlex.split', ({(367, 23, 367, 36): 'reset_command'}, {}), '(reset_command)', False, 'import shlex\n'), ((368, 8, 368, 30), 'subprocess.Popen', 'subprocess.Popen', ({(368, 25, 368, 29): 'args'}, {}), '(args)', False, 'import subprocess\n'), ((382, 4, 382, 17), 'time.sleep', 'time.sleep', ({(382, 15, 382, 16): '(2)'}, {}), '(2)', False, 'import time\n'), ((395, 8, 395, 65), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((399, 12, 399, 23), 'os.getpid', 'os.getpid', ({}, {}), '()', False, 'import os\n'), ((540, 4, 540, 17), 'time.sleep', 'time.sleep', ({(540, 15, 540, 16): '(2)'}, {}), '(2)', False, 'import time\n'), ((657, 13, 657, 100), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((79, 11, 79, 35), 'os.path.exists', 'os.path.exists', ({(79, 26, 79, 34): 'filename'}, {}), '(filename)', False, 'import os\n'), ((91, 27, 91, 63), 'glob.glob', 'glob.glob', ({(91, 37, 91, 62): "pending_file_prefix + '*'"}, {}), "(pending_file_prefix + '*')", False, 'import glob\n'), ((205, 27, 205, 38), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((343, 14, 343, 47), 'badge_manager_server.BadgeManagerServer', 'BadgeManagerServer', (), '', False, 'from badge_manager_server import BadgeManagerServer\n'), ((345, 14, 345, 71), 'badge_manager_standalone.BadgeManagerStandalone', 'BadgeManagerStandalone', (), '', False, 'from badge_manager_standalone import BadgeManagerStandalone\n'), ((351, 15, 351, 49), 'beacon_manager_server.BeaconManagerServer', 'BeaconManagerServer', (), '', False, 'from beacon_manager_server import BeaconManagerServer\n'), ((353, 15, 353, 73), 'beacon_manager_standalone.BeaconManagerStandalone', 'BeaconManagerStandalone', (), '', False, 'from beacon_manager_standalone import BeaconManagerStandalone\n'), ((449, 8, 449, 39), 'random.shuffle', 'random.shuffle', ({(449, 23, 449, 38): 'scanned_devices'}, {}), '(scanned_devices)', False, 'import random\n'), ((493, 8, 493, 39), 'random.shuffle', 'random.shuffle', ({(493, 23, 493, 38): 'scanned_beacons'}, {}), '(scanned_beacons)', False, 'import random\n'), ((524, 8, 524, 21), 'time.sleep', 'time.sleep', ({(524, 19, 524, 20): '(2)'}, {}), '(2)', False, 'import time\n'), ((538, 8, 538, 21), 'time.sleep', 'time.sleep', ({(538, 19, 538, 20): '(2)'}, {}), '(2)', False, 'import time\n'), ((569, 8, 569, 21), 'time.sleep', 'time.sleep', ({(569, 19, 569, 20): '(5)'}, {}), '(5)', False, 'import time\n'), ((609, 8, 609, 21), 'time.sleep', 'time.sleep', ({(609, 19, 609, 20): '(2)'}, {}), '(2)', False, 'import time\n'), ((79, 40, 79, 65), 'os.path.getsize', 'os.path.getsize', ({(79, 56, 79, 64): 'filename'}, {}), '(filename)', False, 'import os\n'), ((107, 29, 107, 87), 'hub_manager.send_data_to_server', 'hub_manager.send_data_to_server', ({(107, 61, 107, 67): 'logger', (107, 69, 107, 78): 'data_type', (107, 80, 107, 86): 'chunks'}, {}), '(logger, data_type, chunks)', False, 'import hub_manager\n'), ((123, 12, 123, 40), 'os.remove', 'os.remove', ({(123, 22, 123, 39): 'pending_file_name'}, {}), '(pending_file_name)', False, 'import os\n'), ((182, 10, 182, 18), 'datetime.datetime.now', 'dt.now', ({}, {}), '()', True, 'from datetime import datetime as dt\n'), ((466, 29, 466, 40), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((487, 12, 487, 25), 'time.sleep', 'time.sleep', ({(487, 23, 487, 24): '(2)'}, {}), '(2)', False, 'import time\n'), ((508, 31, 508, 42), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((522, 12, 522, 25), 'time.sleep', 'time.sleep', ({(522, 23, 522, 24): '(2)'}, {}), '(2)', False, 'import time\n'), ((125, 16, 125, 38), 'traceback.format_exc', 'traceback.format_exc', ({}, {}), '()', False, 'import traceback\n'), ((168, 22, 168, 40), 'os.path.getsize', 'os.path.getsize', ({(168, 38, 168, 39): 'x'}, {}), '(x)', False, 'import os\n'), ((236, 16, 236, 41), 'json.dump', 'json.dump', ({(236, 26, 236, 34): 'log_line', (236, 36, 236, 40): 'fout'}, {}), '(log_line, fout)', False, 'import json\n'), ((243, 31, 243, 62), 'datetime.datetime.fromtimestamp', 'dt.fromtimestamp', ({(243, 48, 243, 61): 'last_chunk.ts'}, {}), '(last_chunk.ts)', True, 'from datetime import datetime as dt\n'), ((284, 16, 284, 41), 'json.dump', 'json.dump', ({(284, 26, 284, 34): 'log_line', (284, 36, 284, 40): 'fout'}, {}), '(log_line, fout)', False, 'import json\n'), ((289, 30, 289, 60), 'datetime.datetime.fromtimestamp', 'dt.fromtimestamp', ({(289, 47, 289, 59): 'last_scan.ts'}, {}), '(last_scan.ts)', True, 'from datetime import datetime as dt\n'), ((372, 7, 372, 17), 'os.uname', 'os.uname', ({}, {}), '()', False, 'import os\n'), ((101, 30, 101, 46), 'json.loads', 'json.loads', ({(101, 41, 101, 45): 'line'}, {}), '(line)', False, 'import json\n'), ((219, 57, 219, 68), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((265, 57, 265, 68), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((598, 24, 598, 37), 'time.sleep', 'time.sleep', ({(598, 35, 598, 36): '(2)'}, {}), '(2)', False, 'import time\n'), ((604, 24, 604, 37), 'time.sleep', 'time.sleep', ({(604, 35, 604, 36): '(2)'}, {}), '(2)', False, 'import time\n'), ((122, 39, 122, 56), 'json.dumps', 'json.dumps', ({(122, 50, 122, 55): 'chunk'}, {}), '(chunk)', False, 'import json\n')] |
liamgam/gdkit | python/compile.py | e9d419ff916f15dbd8ec6d7cc59b0a3d8f636a95 | import compileall
compileall.compile_dir(".",force=1) | [((2, 0, 2, 35), 'compileall.compile_dir', 'compileall.compile_dir', (), '', False, 'import compileall\n')] |
fairhopeweb/saleor | saleor/product/migrations/0141_update_descritpion_fields.py | 9ac6c22652d46ba65a5b894da5f1ba5bec48c019 | # Generated by Django 3.1.5 on 2021-02-17 11:04
from django.db import migrations
import saleor.core.db.fields
import saleor.core.utils.editorjs
def update_empty_description_field(apps, schema_editor):
Category = apps.get_model("product", "Category")
CategoryTranslation = apps.get_model("product", "CategoryTranslation")
Collection = apps.get_model("product", "Collection")
CollectionTranslation = apps.get_model("product", "CollectionTranslation")
Product = apps.get_model("product", "Product")
ProductTranslation = apps.get_model("product", "ProductTranslation")
models = [
Category,
CategoryTranslation,
Collection,
CollectionTranslation,
Product,
ProductTranslation,
]
for model in models:
model.objects.filter(description={}).update(description=None)
class Migration(migrations.Migration):
dependencies = [
("product", "0140_auto_20210125_0905"),
]
operations = [
migrations.AlterField(
model_name="category",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="categorytranslation",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="collection",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="collectiontranslation",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="product",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="producttranslation",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.RunPython(
update_empty_description_field,
migrations.RunPython.noop,
),
]
| [((91, 8, 94, 9), 'django.db.migrations.RunPython', 'migrations.RunPython', ({(92, 12, 92, 42): 'update_empty_description_field', (93, 12, 93, 37): 'migrations.RunPython.noop'}, {}), '(update_empty_description_field, migrations.RunPython.noop)', False, 'from django.db import migrations\n')] |
arnaubena97/SatSolver-sat_isfayer | local_search/sat_isfayer.py | db7edc83547786deb7bf6b1c5d75b406f877ca15 | #!/usr/bin/env python3
import sys
import random
def read_file(file_name):
"""File reader and parser the num of variables, num of clauses and put the clauses in a list"""
clauses =[]
with open(file_name) as all_file:
for line in all_file:
if line.startswith('c'): continue #ignore comments
if line.startswith('p'):
num_variables = int(line.split()[2]) # set num_variables
continue
if line.strip() == "": continue
clause = list(map(int, line.split()))
clause.pop()
clauses.append(clause)
return num_variables, clauses
def print_sol(solution):
"""Method to print the solution that satisfies all the clauses """
print("s SATISFIABLE")
print("v %s 0" %" ".join(map(str, solution)))
exit(0)
class walksat_solver():
def __init__(self, clauses, num_variables):
"""Constructor of the solver"""
self.clauses = clauses
self.num_variables = num_variables
self.formula=[]
self.list_positions = self.create_positions()
self.index_clauses_satisfied = []
def randomSolution(self):
"""Create a random solution of cnf formula. Ex: [-1, 2, 3, -4, ...]"""
random_formula = [x if random.random() < 0.5 else -x for x in range(self.num_variables + 1)]
return random_formula[1:]
def create_positions(self):
"""Return a list with the clause index that apear in the clauses.
First position is empty, and the index of list is the variable.
Ex: [ [], [2], [2, 3], ....] """
vars_positions = [[] for _ in range(self.num_variables * 2 + 1)]
for index, clause in enumerate(self.clauses):
for var in clause:
vars_positions[var].append(index)
return vars_positions
def calculate_all_clauses_satisfy(self):
"""Returns a list with the number of variables that
satisfy the clause with the same index.
Method for all clauses.
Ex: [1, 0, 2, 2] in test_0.cnf """
list_variables_satisfies = []
for clause in range(len(self.clauses)):
number_sat = self.clause_satisfy(clause)
list_variables_satisfies.append(number_sat)
return list_variables_satisfies
def clause_satisfy(self, index):
"""Returns an integer, which is the number of
variables in the formula that satisfy the
clause indicated by the index.
Ex: index = 1 --> cluse[1] = [1, -2, 3, ..] """
satisfy = 0
for variable in self.clauses[index]:
if variable in self.formula:
satisfy += 1
return satisfy
def select_all_unsatisfied(self):
"""Returns a list of indexes whose clause
is not satisfied."""
clauses_not_satisfied = []
for index, value in enumerate(self.index_clauses_satisfied):
if value == 0:
clauses_not_satisfied.append(index)
return clauses_not_satisfied
def get_clause_unsatisfied(self, list_all_unsatisfied):
"""Returns a randomly selected unsatisfied clause"""
return self.clauses[random.choice(list_all_unsatisfied)]
def update(self, variable, x):
"""It is responsible for updating the list of
the number of variables that satisfy the clause"""
for index in self.list_positions[x * variable]:
self.index_clauses_satisfied[index] += x
def change_variable(self, clause_to_review):
"""Is responsible for assessing which is
the best variable in the clause to change"""
worst_wrong = sys.maxsize
bests_variables = []
for variable in clause_to_review:
wrong = 0
for index in self.list_positions[-variable]:
if not self.index_clauses_satisfied[index] > 1:
wrong += 1
if wrong <= worst_wrong:
worst_wrong = wrong
bests_variables.append(variable)
return random.choice(bests_variables)
def solve(self, max_tries=50000000, max_flips=3000):
"""Implementation of the solver"""
#for _ in range(max_tries):
while(True):
self.formula = self.randomSolution()
self.index_clauses_satisfied = self.calculate_all_clauses_satisfy()
for _ in range(max_flips):
index_all_unsatisfied = self.select_all_unsatisfied()
if len(index_all_unsatisfied)==0:
print_sol(self.formula)
clause_to_review = self.get_clause_unsatisfied(index_all_unsatisfied)
variable = self.change_variable(clause_to_review)
self.update(variable, 1)
self.update(variable, -1)
self.formula[abs(variable)-1] *= -1
#Main
if __name__ == "__main__":
if len(sys.argv) == 2:
file_name = sys.argv[1]
else:
print("\n Command: python %s <file_name.cnf> \n" %sys.argv[0])
exit(0)
num_variables, clauses = read_file(file_name)
sat = walksat_solver(clauses, num_variables)
sat.solve()
exit(0)
| [((109, 15, 109, 45), 'random.choice', 'random.choice', ({(109, 29, 109, 44): 'bests_variables'}, {}), '(bests_variables)', False, 'import random\n'), ((86, 28, 86, 63), 'random.choice', 'random.choice', ({(86, 42, 86, 62): 'list_all_unsatisfied'}, {}), '(list_all_unsatisfied)', False, 'import random\n'), ((40, 31, 40, 46), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n')] |
Hacky-DH/pytorch | torch/_VF.py | 80dc4be615854570aa39a7e36495897d8a040ecc | """
This makes the functions in torch._C._VariableFunctions available as
torch._VF.<funcname>
without mypy being able to find them.
A subset of those functions are mapped to ATen functions in
torch/jit/_builtins.py
See https://github.com/pytorch/pytorch/issues/21478 for the reason for
introducing torch._VF
"""
import torch
import sys
import types
class VFModule(types.ModuleType):
vf: types.ModuleType
def __init__(self, name):
super(VFModule, self).__init__(name)
self.vf = torch._C._VariableFunctions
def __getattr__(self, attr):
return getattr(self.vf, attr)
sys.modules[__name__] = VFModule(__name__)
| [] |
sergeivolodin/causality-disentanglement-rl | sparse_causal_model_learner_rl/annealer/threshold_projection.py | 5a41b4a2e3d85fa7e9c8450215fdc6cf954df867 | import gin
import torch
import logging
from sparse_causal_model_learner_rl.metrics import find_value, find_key
@gin.configurable
def ProjectionThreshold(config, config_object, epoch_info, temp,
adjust_every=100, metric_threshold=0.5, delta=0.5, source_metric_key=None,
min_hyper=0, max_hyper=1000,
gin_variable=None, **kwargs):
try:
metric_val = find_value(epoch_info, source_metric_key)
except AssertionError as e:
return config
good = metric_val < metric_threshold
hyper = gin.query_parameter(gin_variable)
logging.info(f"Projection: metric={metric_val} threshold={metric_threshold} good={good} hyper={hyper}")
if 'last_hyper_adjustment' not in temp:
temp['last_hyper_adjustment'] = 0
i = epoch_info['epochs']
if good:
temp['suggested_hyper'] = hyper - delta
else:
temp['suggested_hyper'] = hyper + delta
if temp['suggested_hyper'] > max_hyper:
temp['suggested_hyper'] = max_hyper
if temp['suggested_hyper'] < min_hyper:
temp['suggested_hyper'] = min_hyper
if 'suggested_hyper' in temp and (i - temp['last_hyper_adjustment'] >= adjust_every):
temp['last_hyper_adjustment'] = i
with gin.unlock_config():
gin.bind_parameter(gin_variable, temp['suggested_hyper'])
del temp['suggested_hyper']
return config
| [((17, 12, 17, 45), 'gin.query_parameter', 'gin.query_parameter', ({(17, 32, 17, 44): 'gin_variable'}, {}), '(gin_variable)', False, 'import gin\n'), ((18, 4, 18, 107), 'logging.info', 'logging.info', ({(18, 17, 18, 106): 'f"""Projection: metric={metric_val} threshold={metric_threshold} good={good} hyper={hyper}"""'}, {}), "(\n f'Projection: metric={metric_val} threshold={metric_threshold} good={good} hyper={hyper}'\n )", False, 'import logging\n'), ((12, 21, 12, 62), 'sparse_causal_model_learner_rl.metrics.find_value', 'find_value', ({(12, 32, 12, 42): 'epoch_info', (12, 44, 12, 61): 'source_metric_key'}, {}), '(epoch_info, source_metric_key)', False, 'from sparse_causal_model_learner_rl.metrics import find_value, find_key\n'), ((39, 13, 39, 32), 'gin.unlock_config', 'gin.unlock_config', ({}, {}), '()', False, 'import gin\n'), ((40, 12, 40, 69), 'gin.bind_parameter', 'gin.bind_parameter', ({(40, 31, 40, 43): 'gin_variable', (40, 45, 40, 68): "temp['suggested_hyper']"}, {}), "(gin_variable, temp['suggested_hyper'])", False, 'import gin\n')] |
ucals/numpyro | numpyro/contrib/control_flow/scan.py | 566a5311d660d28a630188063c03a018165a38a9 | # Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
from collections import OrderedDict
from functools import partial
from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten
import jax.numpy as jnp
from jax.tree_util import register_pytree_node_class
from numpyro import handlers
from numpyro.primitives import _PYRO_STACK, Messenger, apply_stack
from numpyro.util import not_jax_tracer
@register_pytree_node_class
class PytreeTrace:
def __init__(self, trace):
self.trace = trace
def tree_flatten(self):
trace, aux_trace = {}, {}
for name, site in self.trace.items():
if site['type'] in ['sample', 'deterministic']:
trace[name], aux_trace[name] = {}, {'_control_flow_done': True}
for key in site:
if key in ['fn', 'args', 'value', 'intermediates']:
trace[name][key] = site[key]
# scanned sites have stop field because we trace them inside a block handler
elif key != 'stop':
aux_trace[name][key] = site[key]
return (trace,), aux_trace
@classmethod
def tree_unflatten(cls, aux_data, children):
trace, = children
for name, site in trace.items():
site.update(aux_data[name])
return cls(trace)
def _subs_wrapper(subs_map, i, length, site):
value = None
if isinstance(subs_map, dict) and site['name'] in subs_map:
value = subs_map[site['name']]
elif callable(subs_map):
rng_key = site['kwargs'].get('rng_key')
subs_map = handlers.seed(subs_map, rng_seed=rng_key) if rng_key is not None else subs_map
value = subs_map(site)
if value is not None:
value_ndim = jnp.ndim(value)
sample_shape = site['kwargs']['sample_shape']
fn_ndim = len(sample_shape + site['fn'].shape())
if value_ndim == fn_ndim:
# this branch happens when substitute_fn is init_strategy,
# where we apply init_strategy to each element in the scanned series
return value
elif value_ndim == fn_ndim + 1:
# this branch happens when we substitute a series of values
shape = jnp.shape(value)
if shape[0] == length:
return value[i]
elif shape[0] < length:
rng_key = site['kwargs']['rng_key']
assert rng_key is not None
# we use the substituted values if i < shape[0]
# and generate a new sample otherwise
return lax.cond(i < shape[0],
(value, i),
lambda val: val[0][val[1]],
rng_key,
lambda val: site['fn'](rng_key=val, sample_shape=sample_shape))
else:
raise RuntimeError(f"Substituted value for site {site['name']} "
"requires length less than or equal to scan length."
f" Expected length <= {length}, but got {shape[0]}.")
else:
raise RuntimeError(f"Something goes wrong. Expected ndim = {fn_ndim} or {fn_ndim+1},"
f" but got {value_ndim}. This might happen when you use nested scan,"
" which is currently not supported. Please report the issue to us!")
class promote_shapes(Messenger):
# a helper messenger to promote shapes of `fn` and `value`
# + msg: fn.batch_shape = (2, 3), value.shape = (3,) + fn.event_shape
# process_message(msg): promote value so that value.shape = (1, 3) + fn.event_shape
# + msg: fn.batch_shape = (3,), value.shape = (2, 3) + fn.event_shape
# process_message(msg): promote fn so that fn.batch_shape = (1, 3).
def process_message(self, msg):
if msg["type"] == "sample" and msg["value"] is not None:
fn, value = msg["fn"], msg["value"]
value_batch_ndims = jnp.ndim(value) - fn.event_dim
fn_batch_ndim = len(fn.batch_shape)
prepend_shapes = (1,) * abs(fn_batch_ndim - value_batch_ndims)
if fn_batch_ndim > value_batch_ndims:
msg["value"] = jnp.reshape(value, prepend_shapes + jnp.shape(value))
elif fn_batch_ndim < value_batch_ndims:
msg["fn"] = tree_map(lambda x: jnp.reshape(x, prepend_shapes + jnp.shape(x)), fn)
def scan_enum(f, init, xs, length, reverse, rng_key=None, substitute_stack=None):
from numpyro.contrib.funsor import enum, config_enumerate, markov, trace as packed_trace
# XXX: This implementation only works for history size=1 but can be
# extended to history size > 1 by running `f` `history_size` times
# for initialization. However, `sequential_sum_product` does not
# support history size > 1, so we skip supporting it here.
# Note that `funsor.sum_product.sarkka_bilmes_product` does support history > 1.
if reverse:
x0 = tree_map(lambda x: x[-1], xs)
xs_ = tree_map(lambda x: x[:-1], xs)
else:
x0 = tree_map(lambda x: x[0], xs)
xs_ = tree_map(lambda x: x[1:], xs)
carry_shape_at_t1 = None
def body_fn(wrapped_carry, x, prefix=None):
i, rng_key, carry = wrapped_carry
init = True if (not_jax_tracer(i) and i == 0) else False
rng_key, subkey = random.split(rng_key) if rng_key is not None else (None, None)
seeded_fn = handlers.seed(f, subkey) if subkey is not None else f
for subs_type, subs_map in substitute_stack:
subs_fn = partial(_subs_wrapper, subs_map, i, length)
if subs_type == 'condition':
seeded_fn = handlers.condition(seeded_fn, condition_fn=subs_fn)
elif subs_type == 'substitute':
seeded_fn = handlers.substitute(seeded_fn, substitute_fn=subs_fn)
if init:
with handlers.scope(prefix="_init"):
new_carry, y = seeded_fn(carry, x)
trace = {}
else:
with handlers.block(), packed_trace() as trace, promote_shapes(), enum(), markov():
# Like scan_wrapper, we collect the trace of scan's transition function
# `seeded_fn` here. To put time dimension to the correct position, we need to
# promote shapes to make `fn` and `value`
# at each site have the same batch dims (e.g. if `fn.batch_shape = (2, 3)`,
# and value's batch_shape is (3,), then we promote shape of
# value so that its batch shape is (1, 3)).
new_carry, y = config_enumerate(seeded_fn)(carry, x)
# store shape of new_carry at a global variable
nonlocal carry_shape_at_t1
carry_shape_at_t1 = [jnp.shape(x) for x in tree_flatten(new_carry)[0]]
# make new_carry have the same shape as carry
# FIXME: is this rigorous?
new_carry = tree_multimap(lambda a, b: jnp.reshape(a, jnp.shape(b)),
new_carry, carry)
return (i + jnp.array(1), rng_key, new_carry), (PytreeTrace(trace), y)
with markov():
wrapped_carry = (0, rng_key, init)
wrapped_carry, (_, y0) = body_fn(wrapped_carry, x0)
if length == 1:
ys = tree_map(lambda x: jnp.expand_dims(x, 0), y0)
return wrapped_carry, (PytreeTrace({}), ys)
wrapped_carry, (pytree_trace, ys) = lax.scan(body_fn, wrapped_carry, xs_, length - 1, reverse)
first_var = None
for name, site in pytree_trace.trace.items():
# add `time` dimension, the name will be '_time_{first variable in the trace}'
if first_var is None:
first_var = name
leftmost_dim = min(site['infer']['dim_to_name'])
site['infer']['dim_to_name'][leftmost_dim - 1] = '_time_{}'.format(first_var)
# similar to carry, we need to reshape due to shape alternating in markov
ys = tree_multimap(lambda z0, z: jnp.reshape(z, z.shape[:1] + jnp.shape(z0)), y0, ys)
# we also need to reshape `carry` to match sequential behavior
if length % 2 == 0:
t, rng_key, carry = wrapped_carry
flatten_carry, treedef = tree_flatten(carry)
flatten_carry = [jnp.reshape(x, t1_shape)
for x, t1_shape in zip(flatten_carry, carry_shape_at_t1)]
carry = tree_unflatten(treedef, flatten_carry)
wrapped_carry = (t, rng_key, carry)
return wrapped_carry, (pytree_trace, ys)
def scan_wrapper(f, init, xs, length, reverse, rng_key=None, substitute_stack=[], enum=False):
if length is None:
length = tree_flatten(xs)[0][0].shape[0]
if enum:
return scan_enum(f, init, xs, length, reverse, rng_key, substitute_stack)
def body_fn(wrapped_carry, x):
i, rng_key, carry = wrapped_carry
rng_key, subkey = random.split(rng_key) if rng_key is not None else (None, None)
with handlers.block():
seeded_fn = handlers.seed(f, subkey) if subkey is not None else f
for subs_type, subs_map in substitute_stack:
subs_fn = partial(_subs_wrapper, subs_map, i, length)
if subs_type == 'condition':
seeded_fn = handlers.condition(seeded_fn, condition_fn=subs_fn)
elif subs_type == 'substitute':
seeded_fn = handlers.substitute(seeded_fn, substitute_fn=subs_fn)
with handlers.trace() as trace:
carry, y = seeded_fn(carry, x)
return (i + 1, rng_key, carry), (PytreeTrace(trace), y)
return lax.scan(body_fn, (jnp.array(0), rng_key, init), xs, length=length, reverse=reverse)
def scan(f, init, xs, length=None, reverse=False):
"""
This primitive scans a function over the leading array axes of
`xs` while carrying along state. See :func:`jax.lax.scan` for more
information.
**Usage**:
.. doctest::
>>> import numpy as np
>>> import numpyro
>>> import numpyro.distributions as dist
>>> from numpyro.contrib.control_flow import scan
>>>
>>> def gaussian_hmm(y=None, T=10):
... def transition(x_prev, y_curr):
... x_curr = numpyro.sample('x', dist.Normal(x_prev, 1))
... y_curr = numpyro.sample('y', dist.Normal(x_curr, 1), obs=y_curr)
... return x_curr, (x_curr, y_curr)
...
... x0 = numpyro.sample('x_0', dist.Normal(0, 1))
... _, (x, y) = scan(transition, x0, y, length=T)
... return (x, y)
>>>
>>> # here we do some quick tests
>>> with numpyro.handlers.seed(rng_seed=0):
... x, y = gaussian_hmm(np.arange(10.))
>>> assert x.shape == (10,) and y.shape == (10,)
>>> assert np.all(y == np.arange(10))
>>>
>>> with numpyro.handlers.seed(rng_seed=0): # generative
... x, y = gaussian_hmm()
>>> assert x.shape == (10,) and y.shape == (10,)
.. warning:: This is an experimental utility function that allows users to use
JAX control flow with NumPyro's effect handlers. Currently, `sample` and
`deterministic` sites within the scan body `f` are supported. If you notice
that any effect handlers or distributions are unsupported, please file an issue.
.. note:: It is ambiguous to align `scan` dimension inside a `plate` context.
So the following pattern won't be supported
.. code-block:: python
with numpyro.plate('N', 10):
last, ys = scan(f, init, xs)
All `plate` statements should be put inside `f`. For example, the corresponding
working code is
.. code-block:: python
def g(*args, **kwargs):
with numpyro.plate('N', 10):
return f(*arg, **kwargs)
last, ys = scan(g, init, xs)
.. note:: Nested scan is currently not supported.
.. note:: We can scan over discrete latent variables in `f`. The joint density is
evaluated using parallel-scan (reference [1]) over time dimension, which
reduces parallel complexity to `O(log(length))`.
Currently, only the equivalence to
:class:`~numpyro.contrib.funsor.enum_messenger.markov(history_size=1)`
is supported. A :class:`~numpyro.handlers.trace` of `scan` with discrete latent
variables will contain the following sites:
+ init sites: those sites belong to the first trace of `f`. Each of
them will have name prefixed with `_init/`.
+ scanned sites: those sites collect the values of the remaining scan
loop over `f`. An addition time dimension `_time_foo` will be
added to those sites, where `foo` is the name of the first site
appeared in `f`.
Not all transition functions `f` are supported. All of the restrictions from
Pyro's enumeration tutorial [2] still apply here. In addition, there should
not have any site outside of `scan` depend on the first output of `scan`
(the last carry value).
** References **
1. *Temporal Parallelization of Bayesian Smoothers*,
Simo Sarkka, Angel F. Garcia-Fernandez
(https://arxiv.org/abs/1905.13002)
2. *Inference with Discrete Latent Variables*
(http://pyro.ai/examples/enumeration.html#Dependencies-among-plates)
:param callable f: a function to be scanned.
:param init: the initial carrying state
:param xs: the values over which we scan along the leading axis. This can
be any JAX pytree (e.g. list/dict of arrays).
:param length: optional value specifying the length of `xs`
but can be used when `xs` is an empty pytree (e.g. None)
:param bool reverse: optional boolean specifying whether to run the scan iteration
forward (the default) or in reverse
:return: output of scan, quoted from :func:`jax.lax.scan` docs:
"pair of type (c, [b]) where the first element represents the final loop
carry value and the second element represents the stacked outputs of the
second output of f when scanned over the leading axis of the inputs".
"""
# if there are no active Messengers, we just run and return it as expected:
if not _PYRO_STACK:
(length, rng_key, carry), (pytree_trace, ys) = scan_wrapper(
f, init, xs, length=length, reverse=reverse)
else:
# Otherwise, we initialize a message...
initial_msg = {
'type': 'control_flow',
'fn': scan_wrapper,
'args': (f, init, xs, length, reverse),
'kwargs': {'rng_key': None,
'substitute_stack': []},
'value': None,
}
# ...and use apply_stack to send it to the Messengers
msg = apply_stack(initial_msg)
(length, rng_key, carry), (pytree_trace, ys) = msg['value']
if not msg["kwargs"].get("enum", False):
for msg in pytree_trace.trace.values():
apply_stack(msg)
else:
from numpyro.contrib.funsor import to_funsor
from numpyro.contrib.funsor.enum_messenger import LocalNamedMessenger
for msg in pytree_trace.trace.values():
with LocalNamedMessenger():
dim_to_name = msg["infer"].get("dim_to_name")
to_funsor(msg["value"], dim_to_name=OrderedDict([(k, dim_to_name[k]) for k in sorted(dim_to_name)]))
apply_stack(msg)
return carry, ys
| [((52, 21, 52, 36), 'jax.numpy.ndim', 'jnp.ndim', ({(52, 30, 52, 35): 'value'}, {}), '(value)', True, 'import jax.numpy as jnp\n'), ((111, 13, 111, 42), 'jax.tree_map', 'tree_map', ({(111, 22, 111, 37): 'lambda x: x[-1]', (111, 39, 111, 41): 'xs'}, {}), '(lambda x: x[-1], xs)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((112, 14, 112, 44), 'jax.tree_map', 'tree_map', ({(112, 23, 112, 39): 'lambda x: x[:-1]', (112, 41, 112, 43): 'xs'}, {}), '(lambda x: x[:-1], xs)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((114, 13, 114, 41), 'jax.tree_map', 'tree_map', ({(114, 22, 114, 36): 'lambda x: x[0]', (114, 38, 114, 40): 'xs'}, {}), '(lambda x: x[0], xs)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((115, 14, 115, 43), 'jax.tree_map', 'tree_map', ({(115, 23, 115, 38): 'lambda x: x[1:]', (115, 40, 115, 42): 'xs'}, {}), '(lambda x: x[1:], xs)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((155, 9, 155, 17), 'numpyro.contrib.funsor.markov', 'markov', ({}, {}), '()', False, 'from numpyro.contrib.funsor import enum, config_enumerate, markov, trace as packed_trace\n'), ((161, 44, 161, 102), 'jax.lax.scan', 'lax.scan', ({(161, 53, 161, 60): 'body_fn', (161, 62, 161, 75): 'wrapped_carry', (161, 77, 161, 80): 'xs_', (161, 82, 161, 92): 'length - 1', (161, 94, 161, 101): 'reverse'}, {}), '(body_fn, wrapped_carry, xs_, length - 1, reverse)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((176, 33, 176, 52), 'jax.tree_flatten', 'tree_flatten', ({(176, 46, 176, 51): 'carry'}, {}), '(carry)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((179, 16, 179, 54), 'jax.tree_unflatten', 'tree_unflatten', ({(179, 31, 179, 38): 'treedef', (179, 40, 179, 53): 'flatten_carry'}, {}), '(treedef, flatten_carry)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((332, 14, 332, 38), 'numpyro.primitives.apply_stack', 'apply_stack', ({(332, 26, 332, 37): 'initial_msg'}, {}), '(initial_msg)', False, 'from numpyro.primitives import _PYRO_STACK, Messenger, apply_stack\n'), ((122, 26, 122, 47), 'jax.random.split', 'random.split', ({(122, 39, 122, 46): 'rng_key'}, {}), '(rng_key)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((124, 20, 124, 44), 'numpyro.handlers.seed', 'handlers.seed', ({(124, 34, 124, 35): 'f', (124, 37, 124, 43): 'subkey'}, {}), '(f, subkey)', False, 'from numpyro import handlers\n'), ((126, 22, 126, 65), 'functools.partial', 'partial', ({(126, 30, 126, 43): '_subs_wrapper', (126, 45, 126, 53): 'subs_map', (126, 55, 126, 56): 'i', (126, 58, 126, 64): 'length'}, {}), '(_subs_wrapper, subs_map, i, length)', False, 'from functools import partial\n'), ((177, 25, 177, 49), 'jax.numpy.reshape', 'jnp.reshape', ({(177, 37, 177, 38): 'x', (177, 40, 177, 48): 't1_shape'}, {}), '(x, t1_shape)', True, 'import jax.numpy as jnp\n'), ((193, 26, 193, 47), 'jax.random.split', 'random.split', ({(193, 39, 193, 46): 'rng_key'}, {}), '(rng_key)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((195, 13, 195, 29), 'numpyro.handlers.block', 'handlers.block', ({}, {}), '()', False, 'from numpyro import handlers\n'), ((209, 30, 209, 42), 'jax.numpy.array', 'jnp.array', ({(209, 40, 209, 41): '(0)'}, {}), '(0)', True, 'import jax.numpy as jnp\n'), ((337, 12, 337, 28), 'numpyro.primitives.apply_stack', 'apply_stack', ({(337, 24, 337, 27): 'msg'}, {}), '(msg)', False, 'from numpyro.primitives import _PYRO_STACK, Messenger, apply_stack\n'), ((48, 19, 48, 60), 'numpyro.handlers.seed', 'handlers.seed', (), '', False, 'from numpyro import handlers\n'), ((61, 20, 61, 36), 'jax.numpy.shape', 'jnp.shape', ({(61, 30, 61, 35): 'value'}, {}), '(value)', True, 'import jax.numpy as jnp\n'), ((93, 32, 93, 47), 'jax.numpy.ndim', 'jnp.ndim', ({(93, 41, 93, 46): 'value'}, {}), '(value)', True, 'import jax.numpy as jnp\n'), ((121, 24, 121, 41), 'numpyro.util.not_jax_tracer', 'not_jax_tracer', ({(121, 39, 121, 40): 'i'}, {}), '(i)', False, 'from numpyro.util import not_jax_tracer\n'), ((128, 28, 128, 79), 'numpyro.handlers.condition', 'handlers.condition', (), '', False, 'from numpyro import handlers\n'), ((133, 17, 133, 47), 'numpyro.handlers.scope', 'handlers.scope', (), '', False, 'from numpyro import handlers\n'), ((137, 17, 137, 33), 'numpyro.handlers.block', 'handlers.block', ({}, {}), '()', False, 'from numpyro import handlers\n'), ((137, 35, 137, 49), 'numpyro.contrib.funsor.trace', 'packed_trace', ({}, {}), '()', True, 'from numpyro.contrib.funsor import enum, config_enumerate, markov, trace as packed_trace\n'), ((137, 78, 137, 84), 'numpyro.contrib.funsor.enum', 'enum', ({}, {}), '()', False, 'from numpyro.contrib.funsor import enum, config_enumerate, markov, trace as packed_trace\n'), ((137, 86, 137, 94), 'numpyro.contrib.funsor.markov', 'markov', ({}, {}), '()', False, 'from numpyro.contrib.funsor import enum, config_enumerate, markov, trace as packed_trace\n'), ((148, 33, 148, 45), 'jax.numpy.shape', 'jnp.shape', ({(148, 43, 148, 44): 'x'}, {}), '(x)', True, 'import jax.numpy as jnp\n'), ((196, 24, 196, 48), 'numpyro.handlers.seed', 'handlers.seed', ({(196, 38, 196, 39): 'f', (196, 41, 196, 47): 'subkey'}, {}), '(f, subkey)', False, 'from numpyro import handlers\n'), ((198, 26, 198, 69), 'functools.partial', 'partial', ({(198, 34, 198, 47): '_subs_wrapper', (198, 49, 198, 57): 'subs_map', (198, 59, 198, 60): 'i', (198, 62, 198, 68): 'length'}, {}), '(_subs_wrapper, subs_map, i, length)', False, 'from functools import partial\n'), ((204, 17, 204, 33), 'numpyro.handlers.trace', 'handlers.trace', ({}, {}), '()', False, 'from numpyro import handlers\n'), ((343, 17, 343, 38), 'numpyro.contrib.funsor.enum_messenger.LocalNamedMessenger', 'LocalNamedMessenger', ({}, {}), '()', False, 'from numpyro.contrib.funsor.enum_messenger import LocalNamedMessenger\n'), ((346, 16, 346, 32), 'numpyro.primitives.apply_stack', 'apply_stack', ({(346, 28, 346, 31): 'msg'}, {}), '(msg)', False, 'from numpyro.primitives import _PYRO_STACK, Messenger, apply_stack\n'), ((130, 28, 130, 81), 'numpyro.handlers.substitute', 'handlers.substitute', (), '', False, 'from numpyro import handlers\n'), ((144, 31, 144, 58), 'numpyro.contrib.funsor.config_enumerate', 'config_enumerate', ({(144, 48, 144, 57): 'seeded_fn'}, {}), '(seeded_fn)', False, 'from numpyro.contrib.funsor import enum, config_enumerate, markov, trace as packed_trace\n'), ((153, 20, 153, 32), 'jax.numpy.array', 'jnp.array', ({(153, 30, 153, 31): '(1)'}, {}), '(1)', True, 'import jax.numpy as jnp\n'), ((159, 36, 159, 57), 'jax.numpy.expand_dims', 'jnp.expand_dims', ({(159, 52, 159, 53): 'x', (159, 55, 159, 56): '0'}, {}), '(x, 0)', True, 'import jax.numpy as jnp\n'), ((172, 66, 172, 79), 'jax.numpy.shape', 'jnp.shape', ({(172, 76, 172, 78): 'z0'}, {}), '(z0)', True, 'import jax.numpy as jnp\n'), ((200, 32, 200, 83), 'numpyro.handlers.condition', 'handlers.condition', (), '', False, 'from numpyro import handlers\n'), ((97, 67, 97, 83), 'jax.numpy.shape', 'jnp.shape', ({(97, 77, 97, 82): 'value'}, {}), '(value)', True, 'import jax.numpy as jnp\n'), ((148, 55, 148, 78), 'jax.tree_flatten', 'tree_flatten', ({(148, 68, 148, 77): 'new_carry'}, {}), '(new_carry)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((151, 66, 151, 78), 'jax.numpy.shape', 'jnp.shape', ({(151, 76, 151, 77): 'b'}, {}), '(b)', True, 'import jax.numpy as jnp\n'), ((186, 17, 186, 33), 'jax.tree_flatten', 'tree_flatten', ({(186, 30, 186, 32): 'xs'}, {}), '(xs)', False, 'from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten\n'), ((202, 32, 202, 85), 'numpyro.handlers.substitute', 'handlers.substitute', (), '', False, 'from numpyro import handlers\n'), ((99, 79, 99, 91), 'jax.numpy.shape', 'jnp.shape', ({(99, 89, 99, 90): 'x'}, {}), '(x)', True, 'import jax.numpy as jnp\n')] |
earth-emoji/dennea | src/catalog/migrations/0003_remove_productattributevalue_name.py | fbabd7d9ecc95898411aba238bbcca8b5e942c31 | # Generated by Django 2.2.12 on 2020-06-10 01:11
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('catalog', '0002_auto_20200610_0019'),
]
operations = [
migrations.RemoveField(
model_name='productattributevalue',
name='name',
),
]
| [((13, 8, 16, 9), 'django.db.migrations.RemoveField', 'migrations.RemoveField', (), '', False, 'from django.db import migrations\n')] |
divindevaiah/e2xgrader | e2xgrader/preprocessors/overwritecells.py | 19eb4662e4eee5ddef673097517e4bd4fb469e62 | import json
from nbformat.notebooknode import NotebookNode
from nbconvert.exporters.exporter import ResourcesDict
from typing import Tuple
from nbgrader.api import MissingEntry
from nbgrader.preprocessors import OverwriteCells as NbgraderOverwriteCells
from ..utils.extra_cells import is_singlechoice, is_multiplechoice
class OverwriteCells(NbgraderOverwriteCells):
def preprocess_cell(
self, cell: NotebookNode, resources: ResourcesDict, cell_index: int
) -> Tuple[NotebookNode, ResourcesDict]:
if not (is_singlechoice(cell) or is_multiplechoice(cell)):
return super().preprocess_cell(cell, resources, cell_index)
grade_id = cell.metadata.get("nbgrader", {}).get("grade_id", None)
if grade_id is None:
return cell, resources
try:
source_cell = self.gradebook.find_source_cell(
grade_id, self.notebook_id, self.assignment_id
)
except MissingEntry:
self.log.warning(f"Cell {grade_id} does not exist in database")
del cell.metadata.nbgrader["grade_id"]
return cell, resources
cell.metadata.extended_cell.source = json.loads(source_cell.source)
return cell, resources
| [((32, 45, 32, 75), 'json.loads', 'json.loads', ({(32, 56, 32, 74): 'source_cell.source'}, {}), '(source_cell.source)', False, 'import json\n')] |
ehtec/pdfminer.six | tools/pdf2txt.py | 5b1823f25ab998e904fc5d81687732580f23e3b9 | #!/usr/bin/env python3
"""A command line tool for extracting text and images from PDF and
output it to plain text, html, xml or tags."""
import argparse
import logging
import sys
from typing import Any, Container, Iterable, List, Optional
import pdfminer.high_level
from pdfminer.layout import LAParams
from pdfminer.utils import AnyIO
logging.basicConfig()
OUTPUT_TYPES = ((".htm", "html"),
(".html", "html"),
(".xml", "xml"),
(".tag", "tag"))
def float_or_disabled(x: str) -> Optional[float]:
if x.lower().strip() == "disabled":
return None
try:
return float(x)
except ValueError:
raise argparse.ArgumentTypeError("invalid float value: {}".format(x))
def extract_text(
files: Iterable[str] = [],
outfile: str = '-',
laparams: Optional[LAParams] = None,
output_type: str = 'text',
codec: str = 'utf-8',
strip_control: bool = False,
maxpages: int = 0,
page_numbers: Optional[Container[int]] = None,
password: str = "",
scale: float = 1.0,
rotation: int = 0,
layoutmode: str = 'normal',
output_dir: Optional[str] = None,
debug: bool = False,
disable_caching: bool = False,
**kwargs: Any
) -> AnyIO:
if not files:
raise ValueError("Must provide files to work upon!")
if output_type == "text" and outfile != "-":
for override, alttype in OUTPUT_TYPES:
if outfile.endswith(override):
output_type = alttype
if outfile == "-":
outfp: AnyIO = sys.stdout
if sys.stdout.encoding is not None:
codec = 'utf-8'
else:
outfp = open(outfile, "wb")
for fname in files:
with open(fname, "rb") as fp:
pdfminer.high_level.extract_text_to_fp(fp, **locals())
return outfp
def parse_args(args: Optional[List[str]]) -> argparse.Namespace:
parser = argparse.ArgumentParser(description=__doc__, add_help=True)
parser.add_argument(
"files", type=str, default=None, nargs="+",
help="One or more paths to PDF files.")
parser.add_argument(
"--version", "-v", action="version",
version="pdfminer.six v{}".format(pdfminer.__version__))
parser.add_argument(
"--debug", "-d", default=False, action="store_true",
help="Use debug logging level.")
parser.add_argument(
"--disable-caching", "-C", default=False, action="store_true",
help="If caching or resources, such as fonts, should be disabled.")
parse_params = parser.add_argument_group(
'Parser', description='Used during PDF parsing')
parse_params.add_argument(
"--page-numbers", type=int, default=None, nargs="+",
help="A space-seperated list of page numbers to parse.")
parse_params.add_argument(
"--pagenos", "-p", type=str,
help="A comma-separated list of page numbers to parse. "
"Included for legacy applications, use --page-numbers "
"for more idiomatic argument entry.")
parse_params.add_argument(
"--maxpages", "-m", type=int, default=0,
help="The maximum number of pages to parse.")
parse_params.add_argument(
"--password", "-P", type=str, default="",
help="The password to use for decrypting PDF file.")
parse_params.add_argument(
"--rotation", "-R", default=0, type=int,
help="The number of degrees to rotate the PDF "
"before other types of processing.")
la_params = LAParams() # will be used for defaults
la_param_group = parser.add_argument_group(
'Layout analysis', description='Used during layout analysis.')
la_param_group.add_argument(
"--no-laparams", "-n", default=False, action="store_true",
help="If layout analysis parameters should be ignored.")
la_param_group.add_argument(
"--detect-vertical", "-V", default=la_params.detect_vertical,
action="store_true",
help="If vertical text should be considered during layout analysis")
la_param_group.add_argument(
"--line-overlap", type=float, default=la_params.line_overlap,
help='If two characters have more overlap than this they '
'are considered to be on the same line. The overlap is specified '
'relative to the minimum height of both characters.')
la_param_group.add_argument(
"--char-margin", "-M", type=float, default=la_params.char_margin,
help="If two characters are closer together than this margin they "
"are considered to be part of the same line. The margin is "
"specified relative to the width of the character.")
la_param_group.add_argument(
"--word-margin", "-W", type=float, default=la_params.word_margin,
help="If two characters on the same line are further apart than this "
"margin then they are considered to be two separate words, and "
"an intermediate space will be added for readability. The margin "
"is specified relative to the width of the character.")
la_param_group.add_argument(
"--line-margin", "-L", type=float, default=la_params.line_margin,
help="If two lines are close together they are considered to "
"be part of the same paragraph. The margin is specified "
"relative to the height of a line.")
la_param_group.add_argument(
"--boxes-flow", "-F", type=float_or_disabled,
default=la_params.boxes_flow,
help="Specifies how much a horizontal and vertical position of a "
"text matters when determining the order of lines. The value "
"should be within the range of -1.0 (only horizontal position "
"matters) to +1.0 (only vertical position matters). You can also "
"pass `disabled` to disable advanced layout analysis, and "
"instead return text based on the position of the bottom left "
"corner of the text box.")
la_param_group.add_argument(
"--all-texts", "-A", default=la_params.all_texts, action="store_true",
help="If layout analysis should be performed on text in figures.")
output_params = parser.add_argument_group(
'Output', description='Used during output generation.')
output_params.add_argument(
"--outfile", "-o", type=str, default="-",
help="Path to file where output is written. "
"Or \"-\" (default) to write to stdout.")
output_params.add_argument(
"--output_type", "-t", type=str, default="text",
help="Type of output to generate {text,html,xml,tag}.")
output_params.add_argument(
"--codec", "-c", type=str, default="utf-8",
help="Text encoding to use in output file.")
output_params.add_argument(
"--output-dir", "-O", default=None,
help="The output directory to put extracted images in. If not given, "
"images are not extracted.")
output_params.add_argument(
"--layoutmode", "-Y", default="normal",
type=str, help="Type of layout to use when generating html "
"{normal,exact,loose}. If normal,each line is"
" positioned separately in the html. If exact"
", each character is positioned separately in"
" the html. If loose, same result as normal "
"but with an additional newline after each "
"text line. Only used when output_type is html.")
output_params.add_argument(
"--scale", "-s", type=float, default=1.0,
help="The amount of zoom to use when generating html file. "
"Only used when output_type is html.")
output_params.add_argument(
"--strip-control", "-S", default=False, action="store_true",
help="Remove control statement from text. "
"Only used when output_type is xml.")
parsed_args = parser.parse_args(args=args)
# Propagate parsed layout parameters to LAParams object
if parsed_args.no_laparams:
parsed_args.laparams = None
else:
parsed_args.laparams = LAParams(
line_overlap=parsed_args.line_overlap,
char_margin=parsed_args.char_margin,
line_margin=parsed_args.line_margin,
word_margin=parsed_args.word_margin,
boxes_flow=parsed_args.boxes_flow,
detect_vertical=parsed_args.detect_vertical,
all_texts=parsed_args.all_texts,
)
if parsed_args.page_numbers:
parsed_args.page_numbers = {x-1 for x in parsed_args.page_numbers}
if parsed_args.pagenos:
parsed_args.page_numbers = {int(x)-1 for x in parsed_args.pagenos.split(",")}
if parsed_args.output_type == "text" and parsed_args.outfile != "-":
for override, alttype in OUTPUT_TYPES:
if parsed_args.outfile.endswith(override):
parsed_args.output_type = alttype
return parsed_args
def main(args: Optional[List[str]] = None) -> int:
parsed_args = parse_args(args)
outfp = extract_text(**vars(parsed_args))
outfp.close()
return 0
if __name__ == '__main__':
sys.exit(main())
| [((13, 0, 13, 21), 'logging.basicConfig', 'logging.basicConfig', ({}, {}), '()', False, 'import logging\n'), ((70, 13, 70, 72), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((106, 16, 106, 26), 'pdfminer.layout.LAParams', 'LAParams', ({}, {}), '()', False, 'from pdfminer.layout import LAParams\n'), ((191, 31, 199, 9), 'pdfminer.layout.LAParams', 'LAParams', (), '', False, 'from pdfminer.layout import LAParams\n')] |
agungnasik57/nython | nython/nythonize.py | cf499fe20f86e2685671495bd941b411fa066813 | """Compile Nim libraries as Python Extension Modules.
If you want your namespace to coexist with your pthon code, name this ponim.nim
and then your import will look like `from ponim.nim import adder` and
`from ponim import subtractor`. There must be a way to smooth that out in the
__init__.py file somehow.
Note that the file must be in the included source code dir. Currently it is
easiest to just put this in with your python code.
"""
from os import listdir, mkdir
from os.path import join, expanduser
from setuptools import Extension
from shutil import copyfile, rmtree
from typing import Sequence, Dict, List
import subprocess
import sys
import pathlib
# class NimLib(TypedDict):
# """Wrapper around a lib name and path for nim cdoe"""
# name: str
# path: str
def nythonize(nimbase: str, modules: Sequence[Dict[str, str]]) -> List[Extension]:
"""Compile a Nim library as a Python Extension Module.
`nimbase` is the path to `nimbase.h` on your system, which is needed for
Python to compile gene Nim generated C code.
This builds a set of Extenstions, which are then passed back to setuptools.
"""
extensions = []
# Create a top level working dir
rmtree(join("build", "nim_build"), ignore_errors=True)
pathlib.Path(join("build", "nim_build")).mkdir(parents=True)
for module in modules:
module_dir = join("build", "nim_build", f"{module['name']}_build")
rmtree(module_dir, ignore_errors=True)
mkdir(module_dir)
subprocess.run(
[
"nim",
"compileToC",
"--compileOnly",
"-d:release",
"-d:ssl",
"--app:lib",
"--opt:speed",
"--gc:markAndSweep",
f"--nimcache:{module_dir}",
module["path"],
],
check=True,
stderr=sys.stdout.buffer,
)
copyfile(
nimbase, join(module_dir, "nimbase.h"),
)
sources = []
for c_source_file in listdir(module_dir):
if c_source_file.endswith(".c"):
sources.append(join(module_dir, c_source_file))
extensions.append(
Extension(
name=module["name"],
sources=sources,
extra_compile_args=[
"-flto",
"-ffast-math",
"-march=native",
"-mtune=native",
"-O3",
"-fno-ident",
"-fsingle-precision-constant",
],
extra_link_args=["-s"],
include_dirs=[module_dir],
)
)
return extensions
| [((40, 11, 40, 37), 'os.path.join', 'join', ({(40, 16, 40, 23): '"""build"""', (40, 25, 40, 36): '"""nim_build"""'}, {}), "('build', 'nim_build')", False, 'from os.path import join, expanduser\n'), ((43, 21, 43, 74), 'os.path.join', 'join', ({(43, 26, 43, 33): '"""build"""', (43, 35, 43, 46): '"""nim_build"""', (43, 48, 43, 73): 'f"""{module[\'name\']}_build"""'}, {}), '(\'build\', \'nim_build\', f"{module[\'name\']}_build")', False, 'from os.path import join, expanduser\n'), ((44, 8, 44, 46), 'shutil.rmtree', 'rmtree', (), '', False, 'from shutil import copyfile, rmtree\n'), ((45, 8, 45, 25), 'os.mkdir', 'mkdir', ({(45, 14, 45, 24): 'module_dir'}, {}), '(module_dir)', False, 'from os import listdir, mkdir\n'), ((46, 8, 61, 9), 'subprocess.run', 'subprocess.run', (), '', False, 'import subprocess\n'), ((66, 29, 66, 48), 'os.listdir', 'listdir', ({(66, 37, 66, 47): 'module_dir'}, {}), '(module_dir)', False, 'from os import listdir, mkdir\n'), ((63, 21, 63, 50), 'os.path.join', 'join', ({(63, 26, 63, 36): 'module_dir', (63, 38, 63, 49): '"""nimbase.h"""'}, {}), "(module_dir, 'nimbase.h')", False, 'from os.path import join, expanduser\n'), ((70, 12, 84, 13), 'setuptools.Extension', 'Extension', (), '', False, 'from setuptools import Extension\n'), ((41, 17, 41, 43), 'os.path.join', 'join', ({(41, 22, 41, 29): '"""build"""', (41, 31, 41, 42): '"""nim_build"""'}, {}), "('build', 'nim_build')", False, 'from os.path import join, expanduser\n'), ((68, 31, 68, 62), 'os.path.join', 'join', ({(68, 36, 68, 46): 'module_dir', (68, 48, 68, 61): 'c_source_file'}, {}), '(module_dir, c_source_file)', False, 'from os.path import join, expanduser\n')] |
lixinso/pyro | tests/contrib/test_util.py | ca0d6417bed3882a47cb8cbb01b36f403ee903d5 | from collections import OrderedDict
import pytest
import torch
import pyro.distributions as dist
from pyro.contrib.util import (
get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian
)
from tests.common import assert_equal
def test_get_indices_sizes():
sizes = OrderedDict([("a", 2), ("b", 2), ("c", 2)])
assert_equal(get_indices(["b"], sizes=sizes), torch.tensor([2, 3]))
assert_equal(get_indices(["b", "c"], sizes=sizes), torch.tensor([2, 3, 4, 5]))
tensors = OrderedDict([("a", torch.ones(2)), ("b", torch.ones(2)), ("c", torch.ones(2))])
assert_equal(get_indices(["b"], tensors=tensors), torch.tensor([2, 3]))
assert_equal(get_indices(["b", "c"], tensors=tensors), torch.tensor([2, 3, 4, 5]))
def test_tensor_to_dict():
sizes = OrderedDict([("a", 2), ("b", 2), ("c", 2)])
vector = torch.tensor([1., 2, 3, 4, 5, 6])
assert_equal(tensor_to_dict(sizes, vector), {"a": torch.tensor([1., 2.]),
"b": torch.tensor([3., 4.]),
"c": torch.tensor([5., 6.])})
assert_equal(tensor_to_dict(sizes, vector, subset=["b"]),
{"b": torch.tensor([3., 4.])})
@pytest.mark.parametrize("A,b", [
(torch.tensor([[1., 2.], [2., -3.]]), torch.tensor([-1., 2.]))
])
def test_rmv(A, b):
assert_equal(rmv(A, b), A.mv(b), prec=1e-8)
batched_A = lexpand(A, 5, 4)
batched_b = lexpand(b, 5, 4)
expected_Ab = lexpand(A.mv(b), 5, 4)
assert_equal(rmv(batched_A, batched_b), expected_Ab, prec=1e-8)
@pytest.mark.parametrize("a,b", [
(torch.tensor([1., 2.]), torch.tensor([-1., 2.]))
])
def test_rvv(a, b):
assert_equal(rvv(a, b), torch.dot(a, b), prec=1e-8)
batched_a = lexpand(a, 5, 4)
batched_b = lexpand(b, 5, 4)
expected_ab = lexpand(torch.dot(a, b), 5, 4)
assert_equal(rvv(batched_a, batched_b), expected_ab, prec=1e-8)
def test_lexpand():
A = torch.tensor([[1., 2.], [-2., 0]])
assert_equal(lexpand(A), A, prec=1e-8)
assert_equal(lexpand(A, 4), A.expand(4, 2, 2), prec=1e-8)
assert_equal(lexpand(A, 4, 2), A.expand(4, 2, 2, 2), prec=1e-8)
def test_rexpand():
A = torch.tensor([[1., 2.], [-2., 0]])
assert_equal(rexpand(A), A, prec=1e-8)
assert_equal(rexpand(A, 4), A.unsqueeze(-1).expand(2, 2, 4), prec=1e-8)
assert_equal(rexpand(A, 4, 2), A.unsqueeze(-1).unsqueeze(-1).expand(2, 2, 4, 2), prec=1e-8)
def test_rtril():
A = torch.tensor([[1., 2.], [-2., 0]])
assert_equal(rtril(A), torch.tril(A), prec=1e-8)
expanded = lexpand(A, 5, 4)
expected = lexpand(torch.tril(A), 5, 4)
assert_equal(rtril(expanded), expected, prec=1e-8)
def test_rdiag():
v = torch.tensor([1., 2., -1.])
assert_equal(rdiag(v), torch.diag(v), prec=1e-8)
expanded = lexpand(v, 5, 4)
expeceted = lexpand(torch.diag(v), 5, 4)
assert_equal(rdiag(expanded), expeceted, prec=1e-8)
def test_hessian_mvn():
tmp = torch.randn(3, 10)
cov = torch.matmul(tmp, tmp.t())
mvn = dist.MultivariateNormal(cov.new_zeros(3), cov)
x = torch.randn(3, requires_grad=True)
y = mvn.log_prob(x)
assert_equal(hessian(y, x), -mvn.precision_matrix)
def test_hessian_multi_variables():
x = torch.randn(3, requires_grad=True)
z = torch.randn(3, requires_grad=True)
y = (x ** 2 * z + z ** 3).sum()
H = hessian(y, (x, z))
Hxx = (2 * z).diag()
Hxz = (2 * x).diag()
Hzz = (6 * z).diag()
target_H = torch.cat([torch.cat([Hxx, Hxz]), torch.cat([Hxz, Hzz])], dim=1)
assert_equal(H, target_H)
| [((13, 12, 13, 55), 'collections.OrderedDict', 'OrderedDict', ({(13, 24, 13, 54): "[('a', 2), ('b', 2), ('c', 2)]"}, {}), "([('a', 2), ('b', 2), ('c', 2)])", False, 'from collections import OrderedDict\n'), ((22, 12, 22, 55), 'collections.OrderedDict', 'OrderedDict', ({(22, 24, 22, 54): "[('a', 2), ('b', 2), ('c', 2)]"}, {}), "([('a', 2), ('b', 2), ('c', 2)])", False, 'from collections import OrderedDict\n'), ((23, 13, 23, 46), 'torch.tensor', 'torch.tensor', ({(23, 26, 23, 45): '[1.0, 2, 3, 4, 5, 6]'}, {}), '([1.0, 2, 3, 4, 5, 6])', False, 'import torch\n'), ((36, 16, 36, 32), 'pyro.contrib.util.lexpand', 'lexpand', ({(36, 24, 36, 25): 'A', (36, 27, 36, 28): '5', (36, 30, 36, 31): '4'}, {}), '(A, 5, 4)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((37, 16, 37, 32), 'pyro.contrib.util.lexpand', 'lexpand', ({(37, 24, 37, 25): 'b', (37, 27, 37, 28): '5', (37, 30, 37, 31): '4'}, {}), '(b, 5, 4)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((47, 16, 47, 32), 'pyro.contrib.util.lexpand', 'lexpand', ({(47, 24, 47, 25): 'a', (47, 27, 47, 28): '5', (47, 30, 47, 31): '4'}, {}), '(a, 5, 4)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((48, 16, 48, 32), 'pyro.contrib.util.lexpand', 'lexpand', ({(48, 24, 48, 25): 'b', (48, 27, 48, 28): '5', (48, 30, 48, 31): '4'}, {}), '(b, 5, 4)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((54, 8, 54, 42), 'torch.tensor', 'torch.tensor', ({(54, 21, 54, 41): '[[1.0, 2.0], [-2.0, 0]]'}, {}), '([[1.0, 2.0], [-2.0, 0]])', False, 'import torch\n'), ((61, 8, 61, 42), 'torch.tensor', 'torch.tensor', ({(61, 21, 61, 41): '[[1.0, 2.0], [-2.0, 0]]'}, {}), '([[1.0, 2.0], [-2.0, 0]])', False, 'import torch\n'), ((68, 8, 68, 42), 'torch.tensor', 'torch.tensor', ({(68, 21, 68, 41): '[[1.0, 2.0], [-2.0, 0]]'}, {}), '([[1.0, 2.0], [-2.0, 0]])', False, 'import torch\n'), ((70, 15, 70, 31), 'pyro.contrib.util.lexpand', 'lexpand', ({(70, 23, 70, 24): 'A', (70, 26, 70, 27): '5', (70, 29, 70, 30): '4'}, {}), '(A, 5, 4)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((76, 8, 76, 35), 'torch.tensor', 'torch.tensor', ({(76, 21, 76, 34): '[1.0, 2.0, -1.0]'}, {}), '([1.0, 2.0, -1.0])', False, 'import torch\n'), ((78, 15, 78, 31), 'pyro.contrib.util.lexpand', 'lexpand', ({(78, 23, 78, 24): 'v', (78, 26, 78, 27): '5', (78, 29, 78, 30): '4'}, {}), '(v, 5, 4)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((84, 10, 84, 28), 'torch.randn', 'torch.randn', ({(84, 22, 84, 23): '3', (84, 25, 84, 27): '10'}, {}), '(3, 10)', False, 'import torch\n'), ((88, 8, 88, 42), 'torch.randn', 'torch.randn', (), '', False, 'import torch\n'), ((94, 8, 94, 42), 'torch.randn', 'torch.randn', (), '', False, 'import torch\n'), ((95, 8, 95, 42), 'torch.randn', 'torch.randn', (), '', False, 'import torch\n'), ((98, 8, 98, 26), 'pyro.contrib.util.hessian', 'hessian', ({(98, 16, 98, 17): 'y', (98, 19, 98, 25): '(x, z)'}, {}), '(y, (x, z))', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((103, 4, 103, 29), 'tests.common.assert_equal', 'assert_equal', ({(103, 17, 103, 18): 'H', (103, 20, 103, 28): 'target_H'}, {}), '(H, target_H)', False, 'from tests.common import assert_equal\n'), ((14, 17, 14, 48), 'pyro.contrib.util.get_indices', 'get_indices', (), '', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((14, 50, 14, 70), 'torch.tensor', 'torch.tensor', ({(14, 63, 14, 69): '[2, 3]'}, {}), '([2, 3])', False, 'import torch\n'), ((15, 17, 15, 53), 'pyro.contrib.util.get_indices', 'get_indices', (), '', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((15, 55, 15, 81), 'torch.tensor', 'torch.tensor', ({(15, 68, 15, 80): '[2, 3, 4, 5]'}, {}), '([2, 3, 4, 5])', False, 'import torch\n'), ((17, 17, 17, 52), 'pyro.contrib.util.get_indices', 'get_indices', (), '', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((17, 54, 17, 74), 'torch.tensor', 'torch.tensor', ({(17, 67, 17, 73): '[2, 3]'}, {}), '([2, 3])', False, 'import torch\n'), ((18, 17, 18, 57), 'pyro.contrib.util.get_indices', 'get_indices', (), '', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((18, 59, 18, 85), 'torch.tensor', 'torch.tensor', ({(18, 72, 18, 84): '[2, 3, 4, 5]'}, {}), '([2, 3, 4, 5])', False, 'import torch\n'), ((24, 17, 24, 46), 'pyro.contrib.util.tensor_to_dict', 'tensor_to_dict', ({(24, 32, 24, 37): 'sizes', (24, 39, 24, 45): 'vector'}, {}), '(sizes, vector)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((27, 17, 27, 60), 'pyro.contrib.util.tensor_to_dict', 'tensor_to_dict', (), '', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((35, 17, 35, 26), 'pyro.contrib.util.rmv', 'rmv', ({(35, 21, 35, 22): 'A', (35, 24, 35, 25): 'b'}, {}), '(A, b)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((39, 17, 39, 42), 'pyro.contrib.util.rmv', 'rmv', ({(39, 21, 39, 30): 'batched_A', (39, 32, 39, 41): 'batched_b'}, {}), '(batched_A, batched_b)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((46, 17, 46, 26), 'pyro.contrib.util.rvv', 'rvv', ({(46, 21, 46, 22): 'a', (46, 24, 46, 25): 'b'}, {}), '(a, b)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((46, 28, 46, 43), 'torch.dot', 'torch.dot', ({(46, 38, 46, 39): 'a', (46, 41, 46, 42): 'b'}, {}), '(a, b)', False, 'import torch\n'), ((49, 26, 49, 41), 'torch.dot', 'torch.dot', ({(49, 36, 49, 37): 'a', (49, 39, 49, 40): 'b'}, {}), '(a, b)', False, 'import torch\n'), ((50, 17, 50, 42), 'pyro.contrib.util.rvv', 'rvv', ({(50, 21, 50, 30): 'batched_a', (50, 32, 50, 41): 'batched_b'}, {}), '(batched_a, batched_b)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((55, 17, 55, 27), 'pyro.contrib.util.lexpand', 'lexpand', ({(55, 25, 55, 26): 'A'}, {}), '(A)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((56, 17, 56, 30), 'pyro.contrib.util.lexpand', 'lexpand', ({(56, 25, 56, 26): 'A', (56, 28, 56, 29): '(4)'}, {}), '(A, 4)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((57, 17, 57, 33), 'pyro.contrib.util.lexpand', 'lexpand', ({(57, 25, 57, 26): 'A', (57, 28, 57, 29): '(4)', (57, 31, 57, 32): '(2)'}, {}), '(A, 4, 2)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((62, 17, 62, 27), 'pyro.contrib.util.rexpand', 'rexpand', ({(62, 25, 62, 26): 'A'}, {}), '(A)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((63, 17, 63, 30), 'pyro.contrib.util.rexpand', 'rexpand', ({(63, 25, 63, 26): 'A', (63, 28, 63, 29): '(4)'}, {}), '(A, 4)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((64, 17, 64, 33), 'pyro.contrib.util.rexpand', 'rexpand', ({(64, 25, 64, 26): 'A', (64, 28, 64, 29): '(4)', (64, 31, 64, 32): '(2)'}, {}), '(A, 4, 2)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((69, 17, 69, 25), 'pyro.contrib.util.rtril', 'rtril', ({(69, 23, 69, 24): 'A'}, {}), '(A)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((69, 27, 69, 40), 'torch.tril', 'torch.tril', ({(69, 38, 69, 39): 'A'}, {}), '(A)', False, 'import torch\n'), ((71, 23, 71, 36), 'torch.tril', 'torch.tril', ({(71, 34, 71, 35): 'A'}, {}), '(A)', False, 'import torch\n'), ((72, 17, 72, 32), 'pyro.contrib.util.rtril', 'rtril', ({(72, 23, 72, 31): 'expanded'}, {}), '(expanded)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((77, 17, 77, 25), 'pyro.contrib.util.rdiag', 'rdiag', ({(77, 23, 77, 24): 'v'}, {}), '(v)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((77, 27, 77, 40), 'torch.diag', 'torch.diag', ({(77, 38, 77, 39): 'v'}, {}), '(v)', False, 'import torch\n'), ((79, 24, 79, 37), 'torch.diag', 'torch.diag', ({(79, 35, 79, 36): 'v'}, {}), '(v)', False, 'import torch\n'), ((80, 17, 80, 32), 'pyro.contrib.util.rdiag', 'rdiag', ({(80, 23, 80, 31): 'expanded'}, {}), '(expanded)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((90, 17, 90, 30), 'pyro.contrib.util.hessian', 'hessian', ({(90, 25, 90, 26): 'y', (90, 28, 90, 29): 'x'}, {}), '(y, x)', False, 'from pyro.contrib.util import get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian\n'), ((24, 54, 24, 76), 'torch.tensor', 'torch.tensor', ({(24, 67, 24, 75): '[1.0, 2.0]'}, {}), '([1.0, 2.0])', False, 'import torch\n'), ((25, 54, 25, 76), 'torch.tensor', 'torch.tensor', ({(25, 67, 25, 75): '[3.0, 4.0]'}, {}), '([3.0, 4.0])', False, 'import torch\n'), ((26, 54, 26, 76), 'torch.tensor', 'torch.tensor', ({(26, 67, 26, 75): '[5.0, 6.0]'}, {}), '([5.0, 6.0])', False, 'import torch\n'), ((28, 23, 28, 45), 'torch.tensor', 'torch.tensor', ({(28, 36, 28, 44): '[3.0, 4.0]'}, {}), '([3.0, 4.0])', False, 'import torch\n'), ((32, 5, 32, 40), 'torch.tensor', 'torch.tensor', ({(32, 18, 32, 39): '[[1.0, 2.0], [2.0, -3.0]]'}, {}), '([[1.0, 2.0], [2.0, -3.0]])', False, 'import torch\n'), ((32, 42, 32, 65), 'torch.tensor', 'torch.tensor', ({(32, 55, 32, 64): '[-1.0, 2.0]'}, {}), '([-1.0, 2.0])', False, 'import torch\n'), ((43, 5, 43, 27), 'torch.tensor', 'torch.tensor', ({(43, 18, 43, 26): '[1.0, 2.0]'}, {}), '([1.0, 2.0])', False, 'import torch\n'), ((43, 29, 43, 52), 'torch.tensor', 'torch.tensor', ({(43, 42, 43, 51): '[-1.0, 2.0]'}, {}), '([-1.0, 2.0])', False, 'import torch\n'), ((102, 26, 102, 47), 'torch.cat', 'torch.cat', ({(102, 36, 102, 46): '[Hxx, Hxz]'}, {}), '([Hxx, Hxz])', False, 'import torch\n'), ((102, 49, 102, 70), 'torch.cat', 'torch.cat', ({(102, 59, 102, 69): '[Hxz, Hzz]'}, {}), '([Hxz, Hzz])', False, 'import torch\n'), ((16, 33, 16, 46), 'torch.ones', 'torch.ones', ({(16, 44, 16, 45): '2'}, {}), '(2)', False, 'import torch\n'), ((16, 55, 16, 68), 'torch.ones', 'torch.ones', ({(16, 66, 16, 67): '2'}, {}), '(2)', False, 'import torch\n'), ((16, 77, 16, 90), 'torch.ones', 'torch.ones', ({(16, 88, 16, 89): '2'}, {}), '(2)', False, 'import torch\n')] |
HarisHijazi/mojarnik-server | emodul/apps.py | bee7266609cc0bca7cc6a4059086fc0ba7219a33 | from django.apps import AppConfig
class EmodulConfig(AppConfig):
name = 'emodul'
| [] |
mIXs222/diffnet | Diffnet++/class/DataModule.py | 1f580332254a5113ed7b88b9b2e0aa467344e94d | from __future__ import division
from collections import defaultdict
import numpy as np
from time import time
import random
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
# import tensorflow as tf
class DataModule():
def __init__(self, conf, filename):
self.conf = conf
self.data_dict = {}
self.terminal_flag = 1
self.filename = filename
self.index = 0
####### Initalize Procedures #######
def prepareModelSupplement(self, model):
data_dict = {}
if 'CONSUMED_ITEMS_SPARSE_MATRIX' in model.supply_set:
self.generateConsumedItemsSparseMatrix()
#self.arrangePositiveData()
data_dict['CONSUMED_ITEMS_INDICES_INPUT'] = self.consumed_items_indices_list
data_dict['CONSUMED_ITEMS_VALUES_INPUT'] = self.consumed_items_values_list
data_dict['CONSUMED_ITEMS_VALUES_WEIGHT_AVG_INPUT'] = self.consumed_items_values_weight_avg_list
data_dict['CONSUMED_ITEMS_NUM_INPUT'] = self.consumed_item_num_list
data_dict['CONSUMED_ITEMS_NUM_DICT_INPUT'] = self.user_item_num_dict
data_dict['USER_ITEM_SPARSITY_DICT'] = self.user_item_sparsity_dict
if 'SOCIAL_NEIGHBORS_SPARSE_MATRIX' in model.supply_set:
self.readSocialNeighbors()
self.generateSocialNeighborsSparseMatrix()
data_dict['SOCIAL_NEIGHBORS_INDICES_INPUT'] = self.social_neighbors_indices_list
data_dict['SOCIAL_NEIGHBORS_VALUES_INPUT'] = self.social_neighbors_values_list
data_dict['SOCIAL_NEIGHBORS_VALUES_WEIGHT_AVG_INPUT'] = self.social_neighbors_values_weight_avg_list
data_dict['SOCIAL_NEIGHBORS_NUM_INPUT'] = self.social_neighbor_num_list
data_dict['SOCIAL_NEIGHBORS_NUM_DICT_INPUT'] = self.social_neighbors_num_dict
data_dict['USER_USER_SPARSITY_DICT']= self.user_user_sparsity_dict
if 'ITEM_CUSTOMER_SPARSE_MATRIX' in model.supply_set:
self.generateConsumedItemsSparseMatrixForItemUser()
data_dict['ITEM_CUSTOMER_INDICES_INPUT'] = self.item_customer_indices_list
data_dict['ITEM_CUSTOMER_VALUES_INPUT'] = self.item_customer_values_list
data_dict['ITEM_CUSTOMER_VALUES_WEIGHT_AVG_INPUT'] = self.item_customer_values_weight_avg_list
data_dict['ITEM_CUSTOMER_NUM_INPUT'] = self.item_customer_num_list
data_dict['ITEM_USER_NUM_DICT_INPUT'] = self.item_user_num_dict
return data_dict
def initializeRankingTrain(self):
self.readData()
self.arrangePositiveData()
self.arrangePositiveDataForItemUser()
self.generateTrainNegative()
def initializeRankingVT(self):
self.readData()
self.arrangePositiveData()
self.arrangePositiveDataForItemUser()
self.generateTrainNegative()
def initalizeRankingEva(self):
self.readData()
self.getEvaPositiveBatch()
self.generateEvaNegative()
def linkedMap(self):
self.data_dict['USER_LIST'] = self.user_list
self.data_dict['ITEM_LIST'] = self.item_list
self.data_dict['LABEL_LIST'] = self.labels_list
def linkedRankingEvaMap(self):
self.data_dict['EVA_USER_LIST'] = self.eva_user_list
self.data_dict['EVA_ITEM_LIST'] = self.eva_item_list
####### Data Loading #######
def readData(self):
f = open(self.filename)
total_user_list = set()
hash_data = defaultdict(int)
for _, line in enumerate(f):
arr = line.split("\t")
hash_data[(int(arr[0]), int(arr[1]))] = 1
total_user_list.add(int(arr[0]))
self.total_user_list = list(total_user_list)
self.hash_data = hash_data
def arrangePositiveData(self):
positive_data = defaultdict(set)
user_item_num_dict = defaultdict(set)
total_data = set()
hash_data = self.hash_data
for (u, i) in hash_data:
total_data.add((u, i))
positive_data[u].add(i)
user_list = sorted(list(positive_data.keys()))
for u in range(self.conf.num_users):
user_item_num_dict[u] = len(positive_data[u])+1
self.positive_data = positive_data
self.user_item_num_dict = user_item_num_dict
self.user_item_num_for_sparsity_dict = user_item_num_for_sparsity_dict
self.total_data = len(total_data)
def Sparsity_analysis_for_user_item_network(self):
hash_data_for_user_item = self.hash_data
sparisty_user_item_dict = {}
def arrangePositiveDataForItemUser(self):
positive_data_for_item_user = defaultdict(set)
item_user_num_dict = defaultdict(set)
total_data_for_item_user = set()
hash_data_for_item_user = self.hash_data
for (u, i) in hash_data_for_item_user:
total_data_for_item_user.add((i, u))
positive_data_for_item_user[i].add(u)
item_list = sorted(list(positive_data_for_item_user.keys()))
for i in range(self.conf.num_items):
item_user_num_dict[i] = len(positive_data_for_item_user[i])+1
self.item_user_num_dict = item_user_num_dict
self.positive_data_for_item_user = positive_data_for_item_user
self.total_data_for_item_user = len(total_data_for_item_user)
# ----------------------
# This function designes for generating train/val/test negative
def generateTrainNegative(self):
num_items = self.conf.num_items
num_negatives = self.conf.num_negatives
negative_data = defaultdict(set)
total_data = set()
hash_data = self.hash_data
for (u, i) in hash_data:
total_data.add((u, i))
for _ in range(num_negatives):
j = np.random.randint(num_items)
while (u, j) in hash_data:
j = np.random.randint(num_items)
negative_data[u].add(j)
total_data.add((u, j))
self.negative_data = negative_data
self.terminal_flag = 1
# ----------------------
# This function designes for val/test set, compute loss
def getVTRankingOneBatch(self):
positive_data = self.positive_data
negative_data = self.negative_data
total_user_list = self.total_user_list
user_list = []
item_list = []
labels_list = []
for u in total_user_list:
user_list.extend([u] * len(positive_data[u]))
item_list.extend(positive_data[u])
labels_list.extend([1] * len(positive_data[u]))
user_list.extend([u] * len(negative_data[u]))
item_list.extend(negative_data[u])
labels_list.extend([0] * len(negative_data[u]))
self.user_list = np.reshape(user_list, [-1, 1])
self.item_list = np.reshape(item_list, [-1, 1])
self.labels_list = np.reshape(labels_list, [-1, 1])
# ----------------------
# This function designes for the training process
def getTrainRankingBatch(self):
positive_data = self.positive_data
negative_data = self.negative_data
total_user_list = self.total_user_list
index = self.index
batch_size = self.conf.training_batch_size
user_list, item_list, labels_list = [], [], []
if index + batch_size < len(total_user_list):
target_user_list = total_user_list[index:index+batch_size]
self.index = index + batch_size
else:
target_user_list = total_user_list[index:len(total_user_list)]
self.index = 0
self.terminal_flag = 0
for u in target_user_list:
user_list.extend([u] * len(positive_data[u]))
item_list.extend(list(positive_data[u]))
labels_list.extend([1] * len(positive_data[u]))
user_list.extend([u] * len(negative_data[u]))
item_list.extend(list(negative_data[u]))
labels_list.extend([0] * len(negative_data[u]))
self.user_list = np.reshape(user_list, [-1, 1])
self.item_list = np.reshape(item_list, [-1, 1])
self.labels_list = np.reshape(labels_list, [-1, 1])
# ----------------------
# This function is designed for the positive data
def getEvaPositiveBatch(self):
hash_data = self.hash_data
user_list = []
item_list = []
index_dict = defaultdict(list)
index = 0
for (u, i) in hash_data:
user_list.append(u)
item_list.append(i)
index_dict[u].append(index)
index = index + 1
self.eva_user_list = np.reshape(user_list, [-1, 1])
self.eva_item_list = np.reshape(item_list, [-1, 1])
self.eva_index_dict = index_dict
# ----------------------
#This function is designed for generating negative data
def generateEvaNegative(self):
hash_data = self.hash_data
total_user_list = self.total_user_list
num_evaluate = self.conf.num_evaluate
num_items = self.conf.num_items
eva_negative_data = defaultdict(list)
for u in total_user_list:
for _ in range(num_evaluate):
j = np.random.randint(num_items)
while (u, j) in hash_data:
j = np.random.randint(num_items)
eva_negative_data[u].append(j)
self.eva_negative_data = eva_negative_data
# ----------------------
#This function designs for generating negative batch in rating evaluation,
def getEvaRankingBatch(self):
batch_size = self.conf.evaluate_batch_size
num_evaluate = self.conf.num_evaluate
eva_negative_data = self.eva_negative_data
total_user_list = self.total_user_list
index = self.index
terminal_flag = 1
total_users = len(total_user_list)
user_list = []
item_list = []
if index + batch_size < total_users:
batch_user_list = total_user_list[index:index+batch_size]
self.index = index + batch_size
else:
terminal_flag = 0
batch_user_list = total_user_list[index:total_users]
self.index = 0
for u in batch_user_list:
user_list.extend([u]*num_evaluate)
item_list.extend(eva_negative_data[u])
self.eva_user_list = np.reshape(user_list, [-1, 1])
self.eva_item_list = np.reshape(item_list, [-1, 1])
return batch_user_list, terminal_flag
# ----------------------
# Read social network information
def readSocialNeighbors(self, friends_flag=1):
social_neighbors = defaultdict(set)
social_neighbors_num_dict = defaultdict(set)
links_file = open(self.conf.links_filename)
for _, line in enumerate(links_file):
tmp = line.split('\t')
u1, u2 = int(tmp[0]), int(tmp[1])
social_neighbors[u1].add(u2)
if friends_flag == 1:
social_neighbors[u2].add(u1)
user_list = sorted(list(social_neighbors.keys()))
for u in range(self.conf.num_users):
social_neighbors_num_dict[u] = len(social_neighbors[u])+1
self.social_neighbors_num_dict = social_neighbors_num_dict
self.social_neighbors = social_neighbors
def arrangePositiveData(self):
positive_data = defaultdict(set)
user_item_num_dict = defaultdict(set)
total_data = set()
hash_data = self.hash_data
for (u, i) in hash_data:
total_data.add((u, i))
positive_data[u].add(i)
user_list = sorted(list(positive_data.keys()))
for u in range(self.conf.num_users):
user_item_num_dict[u] = len(positive_data[u])+1
self.positive_data = positive_data
self.user_item_num_dict = user_item_num_dict
self.total_data = len(total_data)
# ----------------------
#Generate Social Neighbors Sparse Matrix Indices and Values
def generateSocialNeighborsSparseMatrix(self):
social_neighbors = self.social_neighbors
social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg
social_neighbors_indices_list = []
social_neighbors_values_list = []
social_neighbors_values_weight_avg_list = []
social_neighbor_num_list = []
social_neighbors_dict = defaultdict(list)
user_user_num_for_sparsity_dict = defaultdict(set)
user_user_sparsity_dict = {}
user_user_sparsity_dict['0-4'] = []
user_user_sparsity_dict['4-8'] = []
user_user_sparsity_dict['8-16'] = []
user_user_sparsity_dict['16-32'] = []
user_user_sparsity_dict['32-64'] = []
user_user_sparsity_dict['64-'] = []
for u in range(self.conf.num_users):
user_user_num_for_sparsity_dict[u] = len(social_neighbors[u])
for u in social_neighbors:
social_neighbors_dict[u] = sorted(social_neighbors[u])
user_list = sorted(list(social_neighbors.keys()))
#node att
for user in range(self.conf.num_users):
if user in social_neighbors_dict:
social_neighbor_num_list.append(len(social_neighbors_dict[user]))
else:
social_neighbor_num_list.append(1)
for user in user_list:
for friend in social_neighbors_dict[user]:
social_neighbors_indices_list.append([user, friend])
social_neighbors_values_list.append(1.0/len(social_neighbors_dict[user]))
social_neighbors_values_weight_avg_list.append(1.0/(np.sqrt(social_neighbors_num_dict[user])*np.sqrt(social_neighbors_num_dict[friend]))) #weight avg
for u in range(self.conf.num_users):
cur_user_neighbors_num = user_user_num_for_sparsity_dict[u]
if( (cur_user_neighbors_num >=0) & (cur_user_neighbors_num<4) ):
user_user_sparsity_dict['0-4'].append(u)
elif( (cur_user_neighbors_num >=4) & (cur_user_neighbors_num<8) ):
user_user_sparsity_dict['4-8'].append(u)
elif( (cur_user_neighbors_num >=8) & (cur_user_neighbors_num<16) ):
user_user_sparsity_dict['8-16'].append(u)
elif( (cur_user_neighbors_num >=16) & (cur_user_neighbors_num<32) ):
user_user_sparsity_dict['16-32'].append(u)
elif( (cur_user_neighbors_num >=32) & (cur_user_neighbors_num<64) ):
user_user_sparsity_dict['32-64'].append(u)
elif( cur_user_neighbors_num >=64):
user_user_sparsity_dict['64-'].append(u)
self.user_user_sparsity_dict = user_user_sparsity_dict
self.social_neighbors_indices_list = np.array(social_neighbors_indices_list).astype(np.int64)
self.social_neighbors_values_list = np.array(social_neighbors_values_list).astype(np.float32)
self.social_neighbors_values_weight_avg_list = np.array(social_neighbors_values_weight_avg_list).astype(np.float32) # weight avg
self.social_neighbor_num_list = np.array(social_neighbor_num_list).astype(np.int64)
#self.social_neighbors_values_list = tf.Variable(tf.random_normal([len(self.social_neighbors_indices_list)], stddev=0.01))
# ----------------------
#Generate Consumed Items Sparse Matrix Indices and Values
def generateConsumedItemsSparseMatrix(self):
positive_data = self.positive_data
consumed_items_indices_list = []
consumed_items_values_list = []
consumed_items_values_weight_avg_list = []
consumed_item_num_list = []
consumed_items_dict = defaultdict(list)
user_item_num_for_sparsity_dict = defaultdict(set)
user_item_sparsity_dict = {}
user_item_sparsity_dict['0-4'] = []
user_item_sparsity_dict['4-8'] = []
user_item_sparsity_dict['8-16'] = []
user_item_sparsity_dict['16-32'] = []
user_item_sparsity_dict['32-64'] = []
user_item_sparsity_dict['64-'] = []
consumed_items_num_dict = self.user_item_num_dict #weight avg
#social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg
item_user_num_dict = self.item_user_num_dict #weight avg
for u in positive_data:
consumed_items_dict[u] = sorted(positive_data[u])
user_list = sorted(list(positive_data.keys()))
for u in range(self.conf.num_users):
user_item_num_for_sparsity_dict[u] = len(positive_data[u])
for user in range(self.conf.num_users):
if user in consumed_items_dict:
consumed_item_num_list.append(len(consumed_items_dict[user]))
else:
consumed_item_num_list.append(1)
for u in user_list:
for i in consumed_items_dict[u]:
consumed_items_indices_list.append([u, i])
consumed_items_values_list.append(1.0/len(consumed_items_dict[u]))
consumed_items_values_weight_avg_list.append(1.0/( np.sqrt(consumed_items_num_dict[u]) * np.sqrt(item_user_num_dict[i]) )) #weight avg
for u in range(self.conf.num_users):
cur_user_consumed_item_num = user_item_num_for_sparsity_dict[u]
if( (cur_user_consumed_item_num >=0) & (cur_user_consumed_item_num<4) ):
user_item_sparsity_dict['0-4'].append(u)
elif( (cur_user_consumed_item_num >=4) & (cur_user_consumed_item_num<8) ):
user_item_sparsity_dict['4-8'].append(u)
elif( (cur_user_consumed_item_num >=8) & (cur_user_consumed_item_num<16) ):
user_item_sparsity_dict['8-16'].append(u)
elif( (cur_user_consumed_item_num >=16) & (cur_user_consumed_item_num<32) ):
user_item_sparsity_dict['16-32'].append(u)
elif( (cur_user_consumed_item_num >=32) & (cur_user_consumed_item_num<64) ):
user_item_sparsity_dict['32-64'].append(u)
elif( cur_user_consumed_item_num >=64):
user_item_sparsity_dict['64-'].append(u)
self.user_item_sparsity_dict = user_item_sparsity_dict
self.consumed_items_indices_list = np.array(consumed_items_indices_list).astype(np.int64)
self.consumed_items_values_list = np.array(consumed_items_values_list).astype(np.float32)
self.consumed_items_values_weight_avg_list = np.array(consumed_items_values_weight_avg_list).astype(np.float32) #weight avg
self.consumed_item_num_list = np.array(consumed_item_num_list).astype(np.int64)
def generateConsumedItemsSparseMatrixForItemUser(self):
positive_data_for_item_user = self.positive_data_for_item_user
item_customer_indices_list = []
item_customer_values_list = []
item_customer_values_weight_avg_list = []
item_customer_num_list = []
item_customer_dict = defaultdict(list)
consumed_items_num_dict = self.user_item_num_dict #weight avg
#social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg
item_user_num_dict = self.item_user_num_dict #weight avg
for i in positive_data_for_item_user:
item_customer_dict[i] = sorted(positive_data_for_item_user[i])
item_list = sorted(list(positive_data_for_item_user.keys()))
for item in range(self.conf.num_items):
if item in item_customer_dict:
item_customer_num_list.append(len(item_customer_dict[item]))
else:
item_customer_num_list.append(1)
for i in item_list:
for u in item_customer_dict[i]:
item_customer_indices_list.append([i, u])
item_customer_values_list.append(1.0/len(item_customer_dict[i]))
item_customer_values_weight_avg_list.append(1.0/( np.sqrt(consumed_items_num_dict[u]) * np.sqrt(item_user_num_dict[i]) ))
self.item_customer_indices_list = np.array(item_customer_indices_list).astype(np.int64)
self.item_customer_values_list = np.array(item_customer_values_list).astype(np.float32)
self.item_customer_num_list = np.array(item_customer_num_list).astype(np.int64)
self.item_customer_values_weight_avg_list = np.array(item_customer_values_weight_avg_list).astype(np.float32)
| [((7, 0, 7, 24), 'tensorflow.compat.v1.disable_v2_behavior', 'tf.disable_v2_behavior', ({}, {}), '()', True, 'import tensorflow.compat.v1 as tf\n'), ((80, 20, 80, 36), 'collections.defaultdict', 'defaultdict', ({(80, 32, 80, 35): 'int'}, {}), '(int)', False, 'from collections import defaultdict\n'), ((90, 24, 90, 40), 'collections.defaultdict', 'defaultdict', ({(90, 36, 90, 39): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((91, 29, 91, 45), 'collections.defaultdict', 'defaultdict', ({(91, 41, 91, 44): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((112, 38, 112, 54), 'collections.defaultdict', 'defaultdict', ({(112, 50, 112, 53): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((113, 29, 113, 45), 'collections.defaultdict', 'defaultdict', ({(113, 41, 113, 44): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((136, 24, 136, 40), 'collections.defaultdict', 'defaultdict', ({(136, 36, 136, 39): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((168, 25, 168, 55), 'numpy.reshape', 'np.reshape', ({(168, 36, 168, 45): 'user_list', (168, 47, 168, 54): '[-1, 1]'}, {}), '(user_list, [-1, 1])', True, 'import numpy as np\n'), ((169, 25, 169, 55), 'numpy.reshape', 'np.reshape', ({(169, 36, 169, 45): 'item_list', (169, 47, 169, 54): '[-1, 1]'}, {}), '(item_list, [-1, 1])', True, 'import numpy as np\n'), ((170, 27, 170, 59), 'numpy.reshape', 'np.reshape', ({(170, 38, 170, 49): 'labels_list', (170, 51, 170, 58): '[-1, 1]'}, {}), '(labels_list, [-1, 1])', True, 'import numpy as np\n'), ((199, 25, 199, 55), 'numpy.reshape', 'np.reshape', ({(199, 36, 199, 45): 'user_list', (199, 47, 199, 54): '[-1, 1]'}, {}), '(user_list, [-1, 1])', True, 'import numpy as np\n'), ((200, 25, 200, 55), 'numpy.reshape', 'np.reshape', ({(200, 36, 200, 45): 'item_list', (200, 47, 200, 54): '[-1, 1]'}, {}), '(item_list, [-1, 1])', True, 'import numpy as np\n'), ((201, 27, 201, 59), 'numpy.reshape', 'np.reshape', ({(201, 38, 201, 49): 'labels_list', (201, 51, 201, 58): '[-1, 1]'}, {}), '(labels_list, [-1, 1])', True, 'import numpy as np\n'), ((209, 21, 209, 38), 'collections.defaultdict', 'defaultdict', ({(209, 33, 209, 37): 'list'}, {}), '(list)', False, 'from collections import defaultdict\n'), ((216, 29, 216, 59), 'numpy.reshape', 'np.reshape', ({(216, 40, 216, 49): 'user_list', (216, 51, 216, 58): '[-1, 1]'}, {}), '(user_list, [-1, 1])', True, 'import numpy as np\n'), ((217, 29, 217, 59), 'numpy.reshape', 'np.reshape', ({(217, 40, 217, 49): 'item_list', (217, 51, 217, 58): '[-1, 1]'}, {}), '(item_list, [-1, 1])', True, 'import numpy as np\n'), ((228, 28, 228, 45), 'collections.defaultdict', 'defaultdict', ({(228, 40, 228, 44): 'list'}, {}), '(list)', False, 'from collections import defaultdict\n'), ((259, 29, 259, 59), 'numpy.reshape', 'np.reshape', ({(259, 40, 259, 49): 'user_list', (259, 51, 259, 58): '[-1, 1]'}, {}), '(user_list, [-1, 1])', True, 'import numpy as np\n'), ((260, 29, 260, 59), 'numpy.reshape', 'np.reshape', ({(260, 40, 260, 49): 'item_list', (260, 51, 260, 58): '[-1, 1]'}, {}), '(item_list, [-1, 1])', True, 'import numpy as np\n'), ((267, 27, 267, 43), 'collections.defaultdict', 'defaultdict', ({(267, 39, 267, 42): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((268, 36, 268, 52), 'collections.defaultdict', 'defaultdict', ({(268, 48, 268, 51): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((285, 24, 285, 40), 'collections.defaultdict', 'defaultdict', ({(285, 36, 285, 39): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((286, 29, 286, 45), 'collections.defaultdict', 'defaultdict', ({(286, 41, 286, 44): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((311, 32, 311, 49), 'collections.defaultdict', 'defaultdict', ({(311, 44, 311, 48): 'list'}, {}), '(list)', False, 'from collections import defaultdict\n'), ((313, 42, 313, 58), 'collections.defaultdict', 'defaultdict', ({(313, 54, 313, 57): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((376, 30, 376, 47), 'collections.defaultdict', 'defaultdict', ({(376, 42, 376, 46): 'list'}, {}), '(list)', False, 'from collections import defaultdict\n'), ((377, 42, 377, 58), 'collections.defaultdict', 'defaultdict', ({(377, 54, 377, 57): 'set'}, {}), '(set)', False, 'from collections import defaultdict\n'), ((440, 29, 440, 46), 'collections.defaultdict', 'defaultdict', ({(440, 41, 440, 45): 'list'}, {}), '(list)', False, 'from collections import defaultdict\n'), ((142, 20, 142, 48), 'numpy.random.randint', 'np.random.randint', ({(142, 38, 142, 47): 'num_items'}, {}), '(num_items)', True, 'import numpy as np\n'), ((231, 20, 231, 48), 'numpy.random.randint', 'np.random.randint', ({(231, 38, 231, 47): 'num_items'}, {}), '(num_items)', True, 'import numpy as np\n'), ((361, 45, 361, 84), 'numpy.array', 'np.array', ({(361, 54, 361, 83): 'social_neighbors_indices_list'}, {}), '(social_neighbors_indices_list)', True, 'import numpy as np\n'), ((362, 44, 362, 82), 'numpy.array', 'np.array', ({(362, 53, 362, 81): 'social_neighbors_values_list'}, {}), '(social_neighbors_values_list)', True, 'import numpy as np\n'), ((363, 55, 363, 104), 'numpy.array', 'np.array', ({(363, 64, 363, 103): 'social_neighbors_values_weight_avg_list'}, {}), '(social_neighbors_values_weight_avg_list)', True, 'import numpy as np\n'), ((364, 40, 364, 74), 'numpy.array', 'np.array', ({(364, 49, 364, 73): 'social_neighbor_num_list'}, {}), '(social_neighbor_num_list)', True, 'import numpy as np\n'), ((427, 43, 427, 80), 'numpy.array', 'np.array', ({(427, 52, 427, 79): 'consumed_items_indices_list'}, {}), '(consumed_items_indices_list)', True, 'import numpy as np\n'), ((428, 42, 428, 78), 'numpy.array', 'np.array', ({(428, 51, 428, 77): 'consumed_items_values_list'}, {}), '(consumed_items_values_list)', True, 'import numpy as np\n'), ((429, 53, 429, 100), 'numpy.array', 'np.array', ({(429, 62, 429, 99): 'consumed_items_values_weight_avg_list'}, {}), '(consumed_items_values_weight_avg_list)', True, 'import numpy as np\n'), ((430, 38, 430, 70), 'numpy.array', 'np.array', ({(430, 47, 430, 69): 'consumed_item_num_list'}, {}), '(consumed_item_num_list)', True, 'import numpy as np\n'), ((462, 42, 462, 78), 'numpy.array', 'np.array', ({(462, 51, 462, 77): 'item_customer_indices_list'}, {}), '(item_customer_indices_list)', True, 'import numpy as np\n'), ((463, 41, 463, 76), 'numpy.array', 'np.array', ({(463, 50, 463, 75): 'item_customer_values_list'}, {}), '(item_customer_values_list)', True, 'import numpy as np\n'), ((464, 38, 464, 70), 'numpy.array', 'np.array', ({(464, 47, 464, 69): 'item_customer_num_list'}, {}), '(item_customer_num_list)', True, 'import numpy as np\n'), ((465, 52, 465, 98), 'numpy.array', 'np.array', ({(465, 61, 465, 97): 'item_customer_values_weight_avg_list'}, {}), '(item_customer_values_weight_avg_list)', True, 'import numpy as np\n'), ((144, 24, 144, 52), 'numpy.random.randint', 'np.random.randint', ({(144, 42, 144, 51): 'num_items'}, {}), '(num_items)', True, 'import numpy as np\n'), ((233, 24, 233, 52), 'numpy.random.randint', 'np.random.randint', ({(233, 42, 233, 51): 'num_items'}, {}), '(num_items)', True, 'import numpy as np\n'), ((342, 68, 342, 108), 'numpy.sqrt', 'np.sqrt', ({(342, 76, 342, 107): 'social_neighbors_num_dict[user]'}, {}), '(social_neighbors_num_dict[user])', True, 'import numpy as np\n'), ((342, 109, 342, 151), 'numpy.sqrt', 'np.sqrt', ({(342, 117, 342, 150): 'social_neighbors_num_dict[friend]'}, {}), '(social_neighbors_num_dict[friend])', True, 'import numpy as np\n'), ((409, 68, 409, 103), 'numpy.sqrt', 'np.sqrt', ({(409, 76, 409, 102): 'consumed_items_num_dict[u]'}, {}), '(consumed_items_num_dict[u])', True, 'import numpy as np\n'), ((409, 107, 409, 137), 'numpy.sqrt', 'np.sqrt', ({(409, 115, 409, 136): 'item_user_num_dict[i]'}, {}), '(item_user_num_dict[i])', True, 'import numpy as np\n'), ((460, 66, 460, 101), 'numpy.sqrt', 'np.sqrt', ({(460, 74, 460, 100): 'consumed_items_num_dict[u]'}, {}), '(consumed_items_num_dict[u])', True, 'import numpy as np\n'), ((460, 105, 460, 135), 'numpy.sqrt', 'np.sqrt', ({(460, 113, 460, 134): 'item_user_num_dict[i]'}, {}), '(item_user_num_dict[i])', True, 'import numpy as np\n')] |
iosurodri/annotated-transformer | src/models/VanillaTransformer.py | e5a7e27067d08c09f51b57bbf2824fbcd80ae4d9 | from xmlrpc.server import MultiPathXMLRPCServer
import torch.nn as nn
import torch.nn.functional as F
import copy
from src.layers.layers import Encoder, EncoderLayer, Decoder, DecoderLayer, PositionwiseFeedForward
from src.layers.preprocessing import Embeddings, PositionalEncoding
from src.layers.attention import MultiHeadedAttention
### Generic EncoderDecoder structure:
class EncoderDecoder(nn.Module):
"""
A standard Encoder-Decoder architecture. Base for this and many
other models.
"""
def __init__(self, encoder, decoder, src_embed, tgt_embed, generator):
super(EncoderDecoder, self).__init__()
self.encoder = encoder
self.decoder = decoder
self.src_embed = src_embed
self.tgt_embed = tgt_embed
self.generator = generator
def forward(self, src, tgt, src_mask, tgt_mask):
"Take in and process masked src and target sequences."
encoded_src = self.encode(src, src_mask)
return self.decode(encoded_src, src_mask, tgt, tgt_mask)
def encode(self, src, src_mask):
embedded_src = self.src_embed(src)
return self.encoder(embedded_src, src_mask)
def decode(self, memory, src_mask, tgt, tgt_mask):
embedded_tgt = self.tgt_embed(tgt)
return self.decoder(embedded_tgt, memory, src_mask, tgt_mask)
class Generator(nn.Module):
"Define standard linear + softmax generation step."
def __init__(self, d_model, vocab):
super(Generator, self).__init__()
self.proj = nn.Linear(d_model, vocab)
def forward(self, x):
return F.log_softmax(self.proj(x), dim=-1)
def make_model(src_vocab, tgt_vocab, N=6, d_model=512, d_ff=2048, h=8, dropout=0.1, alpha=0.5):
"Helper: Construct a model from hyperparameters."
c = copy.deepcopy
attn = MultiHeadedAttention(h, d_model, alpha=alpha)
ff = PositionwiseFeedForward(d_model, d_ff, dropout)
position = PositionalEncoding(d_model, dropout)
model = EncoderDecoder(
Encoder(EncoderLayer(d_model, c(attn), c(ff), dropout), N),
Decoder(DecoderLayer(d_model, c(attn), c(attn), c(ff), dropout), N),
nn.Sequential(Embeddings(d_model, src_vocab), c(position)),
nn.Sequential(Embeddings(d_model, tgt_vocab), c(position)),
Generator(d_model, tgt_vocab)
)
# This was important from their code.
# Initialize parameters with Glorot / fan_avg.
for p in model.parameters():
if p.dim() > 1:
nn.init.xavier_uniform(p)
return model
if __name__ == '__main__':
# Small example model
tmp_model = make_model(10, 10, 2)
print(tmp_model)
| [((52, 11, 52, 56), 'src.layers.attention.MultiHeadedAttention', 'MultiHeadedAttention', (), '', False, 'from src.layers.attention import MultiHeadedAttention\n'), ((53, 9, 53, 56), 'src.layers.layers.PositionwiseFeedForward', 'PositionwiseFeedForward', ({(53, 33, 53, 40): 'd_model', (53, 42, 53, 46): 'd_ff', (53, 48, 53, 55): 'dropout'}, {}), '(d_model, d_ff, dropout)', False, 'from src.layers.layers import Encoder, EncoderLayer, Decoder, DecoderLayer, PositionwiseFeedForward\n'), ((54, 15, 54, 51), 'src.layers.preprocessing.PositionalEncoding', 'PositionalEncoding', ({(54, 34, 54, 41): 'd_model', (54, 43, 54, 50): 'dropout'}, {}), '(d_model, dropout)', False, 'from src.layers.preprocessing import Embeddings, PositionalEncoding\n'), ((43, 20, 43, 45), 'torch.nn.Linear', 'nn.Linear', ({(43, 30, 43, 37): 'd_model', (43, 39, 43, 44): 'vocab'}, {}), '(d_model, vocab)', True, 'import torch.nn as nn\n'), ((58, 22, 58, 52), 'src.layers.preprocessing.Embeddings', 'Embeddings', ({(58, 33, 58, 40): 'd_model', (58, 42, 58, 51): 'src_vocab'}, {}), '(d_model, src_vocab)', False, 'from src.layers.preprocessing import Embeddings, PositionalEncoding\n'), ((59, 22, 59, 52), 'src.layers.preprocessing.Embeddings', 'Embeddings', ({(59, 33, 59, 40): 'd_model', (59, 42, 59, 51): 'tgt_vocab'}, {}), '(d_model, tgt_vocab)', False, 'from src.layers.preprocessing import Embeddings, PositionalEncoding\n'), ((67, 12, 67, 37), 'torch.nn.init.xavier_uniform', 'nn.init.xavier_uniform', ({(67, 35, 67, 36): 'p'}, {}), '(p)', True, 'import torch.nn as nn\n')] |
YileC928/finm-portfolio-2021 | venv/lib/python3.8/site-packages/arch/tests/univariate/test_recursions.py | 3fa1e97423fa731bce0cad3457807e1873120891 | import os
import timeit
from typing import List
import numpy as np
from numpy.random import RandomState
from numpy.testing import assert_allclose, assert_almost_equal
import pytest
from scipy.special import gamma
import arch.univariate.recursions_python as recpy
CYTHON_COVERAGE = os.environ.get("ARCH_CYTHON_COVERAGE", "0") in ("true", "1", "True")
try:
import arch.univariate.recursions as rec_cython
missing_extension = False
except ImportError:
missing_extension = True
if missing_extension:
rec = recpy
else:
rec = rec_cython
try:
import numba # noqa
missing_numba = False
except ImportError:
missing_numba = True
pytestmark = pytest.mark.filterwarnings("ignore::arch.compat.numba.PerformanceWarning")
class Timer(object):
def __init__(
self,
first,
first_name,
second,
second_name,
model_name,
setup,
repeat=5,
number=10,
) -> None:
self.first_code = first
self.second_code = second
self.setup = setup
self.first_name = first_name
self.second_name = second_name
self.model_name = model_name
self.repeat = repeat
self.number = number
self._run = False
self.times: List[float] = []
self._codes = [first, second]
self.ratio = np.inf
def display(self):
if not self._run:
self.time()
self.ratio = self.times[0] / self.times[1]
title = self.model_name + " timing"
print("\n" + title)
print("-" * len(title))
print(self.first_name + ": " + "{:0.3f} ms".format(1000 * self.times[0]))
print(self.second_name + ": " + "{:0.3f} ms".format(1000 * self.times[1]))
if self.ratio < 1:
print(
"{0} is {1:0.1f}% faster".format(
self.first_name, 100 * (1 / self.ratio - 1)
)
)
else:
print(
"{0} is {1:0.1f}% faster".format(
self.second_name, 100 * (self.ratio - 1)
)
)
print(
self.first_name
+ "/"
+ self.second_name
+ " Ratio: {:0.3f}\n".format(self.ratio)
)
def time(self):
self.times = []
for code in self._codes:
timer = timeit.Timer(code, setup=self.setup)
self.times.append(min(timer.repeat(self.repeat, self.number)))
class TestRecursions(object):
@classmethod
def setup_class(cls):
cls.nobs = 1000
cls.rng = RandomState(12345)
cls.resids = cls.rng.standard_normal(cls.nobs)
cls.sigma2 = np.zeros_like(cls.resids)
var = cls.resids.var()
var_bounds = np.array([var / 1000000.0, var * 1000000.0])
cls.var_bounds = np.ones((cls.nobs, 2)) * var_bounds
cls.backcast = 1.0
cls.timer_setup = """
import numpy as np
import arch.univariate.recursions as rec
import arch.univariate.recursions_python as recpy
nobs = 10000
resids = np.random.standard_normal(nobs)
sigma2 = np.zeros_like(resids)
var = resids.var()
backcast = 1.0
var_bounds = np.array([var / 1000000.0, var * 1000000.0])
var_bounds = np.ones((nobs, 2)) * var_bounds
"""
def test_garch(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_numba = sigma2.copy()
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([0.1, -0.4, 0.3, 0.2])
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 0.4, 3, 2])
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 0.4, 0.3, 0.2])
mod_fresids = fresids.copy()
mod_fresids[:1] = np.inf
recpy.garch_recursion_python(
parameters,
mod_fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.garch_recursion(
parameters,
mod_fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_harch(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
lags = np.array([1, 5, 22], dtype=np.int32)
recpy.harch_recursion_python(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
recpy.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_numba = sigma2.copy()
rec.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([-0.1, -0.4, 0.3, 0.2])
recpy.harch_recursion_python(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 4e8, 3, 2])
recpy.harch_recursion_python(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 4e8, 3, 2])
mod_resids = resids.copy()
mod_resids[:10] = np.inf
recpy.harch_recursion_python(
parameters, mod_resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.harch_recursion(
parameters, mod_resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_arch(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
p = 3
recpy.arch_recursion_python(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
recpy.arch_recursion(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
sigma2_numba = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([-0.1, -0.4, 0.3, 0.2])
recpy.arch_recursion_python(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 4e8, 3, 2])
recpy.arch_recursion_python(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
mod_resids = resids.copy()
mod_resids[:10] = np.inf
recpy.arch_recursion_python(
parameters, mod_resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.arch_recursion(
parameters, mod_resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_garch_power_1(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = np.abs(resids) ** 1.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_direct(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = np.abs(resids) ** 2.0
sresids = np.sign(resids)
for t in range(nobs):
if t == 0:
sigma2[t] = parameters.dot(
np.array([1.0, backcast, 0.5 * backcast, backcast])
)
else:
var = np.array(
[
1.0,
resids[t - 1] ** 2.0,
resids[t - 1] ** 2.0 * (resids[t - 1] < 0),
sigma2[t - 1],
]
)
sigma2[t] = parameters.dot(var)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_no_q(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
0,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
0,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_no_p(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
0,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
0,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_no_o(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
0,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
0,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_arch(self):
backcast = self.backcast
nobs, resids, sigma2 = self.nobs, self.resids, self.sigma2
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
3,
0,
0,
nobs,
backcast,
self.var_bounds,
)
sigma2_garch = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_garch, sigma2)
def test_bounds(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([1e100, 0.4, 0.3, 0.2])
lags = np.array([1, 5, 22], dtype=np.int32)
recpy.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([-1e100, 0.4, 0.3, 0.2])
recpy.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert_almost_equal(sigma2, self.var_bounds[:, 0])
parameters = np.array([1e100, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([-1e100, 0.4, 0.3, 0.2])
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
assert_almost_equal(sigma2, self.var_bounds[:, 0])
parameters = np.array([1e100, 0.4, 0.3, 0.2])
recpy.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([-1e100, 0.4, 0.3, 0.2])
recpy.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert_almost_equal(sigma2, self.var_bounds[:, 0])
def test_egarch(self):
nobs = self.nobs
parameters = np.array([0.0, 0.1, -0.1, 0.95])
resids, sigma2 = self.resids, self.sigma2
p = o = q = 1
backcast = 0.0
var_bounds = self.var_bounds
lnsigma2 = np.empty_like(sigma2)
std_resids = np.empty_like(sigma2)
abs_std_resids = np.empty_like(sigma2)
recpy.egarch_recursion(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
sigma2_numba = sigma2.copy()
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
sigma2_python = sigma2.copy()
rec.egarch_recursion(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
norm_const = np.sqrt(2 / np.pi)
for t in range(nobs):
lnsigma2[t] = parameters[0]
if t == 0:
lnsigma2[t] += parameters[3] * backcast
else:
stdresid = resids[t - 1] / np.sqrt(sigma2[t - 1])
lnsigma2[t] += parameters[1] * (np.abs(stdresid) - norm_const)
lnsigma2[t] += parameters[2] * stdresid
lnsigma2[t] += parameters[3] * lnsigma2[t - 1]
sigma2[t] = np.exp(lnsigma2[t])
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([-100.0, 0.1, -0.1, 0.95])
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.0, 0.1, -0.1, 9.5])
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.0, 0.1, -0.1, 0.95])
mod_resids = resids.copy()
mod_resids[:1] = np.inf
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_midas_hyperbolic(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.8, 0])
j = np.arange(1, 22 + 1)
weights = gamma(j + 0.6) / (gamma(j + 1) * gamma(0.6))
weights = weights / weights.sum()
recpy.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
sigma2_numba = sigma2.copy()
recpy.midas_recursion_python(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
mod_resids = resids.copy()
mod_resids[:10] = np.inf
recpy.midas_recursion_python(
parameters, weights, mod_resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 10e10, 0])
j = np.arange(1, 22 + 1)
weights = gamma(j + 0.6) / (gamma(j + 1) * gamma(0.6))
weights = weights / weights.sum()
recpy.midas_recursion_python(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, -0.4, 0])
recpy.midas_recursion_python(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_figarch_recursion(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([1.0, 0.2, 0.4, 0.3])
fresids = resids ** 2
p = q = 1
trunc_lag = 1000
rec.figarch_recursion(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
lam = rec.figarch_weights(parameters[1:], p, q, trunc_lag=trunc_lag)
lam_rev = lam[::-1]
omega_tilde = parameters[0] / (1 - parameters[-1])
sigma2_direct = np.empty_like(sigma2)
for t in range(nobs):
backcasts = trunc_lag - t
sigma2_direct[t] = omega_tilde
if backcasts:
sigma2_direct[t] += backcast * lam_rev[:backcasts].sum()
if t:
sigma2_direct[t] += np.sum(lam_rev[-t:] * fresids[max(0, t - 1000) : t])
assert_almost_equal(sigma2_direct, sigma2)
recpy.figarch_recursion(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
sigma2_numba = sigma2.copy()
recpy.figarch_recursion_python(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.figarch_recursion(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
def test_figarch_weights(self):
parameters = np.array([1.0, 0.4])
lam = rec.figarch_weights(parameters[1:], 0, 0, trunc_lag=1000)
lam_direct = np.empty_like(lam)
lam_direct[0] = parameters[-1]
for i in range(1, 1000):
lam_direct[i] = (i - parameters[-1]) / (i + 1) * lam_direct[i - 1]
assert_almost_equal(lam, lam_direct)
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_garch_performance(self):
garch_setup = """
parameters = np.array([.1, .4, .3, .2])
fresids = resids ** 2.0
sresids = np.sign(resids)
"""
garch_first = """
recpy.garch_recursion(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs,
backcast, var_bounds)
"""
garch_second = """
rec.garch_recursion(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs, backcast,
var_bounds)
"""
timer = Timer(
garch_first,
"Numba",
garch_second,
"Cython",
"GARCH",
self.timer_setup + garch_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_harch_performance(self):
harch_setup = """
parameters = np.array([.1, .4, .3, .2])
lags = np.array([1, 5, 22], dtype=np.int32)
"""
harch_first = """
recpy.harch_recursion(parameters, resids, sigma2, lags, nobs, backcast,
var_bounds)
"""
harch_second = """
rec.harch_recursion(parameters, resids, sigma2, lags, nobs, backcast, var_bounds)
"""
timer = Timer(
harch_first,
"Numba",
harch_second,
"Cython",
"HARCH",
self.timer_setup + harch_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_egarch_performance(self):
egarch_setup = """
parameters = np.array([0.0, 0.1, -0.1, 0.95])
p = o = q = 1
backcast = 0.0
lnsigma2 = np.empty_like(sigma2)
std_resids = np.empty_like(sigma2)
abs_std_resids = np.empty_like(sigma2)
"""
egarch_first = """
recpy.egarch_recursion(parameters, resids, sigma2, p, o, q, nobs, backcast,
var_bounds, lnsigma2, std_resids, abs_std_resids)
"""
egarch_second = """
rec.egarch_recursion(parameters, resids, sigma2, p, o, q, nobs, backcast,
var_bounds, lnsigma2, std_resids, abs_std_resids)
"""
timer = Timer(
egarch_first,
"Numba",
egarch_second,
"Cython",
"EGARCH",
self.timer_setup + egarch_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_midas_performance(self):
midas_setup = """
from scipy.special import gamma
parameters = np.array([.1, 0.8, 0])
j = np.arange(1,22+1)
weights = gamma(j+0.6) / (gamma(j+1) * gamma(0.6))
weights = weights / weights.sum()
"""
midas_first = """
recpy.midas_recursion(parameters, weights, resids, sigma2, nobs, backcast, var_bounds)
"""
midas_second = """
rec.midas_recursion(parameters, weights, resids, sigma2, nobs, backcast, var_bounds)
"""
timer = Timer(
midas_first,
"Numba",
midas_second,
"Cython",
"MIDAS",
self.timer_setup + midas_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_figarch_performance(self):
midas_setup = """
p = q = 1
trunc_lag = 1000
parameters = np.array([1.0, 0.2, 0.2, 0.04])
fresids = resids ** 2.0
"""
midas_first = """
recpy.figarch_recursion(parameters, fresids, sigma2, p, q, nobs, trunc_lag, backcast, var_bounds)
"""
midas_second = """
rec.figarch_recursion(parameters, fresids, sigma2, p, q, nobs, trunc_lag, backcast, var_bounds)
"""
timer = Timer(
midas_first,
"Numba",
midas_second,
"Cython",
"FIGARCH",
self.timer_setup + midas_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
def test_garch_aparch_equiv(self):
parameters = np.array([0.1, 0.1, 0.8])
fresids = self.resids ** 2
sresids = np.sign(self.resids)
sigma2 = np.empty(1000)
p = q = 1
o = 0
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
sigma2_garch = sigma2.copy()
parameters = np.array([0.1, 0.1, 0.8, 2])
sigma2[:] = np.nan
sigma2_delta = np.empty_like(sigma2)
recpy.aparch_recursion_python(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert_allclose(sigma2_garch, sigma2, atol=1e-6)
sigma2[:] = np.nan
recpy.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert_allclose(sigma2_garch, sigma2, atol=1e-6)
sigma2[:] = np.nan
rec.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert_allclose(sigma2_garch, sigma2, atol=1e-6)
def test_asym_aparch_smoke(self):
sigma2 = np.empty(1000)
p = o = q = 1
parameters = np.array([0.1, 0.1, 0.1, 0.8, 1.3])
sigma2[:] = np.nan
sigma2_delta = np.empty_like(sigma2)
recpy.aparch_recursion_python(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert np.all(np.isfinite(sigma2))
sigma2_py = sigma2.copy()
sigma2[:] = np.nan
recpy.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert np.all(np.isfinite(sigma2))
assert_allclose(sigma2_py, sigma2)
sigma2[:] = np.nan
rec.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert np.all(np.isfinite(sigma2))
assert_allclose(sigma2_py, sigma2)
def test_bounds_check():
var_bounds = np.array([0.1, 10])
assert_almost_equal(recpy.bounds_check_python(-1.0, var_bounds), 0.1)
assert_almost_equal(
recpy.bounds_check_python(20.0, var_bounds), 10 + np.log(20.0 / 10.0)
)
assert_almost_equal(recpy.bounds_check_python(np.inf, var_bounds), 1010.0)
| [((34, 13, 34, 87), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', ({(34, 40, 34, 86): '"""ignore::arch.compat.numba.PerformanceWarning"""'}, {}), "('ignore::arch.compat.numba.PerformanceWarning')", False, 'import pytest\n'), ((13, 18, 13, 61), 'os.environ.get', 'os.environ.get', ({(13, 33, 13, 55): '"""ARCH_CYTHON_COVERAGE"""', (13, 57, 13, 60): '"""0"""'}, {}), "('ARCH_CYTHON_COVERAGE', '0')", False, 'import os\n'), ((911, 5, 913, 5), 'pytest.mark.skipif', 'pytest.mark.skipif', (), '', False, 'import pytest\n'), ((942, 5, 944, 5), 'pytest.mark.skipif', 'pytest.mark.skipif', (), '', False, 'import pytest\n'), ((973, 5, 975, 5), 'pytest.mark.skipif', 'pytest.mark.skipif', (), '', False, 'import pytest\n'), ((1008, 5, 1010, 5), 'pytest.mark.skipif', 'pytest.mark.skipif', (), '', False, 'import pytest\n'), ((1039, 5, 1041, 5), 'pytest.mark.skipif', 'pytest.mark.skipif', (), '', False, 'import pytest\n'), ((1197, 17, 1197, 36), 'numpy.array', 'np.array', ({(1197, 26, 1197, 35): '[0.1, 10]'}, {}), '([0.1, 10])', True, 'import numpy as np\n'), ((101, 18, 101, 36), 'numpy.random.RandomState', 'RandomState', ({(101, 30, 101, 35): '12345'}, {}), '(12345)', False, 'from numpy.random import RandomState\n'), ((103, 21, 103, 46), 'numpy.zeros_like', 'np.zeros_like', ({(103, 35, 103, 45): 'cls.resids'}, {}), '(cls.resids)', True, 'import numpy as np\n'), ((105, 21, 105, 65), 'numpy.array', 'np.array', ({(105, 30, 105, 64): '[var / 1000000.0, var * 1000000.0]'}, {}), '([var / 1000000.0, var * 1000000.0])', True, 'import numpy as np\n'), ((126, 21, 126, 51), 'numpy.array', 'np.array', ({(126, 30, 126, 50): '[0.1, 0.4, 0.3, 0.2]'}, {}), '([0.1, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((128, 18, 128, 33), 'numpy.sign', 'np.sign', ({(128, 26, 128, 32): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((130, 8, 141, 9), 'arch.univariate.recursions_python.garch_recursion', 'recpy.garch_recursion', ({(131, 12, 131, 22): 'parameters', (132, 12, 132, 19): 'fresids', (133, 12, 133, 19): 'sresids', (134, 12, 134, 18): 'sigma2', (135, 12, 135, 13): '(1)', (136, 12, 136, 13): '(1)', (137, 12, 137, 13): '(1)', (138, 12, 138, 16): 'nobs', (139, 12, 139, 20): 'backcast', (140, 12, 140, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((143, 8, 154, 9), 'arch.univariate.recursions_python.garch_recursion_python', 'recpy.garch_recursion_python', ({(144, 12, 144, 22): 'parameters', (145, 12, 145, 19): 'fresids', (146, 12, 146, 19): 'sresids', (147, 12, 147, 18): 'sigma2', (148, 12, 148, 13): '(1)', (149, 12, 149, 13): '(1)', (150, 12, 150, 13): '(1)', (151, 12, 151, 16): 'nobs', (152, 12, 152, 20): 'backcast', (153, 12, 153, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 1, 1, 1,\n nobs, backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((168, 8, 168, 49), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(168, 28, 168, 40): 'sigma2_numba', (168, 42, 168, 48): 'sigma2'}, {}), '(sigma2_numba, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((169, 8, 169, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(169, 28, 169, 41): 'sigma2_python', (169, 43, 169, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((171, 21, 171, 52), 'numpy.array', 'np.array', ({(171, 30, 171, 51): '[0.1, -0.4, 0.3, 0.2]'}, {}), '([0.1, -0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((172, 8, 183, 9), 'arch.univariate.recursions_python.garch_recursion_python', 'recpy.garch_recursion_python', ({(173, 12, 173, 22): 'parameters', (174, 12, 174, 19): 'fresids', (175, 12, 175, 19): 'sresids', (176, 12, 176, 18): 'sigma2', (177, 12, 177, 13): '(1)', (178, 12, 178, 13): '(1)', (179, 12, 179, 13): '(1)', (180, 12, 180, 16): 'nobs', (181, 12, 181, 20): 'backcast', (182, 12, 182, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 1, 1, 1,\n nobs, backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((184, 15, 184, 54), 'numpy.all', 'np.all', ({(184, 22, 184, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((185, 15, 185, 58), 'numpy.all', 'np.all', ({(185, 22, 185, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((187, 21, 187, 47), 'numpy.array', 'np.array', ({(187, 30, 187, 46): '[0.1, 0.4, 3, 2]'}, {}), '([0.1, 0.4, 3, 2])', True, 'import numpy as np\n'), ((188, 8, 199, 9), 'arch.univariate.recursions_python.garch_recursion_python', 'recpy.garch_recursion_python', ({(189, 12, 189, 22): 'parameters', (190, 12, 190, 19): 'fresids', (191, 12, 191, 19): 'sresids', (192, 12, 192, 18): 'sigma2', (193, 12, 193, 13): '(1)', (194, 12, 194, 13): '(1)', (195, 12, 195, 13): '(1)', (196, 12, 196, 16): 'nobs', (197, 12, 197, 20): 'backcast', (198, 12, 198, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 1, 1, 1,\n nobs, backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((200, 15, 200, 54), 'numpy.all', 'np.all', ({(200, 22, 200, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((201, 15, 201, 58), 'numpy.all', 'np.all', ({(201, 22, 201, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((203, 21, 203, 51), 'numpy.array', 'np.array', ({(203, 30, 203, 50): '[0.1, 0.4, 0.3, 0.2]'}, {}), '([0.1, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((206, 8, 217, 9), 'arch.univariate.recursions_python.garch_recursion_python', 'recpy.garch_recursion_python', ({(207, 12, 207, 22): 'parameters', (208, 12, 208, 23): 'mod_fresids', (209, 12, 209, 19): 'sresids', (210, 12, 210, 18): 'sigma2', (211, 12, 211, 13): '(1)', (212, 12, 212, 13): '(1)', (213, 12, 213, 13): '(1)', (214, 12, 214, 16): 'nobs', (215, 12, 215, 20): 'backcast', (216, 12, 216, 27): 'self.var_bounds'}, {}), '(parameters, mod_fresids, sresids, sigma2, 1, 1,\n 1, nobs, backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((218, 15, 218, 54), 'numpy.all', 'np.all', ({(218, 22, 218, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((219, 15, 219, 58), 'numpy.all', 'np.all', ({(219, 22, 219, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((232, 15, 232, 54), 'numpy.all', 'np.all', ({(232, 22, 232, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((233, 15, 233, 58), 'numpy.all', 'np.all', ({(233, 22, 233, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((239, 21, 239, 51), 'numpy.array', 'np.array', ({(239, 30, 239, 50): '[0.1, 0.4, 0.3, 0.2]'}, {}), '([0.1, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((240, 15, 240, 51), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((241, 8, 243, 9), 'arch.univariate.recursions_python.harch_recursion_python', 'recpy.harch_recursion_python', ({(242, 12, 242, 22): 'parameters', (242, 24, 242, 30): 'resids', (242, 32, 242, 38): 'sigma2', (242, 40, 242, 44): 'lags', (242, 46, 242, 50): 'nobs', (242, 52, 242, 60): 'backcast', (242, 62, 242, 77): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, lags, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((245, 8, 247, 9), 'arch.univariate.recursions_python.harch_recursion', 'recpy.harch_recursion', ({(246, 12, 246, 22): 'parameters', (246, 24, 246, 30): 'resids', (246, 32, 246, 38): 'sigma2', (246, 40, 246, 44): 'lags', (246, 46, 246, 50): 'nobs', (246, 52, 246, 60): 'backcast', (246, 62, 246, 77): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, lags, nobs, backcast,\n self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((252, 8, 252, 49), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(252, 28, 252, 40): 'sigma2_numba', (252, 42, 252, 48): 'sigma2'}, {}), '(sigma2_numba, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((253, 8, 253, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(253, 28, 253, 41): 'sigma2_python', (253, 43, 253, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((255, 21, 255, 53), 'numpy.array', 'np.array', ({(255, 30, 255, 52): '[-0.1, -0.4, 0.3, 0.2]'}, {}), '([-0.1, -0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((256, 8, 258, 9), 'arch.univariate.recursions_python.harch_recursion_python', 'recpy.harch_recursion_python', ({(257, 12, 257, 22): 'parameters', (257, 24, 257, 30): 'resids', (257, 32, 257, 38): 'sigma2', (257, 40, 257, 44): 'lags', (257, 46, 257, 50): 'nobs', (257, 52, 257, 60): 'backcast', (257, 62, 257, 77): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, lags, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((259, 15, 259, 54), 'numpy.all', 'np.all', ({(259, 22, 259, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((260, 15, 260, 58), 'numpy.all', 'np.all', ({(260, 22, 260, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((262, 21, 262, 47), 'numpy.array', 'np.array', ({(262, 30, 262, 46): '[0.1, 400000000.0, 3, 2]'}, {}), '([0.1, 400000000.0, 3, 2])', True, 'import numpy as np\n'), ((263, 8, 265, 9), 'arch.univariate.recursions_python.harch_recursion_python', 'recpy.harch_recursion_python', ({(264, 12, 264, 22): 'parameters', (264, 24, 264, 30): 'resids', (264, 32, 264, 38): 'sigma2', (264, 40, 264, 44): 'lags', (264, 46, 264, 50): 'nobs', (264, 52, 264, 60): 'backcast', (264, 62, 264, 77): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, lags, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((266, 15, 266, 54), 'numpy.all', 'np.all', ({(266, 22, 266, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((267, 15, 267, 58), 'numpy.all', 'np.all', ({(267, 22, 267, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((269, 21, 269, 47), 'numpy.array', 'np.array', ({(269, 30, 269, 46): '[0.1, 400000000.0, 3, 2]'}, {}), '([0.1, 400000000.0, 3, 2])', True, 'import numpy as np\n'), ((272, 8, 274, 9), 'arch.univariate.recursions_python.harch_recursion_python', 'recpy.harch_recursion_python', ({(273, 12, 273, 22): 'parameters', (273, 24, 273, 34): 'mod_resids', (273, 36, 273, 42): 'sigma2', (273, 44, 273, 48): 'lags', (273, 50, 273, 54): 'nobs', (273, 56, 273, 64): 'backcast', (273, 66, 273, 81): 'self.var_bounds'}, {}), '(parameters, mod_resids, sigma2, lags, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((275, 15, 275, 54), 'numpy.all', 'np.all', ({(275, 22, 275, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((276, 15, 276, 58), 'numpy.all', 'np.all', ({(276, 22, 276, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((280, 15, 280, 54), 'numpy.all', 'np.all', ({(280, 22, 280, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((281, 15, 281, 58), 'numpy.all', 'np.all', ({(281, 22, 281, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((287, 21, 287, 51), 'numpy.array', 'np.array', ({(287, 30, 287, 50): '[0.1, 0.4, 0.3, 0.2]'}, {}), '([0.1, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((290, 8, 292, 9), 'arch.univariate.recursions_python.arch_recursion_python', 'recpy.arch_recursion_python', ({(291, 12, 291, 22): 'parameters', (291, 24, 291, 30): 'resids', (291, 32, 291, 38): 'sigma2', (291, 40, 291, 41): 'p', (291, 43, 291, 47): 'nobs', (291, 49, 291, 57): 'backcast', (291, 59, 291, 74): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, p, nobs, backcast,\n self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((294, 8, 296, 9), 'arch.univariate.recursions_python.arch_recursion', 'recpy.arch_recursion', ({(295, 12, 295, 22): 'parameters', (295, 24, 295, 30): 'resids', (295, 32, 295, 38): 'sigma2', (295, 40, 295, 41): 'p', (295, 43, 295, 47): 'nobs', (295, 49, 295, 57): 'backcast', (295, 59, 295, 74): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, p, nobs, backcast, self.\n var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((301, 8, 301, 49), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(301, 28, 301, 40): 'sigma2_numba', (301, 42, 301, 48): 'sigma2'}, {}), '(sigma2_numba, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((302, 8, 302, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(302, 28, 302, 41): 'sigma2_python', (302, 43, 302, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((304, 21, 304, 53), 'numpy.array', 'np.array', ({(304, 30, 304, 52): '[-0.1, -0.4, 0.3, 0.2]'}, {}), '([-0.1, -0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((305, 8, 307, 9), 'arch.univariate.recursions_python.arch_recursion_python', 'recpy.arch_recursion_python', ({(306, 12, 306, 22): 'parameters', (306, 24, 306, 30): 'resids', (306, 32, 306, 38): 'sigma2', (306, 40, 306, 41): 'p', (306, 43, 306, 47): 'nobs', (306, 49, 306, 57): 'backcast', (306, 59, 306, 74): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, p, nobs, backcast,\n self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((308, 15, 308, 54), 'numpy.all', 'np.all', ({(308, 22, 308, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((309, 15, 309, 58), 'numpy.all', 'np.all', ({(309, 22, 309, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((311, 21, 311, 47), 'numpy.array', 'np.array', ({(311, 30, 311, 46): '[0.1, 400000000.0, 3, 2]'}, {}), '([0.1, 400000000.0, 3, 2])', True, 'import numpy as np\n'), ((312, 8, 314, 9), 'arch.univariate.recursions_python.arch_recursion_python', 'recpy.arch_recursion_python', ({(313, 12, 313, 22): 'parameters', (313, 24, 313, 30): 'resids', (313, 32, 313, 38): 'sigma2', (313, 40, 313, 41): 'p', (313, 43, 313, 47): 'nobs', (313, 49, 313, 57): 'backcast', (313, 59, 313, 74): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, p, nobs, backcast,\n self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((315, 15, 315, 54), 'numpy.all', 'np.all', ({(315, 22, 315, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((316, 15, 316, 58), 'numpy.all', 'np.all', ({(316, 22, 316, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((320, 8, 322, 9), 'arch.univariate.recursions_python.arch_recursion_python', 'recpy.arch_recursion_python', ({(321, 12, 321, 22): 'parameters', (321, 24, 321, 34): 'mod_resids', (321, 36, 321, 42): 'sigma2', (321, 44, 321, 45): 'p', (321, 47, 321, 51): 'nobs', (321, 53, 321, 61): 'backcast', (321, 63, 321, 78): 'self.var_bounds'}, {}), '(parameters, mod_resids, sigma2, p, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((323, 15, 323, 54), 'numpy.all', 'np.all', ({(323, 22, 323, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((324, 15, 324, 58), 'numpy.all', 'np.all', ({(324, 22, 324, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((328, 15, 328, 54), 'numpy.all', 'np.all', ({(328, 22, 328, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((329, 15, 329, 58), 'numpy.all', 'np.all', ({(329, 22, 329, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((335, 21, 335, 51), 'numpy.array', 'np.array', ({(335, 30, 335, 50): '[0.1, 0.4, 0.3, 0.2]'}, {}), '([0.1, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((337, 18, 337, 33), 'numpy.sign', 'np.sign', ({(337, 26, 337, 32): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((339, 8, 350, 9), 'arch.univariate.recursions_python.garch_recursion', 'recpy.garch_recursion', ({(340, 12, 340, 22): 'parameters', (341, 12, 341, 19): 'fresids', (342, 12, 342, 19): 'sresids', (343, 12, 343, 18): 'sigma2', (344, 12, 344, 13): '(1)', (345, 12, 345, 13): '(1)', (346, 12, 346, 13): '(1)', (347, 12, 347, 16): 'nobs', (348, 12, 348, 20): 'backcast', (349, 12, 349, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((364, 8, 364, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(364, 28, 364, 41): 'sigma2_python', (364, 43, 364, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((370, 21, 370, 51), 'numpy.array', 'np.array', ({(370, 30, 370, 50): '[0.1, 0.4, 0.3, 0.2]'}, {}), '([0.1, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((372, 18, 372, 33), 'numpy.sign', 'np.sign', ({(372, 26, 372, 32): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((403, 8, 403, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(403, 28, 403, 41): 'sigma2_python', (403, 43, 403, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((409, 21, 409, 46), 'numpy.array', 'np.array', ({(409, 30, 409, 45): '[0.1, 0.4, 0.3]'}, {}), '([0.1, 0.4, 0.3])', True, 'import numpy as np\n'), ((411, 18, 411, 33), 'numpy.sign', 'np.sign', ({(411, 26, 411, 32): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((413, 8, 424, 9), 'arch.univariate.recursions_python.garch_recursion', 'recpy.garch_recursion', ({(414, 12, 414, 22): 'parameters', (415, 12, 415, 19): 'fresids', (416, 12, 416, 19): 'sresids', (417, 12, 417, 18): 'sigma2', (418, 12, 418, 13): '(1)', (419, 12, 419, 13): '(1)', (420, 12, 420, 13): '(0)', (421, 12, 421, 16): 'nobs', (422, 12, 422, 20): 'backcast', (423, 12, 423, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 1, 1, 0, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((438, 8, 438, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(438, 28, 438, 41): 'sigma2_python', (438, 43, 438, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((444, 21, 444, 46), 'numpy.array', 'np.array', ({(444, 30, 444, 45): '[0.1, 0.4, 0.3]'}, {}), '([0.1, 0.4, 0.3])', True, 'import numpy as np\n'), ((446, 18, 446, 33), 'numpy.sign', 'np.sign', ({(446, 26, 446, 32): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((448, 8, 459, 9), 'arch.univariate.recursions_python.garch_recursion', 'recpy.garch_recursion', ({(449, 12, 449, 22): 'parameters', (450, 12, 450, 19): 'fresids', (451, 12, 451, 19): 'sresids', (452, 12, 452, 18): 'sigma2', (453, 12, 453, 13): '(0)', (454, 12, 454, 13): '(1)', (455, 12, 455, 13): '(1)', (456, 12, 456, 16): 'nobs', (457, 12, 457, 20): 'backcast', (458, 12, 458, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 0, 1, 1, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((473, 8, 473, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(473, 28, 473, 41): 'sigma2_python', (473, 43, 473, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((479, 21, 479, 51), 'numpy.array', 'np.array', ({(479, 30, 479, 50): '[0.1, 0.4, 0.3, 0.2]'}, {}), '([0.1, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((481, 18, 481, 33), 'numpy.sign', 'np.sign', ({(481, 26, 481, 32): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((483, 8, 494, 9), 'arch.univariate.recursions_python.garch_recursion', 'recpy.garch_recursion', ({(484, 12, 484, 22): 'parameters', (485, 12, 485, 19): 'fresids', (486, 12, 486, 19): 'sresids', (487, 12, 487, 18): 'sigma2', (488, 12, 488, 13): '(1)', (489, 12, 489, 13): '(0)', (490, 12, 490, 13): '(1)', (491, 12, 491, 16): 'nobs', (492, 12, 492, 20): 'backcast', (493, 12, 493, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 1, 0, 1, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((508, 8, 508, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(508, 28, 508, 41): 'sigma2_python', (508, 43, 508, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((514, 21, 514, 51), 'numpy.array', 'np.array', ({(514, 30, 514, 50): '[0.1, 0.4, 0.3, 0.2]'}, {}), '([0.1, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((516, 18, 516, 33), 'numpy.sign', 'np.sign', ({(516, 26, 516, 32): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((535, 8, 535, 49), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(535, 28, 535, 40): 'sigma2_garch', (535, 42, 535, 48): 'sigma2'}, {}), '(sigma2_garch, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((541, 21, 541, 53), 'numpy.array', 'np.array', ({(541, 30, 541, 52): '[1e+100, 0.4, 0.3, 0.2]'}, {}), '([1e+100, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((542, 15, 542, 51), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((543, 8, 545, 9), 'arch.univariate.recursions_python.harch_recursion', 'recpy.harch_recursion', ({(544, 12, 544, 22): 'parameters', (544, 24, 544, 30): 'resids', (544, 32, 544, 38): 'sigma2', (544, 40, 544, 44): 'lags', (544, 46, 544, 50): 'nobs', (544, 52, 544, 60): 'backcast', (544, 62, 544, 77): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, lags, nobs, backcast,\n self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((550, 8, 550, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(550, 28, 550, 41): 'sigma2_python', (550, 43, 550, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((551, 15, 551, 54), 'numpy.all', 'np.all', ({(551, 22, 551, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((552, 15, 552, 58), 'numpy.all', 'np.all', ({(552, 22, 552, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((554, 21, 554, 54), 'numpy.array', 'np.array', ({(554, 30, 554, 53): '[-1e+100, 0.4, 0.3, 0.2]'}, {}), '([-1e+100, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((555, 8, 557, 9), 'arch.univariate.recursions_python.harch_recursion', 'recpy.harch_recursion', ({(556, 12, 556, 22): 'parameters', (556, 24, 556, 30): 'resids', (556, 32, 556, 38): 'sigma2', (556, 40, 556, 44): 'lags', (556, 46, 556, 50): 'nobs', (556, 52, 556, 60): 'backcast', (556, 62, 556, 77): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, lags, nobs, backcast,\n self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((562, 8, 562, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(562, 28, 562, 41): 'sigma2_python', (562, 43, 562, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((563, 8, 563, 58), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(563, 28, 563, 34): 'sigma2', (563, 36, 563, 57): 'self.var_bounds[:, (0)]'}, {}), '(sigma2, self.var_bounds[:, (0)])', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((565, 21, 565, 53), 'numpy.array', 'np.array', ({(565, 30, 565, 52): '[1e+100, 0.4, 0.3, 0.2]'}, {}), '([1e+100, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((567, 18, 567, 33), 'numpy.sign', 'np.sign', ({(567, 26, 567, 32): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((569, 8, 580, 9), 'arch.univariate.recursions_python.garch_recursion', 'recpy.garch_recursion', ({(570, 12, 570, 22): 'parameters', (571, 12, 571, 19): 'fresids', (572, 12, 572, 19): 'sresids', (573, 12, 573, 18): 'sigma2', (574, 12, 574, 13): '(1)', (575, 12, 575, 13): '(1)', (576, 12, 576, 13): '(1)', (577, 12, 577, 16): 'nobs', (578, 12, 578, 20): 'backcast', (579, 12, 579, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((594, 8, 594, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(594, 28, 594, 41): 'sigma2_python', (594, 43, 594, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((595, 15, 595, 54), 'numpy.all', 'np.all', ({(595, 22, 595, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((596, 15, 596, 58), 'numpy.all', 'np.all', ({(596, 22, 596, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((598, 21, 598, 54), 'numpy.array', 'np.array', ({(598, 30, 598, 53): '[-1e+100, 0.4, 0.3, 0.2]'}, {}), '([-1e+100, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((599, 8, 610, 9), 'arch.univariate.recursions_python.garch_recursion', 'recpy.garch_recursion', ({(600, 12, 600, 22): 'parameters', (601, 12, 601, 19): 'fresids', (602, 12, 602, 19): 'sresids', (603, 12, 603, 18): 'sigma2', (604, 12, 604, 13): '(1)', (605, 12, 605, 13): '(1)', (606, 12, 606, 13): '(1)', (607, 12, 607, 16): 'nobs', (608, 12, 608, 20): 'backcast', (609, 12, 609, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((624, 8, 624, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(624, 28, 624, 41): 'sigma2_python', (624, 43, 624, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((625, 8, 625, 58), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(625, 28, 625, 34): 'sigma2', (625, 36, 625, 57): 'self.var_bounds[:, (0)]'}, {}), '(sigma2, self.var_bounds[:, (0)])', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((627, 21, 627, 53), 'numpy.array', 'np.array', ({(627, 30, 627, 52): '[1e+100, 0.4, 0.3, 0.2]'}, {}), '([1e+100, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((628, 8, 630, 9), 'arch.univariate.recursions_python.arch_recursion', 'recpy.arch_recursion', ({(629, 12, 629, 22): 'parameters', (629, 24, 629, 30): 'resids', (629, 32, 629, 38): 'sigma2', (629, 40, 629, 41): '(3)', (629, 43, 629, 47): 'nobs', (629, 49, 629, 57): 'backcast', (629, 59, 629, 74): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, 3, nobs, backcast, self.\n var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((635, 8, 635, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(635, 28, 635, 41): 'sigma2_python', (635, 43, 635, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((636, 15, 636, 54), 'numpy.all', 'np.all', ({(636, 22, 636, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((637, 15, 637, 58), 'numpy.all', 'np.all', ({(637, 22, 637, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((639, 21, 639, 54), 'numpy.array', 'np.array', ({(639, 30, 639, 53): '[-1e+100, 0.4, 0.3, 0.2]'}, {}), '([-1e+100, 0.4, 0.3, 0.2])', True, 'import numpy as np\n'), ((640, 8, 642, 9), 'arch.univariate.recursions_python.arch_recursion', 'recpy.arch_recursion', ({(641, 12, 641, 22): 'parameters', (641, 24, 641, 30): 'resids', (641, 32, 641, 38): 'sigma2', (641, 40, 641, 41): '(3)', (641, 43, 641, 47): 'nobs', (641, 49, 641, 57): 'backcast', (641, 59, 641, 74): 'self.var_bounds'}, {}), '(parameters, resids, sigma2, 3, nobs, backcast, self.\n var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((647, 8, 647, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(647, 28, 647, 41): 'sigma2_python', (647, 43, 647, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((648, 8, 648, 58), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(648, 28, 648, 34): 'sigma2', (648, 36, 648, 57): 'self.var_bounds[:, (0)]'}, {}), '(sigma2, self.var_bounds[:, (0)])', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((652, 21, 652, 53), 'numpy.array', 'np.array', ({(652, 30, 652, 52): '[0.0, 0.1, -0.1, 0.95]'}, {}), '([0.0, 0.1, -0.1, 0.95])', True, 'import numpy as np\n'), ((657, 19, 657, 40), 'numpy.empty_like', 'np.empty_like', ({(657, 33, 657, 39): 'sigma2'}, {}), '(sigma2)', True, 'import numpy as np\n'), ((658, 21, 658, 42), 'numpy.empty_like', 'np.empty_like', ({(658, 35, 658, 41): 'sigma2'}, {}), '(sigma2)', True, 'import numpy as np\n'), ((659, 25, 659, 46), 'numpy.empty_like', 'np.empty_like', ({(659, 39, 659, 45): 'sigma2'}, {}), '(sigma2)', True, 'import numpy as np\n'), ((660, 8, 673, 9), 'arch.univariate.recursions_python.egarch_recursion', 'recpy.egarch_recursion', ({(661, 12, 661, 22): 'parameters', (662, 12, 662, 18): 'resids', (663, 12, 663, 18): 'sigma2', (664, 12, 664, 13): 'p', (665, 12, 665, 13): 'o', (666, 12, 666, 13): 'q', (667, 12, 667, 16): 'nobs', (668, 12, 668, 20): 'backcast', (669, 12, 669, 22): 'var_bounds', (670, 12, 670, 20): 'lnsigma2', (671, 12, 671, 22): 'std_resids', (672, 12, 672, 26): 'abs_std_resids'}, {}), '(parameters, resids, sigma2, p, o, q, nobs, backcast,\n var_bounds, lnsigma2, std_resids, abs_std_resids)', True, 'import arch.univariate.recursions_python as recpy\n'), ((675, 8, 688, 9), 'arch.univariate.recursions_python.egarch_recursion_python', 'recpy.egarch_recursion_python', ({(676, 12, 676, 22): 'parameters', (677, 12, 677, 18): 'resids', (678, 12, 678, 18): 'sigma2', (679, 12, 679, 13): 'p', (680, 12, 680, 13): 'o', (681, 12, 681, 13): 'q', (682, 12, 682, 16): 'nobs', (683, 12, 683, 20): 'backcast', (684, 12, 684, 22): 'var_bounds', (685, 12, 685, 20): 'lnsigma2', (686, 12, 686, 22): 'std_resids', (687, 12, 687, 26): 'abs_std_resids'}, {}), '(parameters, resids, sigma2, p, o, q, nobs,\n backcast, var_bounds, lnsigma2, std_resids, abs_std_resids)', True, 'import arch.univariate.recursions_python as recpy\n'), ((704, 8, 704, 49), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(704, 28, 704, 40): 'sigma2_numba', (704, 42, 704, 48): 'sigma2'}, {}), '(sigma2_numba, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((705, 8, 705, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(705, 28, 705, 41): 'sigma2_python', (705, 43, 705, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((707, 21, 707, 39), 'numpy.sqrt', 'np.sqrt', ({(707, 29, 707, 38): '2 / np.pi'}, {}), '(2 / np.pi)', True, 'import numpy as np\n'), ((718, 8, 718, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(718, 28, 718, 41): 'sigma2_python', (718, 43, 718, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((720, 21, 720, 56), 'numpy.array', 'np.array', ({(720, 30, 720, 55): '[-100.0, 0.1, -0.1, 0.95]'}, {}), '([-100.0, 0.1, -0.1, 0.95])', True, 'import numpy as np\n'), ((721, 8, 734, 9), 'arch.univariate.recursions_python.egarch_recursion_python', 'recpy.egarch_recursion_python', ({(722, 12, 722, 22): 'parameters', (723, 12, 723, 18): 'resids', (724, 12, 724, 18): 'sigma2', (725, 12, 725, 13): 'p', (726, 12, 726, 13): 'o', (727, 12, 727, 13): 'q', (728, 12, 728, 16): 'nobs', (729, 12, 729, 20): 'backcast', (730, 12, 730, 22): 'var_bounds', (731, 12, 731, 20): 'lnsigma2', (732, 12, 732, 22): 'std_resids', (733, 12, 733, 26): 'abs_std_resids'}, {}), '(parameters, resids, sigma2, p, o, q, nobs,\n backcast, var_bounds, lnsigma2, std_resids, abs_std_resids)', True, 'import arch.univariate.recursions_python as recpy\n'), ((735, 15, 735, 54), 'numpy.all', 'np.all', ({(735, 22, 735, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((736, 15, 736, 58), 'numpy.all', 'np.all', ({(736, 22, 736, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((738, 21, 738, 52), 'numpy.array', 'np.array', ({(738, 30, 738, 51): '[0.0, 0.1, -0.1, 9.5]'}, {}), '([0.0, 0.1, -0.1, 9.5])', True, 'import numpy as np\n'), ((739, 8, 752, 9), 'arch.univariate.recursions_python.egarch_recursion_python', 'recpy.egarch_recursion_python', ({(740, 12, 740, 22): 'parameters', (741, 12, 741, 18): 'resids', (742, 12, 742, 18): 'sigma2', (743, 12, 743, 13): 'p', (744, 12, 744, 13): 'o', (745, 12, 745, 13): 'q', (746, 12, 746, 16): 'nobs', (747, 12, 747, 20): 'backcast', (748, 12, 748, 22): 'var_bounds', (749, 12, 749, 20): 'lnsigma2', (750, 12, 750, 22): 'std_resids', (751, 12, 751, 26): 'abs_std_resids'}, {}), '(parameters, resids, sigma2, p, o, q, nobs,\n backcast, var_bounds, lnsigma2, std_resids, abs_std_resids)', True, 'import arch.univariate.recursions_python as recpy\n'), ((753, 15, 753, 54), 'numpy.all', 'np.all', ({(753, 22, 753, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((754, 15, 754, 58), 'numpy.all', 'np.all', ({(754, 22, 754, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((756, 21, 756, 53), 'numpy.array', 'np.array', ({(756, 30, 756, 52): '[0.0, 0.1, -0.1, 0.95]'}, {}), '([0.0, 0.1, -0.1, 0.95])', True, 'import numpy as np\n'), ((759, 8, 772, 9), 'arch.univariate.recursions_python.egarch_recursion_python', 'recpy.egarch_recursion_python', ({(760, 12, 760, 22): 'parameters', (761, 12, 761, 18): 'resids', (762, 12, 762, 18): 'sigma2', (763, 12, 763, 13): 'p', (764, 12, 764, 13): 'o', (765, 12, 765, 13): 'q', (766, 12, 766, 16): 'nobs', (767, 12, 767, 20): 'backcast', (768, 12, 768, 22): 'var_bounds', (769, 12, 769, 20): 'lnsigma2', (770, 12, 770, 22): 'std_resids', (771, 12, 771, 26): 'abs_std_resids'}, {}), '(parameters, resids, sigma2, p, o, q, nobs,\n backcast, var_bounds, lnsigma2, std_resids, abs_std_resids)', True, 'import arch.univariate.recursions_python as recpy\n'), ((773, 15, 773, 54), 'numpy.all', 'np.all', ({(773, 22, 773, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((774, 15, 774, 58), 'numpy.all', 'np.all', ({(774, 22, 774, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((780, 21, 780, 44), 'numpy.array', 'np.array', ({(780, 30, 780, 43): '[0.1, 0.8, 0]'}, {}), '([0.1, 0.8, 0])', True, 'import numpy as np\n'), ((781, 12, 781, 32), 'numpy.arange', 'np.arange', ({(781, 22, 781, 23): '1', (781, 25, 781, 31): '22 + 1'}, {}), '(1, 22 + 1)', True, 'import numpy as np\n'), ((784, 8, 786, 9), 'arch.univariate.recursions_python.midas_recursion', 'recpy.midas_recursion', ({(785, 12, 785, 22): 'parameters', (785, 24, 785, 31): 'weights', (785, 33, 785, 39): 'resids', (785, 41, 785, 47): 'sigma2', (785, 49, 785, 53): 'nobs', (785, 55, 785, 63): 'backcast', (785, 65, 785, 80): 'self.var_bounds'}, {}), '(parameters, weights, resids, sigma2, nobs, backcast,\n self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((788, 8, 790, 9), 'arch.univariate.recursions_python.midas_recursion_python', 'recpy.midas_recursion_python', ({(789, 12, 789, 22): 'parameters', (789, 24, 789, 31): 'weights', (789, 33, 789, 39): 'resids', (789, 41, 789, 47): 'sigma2', (789, 49, 789, 53): 'nobs', (789, 55, 789, 63): 'backcast', (789, 65, 789, 80): 'self.var_bounds'}, {}), '(parameters, weights, resids, sigma2, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((795, 8, 795, 49), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(795, 28, 795, 40): 'sigma2_numba', (795, 42, 795, 48): 'sigma2'}, {}), '(sigma2_numba, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((796, 8, 796, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(796, 28, 796, 41): 'sigma2_python', (796, 43, 796, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((800, 8, 802, 9), 'arch.univariate.recursions_python.midas_recursion_python', 'recpy.midas_recursion_python', ({(801, 12, 801, 22): 'parameters', (801, 24, 801, 31): 'weights', (801, 33, 801, 43): 'mod_resids', (801, 45, 801, 51): 'sigma2', (801, 53, 801, 57): 'nobs', (801, 59, 801, 67): 'backcast', (801, 69, 801, 84): 'self.var_bounds'}, {}), '(parameters, weights, mod_resids, sigma2, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((803, 15, 803, 54), 'numpy.all', 'np.all', ({(803, 22, 803, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((804, 15, 804, 58), 'numpy.all', 'np.all', ({(804, 22, 804, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((806, 21, 806, 46), 'numpy.array', 'np.array', ({(806, 30, 806, 45): '[0.1, 100000000000.0, 0]'}, {}), '([0.1, 100000000000.0, 0])', True, 'import numpy as np\n'), ((807, 12, 807, 32), 'numpy.arange', 'np.arange', ({(807, 22, 807, 23): '1', (807, 25, 807, 31): '22 + 1'}, {}), '(1, 22 + 1)', True, 'import numpy as np\n'), ((810, 8, 812, 9), 'arch.univariate.recursions_python.midas_recursion_python', 'recpy.midas_recursion_python', ({(811, 12, 811, 22): 'parameters', (811, 24, 811, 31): 'weights', (811, 33, 811, 39): 'resids', (811, 41, 811, 47): 'sigma2', (811, 49, 811, 53): 'nobs', (811, 55, 811, 63): 'backcast', (811, 65, 811, 80): 'self.var_bounds'}, {}), '(parameters, weights, resids, sigma2, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((813, 15, 813, 54), 'numpy.all', 'np.all', ({(813, 22, 813, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((814, 15, 814, 58), 'numpy.all', 'np.all', ({(814, 22, 814, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((818, 15, 818, 54), 'numpy.all', 'np.all', ({(818, 22, 818, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((819, 15, 819, 58), 'numpy.all', 'np.all', ({(819, 22, 819, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((821, 21, 821, 45), 'numpy.array', 'np.array', ({(821, 30, 821, 44): '[0.1, -0.4, 0]'}, {}), '([0.1, -0.4, 0])', True, 'import numpy as np\n'), ((822, 8, 824, 9), 'arch.univariate.recursions_python.midas_recursion_python', 'recpy.midas_recursion_python', ({(823, 12, 823, 22): 'parameters', (823, 24, 823, 31): 'weights', (823, 33, 823, 39): 'resids', (823, 41, 823, 47): 'sigma2', (823, 49, 823, 53): 'nobs', (823, 55, 823, 63): 'backcast', (823, 65, 823, 80): 'self.var_bounds'}, {}), '(parameters, weights, resids, sigma2, nobs,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((825, 15, 825, 54), 'numpy.all', 'np.all', ({(825, 22, 825, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((826, 15, 826, 58), 'numpy.all', 'np.all', ({(826, 22, 826, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((830, 15, 830, 54), 'numpy.all', 'np.all', ({(830, 22, 830, 53): '(sigma2 >= self.var_bounds[:, (0)])'}, {}), '(sigma2 >= self.var_bounds[:, (0)])', True, 'import numpy as np\n'), ((831, 15, 831, 58), 'numpy.all', 'np.all', ({(831, 22, 831, 57): '(sigma2 <= 2 * self.var_bounds[:, (1)])'}, {}), '(sigma2 <= 2 * self.var_bounds[:, (1)])', True, 'import numpy as np\n'), ((836, 21, 836, 51), 'numpy.array', 'np.array', ({(836, 30, 836, 50): '[1.0, 0.2, 0.4, 0.3]'}, {}), '([1.0, 0.2, 0.4, 0.3])', True, 'import numpy as np\n'), ((854, 24, 854, 45), 'numpy.empty_like', 'np.empty_like', ({(854, 38, 854, 44): 'sigma2'}, {}), '(sigma2)', True, 'import numpy as np\n'), ((862, 8, 862, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(862, 28, 862, 41): 'sigma2_direct', (862, 43, 862, 49): 'sigma2'}, {}), '(sigma2_direct, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((864, 8, 874, 9), 'arch.univariate.recursions_python.figarch_recursion', 'recpy.figarch_recursion', ({(865, 12, 865, 22): 'parameters', (866, 12, 866, 19): 'fresids', (867, 12, 867, 18): 'sigma2', (868, 12, 868, 13): 'p', (869, 12, 869, 13): 'q', (870, 12, 870, 16): 'nobs', (871, 12, 871, 21): 'trunc_lag', (872, 12, 872, 20): 'backcast', (873, 12, 873, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sigma2, p, q, nobs, trunc_lag,\n backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((876, 8, 886, 9), 'arch.univariate.recursions_python.figarch_recursion_python', 'recpy.figarch_recursion_python', ({(877, 12, 877, 22): 'parameters', (878, 12, 878, 19): 'fresids', (879, 12, 879, 18): 'sigma2', (880, 12, 880, 13): 'p', (881, 12, 881, 13): 'q', (882, 12, 882, 16): 'nobs', (883, 12, 883, 21): 'trunc_lag', (884, 12, 884, 20): 'backcast', (885, 12, 885, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sigma2, p, q, nobs,\n trunc_lag, backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((899, 8, 899, 49), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(899, 28, 899, 40): 'sigma2_numba', (899, 42, 899, 48): 'sigma2'}, {}), '(sigma2_numba, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((900, 8, 900, 50), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(900, 28, 900, 41): 'sigma2_python', (900, 43, 900, 49): 'sigma2'}, {}), '(sigma2_python, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((903, 21, 903, 41), 'numpy.array', 'np.array', ({(903, 30, 903, 40): '[1.0, 0.4]'}, {}), '([1.0, 0.4])', True, 'import numpy as np\n'), ((905, 21, 905, 39), 'numpy.empty_like', 'np.empty_like', ({(905, 35, 905, 38): 'lam'}, {}), '(lam)', True, 'import numpy as np\n'), ((909, 8, 909, 44), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', ({(909, 28, 909, 31): 'lam', (909, 33, 909, 43): 'lam_direct'}, {}), '(lam, lam_direct)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((1070, 21, 1070, 46), 'numpy.array', 'np.array', ({(1070, 30, 1070, 45): '[0.1, 0.1, 0.8]'}, {}), '([0.1, 0.1, 0.8])', True, 'import numpy as np\n'), ((1072, 18, 1072, 38), 'numpy.sign', 'np.sign', ({(1072, 26, 1072, 37): 'self.resids'}, {}), '(self.resids)', True, 'import numpy as np\n'), ((1073, 17, 1073, 31), 'numpy.empty', 'np.empty', ({(1073, 26, 1073, 30): '1000'}, {}), '(1000)', True, 'import numpy as np\n'), ((1076, 8, 1087, 9), 'arch.univariate.recursions_python.garch_recursion_python', 'recpy.garch_recursion_python', ({(1077, 12, 1077, 22): 'parameters', (1078, 12, 1078, 19): 'fresids', (1079, 12, 1079, 19): 'sresids', (1080, 12, 1080, 18): 'sigma2', (1081, 12, 1081, 13): 'p', (1082, 12, 1082, 13): 'o', (1083, 12, 1083, 13): 'q', (1084, 12, 1084, 21): 'self.nobs', (1085, 12, 1085, 25): 'self.backcast', (1086, 12, 1086, 27): 'self.var_bounds'}, {}), '(parameters, fresids, sresids, sigma2, p, o, q,\n self.nobs, self.backcast, self.var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((1090, 21, 1090, 49), 'numpy.array', 'np.array', ({(1090, 30, 1090, 48): '[0.1, 0.1, 0.8, 2]'}, {}), '([0.1, 0.1, 0.8, 2])', True, 'import numpy as np\n'), ((1092, 23, 1092, 44), 'numpy.empty_like', 'np.empty_like', ({(1092, 37, 1092, 43): 'sigma2'}, {}), '(sigma2)', True, 'import numpy as np\n'), ((1106, 8, 1106, 56), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((1122, 8, 1122, 56), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((1138, 8, 1138, 56), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((1141, 17, 1141, 31), 'numpy.empty', 'np.empty', ({(1141, 26, 1141, 30): '1000'}, {}), '(1000)', True, 'import numpy as np\n'), ((1143, 21, 1143, 56), 'numpy.array', 'np.array', ({(1143, 30, 1143, 55): '[0.1, 0.1, 0.1, 0.8, 1.3]'}, {}), '([0.1, 0.1, 0.1, 0.8, 1.3])', True, 'import numpy as np\n'), ((1145, 23, 1145, 44), 'numpy.empty_like', 'np.empty_like', ({(1145, 37, 1145, 43): 'sigma2'}, {}), '(sigma2)', True, 'import numpy as np\n'), ((1176, 8, 1176, 42), 'numpy.testing.assert_allclose', 'assert_allclose', ({(1176, 24, 1176, 33): 'sigma2_py', (1176, 35, 1176, 41): 'sigma2'}, {}), '(sigma2_py, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((1193, 8, 1193, 42), 'numpy.testing.assert_allclose', 'assert_allclose', ({(1193, 24, 1193, 33): 'sigma2_py', (1193, 35, 1193, 41): 'sigma2'}, {}), '(sigma2_py, sigma2)', False, 'from numpy.testing import assert_allclose, assert_almost_equal\n'), ((1198, 24, 1198, 67), 'arch.univariate.recursions_python.bounds_check_python', 'recpy.bounds_check_python', ({(1198, 50, 1198, 54): '(-1.0)', (1198, 56, 1198, 66): 'var_bounds'}, {}), '(-1.0, var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((1200, 8, 1200, 51), 'arch.univariate.recursions_python.bounds_check_python', 'recpy.bounds_check_python', ({(1200, 34, 1200, 38): '(20.0)', (1200, 40, 1200, 50): 'var_bounds'}, {}), '(20.0, var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((1202, 24, 1202, 69), 'arch.univariate.recursions_python.bounds_check_python', 'recpy.bounds_check_python', ({(1202, 50, 1202, 56): 'np.inf', (1202, 58, 1202, 68): 'var_bounds'}, {}), '(np.inf, var_bounds)', True, 'import arch.univariate.recursions_python as recpy\n'), ((93, 20, 93, 56), 'timeit.Timer', 'timeit.Timer', (), '', False, 'import timeit\n'), ((106, 25, 106, 47), 'numpy.ones', 'np.ones', ({(106, 33, 106, 46): '(cls.nobs, 2)'}, {}), '((cls.nobs, 2))', True, 'import numpy as np\n'), ((336, 18, 336, 32), 'numpy.abs', 'np.abs', ({(336, 25, 336, 31): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((371, 18, 371, 32), 'numpy.abs', 'np.abs', ({(371, 25, 371, 31): 'resids'}, {}), '(resids)', True, 'import numpy as np\n'), ((717, 24, 717, 43), 'numpy.exp', 'np.exp', ({(717, 31, 717, 42): 'lnsigma2[t]'}, {}), '(lnsigma2[t])', True, 'import numpy as np\n'), ((782, 18, 782, 32), 'scipy.special.gamma', 'gamma', ({(782, 24, 782, 31): '(j + 0.6)'}, {}), '(j + 0.6)', False, 'from scipy.special import gamma\n'), ((808, 18, 808, 32), 'scipy.special.gamma', 'gamma', ({(808, 24, 808, 31): '(j + 0.6)'}, {}), '(j + 0.6)', False, 'from scipy.special import gamma\n'), ((1096, 12, 1096, 31), 'numpy.abs', 'np.abs', ({(1096, 19, 1096, 30): 'self.resids'}, {}), '(self.resids)', True, 'import numpy as np\n'), ((1112, 12, 1112, 31), 'numpy.abs', 'np.abs', ({(1112, 19, 1112, 30): 'self.resids'}, {}), '(self.resids)', True, 'import numpy as np\n'), ((1128, 12, 1128, 31), 'numpy.abs', 'np.abs', ({(1128, 19, 1128, 30): 'self.resids'}, {}), '(self.resids)', True, 'import numpy as np\n'), ((1149, 12, 1149, 31), 'numpy.abs', 'np.abs', ({(1149, 19, 1149, 30): 'self.resids'}, {}), '(self.resids)', True, 'import numpy as np\n'), ((1159, 22, 1159, 41), 'numpy.isfinite', 'np.isfinite', ({(1159, 34, 1159, 40): 'sigma2'}, {}), '(sigma2)', True, 'import numpy as np\n'), ((1165, 12, 1165, 31), 'numpy.abs', 'np.abs', ({(1165, 19, 1165, 30): 'self.resids'}, {}), '(self.resids)', True, 'import numpy as np\n'), ((1175, 22, 1175, 41), 'numpy.isfinite', 'np.isfinite', ({(1175, 34, 1175, 40): 'sigma2'}, {}), '(sigma2)', True, 'import numpy as np\n'), ((1182, 12, 1182, 31), 'numpy.abs', 'np.abs', ({(1182, 19, 1182, 30): 'self.resids'}, {}), '(self.resids)', True, 'import numpy as np\n'), ((1192, 22, 1192, 41), 'numpy.isfinite', 'np.isfinite', ({(1192, 34, 1192, 40): 'sigma2'}, {}), '(sigma2)', True, 'import numpy as np\n'), ((1200, 58, 1200, 77), 'numpy.log', 'np.log', ({(1200, 65, 1200, 76): '(20.0 / 10.0)'}, {}), '(20.0 / 10.0)', True, 'import numpy as np\n'), ((380, 22, 387, 17), 'numpy.array', 'np.array', ({(381, 20, 386, 21): '[1.0, resids[t - 1] ** 2.0, resids[t - 1] ** 2.0 * (resids[t - 1] < 0),\n sigma2[t - 1]]'}, {}), '([1.0, resids[t - 1] ** 2.0, resids[t - 1] ** 2.0 * (resids[t - 1] <\n 0), sigma2[t - 1]])', True, 'import numpy as np\n'), ((782, 36, 782, 48), 'scipy.special.gamma', 'gamma', ({(782, 42, 782, 47): '(j + 1)'}, {}), '(j + 1)', False, 'from scipy.special import gamma\n'), ((782, 51, 782, 61), 'scipy.special.gamma', 'gamma', ({(782, 57, 782, 60): '(0.6)'}, {}), '(0.6)', False, 'from scipy.special import gamma\n'), ((808, 36, 808, 48), 'scipy.special.gamma', 'gamma', ({(808, 42, 808, 47): '(j + 1)'}, {}), '(j + 1)', False, 'from scipy.special import gamma\n'), ((808, 51, 808, 61), 'scipy.special.gamma', 'gamma', ({(808, 57, 808, 60): '(0.6)'}, {}), '(0.6)', False, 'from scipy.special import gamma\n'), ((377, 20, 377, 71), 'numpy.array', 'np.array', ({(377, 29, 377, 70): '[1.0, backcast, 0.5 * backcast, backcast]'}, {}), '([1.0, backcast, 0.5 * backcast, backcast])', True, 'import numpy as np\n'), ((713, 43, 713, 65), 'numpy.sqrt', 'np.sqrt', ({(713, 51, 713, 64): 'sigma2[t - 1]'}, {}), '(sigma2[t - 1])', True, 'import numpy as np\n'), ((714, 48, 714, 64), 'numpy.abs', 'np.abs', ({(714, 55, 714, 63): 'stdresid'}, {}), '(stdresid)', True, 'import numpy as np\n')] |
SoldAI/hermetrics | hermetrics/damerau_levenshtein.py | 5e07a4f40376779015ef2f5b964d7ac060ed6e25 | from .levenshtein import Levenshtein
class DamerauLevenshtein(Levenshtein):
def __init__(self, name='Damerau-Levenshtein'):
super().__init__(name=name)
def distance(self, source, target, cost=(1, 1, 1, 1)):
"""Damerau-Levenshtein distance with costs for deletion, insertion, substitution and transposition"""
s_len = len(source)
t_len = len(target)
if type(cost) == int or type(cost) == float:
del_cost = ins_cost = sub_cost = tra_cost = cost
else:
del_cost, ins_cost, sub_cost, tra_cost = cost
# Be sure to exceed maximum value
#INF = float('inf')
UPPER = max(del_cost, ins_cost, sub_cost, tra_cost) * (s_len + t_len)
# Initialize matrix (s_len + 2) X (t_len + 2)
D = [[UPPER for j in range(t_len + 2)]]
D += [[UPPER] + [j*ins_cost for j in range(t_len + 1)]]
D += [[UPPER, i] + [0]*t_len for i in range(1, s_len + 1)]
# Holds last row each element was encountered
last_row = {}
for i in range(1, s_len + 1):
# Current symbol in source
s_symbol = source[i-1]
# Column of lasta match on this row
last_match_col = 0
for j in range(1, t_len + 1):
# Current symbol in target
t_symbol = target[j-1]
# Last row with matching character
last_match_row = last_row.get(t_symbol, 0)
# Cost of substitution
opt_sub_cost = 0 if s_symbol == t_symbol else sub_cost
# Compute different options
deletion = D[i][j+1] + del_cost
insertion = D[i+1][j] + ins_cost
substitution = D[i][j] + opt_sub_cost
# Cost before transposition
# + cost of operations between transposed letters
# + cost of transposition
# transposition = D[last_match_row][last_match_col] + \
# (i-last_match_row-1) * del_cost + \
# (j-last_match_col-1) * ins_cost + \
# tra_cost
transposition = D[last_match_row][last_match_col] + \
max((i-last_match_row-1) * del_cost, \
(j-last_match_col-1) * ins_cost) + tra_cost
D[i+1][j+1] = min(deletion, insertion, substitution, transposition)
if opt_sub_cost == 0:
last_match_col = j
last_row[s_symbol] = i
return D[-1][-1]
def max_distance(self, source, target, cost=(1,1,1,1)):
"""Damerau-Levenshtein maximum distance value (same as Levenshtein to account for difference in operations)"""
if type(cost) == int or type(cost) == float:
lev_cost = cost
else:
lev_cost = cost[:3]
return super().max_distance(source, target, lev_cost)
if(__name__ == '__main__'):
print("Damerau-Levenshtein distance")
| [] |
Carlosbogo/etna | etna/analysis/outliers/hist_outliers.py | b6210f0e79ee92aa9ae8ff4fcfb267be9fb7cc94 | import typing
from copy import deepcopy
from typing import TYPE_CHECKING
from typing import List
import numba
import numpy as np
import pandas as pd
if TYPE_CHECKING:
from etna.datasets import TSDataset
@numba.jit(nopython=True)
def optimal_sse(left: int, right: int, p: np.ndarray, pp: np.ndarray) -> float:
"""
Count the approximation error by 1 bin from left to right elements.
Parameters
----------
left:
left border
right:
right border
p:
array of sums of elements, p[i] - sum from first to i elements
pp:
array of sums of squares of elements, p[i] - sum of squares from first to i elements
Returns
-------
result: float
approximation error
"""
if left == 0:
avg = p[right]
return pp[right] - avg ** 2 / (right - left + 1)
avg = p[right] - p[left - 1]
return pp[right] - pp[left - 1] - avg ** 2 / (right - left + 1)
@numba.jit(nopython=True)
def adjust_estimation(i: int, k: int, sse: np.ndarray, sse_one_bin: np.ndarray) -> float:
"""
Count sse_one_bin[i][k] using binary search.
Parameters
----------
i:
left border of series
k:
number of bins
sse:
array of approximation errors
sse_one_bin:
array of approximation errors with one bin
Returns
-------
result: float
calculated sse_one_bin[i][k]
"""
now_evaluated = sse[i - 1][k - 1]
first_evaluated = sse[i - 1][k - 1]
idx_prev = np.inf
idx_now = 0
left = 0
while idx_now != idx_prev:
right = i
idx_prev = idx_now
while right - left > 1:
if sse_one_bin[(left + right) // 2][i] > now_evaluated:
left = (left + right) // 2
else:
right = (left + right) // 2
idx_now = left
now_evaluated = first_evaluated - sse[idx_now][k - 1]
now_min = np.inf
for j in range(idx_now, i):
now = sse[j][k - 1] + sse_one_bin[j + 1][i]
now_min = min(now_min, now)
return now_min
@numba.jit(nopython=True)
def v_optimal_hist(series: np.ndarray, bins_number: int, p: np.ndarray, pp: np.ndarray) -> np.ndarray:
"""
Count an approximation error of a series with [1, bins_number] bins.
http://www.vldb.org/conf/1998/p275.pdf
Parameters
----------
series:
array to count an approximation error with bins_number bins
bins_number:
number of bins
p:
array of sums of elements, p[i] - sum from 0th to i elements
pp:
array of sums of squares of elements, p[i] - sum of squares from 0th to i elements
Returns
-------
error: np.ndarray
approximation error of a series with [1, bins_number] bins
"""
sse = np.zeros((len(series), bins_number))
for i in range(len(series)):
sse[i][0] = optimal_sse(0, i, p, pp)
sse_one_bin = np.zeros((len(series), len(series)))
for i in range(len(series)):
for j in range(i, len(series)):
sse_one_bin[i][j] = optimal_sse(i, j, p, pp)
for tmp_bins_number in range(1, bins_number):
for i in range(tmp_bins_number, len(series)):
sse[i][tmp_bins_number] = adjust_estimation(i, tmp_bins_number, sse, sse_one_bin)
return sse
def compute_f(series: np.ndarray, k: int, p: np.ndarray, pp: np.ndarray) -> np.ndarray:
"""
Compute F. F[a][b][k] - minimum approximation error on series[a:b+1] with k outliers.
http://www.vldb.org/conf/1999/P9.pdf
Parameters
----------
series:
array to count F
k:
number of outliers
p:
array of sums of elements, p[i] - sum from 0th to i elements
pp:
array of sums of squares of elements, p[i] - sum of squares from 0th to i elements
Returns
-------
result: np.ndarray
array F, outliers_indices
"""
f = np.zeros((len(series), len(series), k + 1))
s: list = [[[[] for i in range(k + 1)] for j in range(len(series))] for s in range(len(series))]
ss: list = [[[[] for i in range(k + 1)] for j in range(len(series))] for s in range(len(series))]
outliers_indices: list = [[[[] for i in range(k + 1)] for j in range(len(series))] for s in range(len(series))]
for right_border in range(0, len(series)):
f[0][right_border][0] = optimal_sse(0, right_border, p, pp)
s[0][right_border][0] = [p[right_border]]
ss[0][right_border][0] = [pp[right_border]]
for left_border in range(1, len(series)):
for right_border in range(left_border, len(series)):
f[left_border][right_border][0] = optimal_sse(left_border, right_border, p, pp)
s[left_border][right_border][0] = [p[right_border] - p[left_border - 1]]
ss[left_border][right_border][0] = [pp[right_border] - pp[left_border - 1]]
for left_border in range(0, len(series)):
for right_border in range(left_border, min(len(series), left_border + k)):
s[left_border][right_border][right_border - left_border + 1] = [0]
ss[left_border][right_border][right_border - left_border + 1] = [0]
outliers_indices[left_border][right_border][right_border - left_border + 1] = [
list(np.arange(left_border, right_border + 1))
]
for left_border in range(len(series)):
for right_border in range(left_border + 1, len(series)):
for outlier_number in range(1, min(right_border - left_border + 1, k + 1)):
f1 = f[left_border][right_border - 1][outlier_number - 1]
tmp_ss = []
tmp_s = []
f2 = []
now_min = np.inf
now_outliers_indices = []
where = 0
for i in range(len(ss[left_border][right_border - 1][outlier_number])):
tmp_ss.append(ss[left_border][right_border - 1][outlier_number][i] + series[right_border] ** 2)
tmp_s.append(s[left_border][right_border - 1][outlier_number][i] + series[right_border])
now_outliers_indices.append(
deepcopy(outliers_indices[left_border][right_border - 1][outlier_number][i])
)
f2.append(tmp_ss[-1] - tmp_s[-1] ** 2 / (right_border - left_border + 1 - outlier_number))
if f2[-1] < now_min:
now_min = f2[-1]
where = i
if f1 < now_min:
f[left_border][right_border][outlier_number] = f1
s[left_border][right_border][outlier_number] = deepcopy(
s[left_border][right_border - 1][outlier_number - 1]
)
ss[left_border][right_border][outlier_number] = deepcopy(
ss[left_border][right_border - 1][outlier_number - 1]
)
outliers_indices[left_border][right_border][outlier_number] = deepcopy(
outliers_indices[left_border][right_border - 1][outlier_number - 1]
)
if len(outliers_indices[left_border][right_border][outlier_number]):
for i in range(len(outliers_indices[left_border][right_border][outlier_number])):
outliers_indices[left_border][right_border][outlier_number][i].append(right_border)
else:
outliers_indices[left_border][right_border][outlier_number].append([right_border])
elif f1 > now_min:
f[left_border][right_border][outlier_number] = f2[where]
s[left_border][right_border][outlier_number] = tmp_s
ss[left_border][right_border][outlier_number] = tmp_ss
outliers_indices[left_border][right_border][outlier_number] = now_outliers_indices
else:
f[left_border][right_border][outlier_number] = f1
tmp_s.extend(s[left_border][right_border - 1][outlier_number - 1])
tmp_ss.extend(ss[left_border][right_border - 1][outlier_number - 1])
s[left_border][right_border][outlier_number] = tmp_s
ss[left_border][right_border][outlier_number] = tmp_ss
tmp = deepcopy(outliers_indices[left_border][right_border - 1][outlier_number - 1])
if len(tmp):
for i in range(len(tmp)):
tmp[i].append(right_border)
else:
tmp = [[right_border]]
outliers_indices[left_border][right_border][outlier_number].extend(now_outliers_indices)
outliers_indices[left_border][right_border][outlier_number].extend(deepcopy(tmp))
return f, outliers_indices
def hist(series: np.ndarray, bins_number: int) -> np.ndarray:
"""
Compute outliers indices according to hist rule.
http://www.vldb.org/conf/1999/P9.pdf
Parameters
----------
series:
array to count F
bins_number:
number of bins
Returns
-------
indices: np.ndarray
outliers indices
"""
approximation_error = np.zeros((len(series), bins_number + 1, bins_number))
anomalies: list = [[[[] for i in range(bins_number)] for j in range(bins_number + 1)] for s in range(len(series))]
p, pp = np.empty_like(series), np.empty_like(series)
p[0] = series[0]
pp[0] = series[0] ** 2
for i in range(1, len(series)):
p[i] = p[i - 1] + series[i]
pp[i] = pp[i - 1] + series[i] ** 2
f, outliers_indices = compute_f(series, bins_number - 1, p, pp)
approximation_error[:, 1:, 0] = v_optimal_hist(series, bins_number, p, pp)
approximation_error[:, 1, :] = f[0]
for right_border in range(len(series)):
for outlier_number in range(1, bins_number):
if len(outliers_indices[0][right_border][outlier_number]):
anomalies[right_border][1][outlier_number] = deepcopy(
outliers_indices[0][right_border][outlier_number][0]
)
for right_border in range(1, len(series)):
for tmp_bins_number in range(2, min(bins_number + 1, right_border + 2)):
for outlier_number in range(1, min(bins_number, right_border + 2 - tmp_bins_number)): # см формулу выше
tmp_approximation_error = approximation_error[:right_border, tmp_bins_number - 1, : outlier_number + 1]
tmp_f = f[1 : right_border + 1, right_border, : outlier_number + 1][:, ::-1]
approximation_error[right_border][tmp_bins_number][outlier_number] = np.min(
tmp_approximation_error + tmp_f
)
where = np.where(
tmp_approximation_error + tmp_f
== approximation_error[right_border][tmp_bins_number][outlier_number]
)
if where[1][0] != outlier_number:
anomalies[right_border][tmp_bins_number][outlier_number].extend(
deepcopy(outliers_indices[1 + where[0][0]][right_border][outlier_number - where[1][0]][0])
)
anomalies[right_border][tmp_bins_number][outlier_number].extend(
deepcopy(anomalies[where[0][0]][tmp_bins_number - 1][where[1][0]])
)
count = 0
now_min = approximation_error[-1][-1][0]
for outlier_number in range(1, min(approximation_error.shape[1], approximation_error.shape[2])):
if approximation_error[-1][approximation_error.shape[1] - 1 - outlier_number][outlier_number] <= now_min:
count = outlier_number
now_min = approximation_error[-1][approximation_error.shape[1] - 1 - outlier_number][outlier_number]
return np.array(sorted(anomalies[-1][approximation_error.shape[1] - 1 - count][count]))
def get_anomalies_hist(
ts: "TSDataset", in_column: str = "target", bins_number: int = 10
) -> typing.Dict[str, List[pd.Timestamp]]:
"""
Get point outliers in time series using histogram model.
Outliers are all points that, when removed, result in a histogram with a lower approximation error,
even with the number of bins less than the number of outliers.
Parameters
----------
ts:
TSDataset with timeseries data
in_column:
name of the column in which the anomaly is searching
bins_number:
number of bins
Returns
-------
dict of outliers: typing.Dict[str, typing.List[pd.Timestamp]]
dict of outliers in format {segment: [outliers_timestamps]}
"""
outliers_per_segment = {}
segments = ts.segments
for seg in segments:
segment_df = ts.df[seg].reset_index()
values = segment_df[in_column].values
timestamp = segment_df["timestamp"].values
anomalies = hist(values, bins_number)
outliers_per_segment[seg] = [timestamp[i] for i in anomalies]
return outliers_per_segment
| [((14, 1, 14, 25), 'numba.jit', 'numba.jit', (), '', False, 'import numba\n'), ((42, 1, 42, 25), 'numba.jit', 'numba.jit', (), '', False, 'import numba\n'), ((86, 1, 86, 25), 'numba.jit', 'numba.jit', (), '', False, 'import numba\n'), ((249, 12, 249, 33), 'numpy.empty_like', 'np.empty_like', ({(249, 26, 249, 32): 'series'}, {}), '(series)', True, 'import numpy as np\n'), ((249, 35, 249, 56), 'numpy.empty_like', 'np.empty_like', ({(249, 49, 249, 55): 'series'}, {}), '(series)', True, 'import numpy as np\n'), ((264, 61, 266, 17), 'copy.deepcopy', 'deepcopy', ({(265, 20, 265, 72): 'outliers_indices[0][right_border][outlier_number][0]'}, {}), '(outliers_indices[0][right_border][outlier_number][0])', False, 'from copy import deepcopy\n'), ((273, 85, 275, 17), 'numpy.min', 'np.min', ({(274, 20, 274, 51): 'tmp_approximation_error + tmp_f'}, {}), '(tmp_approximation_error + tmp_f)', True, 'import numpy as np\n'), ((276, 24, 279, 17), 'numpy.where', 'np.where', ({(277, 20, 278, 89): 'tmp_approximation_error + tmp_f == approximation_error[right_border][\n tmp_bins_number][outlier_number]'}, {}), '(tmp_approximation_error + tmp_f == approximation_error[\n right_border][tmp_bins_number][outlier_number])', True, 'import numpy as np\n'), ((165, 21, 165, 61), 'numpy.arange', 'np.arange', ({(165, 31, 165, 42): 'left_border', (165, 44, 165, 60): '(right_border + 1)'}, {}), '(left_border, right_border + 1)', True, 'import numpy as np\n'), ((191, 67, 193, 21), 'copy.deepcopy', 'deepcopy', ({(192, 24, 192, 76): 's[left_border][right_border - 1][outlier_number - 1]'}, {}), '(s[left_border][right_border - 1][outlier_number - 1])', False, 'from copy import deepcopy\n'), ((194, 68, 196, 21), 'copy.deepcopy', 'deepcopy', ({(195, 24, 195, 77): 'ss[left_border][right_border - 1][outlier_number - 1]'}, {}), '(ss[left_border][right_border - 1][outlier_number - 1])', False, 'from copy import deepcopy\n'), ((197, 82, 199, 21), 'copy.deepcopy', 'deepcopy', ({(198, 24, 198, 91): 'outliers_indices[left_border][right_border - 1][outlier_number - 1]'}, {}), '(outliers_indices[left_border][right_border - 1][outlier_number - 1])', False, 'from copy import deepcopy\n'), ((286, 20, 286, 86), 'copy.deepcopy', 'deepcopy', ({(286, 29, 286, 85): 'anomalies[where[0][0]][tmp_bins_number - 1][where[1][0]]'}, {}), '(anomalies[where[0][0]][tmp_bins_number - 1][where[1][0]])', False, 'from copy import deepcopy\n'), ((182, 24, 182, 100), 'copy.deepcopy', 'deepcopy', ({(182, 33, 182, 99): 'outliers_indices[left_border][right_border - 1][outlier_number][i]'}, {}), '(outliers_indices[left_border][right_border - 1][outlier_number][i])', False, 'from copy import deepcopy\n'), ((218, 26, 218, 103), 'copy.deepcopy', 'deepcopy', ({(218, 35, 218, 102): 'outliers_indices[left_border][right_border - 1][outlier_number - 1]'}, {}), '(outliers_indices[left_border][right_border - 1][outlier_number - 1])', False, 'from copy import deepcopy\n'), ((283, 24, 283, 114), 'copy.deepcopy', 'deepcopy', ({(283, 33, 283, 113): 'outliers_indices[1 + where[0][0]][right_border][outlier_number - where[1][0]][0\n ]'}, {}), '(outliers_indices[1 + where[0][0]][right_border][outlier_number -\n where[1][0]][0])', False, 'from copy import deepcopy\n'), ((225, 87, 225, 100), 'copy.deepcopy', 'deepcopy', ({(225, 96, 225, 99): 'tmp'}, {}), '(tmp)', False, 'from copy import deepcopy\n')] |
emanueleleyland/sabd-project2 | aws/securityGroup.py | 387b33443b87e78635d8d6c9a03faadbc90ae9da | def createKafkaSecurityGroup(ec2, vpc):
sec_group_kafka = ec2.create_security_group(
GroupName='kafka', Description='kafka sec group', VpcId=vpc.id)
sec_group_kafka.authorize_ingress(
IpPermissions=[{'IpProtocol': 'icmp', 'FromPort': -1, 'ToPort': -1, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]},
{'IpProtocol': 'tcp', 'FromPort': 22, 'ToPort': 22, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]},
{'IpProtocol': 'tcp', 'FromPort': 9092, 'ToPort': 9092, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]}]
)
print(sec_group_kafka.id)
return sec_group_kafka
def createZookeeperSecurityGroup(ec2, vpc):
sec_group_zookeeper = ec2.create_security_group(
GroupName='zookeeper', Description='zookeeper', VpcId=vpc.id)
sec_group_zookeeper.authorize_ingress(
IpPermissions=[{'IpProtocol': 'icmp', 'FromPort': -1, 'ToPort': -1, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]},
{'IpProtocol': 'tcp', 'FromPort': 22, 'ToPort': 22, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]},
{'IpProtocol': 'tcp', 'FromPort': 2181, 'ToPort': 2181, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]},
{'IpProtocol': 'tcp', 'FromPort': 2888, 'ToPort': 2888, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]},
{'IpProtocol': 'tcp', 'FromPort': 3888, 'ToPort': 3888, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]}]
)
print(sec_group_zookeeper.id)
return sec_group_zookeeper
def create_redis_security_group(ec2, vpc):
sec_group_redis = ec2.create_security_group(
GroupName='redis', Description='redis', VpcId=vpc.id)
sec_group_redis.authorize_ingress(
IpPermissions=[{'IpProtocol': 'icmp', 'FromPort': -1, 'ToPort': -1, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]},
{'IpProtocol': 'tcp', 'FromPort': 22, 'ToPort': 22, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]},
{'IpProtocol': 'tcp', 'FromPort': 6379, 'ToPort': 6379, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]}]
)
print(sec_group_redis.id)
return sec_group_redis | [] |
petermirithu/hooby_lab | virtual/lib/python3.6/site-packages/django_pusher/context_processors.py | ffd641948bc2d2539649ec747114c78b5ad105e7 | from django.conf import settings
def pusher(request):
return {
"PUSHER_KEY": getattr(settings, "PUSHER_KEY", ""),
}
| [] |
tiltowait/inconnu | inconnu/character/update/parse.py | 6cca5fed520899d159537701b695c94222d8dc45 | """character/update/parse.py - Defines an interface for updating character traits."""
# pylint: disable=too-many-arguments
import re
import discord
from discord_ui.components import LinkButton
from . import paramupdate
from ..display import display
from ... import common, constants
from ...log import Log
from ...vchar import VChar
__MATCHES = {}
__KEYS = {
"name": "The character's name",
"health": "The character's max Health",
"willpower": "The character's max Willpower",
"humanity": "The character's Humanity",
"splat": "The type of character: `vampire`, `mortal`, or `ghoul`",
"sh": "+/- Superficial Health damage",
"ah": "+/- Aggravated Health damage",
"sw": "+/- Superficial Willpower damage",
"aw": "+/- Aggravated Willpower damage",
"stains": "+/- Stains",
"unspent_xp": "+/- Unspent XP",
"lifetime_xp": "+/- Total Lifetime XP",
"hunger": "+/- The character's Hunger",
"potency": "+/- The character's Blood Potency"
}
__HELP_URL = "https://www.inconnu-bot.com/#/character-tracking?id=tracker-updates"
async def update(
ctx, parameters: str, character=None, color=None, update_message=None, player=None
):
"""
Process the user's arguments.
Allow the user to omit a character if they have only one.
"""
args = re.sub(r":", r"=", parameters) # Some people think colons work ...
args = re.sub(r"(\w)\s*([+-])\s*(\w)", r"\g<1>=\g<2>\g<3>", args) # Stop the sh+3 madness
args = re.sub(r"\s*([+-])\s*=\s*", r"=\g<1>", args) # Let +/-= work, for the CS nerds
args = re.sub(r"\s*=\s*([+-])\s*", r"=\g<1>", args) # Remove gaps between keys and values
args = list(args.split()) # To allow element removal
if len(args) == 0:
await update_help(ctx)
return
try:
owner = await common.player_lookup(ctx, player)
tip = f"`/character update` `parameters:{parameters}` `character:CHARACTER`"
character = await common.fetch_character(ctx, character, tip, __HELP_URL, owner=owner)
parameters = __parse_arguments(*args)
updates = []
for parameter, new_value in parameters.items():
update_msg = __update_character(character, parameter, new_value)
updates.append(update_msg)
Log.log("update",
user=ctx.author.id,
guild=ctx.guild.id,
charid=character.id,
syntax=" ".join(args)
)
# Ignore generated output if we got a custom message
if update_message is None:
update_message = "\n".join(updates)
await display(ctx, character, color=color, owner=player, message=update_message)
except (SyntaxError, ValueError) as err:
Log.log("update_error",
user=ctx.author.id,
guild=ctx.guild.id,
charid=character.id,
syntax=" ".join(args)
)
await update_help(ctx, err)
except LookupError as err:
await common.present_error(ctx, err, help_url=__HELP_URL)
except common.FetchError:
pass
def __parse_arguments(*arguments):
"""
Parse the user's arguments.
Raises ValueErrors and KeyErrors on exceptions.
"""
if len(arguments) == 0:
raise ValueError("You must supply some parameters!")
parameters = {}
for argument in arguments:
split = argument.split("=")
key = split[0].lower()
if len(split) != 2:
err = "Parameters must be in `key = value` pairs."
if key not in __KEYS:
err += f" Also, `{key}` is not a valid option."
raise SyntaxError(err)
if key in parameters:
raise ValueError(f"You cannot use `{key}` more than once.")
if key not in __MATCHES:
raise ValueError(f"Unknown parameter: `{key}`.")
key = __MATCHES[key] # Get the canonical key
value = split[1]
if len(value) == 0:
raise ValueError(f"No value given for `{key}`.")
parameters[key] = value # Don't do any validation here
return parameters
def __update_character(character: VChar, param: str, value: str) -> str:
"""
Update one of a character's parameters.
Args:
character (VChar): The character being updated
param (str): The parameter to update
value (str): The parameter's new value
Raises ValueError if the parameter's value is invalid.
"""
return getattr(paramupdate, f"update_{param}")(character, value)
async def update_help(ctx, err=None, hidden=True):
"""Display a help message that details the available keys."""
embed = discord.Embed(
title="Character Tracking",
)
embed.set_author(name=ctx.author.display_name, icon_url=ctx.author.display_avatar)
if err is not None:
embed.add_field(name="Error", value=str(err), inline=False)
inst = "To update a character, use `/character update` with one or more `KEY=VALUE` pairs."
embed.add_field(name="Instructions", value=inst, inline=False)
parameters = [f"***{key}:*** {val}" for key, val in __KEYS.items()]
parameters = "\n".join(parameters)
embed.add_field(name="Keys", value=parameters, inline=False)
embed.add_field(
name="Example",
value="Character takes 4 Superficial Health damage:```/character update parameters:sh+4```"
)
embed.set_footer(text="You may modify more than one tracker at a time.")
documentation = LinkButton(
"http://www.inconnu-bot.com/#/character-tracking?id=tracker-updates",
label="Full Documentation"
)
support = LinkButton(constants.SUPPORT_URL, "Support")
await ctx.respond(embed=embed, components=[documentation, support], hidden=hidden)
# We do flexible matching for the keys. Many of these are the same as RoD's
# keys, while others have been observed in syntax error logs. This should be
# a little more user-friendly.
def __setup_matches():
"""Register all the update keys."""
__register_keys("name")
__register_keys("health", "hp")
__register_keys("willpower", "wp", "w")
__register_keys("humanity", "hm")
__register_keys("splat", "type")
__register_keys(
"sh", "sd", "shp", "suphp", "suph", "supd", "superficialhealth",
"superficialdamage"
)
__register_keys("ah", "ad", "ahp", "agghp", "aggd", "aggh", "agghealth", "aggdamage")
__register_keys("sw", "swp", "supwp", "supw", "superficialwillpower")
__register_keys("aw", "awp", "aggwp", "aggw", "aggwillpower")
__register_keys("stains", "stain", "s")
__register_keys(
"current_xp", "xp_current", "current_exp", "exp_current", "currentxp",
"currentexp", "xpcurrent", "expcurrent", "cxp",
"unspent_xp", "xp_unspent", "unspent_exp", "exp_unspent", "unspentxp",
"unspentexp", "xpunspent", "expunspent", "uxp"
)
__register_keys(
"total_xp", "xp_total", "total_exp", "exp_total", "totalxp",
"totalexp", "xptotal", "exptotal", "txp",
"lifetimexp", "xplifetime", "explifetime", "lxp", "lifetime_xp", "life_time_xp"
)
__register_keys("hunger", "h")
__register_keys("potency", "bp", "p")
def __register_keys(canonical, *alternates):
"""Register an update key along with some alternates."""
__MATCHES[canonical] = canonical
for alternate in alternates:
if alternate in __MATCHES:
raise KeyError(f"{alternate} is already an update parameter.")
__MATCHES[alternate] = canonical
__setup_matches()
| [((44, 11, 44, 41), 're.sub', 're.sub', ({(44, 18, 44, 22): '""":"""', (44, 24, 44, 28): '"""="""', (44, 30, 44, 40): 'parameters'}, {}), "(':', '=', parameters)", False, 'import re\n'), ((45, 11, 45, 69), 're.sub', 're.sub', ({(45, 18, 45, 41): '"""(\\\\w)\\\\s*([+-])\\\\s*(\\\\w)"""', (45, 43, 45, 62): '"""\\\\g<1>=\\\\g<2>\\\\g<3>"""', (45, 64, 45, 68): 'args'}, {}), "('(\\\\w)\\\\s*([+-])\\\\s*(\\\\w)', '\\\\g<1>=\\\\g<2>\\\\g<3>', args)", False, 'import re\n'), ((46, 11, 46, 55), 're.sub', 're.sub', ({(46, 18, 46, 37): '"""\\\\s*([+-])\\\\s*=\\\\s*"""', (46, 39, 46, 48): '"""=\\\\g<1>"""', (46, 50, 46, 54): 'args'}, {}), "('\\\\s*([+-])\\\\s*=\\\\s*', '=\\\\g<1>', args)", False, 'import re\n'), ((47, 11, 47, 55), 're.sub', 're.sub', ({(47, 18, 47, 37): '"""\\\\s*=\\\\s*([+-])\\\\s*"""', (47, 39, 47, 48): '"""=\\\\g<1>"""', (47, 50, 47, 54): 'args'}, {}), "('\\\\s*=\\\\s*([+-])\\\\s*', '=\\\\g<1>', args)", False, 'import re\n'), ((145, 12, 147, 5), 'discord.Embed', 'discord.Embed', (), '', False, 'import discord\n'), ((166, 20, 169, 5), 'discord_ui.components.LinkButton', 'LinkButton', (), '', False, 'from discord_ui.components import LinkButton\n'), ((170, 14, 170, 58), 'discord_ui.components.LinkButton', 'LinkButton', ({(170, 25, 170, 46): 'constants.SUPPORT_URL', (170, 48, 170, 57): '"""Support"""'}, {}), "(constants.SUPPORT_URL, 'Support')", False, 'from discord_ui.components import LinkButton\n')] |
smadha/MlTrio | src/models/train_search_multi_deep.py | a7269fc4c6d77b2f71432ab9d2ab8fe4e28234d5 | '''
Uses flattened features in feature directory and run a SVM on it
'''
from keras.layers import Dense
from keras.models import Sequential
import keras.regularizers as Reg
from keras.optimizers import SGD, RMSprop
from keras.callbacks import EarlyStopping
import cPickle as pickle
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.metrics import f1_score
import theano
from models.down_sampling import balanced_subsample
theano.config.openmp = True
OMP_NUM_THREADS=16
users_va_te_dict = dict([ (v,idx) for (idx,v) in enumerate(pickle.load(open("../../bytecup2016data/users_va_te.p"))) ])
print "users_va_te_dict created ", len(users_va_te_dict)
def normalize(X_tr):
''' Normalize training and test data features
Args:
X_tr: Unnormalized training features
Output:
X_tr: Normalized training features
'''
X_mu = np.mean(X_tr, axis=0)
X_tr = X_tr - X_mu
X_sig = np.std(X_tr, axis=0)
X_tr = X_tr/X_sig
return X_tr, X_mu, X_sig
def genmodel(num_units, actfn='relu', reg_coeff=0.0, last_act='softmax'):
''' Generate a neural network model of approporiate architecture
Args:
num_units: architecture of network in the format [n1, n2, ... , nL]
actfn: activation function for hidden layers ('relu'/'sigmoid'/'linear'/'softmax')
reg_coeff: L2-regularization coefficient
last_act: activation function for final layer ('relu'/'sigmoid'/'linear'/'softmax')
Output:
model: Keras sequential model with appropriate fully-connected architecture
'''
model = Sequential()
for i in range(1, len(num_units)):
if i == 1 and i < len(num_units) - 1:
model.add(Dense(input_dim=num_units[0], output_dim=num_units[i], activation=actfn,
W_regularizer=Reg.l2(l=reg_coeff), init='glorot_normal'))
elif i == 1 and i == len(num_units) - 1:
model.add(Dense(input_dim=num_units[0], output_dim=num_units[i], activation=last_act,
W_regularizer=Reg.l2(l=reg_coeff), init='glorot_normal'))
elif i < len(num_units) - 1:
model.add(Dense(output_dim=num_units[i], activation=actfn,
W_regularizer=Reg.l2(l=reg_coeff), init='glorot_normal'))
elif i == len(num_units) - 1:
model.add(Dense(output_dim=num_units[i], activation=last_act,
W_regularizer=Reg.l2(l=reg_coeff), init='glorot_normal'))
return model
def transform_label(labels):
labels_new_arr = []
for idx,label in enumerate(labels):
label_new = [0] * len(users_va_te_dict) * 2
if label[1] == '0' :
label_new[ users_va_te_dict[label[0]] ] = 1
else :
label_new[ users_va_te_dict[label[0]] + 1 ] = 1
labels_new_arr.append(label_new)
# if (idx+1) % 1000 == 0:
# break
print "labels_new_arr created" , len(labels_new_arr)
return labels_new_arr
def original_label(label):
return [ l.index(1) for l in label]
def get_transform_label():
'''
Returns list of labels as list of [0/1 , 1/0]
if label = 1 [0, 1]
if label = 0 [1, 0]
'''
count = 0
users_order = []
##features to be deletd
del_rows = []
with open("../../bytecup2016data/invited_info_train_PROC.txt","r") as f:
training_data = f.readline().strip().split("\t")
while training_data and len(training_data) >= 2 :
user_id = training_data[1]
label = training_data[2]
if user_id in users_va_te_dict:
users_order.append((user_id,label) )
else:
del_rows.append(count)
count += 1
training_data = f.readline().strip().split("\t")
f.close()
print "users_order created ", len(users_order), len(del_rows)
return transform_label(users_order), del_rows
features = pickle.load( open("../feature_engg/feature/all_features.p", "rb") )
labels, del_rows = get_transform_label()
# features = np.random.normal(size=(26796,3))
# labels, del_rows = get_transform_label()
print len(features),len(features[0])
print len(labels),len(labels[0])
features = np.array(features)
features = np.delete(features, del_rows, axis=0)
col_deleted = np.nonzero((features==0).sum(axis=0) > (len(features)-1000))
# col_deleted = col_deleted[0].tolist() + range(6,22) + range(28,44)
print col_deleted
features = np.delete(features, col_deleted, axis=1)
print len(features),len(features[0])
print len(labels),len(labels[0])
features, X_mu, X_sig = normalize(features)
save_res = {"col_deleted":col_deleted,"X_mu":X_mu,"X_sig":X_sig}
with open("model/train_config", 'wb') as pickle_file:
pickle.dump(save_res, pickle_file, protocol=2)
print "Dumped config"
momentum = 0.99
eStop = True
sgd_Nesterov = True
sgd_lr = 1e-5
batch_size=5000
nb_epoch=100
verbose=True
features,labels = [] , []
features_tr, features_te,labels_tr, labels_te = train_test_split(features,labels, train_size = 0.85)
print "Using separate test data", len(features_tr), len(features_te)
def run_NN(arch, reg_coeff, sgd_decay, subsample_size=0, save=False):
# features_tr, labels_tr = balanced_subsample(features_tr, original_label(labels_tr), subsample_size = subsample_size)
# labels_tr = transform_label(labels_tr)
# print "Training data balanced-", features_tr.shape, len(labels_tr)
call_ES = EarlyStopping(monitor='val_acc', patience=3, verbose=1, mode='auto')
# Generate Model
model = genmodel(num_units=arch, reg_coeff=reg_coeff )
# Compile Model
sgd = SGD(lr=sgd_lr, decay=sgd_decay, momentum=momentum,
nesterov=sgd_Nesterov)
# sgd = RMSprop(lr=sgd_lr, rho=0.9, epsilon=1e-08, decay=sgd_decay)
model.compile(loss='MSE', optimizer=sgd,
metrics=['accuracy'])
# Train Model
if eStop:
model.fit(features_tr, labels_tr, nb_epoch=nb_epoch, batch_size=batch_size,
verbose=verbose, callbacks=[call_ES], validation_split=0.1,
validation_data=None, shuffle=True)
else:
model.fit(features_tr, labels_tr, nb_epoch=nb_epoch, batch_size=batch_size,
verbose=verbose)
labels_pred = model.predict_classes(features_te)
print len(labels_te[0]), labels_pred[0]
y_true, y_pred = original_label(labels_te), labels_pred
print y_true[0], y_pred[0]
print "arch, reg_coeff, sgd_decay, subsample_size", arch, reg_coeff, sgd_decay, subsample_size
macro_rep = f1_score(y_true, y_pred, average = 'macro')
print "macro", macro_rep
weighted_report = f1_score(y_true, y_pred, average = 'weighted')
print "weighted", weighted_report
with open("results_search_multi_deep.txt", "a") as f:
f.write("macro_rep- "+str(macro_rep))
f.write("\n")
f.write("weighted_report- "+str(weighted_report))
f.write("\n")
f.write(" ".join([str(s) for s in ["arch, reg_coeff, sgd_decay, subsample_size", arch, reg_coeff, sgd_decay, subsample_size]]))
f.write("\n")
if save:
# Save model
model.save("model/model_deep.h5")
print("Saved model to disk")
arch_range = [[len(features_tr[0]),1024,len(labels_tr[0])], [len(features_tr[0]),1024,512,len(labels_tr[0])], [len(features_tr[0]),1024,1024,len(labels_tr[0])],[len(features_tr[0]),1024,512,256,len(labels_tr[0])]]
reg_coeffs_range = [1e-6, 5e-6, 1e-5, 5e-5, 5e-4 ]
sgd_decays_range = [1e-6, 1e-5, 5e-5, 1e-4, 5e-4 ]
class_weight_0_range = [1]
# subsample_size_range = [2,2.5,3]
#GRID SEARCH ON BEST PARAM
for arch in arch_range:
for reg_coeff in reg_coeffs_range:
for sgd_decay in sgd_decays_range:
# for subsample_size in subsample_size_range:
run_NN(arch, reg_coeff, sgd_decay)
# arch = [len(features[0]),1024,512,2]
# reg_coeff = 1e-05
# sgd_decay = 1e-05
# class_weight_0 = 0.5
| [] |
graham-kim/pygremlin-graph-visualiser | formation.py | 65cb4d4fb71c8dde46ff1a36a40adcbdf233448c | import sys
import os
sys.path.append( os.path.dirname(__file__) )
import numpy as np
import typing as tp
import angles
from model import Node, Link, Label
from spec import ArrowDraw, NodeSpec
class FormationManager:
def __init__(self):
self._nodes = {}
self._links = []
self._labels = []
@property
def nodes(self) -> tp.List[Node]:
return [n for n in self._nodes.values()]
@property
def links(self) -> tp.List[Link]:
return self._links
@property
def labels(self) -> tp.List[Link]:
return self._labels
def _id_if_str(self, node: tp.Tuple[str, int]) -> int:
if isinstance(node, int):
return node
else:
return self.id_of(node)
def text_of(self, node_id: int) -> str:
if not isinstance(node_id, int):
raise TypeError("Expected node_id to be int: {}".format(node_id))
return self._nodes[node_id].text
def pos_of(self, node_id: tp.Tuple[str, int]) -> np.array:
node_id = self._id_if_str(node_id)
return np.array(self._nodes[node_id].pos)
def pos_perp_to(self, from_id: int, to_id: int, shift_breadth: int, to_left: bool) -> np.array:
from_vec2 = np.array(self._nodes[from_id].pos)
to_vec2 = np.array(self._nodes[to_id].pos)
rel_vec2 = to_vec2 - from_vec2
flipped_y_unit_rel = angles.flip_y( angles.unit(rel_vec2) )
if to_left:
rotated_dir = angles.flip_y( \
angles.rotate_vector_to_left_by_90_deg( flipped_y_unit_rel ) )
else:
rotated_dir = angles.flip_y( \
angles.rotate_vector_to_right_by_90_deg( flipped_y_unit_rel ) )
return (from_vec2 + rel_vec2 / 2 + rotated_dir * shift_breadth).astype(int)
def id_of(self, text: str) -> int:
if not isinstance(text, str):
raise TypeError("{} should be a string".format(text))
ans = []
for key in self._nodes.keys():
if text == self._nodes[key].text:
ans.append(key)
if len(ans) == 0:
raise ValueError("No node has this text: {}".format(text))
elif len(ans) == 1:
return ans[0]
else:
raise ValueError("More than one node has the text {}: {}".format(text, ans))
def add_node(self, text: str, pos: tp.Tuple[int, int], colour: str="green", multibox: bool = False) -> int:
new_node = Node(text, pos, colour, multibox)
new_id = id(new_node)
self._nodes[new_id] = new_node
return new_id
def add_label(self, text: str, pos: tp.Tuple[int, int], colour: str="red"):
self._labels.append( Label(text, pos, colour) )
def add_link(self, from_id: tp.Tuple[str, int], to_id: tp.Tuple[str, int], colour: str="black", \
arrow_draw: ArrowDraw = ArrowDraw.FWD_ARROW, link_2_col: tp.Optional[str] = None):
self._links.append( Link(self._id_if_str(from_id), self._id_if_str(to_id), colour, arrow_draw, link_2_col) )
def add_dual_link(self, from_id: tp.Tuple[str, int], to_id: tp.Tuple[str, int], colour: str="black", \
second_colour: str="black"):
self.add_link(from_id, to_id, colour, ArrowDraw.DUAL_LINK, second_colour)
def add_linked_node(self, from_id: tp.Tuple[str, int], pos: tp.Tuple[int, int], spec: NodeSpec) -> int:
new_id = self.add_node(spec.text, pos, spec.node_col, spec.multibox)
self.add_link(from_id, new_id, spec.link_col, spec.link_draw, spec.link_2_col)
return new_id
def add_daisy_chain_links(self, nodes: tp.List[tp.Tuple[str, int]], arrow_draw: ArrowDraw = ArrowDraw.FWD_ARROW, \
link_col: str="black", link_2_col: tp.Optional[str] = None):
if not isinstance(nodes, list):
raise TypeError("Expected a list for nodes: {}".format(nodes))
if len(nodes) < 2:
raise ValueError("Expected at least 2 nodes, got {}".format(len(nodes)))
for i, item in enumerate(nodes[1:]):
prev_node = self._id_if_str(nodes[i]) # i is already the previous index
this_node = self._id_if_str(item)
self.add_link(prev_node, this_node, link_col, arrow_draw, link_2_col)
def add_depth_line_of_linked_nodes(self, start_id: tp.Tuple[str, int], dir: tp.Tuple[int, int], \
link_length: int, \
node_specs: tp.List[tp.Optional[NodeSpec]] \
) -> tp.List[int]:
added_ids = []
start_id = self._id_if_str(start_id)
start_pos = angles.vec2(self._nodes[start_id].pos)
unit_dir = angles.unit( dir )
count = 1
from_id = start_id
for spec in node_specs:
if spec is not None:
pos = start_pos + unit_dir * link_length * count
new_id = self.add_node(spec.text, pos, spec.node_col, spec.multibox)
if spec.link_draw == ArrowDraw.BACK_ARROW:
self.add_link(new_id, from_id, spec.link_col, ArrowDraw.FWD_ARROW, None)
elif spec.link_draw != ArrowDraw.NO_LINK:
self.add_link(from_id, new_id, spec.link_col, spec.link_draw, spec.link_2_col)
added_ids.append(new_id)
from_id = new_id
count += 1
return added_ids
def add_rail_of_nodes(self, start_coord: tp.Tuple[int, int], dir: tp.Tuple[int, int], \
link_length: int, \
node_specs: tp.List[tp.Optional[NodeSpec]] \
) -> tp.List[int]:
num_specs = len(node_specs)
if num_specs < 2:
raise ValueError("node_specs must have at least 2 elements")
if node_specs[0] is None or node_specs[-1] is None:
raise ValueError("The first and last item of node_specs must not be None")
first_id = self.add_node(node_specs[0].text, start_coord, \
node_specs[0].node_col, node_specs[0].multibox)
added_ids = [first_id]
new_ids = self.add_depth_line_of_linked_nodes(first_id, dir, link_length, node_specs[1:])
added_ids.extend(new_ids)
return added_ids
def add_breadth_line_of_sibling_nodes(self, parent_id: tp.Tuple[str, int], start_coord: tp.Tuple[int, int], \
end_coord: tp.Tuple[int, int], \
node_specs: tp.List[tp.Optional[NodeSpec]] \
) -> tp.List[int]:
num_specs = len(node_specs)
parent_id = self._id_if_str(parent_id)
if num_specs < 2:
raise ValueError("node_specs must have at least 2 elements")
if node_specs[0] is None or node_specs[-1] is None:
raise ValueError("The first and last item of node_specs must not be None")
added_ids = []
start_vec2 = angles.vec2(start_coord)
end_vec2 = angles.vec2(end_coord)
rel_vec2 = end_vec2 - start_vec2
count = 0
for spec in node_specs:
if spec is not None:
pos = start_vec2 + rel_vec2 * count / (num_specs - 1)
new_id = self.add_node(spec.text, pos, spec.node_col, spec.multibox)
if spec.link_draw == ArrowDraw.BACK_ARROW:
self.add_link(new_id, parent_id, spec.link_col, ArrowDraw.FWD_ARROW, None)
elif spec.link_draw != ArrowDraw.NO_LINK:
self.add_link(parent_id, new_id, spec.link_col, spec.link_draw, spec.link_2_col)
added_ids.append(new_id)
count += 1
return added_ids
def add_breadth_line_centered_on(self, parent_id: tp.Tuple[str, int], center_coord: tp.Tuple[int, int], \
link_length: int, node_specs: tp.List[tp.Optional[NodeSpec]] \
) -> tp.List[int]:
num_specs = len(node_specs)
if num_specs < 2:
raise ValueError("node_specs must have at least 2 elements")
parent_pos = self.pos_of(parent_id)
rel_vec2 = angles.vec2(center_coord) - parent_pos
rotated_vec2 = angles.flip_y( \
angles.rotate_vector_to_left_by_90_deg( \
angles.flip_y( angles.unit(rel_vec2) )))
half_total_length = link_length * float(num_specs-1) / 2.0
start_coord = center_coord + rotated_vec2 * half_total_length
end_coord = center_coord - rotated_vec2 * half_total_length
return self.add_breadth_line_of_sibling_nodes(parent_id, start_coord, end_coord, node_specs)
def add_arc_of_sibling_nodes(self, parent_id: tp.Tuple[str, int], radius: int, start_dir_coord: tp.Tuple[int, int], \
end_dir_coord: tp.Tuple[int, int], clockwise: bool, \
node_specs: tp.List[tp.Optional[NodeSpec]] \
) -> tp.List[int]:
parent_id = self._id_if_str(parent_id)
num_specs = len(node_specs)
if num_specs < 2:
raise ValueError("node_specs must have at least 2 elements")
if node_specs[0] is None or node_specs[-1] is None:
raise ValueError("The first and last item of node_specs must not be None")
added_ids = []
parent_pos = self._nodes[parent_id].pos
parent_vec2 = angles.vec2(parent_pos)
start_vec2 = angles.vec2(start_dir_coord) - parent_vec2
end_vec2 = angles.vec2(end_dir_coord) - parent_vec2
start_bear_rad = angles.get_bearing_rad_of( angles.flip_y(start_vec2) )
end_bear_rad = angles.get_bearing_rad_of( angles.flip_y(end_vec2) )
bear_diff_rad = angles.normalise_angle(end_bear_rad - start_bear_rad)
if clockwise:
bear_diff_rad = angles.flip_angle(bear_diff_rad)
count = 0
for spec in node_specs:
if spec is not None:
rotate_anticlockwise_by = bear_diff_rad * count / (num_specs - 1)
if clockwise:
rotate_anticlockwise_by *= -1
dir_vec = angles.flip_y( \
angles.get_unit_vector_after_rotating( \
angles.flip_y(start_vec2), rotate_anticlockwise_by ))
pos = parent_pos + dir_vec * radius
new_id = self.add_node(spec.text, pos, spec.node_col, spec.multibox)
if spec.link_draw == ArrowDraw.BACK_ARROW:
self.add_link(new_id, parent_id, spec.link_col, ArrowDraw.FWD_ARROW, None)
elif spec.link_draw != ArrowDraw.NO_LINK:
self.add_link(parent_id, new_id, spec.link_col, spec.link_draw, spec.link_2_col)
added_ids.append(new_id)
count += 1
return added_ids
| [((4, 17, 4, 42), 'os.path.dirname', 'os.path.dirname', ({(4, 33, 4, 41): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((45, 15, 45, 49), 'numpy.array', 'np.array', ({(45, 24, 45, 48): 'self._nodes[node_id].pos'}, {}), '(self._nodes[node_id].pos)', True, 'import numpy as np\n'), ((48, 20, 48, 54), 'numpy.array', 'np.array', ({(48, 29, 48, 53): 'self._nodes[from_id].pos'}, {}), '(self._nodes[from_id].pos)', True, 'import numpy as np\n'), ((49, 18, 49, 50), 'numpy.array', 'np.array', ({(49, 27, 49, 49): 'self._nodes[to_id].pos'}, {}), '(self._nodes[to_id].pos)', True, 'import numpy as np\n'), ((78, 19, 78, 52), 'model.Node', 'Node', ({(78, 24, 78, 28): 'text', (78, 30, 78, 33): 'pos', (78, 35, 78, 41): 'colour', (78, 43, 78, 51): 'multibox'}, {}), '(text, pos, colour, multibox)', False, 'from model import Node, Link, Label\n'), ((120, 20, 120, 58), 'angles.vec2', 'angles.vec2', ({(120, 32, 120, 57): 'self._nodes[start_id].pos'}, {}), '(self._nodes[start_id].pos)', False, 'import angles\n'), ((121, 19, 121, 37), 'angles.unit', 'angles.unit', ({(121, 32, 121, 35): 'dir'}, {}), '(dir)', False, 'import angles\n'), ((172, 21, 172, 45), 'angles.vec2', 'angles.vec2', ({(172, 33, 172, 44): 'start_coord'}, {}), '(start_coord)', False, 'import angles\n'), ((173, 19, 173, 41), 'angles.vec2', 'angles.vec2', ({(173, 31, 173, 40): 'end_coord'}, {}), '(end_coord)', False, 'import angles\n'), ((226, 22, 226, 45), 'angles.vec2', 'angles.vec2', ({(226, 34, 226, 44): 'parent_pos'}, {}), '(parent_pos)', False, 'import angles\n'), ((233, 24, 233, 77), 'angles.normalise_angle', 'angles.normalise_angle', ({(233, 47, 233, 76): 'end_bear_rad - start_bear_rad'}, {}), '(end_bear_rad - start_bear_rad)', False, 'import angles\n'), ((51, 44, 51, 65), 'angles.unit', 'angles.unit', ({(51, 56, 51, 64): 'rel_vec2'}, {}), '(rel_vec2)', False, 'import angles\n'), ((84, 29, 84, 53), 'model.Label', 'Label', ({(84, 35, 84, 39): 'text', (84, 41, 84, 44): 'pos', (84, 46, 84, 52): 'colour'}, {}), '(text, pos, colour)', False, 'from model import Node, Link, Label\n'), ((199, 19, 199, 44), 'angles.vec2', 'angles.vec2', ({(199, 31, 199, 43): 'center_coord'}, {}), '(center_coord)', False, 'import angles\n'), ((228, 21, 228, 49), 'angles.vec2', 'angles.vec2', ({(228, 33, 228, 48): 'start_dir_coord'}, {}), '(start_dir_coord)', False, 'import angles\n'), ((229, 19, 229, 45), 'angles.vec2', 'angles.vec2', ({(229, 31, 229, 44): 'end_dir_coord'}, {}), '(end_dir_coord)', False, 'import angles\n'), ((231, 52, 231, 77), 'angles.flip_y', 'angles.flip_y', ({(231, 66, 231, 76): 'start_vec2'}, {}), '(start_vec2)', False, 'import angles\n'), ((232, 50, 232, 73), 'angles.flip_y', 'angles.flip_y', ({(232, 64, 232, 72): 'end_vec2'}, {}), '(end_vec2)', False, 'import angles\n'), ((235, 28, 235, 60), 'angles.flip_angle', 'angles.flip_angle', ({(235, 46, 235, 59): 'bear_diff_rad'}, {}), '(bear_diff_rad)', False, 'import angles\n'), ((54, 28, 54, 88), 'angles.rotate_vector_to_left_by_90_deg', 'angles.rotate_vector_to_left_by_90_deg', ({(54, 68, 54, 86): 'flipped_y_unit_rel'}, {}), '(flipped_y_unit_rel)', False, 'import angles\n'), ((57, 28, 57, 89), 'angles.rotate_vector_to_right_by_90_deg', 'angles.rotate_vector_to_right_by_90_deg', ({(57, 69, 57, 87): 'flipped_y_unit_rel'}, {}), '(flipped_y_unit_rel)', False, 'import angles\n'), ((202, 31, 202, 52), 'angles.unit', 'angles.unit', ({(202, 43, 202, 51): 'rel_vec2'}, {}), '(rel_vec2)', False, 'import angles\n'), ((245, 32, 245, 57), 'angles.flip_y', 'angles.flip_y', ({(245, 46, 245, 56): 'start_vec2'}, {}), '(start_vec2)', False, 'import angles\n')] |
MomsFriendlyRobotCompany/opencv_camera | opencv_camera/parameters/utils.py | 046d779a853ef0117c0177c03a6fd81f361a9dd3 | ##############################################
# The MIT License (MIT)
# Copyright (c) 2014 Kevin Walchko
# see LICENSE for full details
##############################################
# -*- coding: utf-8 -*
from math import atan, pi
def fov(w,f):
"""
Returns the FOV as in degrees, given:
w: image width (or height) in pixels
f: focalLength (fx or fy) in pixels
"""
return 2*atan(w/2/f) * 180/pi
| [((16, 17, 16, 28), 'math.atan', 'atan', ({(16, 22, 16, 27): '(w / 2 / f)'}, {}), '(w / 2 / f)', False, 'from math import atan, pi\n')] |
SamanFekri/BookRecommendation | Code_Hybrid_SLIMBPR_CBF_RP3Beta.py | 07dfa875154af39546cb263d4407339ce26d47e8 | # This Python 3 environment comes with many helpful analytics libraries installed
# It is defined by the kaggle/python Docker image: https://github.com/kaggle/docker-python
# For example, here's several helpful packages to load
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import scipy.sparse as sps
import time
RM_train=pd.read_csv('./input/data_train.csv')
R_test=pd.read_csv('./input/data_target_users_test.csv')
URM=pd.read_csv('./input/data_train.csv')
ICM = pd.read_csv('./input/data_ICM_title_abstract.csv')
##### URM
URM_tuples = [tuple(x) for x in URM.to_numpy()]
userList, itemList, ratingList = zip(*URM_tuples)
userList = list(userList)
userList=np.array(userList,dtype=np.int64)
itemList = list(itemList)
itemList=np.array(itemList,dtype=np.int64)
ratingList = list(ratingList) #not needed
ratingList=np.array(ratingList,dtype=np.int64) #not needed
URM_all = sps.coo_matrix((ratingList, (userList, itemList)))
URM_all = URM_all.tocsr()
#### ICM
ICM_tuples = [tuple(x) for x in ICM.to_numpy()]
itemList_icm, featureList_icm, scoreList_icm = zip(*ICM_tuples)
itemList_icm = list(itemList_icm)
itemList_icm = np.array(itemList_icm,dtype=np.int64)
featureList_icm = list(featureList_icm)
featureList_icm = np.array(featureList_icm,dtype=np.int64)
scoreList_icm = list(scoreList_icm)
scoreList_icm = np.array(scoreList_icm,dtype=np.float64)
ICM_all = sps.coo_matrix((scoreList_icm, (itemList_icm, featureList_icm)))
#### Test
userTestList = [x for x in R_test.to_numpy()]
userTestList = zip(*userTestList)
userTestList = [list(a) for a in userTestList][0]
#### make validation and test
from Base.Evaluation.Evaluator import EvaluatorHoldout
from Data_manager.split_functions.split_train_validation_random_holdout import split_train_in_two_percentage_global_sample
URM_train, URM_test = split_train_in_two_percentage_global_sample(URM_all, train_percentage = 0.80)
URM_train, URM_validation = split_train_in_two_percentage_global_sample(URM_train, train_percentage = 0.80)
evaluator_validation = EvaluatorHoldout(URM_validation, cutoff_list=[10])
evaluator_test = EvaluatorHoldout(URM_test, cutoff_list=[10])
### hybrid recommender
### Usinng TF IDF
ICM_all = ICM_all.tocsr()
num_tot_items = ICM_all.shape[0]
# let's count how many items have a certain feature
items_per_feature = np.ediff1d(ICM_all.indptr) + 1
# print(items_per_feature)
IDF = np.array(np.log(num_tot_items / items_per_feature))
from scipy.sparse import diags
diags(IDF)
ICM_idf = ICM_all.copy()
ICM_idf = diags(IDF)*ICM_idf
############## top pop
item_popularity = np.ediff1d(URM_all.tocsc().indptr)
popular_items = np.argsort(item_popularity)
popular_items = np.flip(popular_items, axis=0)
popular_items = popular_items[0:10]
###########
from HybridRecommender import HybridRecommender
recommender = HybridRecommender(URM_all)
recommender.fit([0.2, 0.3, 0.2], ICM_idf)
recoms = recommender.recommend(userTestList, cutoff=10)
recomList = []
for i in range(len(recoms)):
user_id = userTestList[i]
start_pos = URM_train.indptr[user_id]
end_pos = URM_train.indptr[user_id + 1]
if start_pos == end_pos:
recomList.append(' '.join(str(e) for e in popular_items))
else:
recomList.append(' '.join(str(e) for e in recoms[i]))
# print(recomList)
res = {"user_id": userTestList, "item_list": recomList}
result = pd.DataFrame(res, columns= ['user_id', 'item_list'])
result.to_csv('outputs/hybrid_slim_cbf_rp3v1.csv', index = False, header=True)
| [((9, 9, 9, 46), 'pandas.read_csv', 'pd.read_csv', ({(9, 21, 9, 45): '"""./input/data_train.csv"""'}, {}), "('./input/data_train.csv')", True, 'import pandas as pd\n'), ((10, 7, 10, 56), 'pandas.read_csv', 'pd.read_csv', ({(10, 19, 10, 55): '"""./input/data_target_users_test.csv"""'}, {}), "('./input/data_target_users_test.csv')", True, 'import pandas as pd\n'), ((11, 4, 11, 41), 'pandas.read_csv', 'pd.read_csv', ({(11, 16, 11, 40): '"""./input/data_train.csv"""'}, {}), "('./input/data_train.csv')", True, 'import pandas as pd\n'), ((12, 6, 12, 56), 'pandas.read_csv', 'pd.read_csv', ({(12, 18, 12, 55): '"""./input/data_ICM_title_abstract.csv"""'}, {}), "('./input/data_ICM_title_abstract.csv')", True, 'import pandas as pd\n'), ((20, 9, 20, 42), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((22, 9, 22, 42), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((25, 11, 25, 46), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((27, 10, 27, 60), 'scipy.sparse.coo_matrix', 'sps.coo_matrix', ({(27, 25, 27, 59): '(ratingList, (userList, itemList))'}, {}), '((ratingList, (userList, itemList)))', True, 'import scipy.sparse as sps\n'), ((36, 15, 36, 52), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((39, 18, 39, 58), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((42, 16, 42, 56), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((44, 10, 44, 74), 'scipy.sparse.coo_matrix', 'sps.coo_matrix', ({(44, 25, 44, 73): '(scoreList_icm, (itemList_icm, featureList_icm))'}, {}), '((scoreList_icm, (itemList_icm, featureList_icm)))', True, 'import scipy.sparse as sps\n'), ((56, 22, 56, 99), 'Data_manager.split_functions.split_train_validation_random_holdout.split_train_in_two_percentage_global_sample', 'split_train_in_two_percentage_global_sample', (), '', False, 'from Data_manager.split_functions.split_train_validation_random_holdout import split_train_in_two_percentage_global_sample\n'), ((57, 28, 57, 107), 'Data_manager.split_functions.split_train_validation_random_holdout.split_train_in_two_percentage_global_sample', 'split_train_in_two_percentage_global_sample', (), '', False, 'from Data_manager.split_functions.split_train_validation_random_holdout import split_train_in_two_percentage_global_sample\n'), ((59, 23, 59, 73), 'Base.Evaluation.Evaluator.EvaluatorHoldout', 'EvaluatorHoldout', (), '', False, 'from Base.Evaluation.Evaluator import EvaluatorHoldout\n'), ((60, 17, 60, 61), 'Base.Evaluation.Evaluator.EvaluatorHoldout', 'EvaluatorHoldout', (), '', False, 'from Base.Evaluation.Evaluator import EvaluatorHoldout\n'), ((76, 0, 76, 10), 'scipy.sparse.diags', 'diags', ({(76, 6, 76, 9): 'IDF'}, {}), '(IDF)', False, 'from scipy.sparse import diags\n'), ((85, 16, 85, 43), 'numpy.argsort', 'np.argsort', ({(85, 27, 85, 42): 'item_popularity'}, {}), '(item_popularity)', True, 'import numpy as np\n'), ((86, 16, 86, 46), 'numpy.flip', 'np.flip', (), '', True, 'import numpy as np\n'), ((91, 14, 91, 40), 'HybridRecommender.HybridRecommender', 'HybridRecommender', ({(91, 32, 91, 39): 'URM_all'}, {}), '(URM_all)', False, 'from HybridRecommender import HybridRecommender\n'), ((110, 9, 110, 61), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((70, 20, 70, 46), 'numpy.ediff1d', 'np.ediff1d', ({(70, 31, 70, 45): 'ICM_all.indptr'}, {}), '(ICM_all.indptr)', True, 'import numpy as np\n'), ((73, 15, 73, 56), 'numpy.log', 'np.log', ({(73, 22, 73, 55): 'num_tot_items / items_per_feature'}, {}), '(num_tot_items / items_per_feature)', True, 'import numpy as np\n'), ((80, 10, 80, 20), 'scipy.sparse.diags', 'diags', ({(80, 16, 80, 19): 'IDF'}, {}), '(IDF)', False, 'from scipy.sparse import diags\n')] |
MoyTW/7DRL2016_Rewrite | dodge/config.py | 99e092dcb8797a25caa3c8a989a574efae19e4d4 | import json
class Config(object):
def __init__(self, file_location):
with open(file_location, 'r') as f:
config = json.load(f)
self.SCREEN_WIDTH = int(config["SCREEN_WIDTH"])
self.SCREEN_HEIGHT = int(config["SCREEN_HEIGHT"])
self.MAP_WIDTH = int(config["MAP_WIDTH"])
self.MAP_HEIGHT = int(config["MAP_HEIGHT"])
self.PANEL_HEIGHT = int(config["PANEL_HEIGHT"])
self.FULL_SCREEN = bool(config["FULL_SCREEN"])
self.CAMERA_WIDTH = int(config["CAMERA_WIDTH"])
self.CAMERA_HEIGHT = int(config["CAMERA_HEIGHT"])
self.VISION_RADIUS = int(config["VISION_RADIUS"])
self.FOV_ALGO = int(config["FOV_ALGO"])
self.FOV_LIGHT_WALLS = bool(config["FOV_LIGHT_WALLS"])
self.HP_BAR_WIDTH = int(config["HP_BAR_WIDTH"])
# Derived values
self.PANEL_Y = self.SCREEN_HEIGHT - self.PANEL_HEIGHT
# etc etc etc
| [((7, 21, 7, 33), 'json.load', 'json.load', ({(7, 31, 7, 32): 'f'}, {}), '(f)', False, 'import json\n')] |
barel-mishal/InCal_lib | incal_lib/create_dataframe.py | 3aa63ebccf2ed3277fac55049c88178541cbb94b | import pandas as pd
import numpy as np
def create_calr_example_df(n_rows, start_date):
'''
'''
np.random.seed(20)
array = np.random.rand(n_rows)
cumulative = np.cumsum(array)
d = {
'feature1_subject_1': array,
'feature1_subject_2': array,
'feature2_subject_1': cumulative,
'feature2_subject_2': cumulative*2
}
idx = pd.date_range(start_date, periods=n_rows,
freq="MIN", name='Date_Time_1')
return pd.DataFrame(data=d, index=idx)
| [((9, 4, 9, 22), 'numpy.random.seed', 'np.random.seed', ({(9, 19, 9, 21): '(20)'}, {}), '(20)', True, 'import numpy as np\n'), ((10, 12, 10, 34), 'numpy.random.rand', 'np.random.rand', ({(10, 27, 10, 33): 'n_rows'}, {}), '(n_rows)', True, 'import numpy as np\n'), ((11, 17, 11, 33), 'numpy.cumsum', 'np.cumsum', ({(11, 27, 11, 32): 'array'}, {}), '(array)', True, 'import numpy as np\n'), ((18, 10, 19, 55), 'pandas.date_range', 'pd.date_range', (), '', True, 'import pandas as pd\n'), ((20, 11, 20, 42), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n')] |
lms-07/HybridSN | HybridSN/DataLoadAndOperate.py | 7580d67a5879d5b53ced75a653d4f198a8aefde2 | import os
import numpy as np
import scipy.io as sio
import tifffile
from sklearn.decomposition import PCA
from sklearn.model_selection import train_test_split
#Load dataset
def loadData(name,data_path):
if name == 'IP':
data = sio.loadmat(os.path.join(data_path, 'Indian_pines_corrected.mat'))['indian_pines_corrected']
labels = sio.loadmat(os.path.join(data_path, 'Indian_pines_gt.mat'))['indian_pines_gt']
elif name == 'SA':
data = sio.loadmat(os.path.join(data_path, 'Salinas_corrected.mat'))['salinas_corrected']
labels = sio.loadmat(os.path.join(data_path, 'Salinas_gt.mat'))['salinas_gt']
elif name == 'PU':
data = sio.loadmat(os.path.join(data_path, 'PaviaU.mat'))['paviaU']
labels = sio.loadmat(os.path.join(data_path, 'PaviaU_gt.mat'))['paviaU_gt']
elif name == 'HU13':
# dict_keys(['__header__', '__version__', '__globals__', 'Houston'])
#dict_values([b'MATLAB 5.0 MAT-file, Platform: PCWIN64, Created on: Wed Jul 17 16:45:01 2019', '1.0', [], array()])
#data = sio.loadmat(os.path.join(data_path, 'Houston.mat'))
#labels = sio.loadmat(os.path.join(data_path,'Houston_gt.mat'))
data = sio.loadmat(os.path.join(data_path, 'Houston.mat'))['Houston']
labels = sio.loadmat(os.path.join(data_path,'Houston_gt.mat'))['Houston_gt']
elif name == 'KSC':
data = sio.loadmat(os.path.join(data_path, 'KSC.mat'))['KSC']
labels = sio.loadmat(os.path.join(data_path,'KSC_gt.mat'))['KSC_gt']
return data, labels
# Use tifffile pkg read the hyperspectral img.
# Load .tiff data set and converted to .mat data
def loadTifDataTomat(data_path,save_DataPath,name):
if name=='HU13':
totalTif=tifffile.imread(os.path.join(data_path,'2013_IEEE_GRSS_DF_Contest_CASI.tif'))
trainTif=tifffile.imread(os.path.join(data_path,'train_roi.tif'))
valTif=tifffile.imread(os.path.join(data_path,'val_roi.tif'))
print(totalTif.shape,trainTif.shape,valTif.shape)
#spectral.imshow(totalTif)
#spectral.imshow(trainTif)
sio.savemat(os.path.join(save_DataPath,"totalTifHouston13.mat"),{'totalTifHouston13':totalTif})
sio.savemat(os.path.join(save_DataPath,"trainTifHouston13.mat"),{'trainTifHouston13':trainTif})
sio.savemat(os.path.join(save_DataPath,"valTifHouston13.mat"),{'valTifHouston13':valTif})
def loadTifMat(data_path,name):
if name=='HU13':
data=sio.loadmat(os.path.join(data_path, 'totalTifHouston13.mat'))['totalTifHouston13']
train=sio.loadmat(os.path.join(data_path, 'trainTifHouston13.mat'))['trainTifHouston13']
val=sio.loadmat(os.path.join(data_path, 'valTifHouston13.mat'))['valTifHouston13']
return data,train,val
### Using PCA for removing the spectral redundancy(冗余)
### Reduce the spectral dimension, from high-dimensional to low-dimensional.
def applyPCA(X, numComponents=75):
newX = np.reshape(X, (-1, X.shape[2]))
pca = PCA(n_components=numComponents, whiten=True)
newX = pca.fit_transform(newX)
newX = np.reshape(newX, (X.shape[0],X.shape[1], numComponents))
return newX, pca
### Padding zeros
def padWithZeros(X, margin=2):
newX = np.zeros((X.shape[0] + 2 * margin, X.shape[1] + 2* margin, X.shape[2]))
x_offset = margin
y_offset = margin
newX[x_offset:X.shape[0] + x_offset, y_offset:X.shape[1] + y_offset, :] = X
return newX
### Create data cube,3D-patch.
def createImageCubes(X, y, windowSize=5, removeZeroLabels = True):
margin = int((windowSize - 1) / 2)
zeroPaddedX = padWithZeros(X, margin=margin)
# split patches
patchesData = np.zeros((X.shape[0] * X.shape[1], windowSize, windowSize, X.shape[2]))
patchesLabels = np.zeros((X.shape[0] * X.shape[1]))
patchIndex = 0
for r in range(margin, zeroPaddedX.shape[0] - margin):
for c in range(margin, zeroPaddedX.shape[1] - margin):
patch = zeroPaddedX[r - margin:r + margin + 1, c - margin:c + margin + 1]
patchesData[patchIndex, :, :, :] = patch
patchesLabels[patchIndex] = y[r-margin, c-margin]
patchIndex = patchIndex + 1
if removeZeroLabels:
patchesData = patchesData[patchesLabels>0,:,:,:]
patchesLabels = patchesLabels[patchesLabels>0]
patchesLabels -= 1
return patchesData, patchesLabels
# Dataset split.
def splitTrainTestSet(X, y, testRatio, randomState=345):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=testRatio, random_state=randomState,
stratify=y)
return X_train, X_test, y_train, y_test | [((65, 11, 65, 42), 'numpy.reshape', 'np.reshape', ({(65, 22, 65, 23): 'X', (65, 25, 65, 41): '(-1, X.shape[2])'}, {}), '(X, (-1, X.shape[2]))', True, 'import numpy as np\n'), ((66, 10, 66, 54), 'sklearn.decomposition.PCA', 'PCA', (), '', False, 'from sklearn.decomposition import PCA\n'), ((68, 11, 68, 67), 'numpy.reshape', 'np.reshape', ({(68, 22, 68, 26): 'newX', (68, 28, 68, 66): '(X.shape[0], X.shape[1], numComponents)'}, {}), '(newX, (X.shape[0], X.shape[1], numComponents))', True, 'import numpy as np\n'), ((75, 11, 75, 82), 'numpy.zeros', 'np.zeros', ({(75, 20, 75, 81): '(X.shape[0] + 2 * margin, X.shape[1] + 2 * margin, X.shape[2])'}, {}), '((X.shape[0] + 2 * margin, X.shape[1] + 2 * margin, X.shape[2]))', True, 'import numpy as np\n'), ((88, 18, 88, 89), 'numpy.zeros', 'np.zeros', ({(88, 27, 88, 88): '(X.shape[0] * X.shape[1], windowSize, windowSize, X.shape[2])'}, {}), '((X.shape[0] * X.shape[1], windowSize, windowSize, X.shape[2]))', True, 'import numpy as np\n'), ((89, 20, 89, 55), 'numpy.zeros', 'np.zeros', ({(89, 30, 89, 53): 'X.shape[0] * X.shape[1]'}, {}), '(X.shape[0] * X.shape[1])', True, 'import numpy as np\n'), ((105, 39, 106, 67), 'sklearn.model_selection.train_test_split', 'train_test_split', (), '', False, 'from sklearn.model_selection import train_test_split\n'), ((40, 33, 40, 93), 'os.path.join', 'os.path.join', ({(40, 46, 40, 55): 'data_path', (40, 56, 40, 92): '"""2013_IEEE_GRSS_DF_Contest_CASI.tif"""'}, {}), "(data_path, '2013_IEEE_GRSS_DF_Contest_CASI.tif')", False, 'import os\n'), ((41, 33, 41, 72), 'os.path.join', 'os.path.join', ({(41, 46, 41, 55): 'data_path', (41, 56, 41, 71): '"""train_roi.tif"""'}, {}), "(data_path, 'train_roi.tif')", False, 'import os\n'), ((42, 31, 42, 68), 'os.path.join', 'os.path.join', ({(42, 44, 42, 53): 'data_path', (42, 54, 42, 67): '"""val_roi.tif"""'}, {}), "(data_path, 'val_roi.tif')", False, 'import os\n'), ((47, 20, 47, 71), 'os.path.join', 'os.path.join', ({(47, 33, 47, 46): 'save_DataPath', (47, 47, 47, 70): '"""totalTifHouston13.mat"""'}, {}), "(save_DataPath, 'totalTifHouston13.mat')", False, 'import os\n'), ((48, 20, 48, 71), 'os.path.join', 'os.path.join', ({(48, 33, 48, 46): 'save_DataPath', (48, 47, 48, 70): '"""trainTifHouston13.mat"""'}, {}), "(save_DataPath, 'trainTifHouston13.mat')", False, 'import os\n'), ((49, 20, 49, 69), 'os.path.join', 'os.path.join', ({(49, 33, 49, 46): 'save_DataPath', (49, 47, 49, 68): '"""valTifHouston13.mat"""'}, {}), "(save_DataPath, 'valTifHouston13.mat')", False, 'import os\n'), ((13, 27, 13, 80), 'os.path.join', 'os.path.join', ({(13, 40, 13, 49): 'data_path', (13, 51, 13, 79): '"""Indian_pines_corrected.mat"""'}, {}), "(data_path, 'Indian_pines_corrected.mat')", False, 'import os\n'), ((14, 29, 14, 75), 'os.path.join', 'os.path.join', ({(14, 42, 14, 51): 'data_path', (14, 53, 14, 74): '"""Indian_pines_gt.mat"""'}, {}), "(data_path, 'Indian_pines_gt.mat')", False, 'import os\n'), ((55, 25, 55, 73), 'os.path.join', 'os.path.join', ({(55, 38, 55, 47): 'data_path', (55, 49, 55, 72): '"""totalTifHouston13.mat"""'}, {}), "(data_path, 'totalTifHouston13.mat')", False, 'import os\n'), ((56, 26, 56, 74), 'os.path.join', 'os.path.join', ({(56, 39, 56, 48): 'data_path', (56, 50, 56, 73): '"""trainTifHouston13.mat"""'}, {}), "(data_path, 'trainTifHouston13.mat')", False, 'import os\n'), ((57, 24, 57, 70), 'os.path.join', 'os.path.join', ({(57, 37, 57, 46): 'data_path', (57, 48, 57, 69): '"""valTifHouston13.mat"""'}, {}), "(data_path, 'valTifHouston13.mat')", False, 'import os\n'), ((16, 27, 16, 75), 'os.path.join', 'os.path.join', ({(16, 40, 16, 49): 'data_path', (16, 51, 16, 74): '"""Salinas_corrected.mat"""'}, {}), "(data_path, 'Salinas_corrected.mat')", False, 'import os\n'), ((17, 29, 17, 70), 'os.path.join', 'os.path.join', ({(17, 42, 17, 51): 'data_path', (17, 53, 17, 69): '"""Salinas_gt.mat"""'}, {}), "(data_path, 'Salinas_gt.mat')", False, 'import os\n'), ((19, 27, 19, 64), 'os.path.join', 'os.path.join', ({(19, 40, 19, 49): 'data_path', (19, 51, 19, 63): '"""PaviaU.mat"""'}, {}), "(data_path, 'PaviaU.mat')", False, 'import os\n'), ((20, 29, 20, 69), 'os.path.join', 'os.path.join', ({(20, 42, 20, 51): 'data_path', (20, 53, 20, 68): '"""PaviaU_gt.mat"""'}, {}), "(data_path, 'PaviaU_gt.mat')", False, 'import os\n'), ((26, 27, 26, 65), 'os.path.join', 'os.path.join', ({(26, 40, 26, 49): 'data_path', (26, 51, 26, 64): '"""Houston.mat"""'}, {}), "(data_path, 'Houston.mat')", False, 'import os\n'), ((27, 29, 27, 69), 'os.path.join', 'os.path.join', ({(27, 42, 27, 51): 'data_path', (27, 52, 27, 68): '"""Houston_gt.mat"""'}, {}), "(data_path, 'Houston_gt.mat')", False, 'import os\n'), ((29, 27, 29, 61), 'os.path.join', 'os.path.join', ({(29, 40, 29, 49): 'data_path', (29, 51, 29, 60): '"""KSC.mat"""'}, {}), "(data_path, 'KSC.mat')", False, 'import os\n'), ((30, 29, 30, 65), 'os.path.join', 'os.path.join', ({(30, 42, 30, 51): 'data_path', (30, 52, 30, 64): '"""KSC_gt.mat"""'}, {}), "(data_path, 'KSC_gt.mat')", False, 'import os\n')] |
antopen/alipay-sdk-python-all | alipay/aop/api/domain/AlipayOpenIotmbsDooropenresultSyncModel.py | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOpenIotmbsDooropenresultSyncModel(object):
def __init__(self):
self._dev_id = None
self._door_state = None
self._project_id = None
@property
def dev_id(self):
return self._dev_id
@dev_id.setter
def dev_id(self, value):
self._dev_id = value
@property
def door_state(self):
return self._door_state
@door_state.setter
def door_state(self, value):
self._door_state = value
@property
def project_id(self):
return self._project_id
@project_id.setter
def project_id(self, value):
self._project_id = value
def to_alipay_dict(self):
params = dict()
if self.dev_id:
if hasattr(self.dev_id, 'to_alipay_dict'):
params['dev_id'] = self.dev_id.to_alipay_dict()
else:
params['dev_id'] = self.dev_id
if self.door_state:
if hasattr(self.door_state, 'to_alipay_dict'):
params['door_state'] = self.door_state.to_alipay_dict()
else:
params['door_state'] = self.door_state
if self.project_id:
if hasattr(self.project_id, 'to_alipay_dict'):
params['project_id'] = self.project_id.to_alipay_dict()
else:
params['project_id'] = self.project_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOpenIotmbsDooropenresultSyncModel()
if 'dev_id' in d:
o.dev_id = d['dev_id']
if 'door_state' in d:
o.door_state = d['door_state']
if 'project_id' in d:
o.project_id = d['project_id']
return o
| [] |
ghost58400/marlin-binary-protocol | setup.py | fb93603866ecfce84e887c159bbbb9f9d2f01f17 | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="marlin_binary_protocol",
version="0.0.7",
author="Charles Willis",
author_email="[email protected]",
description="Transfer files with Marlin 2.0 firmware using Marlin Binary Protocol Mark II",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/charleswillis3/marlin-binary-protocol",
packages=setuptools.find_packages(),
install_requires=["heatshrink2>=0.9", "pyserial>=3.4", "backports.time_perf_counter; python_version < '3.3'"],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4',
)
| [((15, 13, 15, 39), 'setuptools.find_packages', 'setuptools.find_packages', ({}, {}), '()', False, 'import setuptools\n')] |
henryseg/Veering | taut_euler_class.py | 50ebdcd5bde582726aefdd564c43e17890651282 | #
# taut_euler_class.py
#
from file_io import parse_data_file, write_data_file
from taut import liberal, isosig_to_tri_angle
from transverse_taut import is_transverse_taut
from sage.matrix.constructor import Matrix
from sage.modules.free_module_element import vector
from sage.arith.misc import gcd
from sage.arith.functions import lcm
#
# Goal - given a transverse taut triangulation, decide if the
# associated "absolute" euler class is torsion or not. If it is
# torsion, determine its order.
#
# Contents and overview:
# 1. References.
#
# 2. Background.
#
# 3. Helper functions.
#
# 4. Truncate. We build the correct "truncated" cell structure \calT'
# from (M, \calT) and give generators for the cochain groups
# C^k(\calT', \ZZ) (k = 1, 2).
#
# 5. Representative. We find a two-cocycle E \in Z^2(\calT', \ZZ)
# that represents E(\calT) \in H^2(M, \ZZ).
#
# 6. Coboundary. We find the matrix for the coboundary operator
# \delta^1.
#
# 7. Linear algebra. We solve the linear problem to decide if E is a
# coboundary - that is, if E lies in B^2(\calT', \ZZ) - that is, if E
# is in the image of \delta^1.
#
# 8. Remarks.
#
# 9. Calling code
#
# 1. References.
#
# Culler, Dunfield - Orderability and Dehn filling
# Ghys - Groups acting on the circle
# Thurston - A norm for the homology of three-manifolds
# Candel, Conlon - Foliations, chapter four
# 2. Background:
# Suppose that (M, \calT) is a transverse taut triangulation. Then
# \calT^{2} is the "horizontal branched surface". This caries various
# laminations, which extend to foliations on M. All of these have the
# same Euler class, which we will denote E(\calT) \in H^2(M, \ZZ).
# Suppose that \calF is a carried foliation and let UT\calF be the
# unit tangent bundle over \calF. The Euler class E vanishes exactly
# when UT\calF has a section; that is, when the unit tangent bundle is
# trivialisable.
# Recall:
# Suppose that X is an F-bundle over B. We have
#
# i
# F -------> X <--.
# | |
# | |
# p| |s
# | |
# v |
# B ---'
#
# So s \from B \to X is a \emph{section} if p \circ s = Id_B
# 3. Helper functions
def diagonal(D):
return [D[i][i] for i in range(min(D.dimensions()))]
# 4. Truncate.
# Suppose that M is a connected, cusped, oriented three-manifold. Let
# C = C(M) \geq 1 be the number of cusps of M. Suppose that \calT is a
# transverse taut ideal triangulation of M. Let T = T(\calT) \geq 1
# be the number of tetrahedra of \calT.
# We use Regina to number and orient the edges \{e_i\}_{i = 0}^{T-1},
# the faces \{f_i\}_{i = 0}^{2T-1}, and the tetrahedra \{t_i\}_{i =
# 0}^{T-1} of \calT. We call all of these \emph{ideal} cells. Note
# that the transverse structure also gives us co-orientations of the
# e_i and the f_i, called "upwards"
# We remove a small open neighbourbood of all ideal vertices of all
# model tetrahedra. This gives the \emph{truncated} cell structure
# \calT'. The remains of the ideal cells are called \emph{truncated}
# cells; we abuse and reuse the notations e_i and f_i for these. The
# truncated cells inherit orientations and co-orientations. The new
# cells are called \emph{peripheral} cells. We number these as
# follows:
# e_{ij} is the peripheral edge cutting vertex v_j off of ideal face f_i
# f_{ij} is the peripheral face cutting vertex v_j off of ideal tetrahedron t_i
# Note that every truncated face is combinatorially a hexagon. The
# boundary of this hexagon contains three truncated edges alternating
# with three peripheral edges. We orient each peripheral edge e_{ij}
# so that the orientation of e_{ij} agrees with the orientation
# induced by \bdy f_i. We orient each peripheral face f_{ij}
# anti-clockwise, as viewed from infinity (that is, from outside of
# M). Also, we equip e_{ij} and f_{ij} with co-orientations pointing
# out of M, called "outward".
# e_{i0}
# ---
# / \
# e_2 / \ e_1
# / \
# / f_i \
# \ /
# e_{i1} --------- e_{i2}
# e_0
# For an edge e or a face f we use e^* and f^* to denote the dual in
# C^1(\calT', \ZZ) or C^2(\calT', \ZZ). Thus \{e^*_i\} \cup
# \{e^*_{ij}\} generates C^1(\calT', \ZZ) while \{f^*_i\} \cup
# \{f^*_{ij}\} generates C^2(\calT', \ZZ).
# For more pictures, see
# /Veering_code/NotesPictures/euler_notes_from_nathan.jpg
# 5. Representative
# We now construct a two-cocycle E \in Z^2(\calT', \ZZ). For every
# peripheral face f we take
# E(f) = 0.
# \begin{remark}
# To see that this is correct, let \calF be any foliation of M,
# transverse to the boundary. Suppose that f is the given peripheral
# triangle. We have a section of the restriction of UT\calF to \bdy
# f; namely the outward field. This extends over f to give a section
# of UT\calF restricted to f. So there is no obstruction to the
# extension. See below for a more precise discussion in terms of
# "Poincar\'e-Hopf index".
# \end{remark}
# Now suppose that f is a truncated face. Suppose that e_0, e_1, e_2
# are its three truncated edges. Recall that these are all oriented.
# Let AC(f) be the number of the edges e_0, e_1, e_2 that are
# oriented anti-clockwise (that is, agree with their induced
# orientation coming from f). We take
# E(f) = AC(f) - 2
# If we flip the transverse direction: AC(f') = 3 - AC(f),
# so E(f') = AC(f') - 2 = 1 - AC(f) = 2 - AC(f) - 1 = -E(f) - 1
# \begin{remark}
# Here is one way to remember (and explain!) this rule. Suppose that
# f is the given truncated face. Suppose that s is a section of UTf |
# \bdy f. Then index(s) is the total rotation of s with respect to
# the tangent field, _plus_ one. This can be rephrased in terms of
# the index of tangent vector fields extending s over all of f.
# Our choices of orientations of edges determine a section of UTf |
# \bdy f. Since all of the boundary edges e_{ij} of f are oriented
# the same way, we choose a standard framing there; Nathan tells us to
# just use the outward pointing section on all of the e_{ij}. Our
# choice of section on e_0 (say) has to (a) depend only on the
# orientation of e_0 and (b) has to be outward at the endpoints of
# e_0. The simplest choice is the section that rotates by +\pi with
# respect to the tangent along \bdy f_i, as we move forward along e_0.
# So s points _back_ at the beginning of e_0, points _right_ in the
# middle of e_0, and points _forwards_ at the end of e_0. The total
# rotation of the resulting field (with respect to the tangent field)
# is AC(f) - 3. Thus E(f) = AC(f) - 2 is the index. You can check
# this works by drawing the four possible pictures and computing the index
# of any extension of s over f.
# \end{remark}
# Claim: \delta^2 E = 0.
# That is, E is a cocycle.
# Proof of claim: Fix a truncated tetrahedron t and fix some oriention
# of its truncated edges. A direct calculation shows that
# \delta E (t) = E \bdy t = 0.
# Likewise, a direct computation shows that switching the orientation
# of a single edge leaves E \bdy t unchanged. QED.
### It would be nice to have a less computational proof!
def euler_cocycle(tri, angle):
"""
Given a regina triangulation "tri", with oriented edges, and a
transverse taut angle structure "angle", returns the associated
two-cocycle E representing the Euler class E(tri).
"""
assert is_transverse_taut(tri, angle)
face_coorientations = is_transverse_taut(tri, angle, return_type = "face_coorientations")
# E will be a _row_ vector, because it eats column vectors.
E = []
# First deal with the truncated faces
for face in tri.faces(2): # 2 = dimension
# First we compute the number of Regina oriented edges that agree with the Regina orientation on face
AC = 0
for i in range(3):
perm = face.faceMapping(1, i)
# print perm[0], perm[1]
if perm[1] == ((perm[0] + 1) % 3): # the edge and face orientations agree so,
AC = AC + 1
# print "AC", AC
# Now we condition on whether or not Regina and angle agree on the (co-)orientation of the face.
if face_coorientations[face.index()] == 1:
E.append(AC - 2)
else:
E.append(1 - AC)
# Now deal with the peripheral faces
for tet in tri.tetrahedra():
for j in range(4):
E.append(0)
return E
# 6. Coboundary
# Suppose that e is a truncated edge. Let LF be the set of truncated
# faces to the left of e and let RF be the set of faces to the right. Then
# \delta e^* = \sum_{f \in LF} f^* - \sum_{f \in RF} f^*.
# Suppose that e is a peripheral edge. So there is a unique truncated
# face f meeting e. Note that f is to the left of e. There are
# also a pair of boundary faces meeting e: say f' _above_ e and f''
# _below_ e. Then
# \delta e^* = f^* + (f')^* - (f'')^*.
def coboundary(tri, angle):
"""
Given a triangulation "tri" (T), with oriented edges, and a
transverse taut angle structure "angle", returns the co-boundary
operator delta^1 \from C^1(T', ZZ) \to C^2(T', ZZ), as a matrix,
for the truncated triangulation T'. Note that, strictly speaking,
we don't need to use "angle" for this, but we use it to determine
orientation on faces for the Euler class, so we might as well use
it again here.
"""
# \delta^1 takes row vectors (functions on edges) and spits out
# row vectors (functions on faces). So, if c is a one-cochain
# then c \cdot \delta is a two-cochain.
delta = []
assert is_transverse_taut(tri, angle)
tet_vert_coorientations = is_transverse_taut(tri, angle, return_type = "tet_vert_coorientations")
face_coorientations = is_transverse_taut(tri, angle, return_type = "face_coorientations")
for edge in tri.edges():
# A row for every truncated edge
row = []
for face in tri.triangles():
# A row entry for every truncated face
count = 0
for i in range(3):
if face.edge(i) == edge:
perm = face.faceMapping(1, i)
if perm[1] == ((perm[0] + 1) % 3):
# the edge and face orientations agree so,
count += 1
else:
count -= 1
row.append(count * face_coorientations[face.index()])
# +1 if face is to the left of the edge, -1 if face is to
# the right of the edge, using Regina's edge orientation
# when viewed from above (using the transverse taut notion
# of up)
# ,'|
# ,' |
# ,' |
# ,' CCW | gets a +1
# `. ^
# `. |
# `. |
# `.|
for tet in tri.simplices():
for i in range(4):
row.append(0)
delta.append(row)
for face in tri.triangles():
face_embeddings = []
for j in range(2):
face_embeddings.append( face.embedding(j) )
for i in range(3): # vertices of the face
# A row for every peripheral edge
row = []
for face2 in tri.triangles():
# A row entry for every truncated face
if face2 == face:
row.append(1)
else:
row.append(0)
for tet in tri.simplices():
for k in range(4):
# A row entry for every peripheral face
count = 0
for j in range(2):
if (tet == face_embeddings[j].simplex()) and (face_embeddings[j].vertices()[i] == k):
# the tetrahedron is on the jth side of the
# face and the ith vertex of face is the kth
# vertex of tet
face_num_in_tet = face_embeddings[j].vertices()[3]
count -= tet_vert_coorientations[tet.index()][face_num_in_tet]
# tet_vert_coorientations is +1 if
# coorientation on face points out of the
# tetrahedron, and we want count += 1 if
# the peripheral face is above the
# peripheral edge
row.append(count)
delta.append(row)
return delta
# 7. Linear algebra
# We ask: is there a one-cocycle C \in C^1(\calT', \ZZ) so that
# \delta C = E? If so, then [E] = E(\calT) is zero in H^2, as
# desired.
# This is a linear algebra problem, so can be solved by, say, sage.
def order_of_euler_class(delta, E):
"""
Given the coboundary operator delta and an Euler two-cocycle E,
returns k if [E] is k--torsion. By convention, returns zero if
[E] is non-torsion. Note that the trivial element is 1--torsion.
"""
delta = Matrix(delta)
E = vector(E)
# Note that E is a coboundary if there is a one-cocycle C solving
#
# E = C*delta
#
# We can find C (if it exists at all) using Smith normal form.
D, U, V = delta.smith_form()
assert D == U*delta*V
# So we are trying to solve
#
# C*delta = C*U.inverse()*D*V.inverse() = E
#
# for a one-cochain C. Multiply by V to get
#
# C*delta*V = C*U.inverse()*D = E*V
#
# Now set
#
# B = C*U.inverse(), and so B*U = C
#
# and rewrite to get
#
# B*U*delta*V = B*D = E*V
#
# So define E' by:
Ep = E*V
# Finally we attempt to solve B * D = Ep. Note that D is
# diagonal: so if we can solve all of the equations
# B[i] * D[i][i] == Ep[i]
# with B[i] integers, then [E] = 0 in cohomology.
diag = diagonal(D)
if any( (diag[i] == 0 and Ep[i] != 0) for i in range(len(Ep)) ):
return 0
# All zeros are at the end in Smith normal form. Since we've
# passed the above we can now remove them.
first_zero = diag.index(0)
diag = diag[:first_zero]
Ep = Ep[:first_zero]
# Since diag[i] is (now) never zero we can divide to get the
# fractions Ep[i]/diag[i] and then find the scaling that makes
# them simultaneously integral.
denoms = [ diag[i] / gcd(Ep[i], diag[i]) for i in range(len(Ep)) ]
return lcm(denoms)
# 8. Remarks
# a) Here is a nice trick that proves [E] = 0 in some cases. Suppose
# that \gamma is an oriented path in \bdy M. Suppose that \gamma is
# transverse to the one-skeleton of \calT'. We form a one-cocycle
# D_\gamma by adding up the boundary edges that \gamma crosses, with
# sign. The sign is positive if \gamma crosses from below to above,
# and negative otherwise. Note that \delta D_\gamma vanishes on all
# boundary faces.
# b) Marc Lackenby says that we should take the paths that go up
# through the centres of tetrahedra and take the Poincare dual. BUT I
# think this is not what we want... Marc is thinking of the relative
# Euler class as discussed on page 390 of his paper "Taut ideal
# triangulations of three-manifolds". The relative Euler class lives
# in H^2(M, \bdy M), so is Poincare dual to an element of H_1(M),
# represented by a collection of loops.
# c) [2019-03-31] It seems that, for transverse veering triangulations
# in the 16 census, the Euler class is always zero or two-torsion.
# Note that there are manifolds M in the census where H^2(M, \ZZ) has
# positive rank... What about odd torsion?
# Question: If the veering triangulation is edge-orientable, does the
# Euler class vanish?
# Answer: Yes. Here is a version of a discussion with Nathan
# [2020-04-03] - he says the following:
# Suppose that F is a foliation carried by the horizontal branched
# surface. Let UTF be the unit tangent bundle to F. We think of
# e(UTF) as being the obstruction to UTF having a section. Let G be
# the foliation carried by the upper (aka green) branched surface. If
# G is transversely orientable (aka edge-orientability of the veering
# triangulation) then G \cap F gives the desired section, and e(UTF) =
# 0. Note that G \cap F gives, for every point, a pair of points in
# the unit tangent circle. So let PUTF be the projective unit tangent
# bundle to F. This definitely has a section, so e(PUTF) = 0. Now,
# the bundle UTF is a double cover of the bundle PUTF.
# Claim: The euler class is multiplicative with respect to covers (in
# both senses).
# With the claim in hand, we have
# 2 * e(UTF) = e(PUTF) = 0
# We deduce that e(UTF) is either zero or two-torsion.
# 9. Calling code
@liberal
def order_of_euler_class_wrapper(tri, angle):
"""
Returns the order of the euler class.
"""
return order_of_euler_class(coboundary(tri, angle), euler_cocycle(tri, angle))
def compute_order_of_euler_classes(file_in, number=None, file_out=None):
data_in = parse_data_file(file_in)
data_in = [line.split(" ") for line in data_in]
if number != None:
data_in = data_in[:number]
data_out = []
evil = []
for i, line in enumerate(data_in):
if i % 50 == 0:
print( ((1.0*i)/(1.0*len(data_in)), len(data_out)) )
sig = line[0]
tri, angle = isosig_to_tri_angle(sig)
# angle = [int(letter) for letter in angle_s]
curr_euler = order_of_euler_class(coboundary(tri, angle), euler_cocycle(tri, angle))
if curr_euler == "non-torsion":
evil.append(sig)
print(sig + " has non-torsion Euler class!!!!")
elif curr_euler == 1: # order is one so [E] = 0. Boring.
pass
else:
line_out = [sig, str(curr_euler)]
line_out.extend(line[1:])
data_out.append(line_out)
if file_out != None:
write_data_file(data_out, file_out)
print( ("list of evil:", evil) )
return data_out
| [((218, 11, 218, 41), 'transverse_taut.is_transverse_taut', 'is_transverse_taut', ({(218, 30, 218, 33): 'tri', (218, 35, 218, 40): 'angle'}, {}), '(tri, angle)', False, 'from transverse_taut import is_transverse_taut\n'), ((219, 26, 219, 93), 'transverse_taut.is_transverse_taut', 'is_transverse_taut', (), '', False, 'from transverse_taut import is_transverse_taut\n'), ((273, 11, 273, 41), 'transverse_taut.is_transverse_taut', 'is_transverse_taut', ({(273, 30, 273, 33): 'tri', (273, 35, 273, 40): 'angle'}, {}), '(tri, angle)', False, 'from transverse_taut import is_transverse_taut\n'), ((274, 30, 274, 101), 'transverse_taut.is_transverse_taut', 'is_transverse_taut', (), '', False, 'from transverse_taut import is_transverse_taut\n'), ((275, 26, 275, 93), 'transverse_taut.is_transverse_taut', 'is_transverse_taut', (), '', False, 'from transverse_taut import is_transverse_taut\n'), ((364, 12, 364, 25), 'sage.matrix.constructor.Matrix', 'Matrix', ({(364, 19, 364, 24): 'delta'}, {}), '(delta)', False, 'from sage.matrix.constructor import Matrix\n'), ((365, 8, 365, 17), 'sage.modules.free_module_element.vector', 'vector', ({(365, 15, 365, 16): 'E'}, {}), '(E)', False, 'from sage.modules.free_module_element import vector\n'), ((420, 11, 420, 22), 'sage.arith.functions.lcm', 'lcm', ({(420, 15, 420, 21): 'denoms'}, {}), '(denoms)', False, 'from sage.arith.functions import lcm\n'), ((485, 14, 485, 38), 'file_io.parse_data_file', 'parse_data_file', ({(485, 30, 485, 37): 'file_in'}, {}), '(file_in)', False, 'from file_io import parse_data_file, write_data_file\n'), ((495, 21, 495, 45), 'taut.isosig_to_tri_angle', 'isosig_to_tri_angle', ({(495, 41, 495, 44): 'sig'}, {}), '(sig)', False, 'from taut import liberal, isosig_to_tri_angle\n'), ((508, 8, 508, 43), 'file_io.write_data_file', 'write_data_file', ({(508, 24, 508, 32): 'data_out', (508, 34, 508, 42): 'file_out'}, {}), '(data_out, file_out)', False, 'from file_io import parse_data_file, write_data_file\n'), ((419, 25, 419, 44), 'sage.arith.misc.gcd', 'gcd', ({(419, 29, 419, 34): 'Ep[i]', (419, 36, 419, 43): 'diag[i]'}, {}), '(Ep[i], diag[i])', False, 'from sage.arith.misc import gcd\n')] |
ananyamalik/Railway-Concession-Portal | mailing/urls.py | 295264ccb50bc4750bf0a749c8477384407d51ad | from django.urls import path
from .views import ( student_list, student_add, student_profile,student_delete )
| [] |
deepbluesea/transformers | transformers/tests/tokenization_xlnet_test.py | 11a2317986aad6e9a72f542e31344cfb7c94cbab | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import unittest
from transformers.tokenization_xlnet import (XLNetTokenizer, SPIECE_UNDERLINE)
from .tokenization_tests_commons import CommonTestCases
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'fixtures/test_sentencepiece.model')
class XLNetTokenizationTest(CommonTestCases.CommonTokenizerTester):
tokenizer_class = XLNetTokenizer
def setUp(self):
super(XLNetTokenizationTest, self).setUp()
# We have a SentencePiece fixture for testing
tokenizer = XLNetTokenizer(SAMPLE_VOCAB, keep_accents=True)
tokenizer.save_pretrained(self.tmpdirname)
def get_tokenizer(self, **kwargs):
return XLNetTokenizer.from_pretrained(self.tmpdirname, **kwargs)
def get_input_output_texts(self):
input_text = u"This is a test"
output_text = u"This is a test"
return input_text, output_text
def test_full_tokenizer(self):
tokenizer = XLNetTokenizer(SAMPLE_VOCAB, keep_accents=True)
tokens = tokenizer.tokenize(u'This is a test')
self.assertListEqual(tokens, [u'▁This', u'▁is', u'▁a', u'▁t', u'est'])
self.assertListEqual(
tokenizer.convert_tokens_to_ids(tokens), [285, 46, 10, 170, 382])
tokens = tokenizer.tokenize(u"I was born in 92000, and this is falsé.")
self.assertListEqual(tokens, [SPIECE_UNDERLINE + u'I', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b',
u'or', u'n', SPIECE_UNDERLINE + u'in', SPIECE_UNDERLINE + u'',
u'9', u'2', u'0', u'0', u'0', u',', SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u's', u'é', u'.'])
ids = tokenizer.convert_tokens_to_ids(tokens)
self.assertListEqual(
ids, [8, 21, 84, 55, 24, 19, 7, 0,
602, 347, 347, 347, 3, 12, 66,
46, 72, 80, 6, 0, 4])
back_tokens = tokenizer.convert_ids_to_tokens(ids)
self.assertListEqual(back_tokens, [SPIECE_UNDERLINE + u'I', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b',
u'or', u'n', SPIECE_UNDERLINE + u'in',
SPIECE_UNDERLINE + u'', u'<unk>', u'2', u'0', u'0', u'0', u',',
SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u's',
u'<unk>', u'.'])
def test_tokenizer_lower(self):
tokenizer = XLNetTokenizer(SAMPLE_VOCAB, do_lower_case=True)
tokens = tokenizer.tokenize(u"I was born in 92000, and this is falsé.")
self.assertListEqual(tokens, [SPIECE_UNDERLINE + u'', u'i', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b',
u'or', u'n', SPIECE_UNDERLINE + u'in', SPIECE_UNDERLINE + u'',
u'9', u'2', u'0', u'0', u'0', u',', SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u'se', u'.'])
self.assertListEqual(tokenizer.tokenize(u"H\u00E9llo"), [u"▁he", u"ll", u"o"])
def test_tokenizer_no_lower(self):
tokenizer = XLNetTokenizer(SAMPLE_VOCAB, do_lower_case=False)
tokens = tokenizer.tokenize(u"I was born in 92000, and this is falsé.")
self.assertListEqual(tokens, [SPIECE_UNDERLINE + u'I', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b', u'or',
u'n', SPIECE_UNDERLINE + u'in', SPIECE_UNDERLINE + u'',
u'9', u'2', u'0', u'0', u'0', u',', SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u'se', u'.'])
def test_sequence_builders(self):
tokenizer = XLNetTokenizer.from_pretrained("xlnet-base-cased")
text = tokenizer.encode("sequence builders")
text_2 = tokenizer.encode("multi-sequence build")
encoded_sentence = tokenizer.add_special_tokens_single_sequence(text)
encoded_pair = tokenizer.add_special_tokens_sequence_pair(text, text_2)
assert encoded_sentence == text + [4, 3]
assert encoded_pair == text + [4] + text_2 + [4, 3]
if __name__ == '__main__':
unittest.main()
| [((106, 4, 106, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((24, 44, 24, 69), 'os.path.abspath', 'os.path.abspath', ({(24, 60, 24, 68): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((35, 20, 35, 67), 'transformers.tokenization_xlnet.XLNetTokenizer', 'XLNetTokenizer', (), '', False, 'from transformers.tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE\n'), ((39, 15, 39, 72), 'transformers.tokenization_xlnet.XLNetTokenizer.from_pretrained', 'XLNetTokenizer.from_pretrained', ({(39, 46, 39, 61): 'self.tmpdirname'}, {}), '(self.tmpdirname, **kwargs)', False, 'from transformers.tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE\n'), ((48, 20, 48, 67), 'transformers.tokenization_xlnet.XLNetTokenizer', 'XLNetTokenizer', (), '', False, 'from transformers.tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE\n'), ((76, 20, 76, 68), 'transformers.tokenization_xlnet.XLNetTokenizer', 'XLNetTokenizer', (), '', False, 'from transformers.tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE\n'), ((85, 20, 85, 69), 'transformers.tokenization_xlnet.XLNetTokenizer', 'XLNetTokenizer', (), '', False, 'from transformers.tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE\n'), ((93, 20, 93, 70), 'transformers.tokenization_xlnet.XLNetTokenizer.from_pretrained', 'XLNetTokenizer.from_pretrained', ({(93, 51, 93, 69): '"""xlnet-base-cased"""'}, {}), "('xlnet-base-cased')", False, 'from transformers.tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE\n')] |
Rongtingting/xcltk | preprocess/utils/liftOver_vcf.py | 2e86207c45a1caa7f905a89e1c121c3c203eab2d | # forked from https://github.com/single-cell-genetics/cellSNP
## A python wrap of UCSC liftOver function for vcf file
## UCSC liftOver binary and hg19 to hg38 chain file:
## https://genome.ucsc.edu/cgi-bin/hgLiftOver
## http://hgdownload.cse.ucsc.edu/admin/exe/linux.x86_64/liftOver
## http://hgdownload.soe.ucsc.edu/goldenPath/hg19/liftOver/hg19ToHg38.over.chain.gz
import sys
import gzip
import subprocess
from optparse import OptionParser
LIFTOVER_INFO = '##INFO=<ID=OLD,Number=1,Type=Integer,'
LIFTOVER_INFO += 'Description="position before liftover">\n'
def vcf_to_bed(vcf_file, out_file, chr_in=True):
if vcf_file[-3:] == ".gz":
is_gzip = True
fid_in = gzip.open(vcf_file, "r")
else:
is_gzip = False
fid_in = open(vcf_file, "r")
fid_out = open(out_file, "w")
for line in fid_in:
if is_gzip:
line = line.decode('utf-8')
if line.startswith("#") == False:
line_val = line.rstrip().split("\t")[:8]
if chr_in and line_val[0].startswith("chr") == False:
line_val[0] = "chr" + line_val[0]
line_val[2] = str(int(line_val[1]) + 1)
fid_out.writelines("\t".join(line_val[:3]) + "\n")
fid_in.close()
fid_out.close()
return None
def update_vcf(vcf_file, bed_new, bed_unmap, out_file):
## unmapped lines
unmap_pos = []
_fid = open(bed_unmap, "r")
for line in _fid:
if not line.startswith("#"):
_pos_id = "_".join(line.rstrip().split("\t")[:2])
unmap_pos.append(_pos_id)
_fid.close()
if vcf_file[-3:] == ".gz":
is_gzip = True
fid_in = gzip.open(vcf_file, "r")
else:
is_gzip = False
fid_in = open(vcf_file, "r")
cnt1 = 0
idx_unmap = 0
fid_bed = open(bed_new, "r")
fid_out = open(out_file, "w")
for line in fid_in:
if is_gzip:
line = line.decode('utf-8')
if line.startswith("#"):
if line.startswith("#CHROM"):
fid_out.writelines(LIFTOVER_INFO)
fid_out.writelines(line)
else:
line_val = line.rstrip().split("\t")
if idx_unmap < len(unmap_pos):
_pos_id = "_".join(line_val[:2])
if line_val[0].startswith("chr") == False:
_pos_id = "chr" + _pos_id
if _pos_id == unmap_pos[idx_unmap]:
idx_unmap += 1
continue
cnt1 += 1
bed_line = fid_bed.readline()
line_val[7] = "OLD=" + line_val[1] + ";" + line_val[7]
line_val[1] = bed_line.rstrip().split("\t")[1]
fid_out.writelines("\t".join(line_val) + "\n")
print(cnt1, idx_unmap)
fid_in.close()
fid_bed.close()
fid_out.close()
return None
def main():
import warnings
warnings.filterwarnings('error')
# parse command line options
parser = OptionParser()
parser.add_option("--chainFile", "-c", dest="chain_file", default=None,
help=("Chain file, full path."))
parser.add_option("--inFile", "-i", dest="in_file", default=None,
help=("Input vcf file, full path."))
parser.add_option("--outFile", "-o", dest="out_file", default=None,
help=("Output VCF file, full path."))
parser.add_option("--liftOverPath", "-P", dest="liftOver_path", default=None,
help=("liftOver_path if it is not in PATH variable."))
(options, args) = parser.parse_args()
if len(sys.argv[1:]) == 0:
print("liftOver-vcf: a wrap of UCSC liftOver for VCF file.\n")
print("use -h or --help for help on argument.")
sys.exit(1)
in_file = options.in_file
bed_file = options.in_file.split(".vcf")[0] + ".bed"
new_bed_file = options.out_file.split(".vcf")[0] + ".bed"
unmap_bed_file = options.out_file.split(".vcf")[0] + ".unmap.bed"
## generate bed file
print("converting vcf to bed file ... ")
vcf_to_bed(in_file, bed_file)
## UCSC liftOver on bed file
chain_file = options.chain_file
if options.liftOver_path is None:
liftOver = "liftOver"
else:
# check if path exists
liftOver = options.liftOver_path
print("liftOver bed file ... ")
bashCommand = "%s %s %s %s %s" %(liftOver, bed_file, chain_file,
new_bed_file, unmap_bed_file)
#print(bashCommand)
pro = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
pro.communicate()[0]
## update vcf file
out_file = options.out_file
if out_file[-3:] == ".gz":
out_file = out_file[:-3]
print("updating vcf file ... ")
update_vcf(in_file, new_bed_file, unmap_bed_file, out_file)
print("gzip vcf file ... ")
import shutil
if shutil.which("bgzip") is not None:
bashCommand = "bgzip -f %s" %(out_file)
else:
bashCommand = "gzip -f %s" %(out_file)
pro = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
pro.communicate()[0]
return None
if __name__ == "__main__":
main()
| [((89, 4, 89, 36), 'warnings.filterwarnings', 'warnings.filterwarnings', ({(89, 28, 89, 35): '"""error"""'}, {}), "('error')", False, 'import warnings\n'), ((92, 13, 92, 27), 'optparse.OptionParser', 'OptionParser', ({}, {}), '()', False, 'from optparse import OptionParser\n'), ((19, 17, 19, 41), 'gzip.open', 'gzip.open', ({(19, 27, 19, 35): 'vcf_file', (19, 37, 19, 40): '"""r"""'}, {}), "(vcf_file, 'r')", False, 'import gzip\n'), ((50, 17, 50, 41), 'gzip.open', 'gzip.open', ({(50, 27, 50, 35): 'vcf_file', (50, 37, 50, 40): '"""r"""'}, {}), "(vcf_file, 'r')", False, 'import gzip\n'), ((106, 8, 106, 19), 'sys.exit', 'sys.exit', ({(106, 17, 106, 18): '(1)'}, {}), '(1)', False, 'import sys\n'), ((141, 7, 141, 28), 'shutil.which', 'shutil.which', ({(141, 20, 141, 27): '"""bgzip"""'}, {}), "('bgzip')", False, 'import shutil\n')] |
kamil559/pomodorr | pomodorr/frames/tests/test_consumers.py | 232e6e98ff3481561dd1235794b3960066713210 | import json
import pytest
from channels.db import database_sync_to_async
from channels.testing import WebsocketCommunicator
from pytest_lazyfixture import lazy_fixture
from pomodorr.frames import statuses
from pomodorr.frames.models import DateFrame
from pomodorr.frames.routing import frames_application
from pomodorr.frames.selectors.date_frame_selector import get_finished_date_frames_for_task
pytestmark = [pytest.mark.django_db(transaction=True), pytest.mark.asyncio]
async def test_connect_websocket(task_instance, active_user):
communicator = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator.scope['user'] = active_user
connected, _ = await communicator.connect()
assert connected
await communicator.disconnect()
@pytest.mark.parametrize(
'tested_frame_type',
[DateFrame.pomodoro_type, DateFrame.break_type, DateFrame.pause_type]
)
async def test_start_and_finish_date_frame(tested_frame_type, task_instance, active_user):
communicator = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator.scope['user'] = active_user
await communicator.connect()
assert await database_sync_to_async(task_instance.frames.exists)() is False
await communicator.send_json_to({
'type': 'frame_start',
'frame_type': tested_frame_type
})
response = await communicator.receive_json_from()
assert response['level'] == statuses.MESSAGE_LEVEL_CHOICES[statuses.LEVEL_TYPE_SUCCESS]
assert response['code'] == statuses.LEVEL_TYPE_SUCCESS
assert response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_STARTED]
started_date_frame_id = response['data']['date_frame_id']
assert started_date_frame_id is not None
assert await database_sync_to_async(task_instance.frames.exists)()
await communicator.send_json_to({
'type': 'frame_finish',
'date_frame_id': started_date_frame_id
})
response = await communicator.receive_json_from()
assert response['level'] == statuses.MESSAGE_LEVEL_CHOICES[statuses.LEVEL_TYPE_SUCCESS]
assert response['code'] == statuses.LEVEL_TYPE_SUCCESS
assert response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_FINISHED]
assert await database_sync_to_async(get_finished_date_frames_for_task(task=task_instance).exists)()
await communicator.disconnect()
async def test_start_and_finish_pomodoro_with_pause_inside(task_instance, active_user):
communicator = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator.scope['user'] = active_user
await communicator.connect()
await communicator.send_json_to({
'type': 'frame_start',
'frame_type': DateFrame.pomodoro_type
})
pomodoro_started_response = await communicator.receive_json_from()
assert pomodoro_started_response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_STARTED]
started_pomodoro_id = pomodoro_started_response['data']['date_frame_id']
await communicator.send_json_to({
'type': 'frame_start',
'frame_type': DateFrame.pause_type
})
pause_started_response = await communicator.receive_json_from()
assert pause_started_response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_STARTED]
pomodoro = await database_sync_to_async(DateFrame.objects.get)(id=started_pomodoro_id)
assert pomodoro.end is None # check if pomodoro hasn't been stopped by starting a pause date frame
started_pause_id = pause_started_response['data']['date_frame_id']
pause = await database_sync_to_async(DateFrame.objects.get)(id=started_pause_id)
assert pause.end is None
await communicator.send_json_to({
'type': 'frame_finish',
'date_frame_id': started_pause_id
})
pause_finished_response = await communicator.receive_json_from()
assert pause_finished_response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_FINISHED]
await database_sync_to_async(pause.refresh_from_db)()
assert pause.end is not None # pause should be finished here
await database_sync_to_async(pomodoro.refresh_from_db)()
assert pomodoro.end is None
await communicator.send_json_to({
'type': 'frame_finish',
'date_frame_id': started_pomodoro_id
})
pomodoro_finished_response = await communicator.receive_json_from()
await database_sync_to_async(pomodoro.refresh_from_db)()
assert pomodoro.end is not None # Only now the pomodoro is expected to be finished
assert pomodoro_finished_response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_FINISHED]
assert await database_sync_to_async(get_finished_date_frames_for_task(task=task_instance).count)() == 2
await communicator.disconnect()
@pytest.mark.parametrize(
'tested_frame_type',
[DateFrame.pomodoro_type, DateFrame.break_type, DateFrame.pause_type]
)
async def test_channel_group_separation(tested_frame_type, active_user, task_instance,
task_instance_in_second_project):
communicator_1 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_2 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance_in_second_project.id}/')
communicator_1.scope['user'] = active_user
communicator_2.scope['user'] = active_user
communicator_1_connected, _ = await communicator_1.connect()
communicator_2_connected, _ = await communicator_2.connect()
assert communicator_1_connected
assert communicator_2_connected
assert await communicator_1.receive_nothing()
assert await communicator_2.receive_nothing()
await communicator_1.send_json_to({
'type': 'frame_start',
'frame_type': tested_frame_type
})
assert await communicator_1.receive_nothing() is False
assert await communicator_2.receive_nothing()
await communicator_1.disconnect()
await communicator_2.disconnect()
@pytest.mark.parametrize(
'tested_frame_type',
[DateFrame.pomodoro_type, DateFrame.break_type, DateFrame.pause_type]
)
async def test_connection_discarded_before_second_connection_established(tested_frame_type, active_user, task_instance):
communicator_1 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_2 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_1.scope['user'] = active_user
communicator_2.scope['user'] = active_user
communicator_1_connected, _ = await communicator_1.connect()
assert communicator_1_connected
communicator_2_connected, _ = await communicator_2.connect()
assert communicator_2_connected
connection_close_response = await communicator_1.receive_output()
assert connection_close_response['type'] == 'websocket.close'
assert await communicator_1.receive_nothing()
assert await communicator_2.receive_nothing()
await communicator_2.send_json_to({
'type': 'frame_start',
'frame_type': tested_frame_type
})
assert await communicator_1.receive_nothing()
assert await communicator_2.receive_nothing() is False
await communicator_2.disconnect()
@pytest.mark.parametrize(
'tested_frame_type',
[
lazy_fixture('pomodoro_in_progress'),
lazy_fixture('pause_in_progress')
]
)
async def test_date_frame_force_finished_and_client_notified(tested_frame_type, active_user, task_instance):
communicator_1 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_2 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_1.scope['user'] = active_user
communicator_2.scope['user'] = active_user
await communicator_1.connect()
await communicator_2.connect()
notification_message = await communicator_1.receive_output()
assert notification_message['type'] == 'websocket.send'
assert json.loads(notification_message['text'])['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[
statuses.FRAME_ACTION_FORCE_TERMINATED]
connection_close_response = await communicator_1.receive_output()
assert connection_close_response['type'] == 'websocket.close'
await communicator_1.disconnect()
await communicator_2.disconnect()
async def test_channel_group_permission(task_instance_for_random_project, active_user):
communicator = WebsocketCommunicator(frames_application, f'date_frames/{task_instance_for_random_project.id}/')
communicator.scope['user'] = active_user
connected, _ = await communicator.connect()
assert connected is False
| [((25, 1, 28, 1), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(26, 4, 26, 23): '"""tested_frame_type"""', (27, 4, 27, 73): '[DateFrame.pomodoro_type, DateFrame.break_type, DateFrame.pause_type]'}, {}), "('tested_frame_type', [DateFrame.pomodoro_type,\n DateFrame.break_type, DateFrame.pause_type])", False, 'import pytest\n'), ((128, 1, 131, 1), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(129, 4, 129, 23): '"""tested_frame_type"""', (130, 4, 130, 73): '[DateFrame.pomodoro_type, DateFrame.break_type, DateFrame.pause_type]'}, {}), "('tested_frame_type', [DateFrame.pomodoro_type,\n DateFrame.break_type, DateFrame.pause_type])", False, 'import pytest\n'), ((158, 1, 161, 1), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(159, 4, 159, 23): '"""tested_frame_type"""', (160, 4, 160, 73): '[DateFrame.pomodoro_type, DateFrame.break_type, DateFrame.pause_type]'}, {}), "('tested_frame_type', [DateFrame.pomodoro_type,\n DateFrame.break_type, DateFrame.pause_type])", False, 'import pytest\n'), ((13, 14, 13, 53), 'pytest.mark.django_db', 'pytest.mark.django_db', (), '', False, 'import pytest\n'), ((17, 19, 17, 96), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(17, 41, 17, 59): 'frames_application', (17, 61, 17, 95): 'f"""date_frames/{task_instance.id}/"""'}, {}), "(frames_application, f'date_frames/{task_instance.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((30, 19, 30, 96), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(30, 41, 30, 59): 'frames_application', (30, 61, 30, 95): 'f"""date_frames/{task_instance.id}/"""'}, {}), "(frames_application, f'date_frames/{task_instance.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((69, 19, 69, 96), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(69, 41, 69, 59): 'frames_application', (69, 61, 69, 95): 'f"""date_frames/{task_instance.id}/"""'}, {}), "(frames_application, f'date_frames/{task_instance.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((134, 21, 134, 98), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(134, 43, 134, 61): 'frames_application', (134, 63, 134, 97): 'f"""date_frames/{task_instance.id}/"""'}, {}), "(frames_application, f'date_frames/{task_instance.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((135, 21, 135, 116), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(135, 43, 135, 61): 'frames_application', (135, 63, 135, 115): 'f"""date_frames/{task_instance_in_second_project.id}/"""'}, {}), "(frames_application,\n f'date_frames/{task_instance_in_second_project.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((163, 21, 163, 98), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(163, 43, 163, 61): 'frames_application', (163, 63, 163, 97): 'f"""date_frames/{task_instance.id}/"""'}, {}), "(frames_application, f'date_frames/{task_instance.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((164, 21, 164, 98), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(164, 43, 164, 61): 'frames_application', (164, 63, 164, 97): 'f"""date_frames/{task_instance.id}/"""'}, {}), "(frames_application, f'date_frames/{task_instance.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((199, 21, 199, 98), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(199, 43, 199, 61): 'frames_application', (199, 63, 199, 97): 'f"""date_frames/{task_instance.id}/"""'}, {}), "(frames_application, f'date_frames/{task_instance.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((200, 21, 200, 98), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(200, 43, 200, 61): 'frames_application', (200, 63, 200, 97): 'f"""date_frames/{task_instance.id}/"""'}, {}), "(frames_application, f'date_frames/{task_instance.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((221, 19, 221, 115), 'channels.testing.WebsocketCommunicator', 'WebsocketCommunicator', ({(221, 41, 221, 59): 'frames_application', (221, 61, 221, 114): 'f"""date_frames/{task_instance_for_random_project.id}/"""'}, {}), "(frames_application,\n f'date_frames/{task_instance_for_random_project.id}/')", False, 'from channels.testing import WebsocketCommunicator\n'), ((194, 8, 194, 44), 'pytest_lazyfixture.lazy_fixture', 'lazy_fixture', ({(194, 21, 194, 43): '"""pomodoro_in_progress"""'}, {}), "('pomodoro_in_progress')", False, 'from pytest_lazyfixture import lazy_fixture\n'), ((195, 8, 195, 41), 'pytest_lazyfixture.lazy_fixture', 'lazy_fixture', ({(195, 21, 195, 40): '"""pause_in_progress"""'}, {}), "('pause_in_progress')", False, 'from pytest_lazyfixture import lazy_fixture\n'), ((50, 17, 50, 68), 'channels.db.database_sync_to_async', 'database_sync_to_async', ({(50, 40, 50, 67): 'task_instance.frames.exists'}, {}), '(task_instance.frames.exists)', False, 'from channels.db import database_sync_to_async\n'), ((91, 21, 91, 66), 'channels.db.database_sync_to_async', 'database_sync_to_async', ({(91, 44, 91, 65): 'DateFrame.objects.get'}, {}), '(DateFrame.objects.get)', False, 'from channels.db import database_sync_to_async\n'), ((95, 18, 95, 63), 'channels.db.database_sync_to_async', 'database_sync_to_async', ({(95, 41, 95, 62): 'DateFrame.objects.get'}, {}), '(DateFrame.objects.get)', False, 'from channels.db import database_sync_to_async\n'), ((107, 10, 107, 55), 'channels.db.database_sync_to_async', 'database_sync_to_async', ({(107, 33, 107, 54): 'pause.refresh_from_db'}, {}), '(pause.refresh_from_db)', False, 'from channels.db import database_sync_to_async\n'), ((110, 10, 110, 58), 'channels.db.database_sync_to_async', 'database_sync_to_async', ({(110, 33, 110, 57): 'pomodoro.refresh_from_db'}, {}), '(pomodoro.refresh_from_db)', False, 'from channels.db import database_sync_to_async\n'), ((119, 10, 119, 58), 'channels.db.database_sync_to_async', 'database_sync_to_async', ({(119, 33, 119, 57): 'pomodoro.refresh_from_db'}, {}), '(pomodoro.refresh_from_db)', False, 'from channels.db import database_sync_to_async\n'), ((210, 11, 210, 51), 'json.loads', 'json.loads', ({(210, 22, 210, 50): "notification_message['text']"}, {}), "(notification_message['text'])", False, 'import json\n'), ((34, 17, 34, 68), 'channels.db.database_sync_to_async', 'database_sync_to_async', ({(34, 40, 34, 67): 'task_instance.frames.exists'}, {}), '(task_instance.frames.exists)', False, 'from channels.db import database_sync_to_async\n'), ((63, 40, 63, 93), 'pomodorr.frames.selectors.date_frame_selector.get_finished_date_frames_for_task', 'get_finished_date_frames_for_task', (), '', False, 'from pomodorr.frames.selectors.date_frame_selector import get_finished_date_frames_for_task\n'), ((123, 40, 123, 93), 'pomodorr.frames.selectors.date_frame_selector.get_finished_date_frames_for_task', 'get_finished_date_frames_for_task', (), '', False, 'from pomodorr.frames.selectors.date_frame_selector import get_finished_date_frames_for_task\n')] |
FaHoLo/Fish_shop | Bot/db_aps.py | b08018223705bca169dab9f39ec5a55f62822f0b | import logging
import os
import redis
import moltin_aps
_database = None
db_logger = logging.getLogger('db_logger')
async def get_database_connection():
global _database
if _database is None:
database_password = os.getenv('DB_PASSWORD')
database_host = os.getenv('DB_HOST')
database_port = os.getenv('DB_PORT')
_database = redis.Redis(host=database_host, port=database_port, password=database_password)
db_logger.debug('Got new db connection')
return _database
def get_moltin_customer_id(customer_key):
db = await get_database_connection()
customer_id = db.get(customer_key)
if customer_id:
customer_id = customer_id.decode('utf-8')
db_logger.debug(f'Got moltin customer id «{customer_id}» from db')
return customer_id
def update_customer_info(customer_key, customer_info):
db = await get_database_connection()
customer_id = db.get(customer_key).decode('utf-8')
moltin_aps.update_customer_info(customer_id, customer_info)
db_logger.debug(f'Customer «{customer_id}» info was updated')
def create_customer(customer_key, customer_info):
db = await get_database_connection()
customer_id = moltin_aps.create_customer(customer_info)['data']['id']
db.set(customer_key, customer_id)
db_logger.debug(f'New customer «{customer_key}» was created')
| [((11, 12, 11, 42), 'logging.getLogger', 'logging.getLogger', ({(11, 30, 11, 41): '"""db_logger"""'}, {}), "('db_logger')", False, 'import logging\n'), ((37, 4, 37, 63), 'moltin_aps.update_customer_info', 'moltin_aps.update_customer_info', ({(37, 36, 37, 47): 'customer_id', (37, 49, 37, 62): 'customer_info'}, {}), '(customer_id, customer_info)', False, 'import moltin_aps\n'), ((17, 28, 17, 52), 'os.getenv', 'os.getenv', ({(17, 38, 17, 51): '"""DB_PASSWORD"""'}, {}), "('DB_PASSWORD')", False, 'import os\n'), ((18, 24, 18, 44), 'os.getenv', 'os.getenv', ({(18, 34, 18, 43): '"""DB_HOST"""'}, {}), "('DB_HOST')", False, 'import os\n'), ((19, 24, 19, 44), 'os.getenv', 'os.getenv', ({(19, 34, 19, 43): '"""DB_PORT"""'}, {}), "('DB_PORT')", False, 'import os\n'), ((20, 20, 20, 99), 'redis.Redis', 'redis.Redis', (), '', False, 'import redis\n'), ((43, 18, 43, 59), 'moltin_aps.create_customer', 'moltin_aps.create_customer', ({(43, 45, 43, 58): 'customer_info'}, {}), '(customer_info)', False, 'import moltin_aps\n')] |
shiv12095/realtimeviz | backend/server/tables/__init__.py | ee2bf10b5f9467212f9a9ce8957d80456ebd0259 | from .lime_bike_feed import LimeBikeFeed
from .lime_bike_trips import LimeBikeTrips
from .lime_bike_trips_analyze import LimeBikeTripsAnalyze
| [] |
gummadirajesh/AzureMonitorForSAPSolutions | sapmon/payload/provider/sapnetweaver.py | 9f8e9dbd38141b5de4782d40556c4368f6ad8d0b | # Python modules
import json
import logging
from datetime import datetime, timedelta, timezone
from time import time
from typing import Any, Callable
import re
import requests
from requests import Session
from threading import Lock
# SOAP Client modules
from zeep import Client
from zeep import helpers
from zeep.transports import Transport
from zeep.exceptions import Fault
# Payload modules
from const import *
from helper.azure import AzureStorageAccount
from helper.context import *
from helper.tools import *
from provider.base import ProviderInstance, ProviderCheck
from netweaver.metricclientfactory import NetWeaverMetricClient, MetricClientFactory
from netweaver.rfcsdkinstaller import PATH_RFC_SDK_INSTALL, SapRfcSdkInstaller
from typing import Dict
# Suppress SSLError warning due to missing SAP server certificate
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# wait time in between attempts to re-download and install RFC SDK package if we have a download blob
# URL defined and previous install attempt was not successful
MINIMUM_RFC_INSTALL_RETRY_INTERVAL = timedelta(minutes=30)
# timeout to use for all SOAP WSDL fetch and other API calls
SOAP_API_TIMEOUT_SECS = 5
# soap client cache expiration, after which amount of time both successful + failed soap client instantiation attempts will be refreshed
SOAP_CLIENT_CACHE_EXPIRATIION = timedelta(minutes=10)
class sapNetweaverProviderInstance(ProviderInstance):
# static / class variables to enforce singleton behavior around rfc sdk installation attempts across all
# instances of SAP Netweaver provider
_isRfcInstalled = None
_rfcInstallerLock = Lock()
def __init__(self,
tracer: logging.Logger,
ctx: Context,
providerInstance: Dict[str, str],
skipContent: bool = False,
**kwargs) -> None:
self.sapSid = None
self.sapHostName = None
self.sapInstanceNr = None
self.sapSubdomain = None
# RFC SDK call settings
self.sapUsername = None
self.sapPassword = None
self.sapClientId = None
self.sapRfcSdkBlobUrl = None
self.sapLogonGroup = None
# provider instance flag for whether RFC calls should be enabled for this specific Netweaver provider instance
self._areRfcCallsEnabled = None
# cache WSDL SOAP clients so we can re-use them across checks for the same provider and cut down off-box calls
self._soapClientCache = {}
# the RFC SDK does not allow client to specify a timeout and in fact appears to have a connection timeout of 60 secs.
# In cases where RFC calls timeout due to some misconfiguration, multiple retries can lead to metric gaps of several minutes.
# We are limiting retries here because it is extremely rare for SOAP or RFC call to fail on first attempt and succeed on retry,
# as most of these failures are due to persistent issues. Better to not waste limited time budget.
retrySettings = {
"retries": 1,
"delayInSeconds": 1,
"backoffMultiplier": 2
}
super().__init__(tracer,
ctx,
providerInstance,
retrySettings,
skipContent,
**kwargs)
"""
parse provider properties and get sid, host name and instance number
"""
def parseProperties(self) -> bool:
self.sapSid = self.metadata.get("sapSid", "")
if not self.sapSid:
self.tracer.error("%s sapSid cannot be empty", self.fullName)
return False
# provider level common logging prefix
self.logTag = "[%s][%s]" % (self.fullName, self.sapSid)
self.sapHostName = self.providerProperties.get("sapHostName", None)
if not self.sapHostName:
self.tracer.error("%s sapHostName cannot be empty", self.logTag)
return False
instanceNr = self.providerProperties.get("sapInstanceNr", None)
if instanceNr is None: # 0 is an acceptable value for Instance Number
self.tracer.error("%s sapInstanceNr cannot be empty", self.logTag)
return False
if not type(instanceNr) is int or instanceNr < 0 or instanceNr > 98:
self.tracer.error("%s sapInstanceNr can only be between 00 and 98 but %s was passed", self.logTag, str(instanceNr))
return False
self.sapInstanceNr = str(instanceNr).zfill(2)
self.sapSubdomain = self.providerProperties.get("sapSubdomain", "")
self.sapUsername = self.providerProperties.get('sapUsername', None)
self.sapPassword = self.providerProperties.get('sapPassword', None)
self.sapClientId = self.providerProperties.get('sapClientId', None)
self.sapLogonGroup = self.providerProperties.get('sapLogonGroup',None)
self.sapRfcSdkBlobUrl = self.providerProperties.get('sapRfcSdkBlobUrl', None)
# if user did not specify password directly via UI, check to see if they instead
# provided link to Key Vault secret
if not self.sapPassword:
sapPasswordKeyVaultUrl = self.providerProperties.get("sapPasswordKeyVaultUrl", None)
if sapPasswordKeyVaultUrl:
self.tracer.info("%s sapPassword key vault URL specified, attempting to fetch from %s", self.logTag, sapPasswordKeyVaultUrl)
try:
keyVaultUrlPatternMatch = re.match(REGEX_EXTERNAL_KEYVAULT_URL,
sapPasswordKeyVaultUrl,
re.IGNORECASE)
keyVaultName = keyVaultUrlPatternMatch.group(1)
secretName = keyVaultUrlPatternMatch.group(2)
except Exception as e:
self.tracer.error("%s invalid sapPassword Key Vault secret url format: %s", self.logTag, sapPasswordKeyVaultUrl)
return False
try:
kv = AzureKeyVault(self.tracer, keyVaultName, self.ctx.msiClientId)
self.sapPassword = kv.getSecret(secretName, None).value
if not self.sapPassword:
raise Exception("failed to read sapPassword secret")
except Exception as e:
self.tracer.error("%s error fetching sapPassword secret from keyVault url: %s, %s",
self.logTag,
sapPasswordKeyVaultUrl,
e)
return False
return True
def _getHttpPortFromInstanceNr(self, instanceNr: str) -> str:
return '5%s13' % instanceNr # As per SAP documentation, default http port is of the form 5<NR>13
def _getHttpsPortFromInstanceNr(self, instanceNr: str) -> str:
return '5%s14' % instanceNr # As per SAP documentation, default https port is of the form 5<NR>14
def getMessageServerPortFromInstanceNr(self, instanceNr: str) -> str:
return '81%s' % instanceNr # As per SAP documentation, default http port is of the form 81<NR>
def getFullyQualifiedDomainName(self, hostname: str) -> str:
if self.sapSubdomain:
return hostname + "." + self.sapSubdomain
else:
return hostname
"""
will first attempt to create SOAP client for hostname using the HTTPS port derived from the SAP instance number,
and if that does not succeed will then try to create client using the derived HTTP port
(if neither hostname or instance are specified, will default to the primary hostname/instance that the
provider was initialized with from properties)
"""
def getDefaultClient(self,
hostname: str = None,
instance: str = None) -> Client:
if not hostname:
hostname = self.sapHostName
if not instance:
instance = self.sapInstanceNr
httpsPort = self._getHttpsPortFromInstanceNr(instance)
httpPort = self._getHttpPortFromInstanceNr(instance)
portList = [(httpsPort,"https"),(httpPort,"http")]
exceptionDetails = None
startTime = time()
for port,protocol in portList:
startTime = time()
self.tracer.info("%s attempting to fetch default client for hostname=%s on %s port %s",
self.logTag, hostname, protocol, port)
try:
client = self.getClient(hostname, httpProtocol=protocol, port=port)
return client
except Exception as e:
exceptionDetails = e
self.tracer.info("%s error fetching default client hostname=%s on %s port %s: %s [%d ms]",
self.logTag, self.sapHostName, protocol, port, e, TimeUtils.getElapsedMilliseconds(startTime))
self.tracer.error("[%s] error fetching default client hostname=%s on port %s : %s [%d ms]",
self.logTag, self.sapHostName, portList, exceptionDetails, TimeUtils.getElapsedMilliseconds(startTime), exc_info=True)
raise exceptionDetails
"""
attempt to create a SOAP client for the specified hostname using specific protocol and port
(for when we already have a known hostconfig for this hostname, and already know whether HTTPS or HTTP should be used)
Store successful clients in cache so we don't make unnecessary WSDL fetchs for future API calls to the same instance
"""
def getClient(self,
hostname: str,
httpProtocol: str,
port: str,
useCache: bool = True) -> Client:
if not hostname or not httpProtocol or not port:
raise Exception("%s cannot create client with empty httpProtocol, hostname or port (%s:%s:%s)" % \
(self.logTag, httpProtocol, hostname, port))
if httpProtocol != "http" and httpProtocol != "https":
raise Exception("%s httpProtocol %s is not valid for hostname: %s, port: %s" % \
(self.logTag, httpProtocol, hostname, port))
hostname = self.getFullyQualifiedDomainName(hostname)
url = '%s://%s:%s/?wsdl' % (httpProtocol, hostname, port)
if (useCache and url in self._soapClientCache):
cacheEntry = self._soapClientCache[url]
# respect cache expiration; if cache is expired allow client to be refreshed below
if (cacheEntry['expirationDateTime'] > datetime.utcnow()):
if (cacheEntry['client']):
# self.tracer.info("%s using cached SOAP client for wsdl: %s", self.logTag, url)
return cacheEntry['client']
else:
# previously cached soap client attempt was failure
raise Exception("%s cached SOAP client failure for wsdl: %s" % (self.logTag, url))
self.tracer.info("%s connecting to wsdl url: %s", self.logTag, url)
startTime = time()
client = None
try:
session = Session()
session.verify = False
client = Client(url, transport=Transport(session=session, timeout=SOAP_API_TIMEOUT_SECS, operation_timeout=SOAP_API_TIMEOUT_SECS))
self.tracer.info("%s initialized SOAP client url: %s [%d ms]",
self.logTag, url, TimeUtils.getElapsedMilliseconds(startTime))
return client
except Exception as e:
self.tracer.error("%s error fetching wsdl url: %s: %s [%d ms]",
self.logTag, url, e, TimeUtils.getElapsedMilliseconds(startTime), exc_info=True)
raise e
finally:
# cache successsful and failed soap client attempts to reduce future API calls
self._soapClientCache[url] = { 'client': client, 'expirationDateTime': datetime.utcnow() + SOAP_CLIENT_CACHE_EXPIRATIION }
def callSoapApi(self, client: Client, apiName: str) -> str:
self.tracer.info("%s executing SOAP API: %s for wsdl: %s", self.logTag, apiName, client.wsdl.location)
startTime = time()
try:
method = getattr(client.service, apiName)
result = method()
self.tracer.info("%s successful SOAP API: %s for wsdl: %s [%d ms]",
self.logTag, apiName, client.wsdl.location, TimeUtils.getElapsedMilliseconds(startTime))
return result
except Exception as e:
self.tracer.error("%s error while calling SOAP API: %s for wsdl: %s: %s [%d ms]",
self.logTag, apiName, client.wsdl.location, e, TimeUtils.getElapsedMilliseconds(startTime), exc_info=True)
raise e
"""
return a netweaver RFC client initialized with "MESSAGESERVER" instance we find
for this SID.
"""
def getRfcClient(self, logTag: str) -> NetWeaverMetricClient:
# RFC connections against application server instances can be made through 'MESSAGESERVER' instances
dispatcherInstance = self.getMessageServerInstance()
return MetricClientFactory.getMetricClient(tracer=self.tracer,
logTag=logTag,
sapHostName=dispatcherInstance['hostname'],
sapSysNr=str(dispatcherInstance['instanceNr']),
sapSubdomain=self.sapSubdomain,
sapSid=self.sapSid,
sapClient=str(self.sapClientId),
sapLogonGroup = self.sapLogonGroup,
sapUsername=self.sapUsername,
sapPassword=self.sapPassword)
def validate(self) -> bool:
logTag = "[%s][%s][validation]" % (self.fullName, self.sapSid)
# HACK: Load content json to fetch the list of APIs in the checks
self.initContent()
try:
self._validateSoapClient()
except Exception as e:
self.tracer.error("%s SOAP API validation failure: %s", logTag, e, exc_info=True)
return False
try:
self._validateRfcClient()
except Exception as e:
self.tracer.error("%s RFC client validation failure: %s", logTag, e, exc_info=True)
return False
return True
"""
iterate through all SOAP API calls and attempt to validate that SOAP API client can be instantiated
and expected APIs are callable
"""
def _validateSoapClient(self) -> None:
###
# TODO: this entire function needs to be rethought to me more precise in terms of which instances
# are called for which APIs, as some APIs will not work for some function types.
###
logTag = "[%s][%s][validation]" % (self.fullName, self.sapSid)
# hard-coded list of checks that correspond to SOAP API calls to validate
soapApiChecks = ['GetSystemInstanceList',
'GetProcessList',
'ABAPGetWPTable',
'GetQueueStatistic',
'EnqGetStatistic']
self.tracer.info("%s connecting to sap to validate SOAP API connectivity", logTag)
try:
client = self.getDefaultClient(hostname=self.sapHostName, instance=self.sapInstanceNr)
except Exception as e:
self.tracer.error("%s error occured while initializing SOAP client to SAP server: %s|%s, %s",
logTag,
self.sapHostName,
self.sapInstanceNr,
e,
exc_info=True)
raise
# Ensure that all APIs in the checks are valid and are marked as unprotected.
# Some APIs are compatible with only specific instance types and throw a Fault if run against
# an incompatible one.
# However, here we suppress all errors except Unauthorized since the Monitor phase takes
# care of calling the API against the right instance type. As long as we don't get an
# Unauthorized error, we know we can safely call them during the Monitor phase.
isValid = True
for check in self.checks:
apiName = check.name
if (apiName not in soapApiChecks):
# this is not a SOAP API check
continue
method = getattr(client.service, apiName, None) # Returning None when API not found
if method is None:
self.tracer.error("%s SOAP client failure: api %s does not exist for %s", logTag, apiName, client.wsdl.location)
isValid = False
else:
try:
self.callSoapApi(client, apiName)
self.tracer.info("%s validated SOAP api %s for %s", logTag, apiName, client.wsdl.location)
except Fault as e:
if (e.code == "SOAP-ENV:Client" and e.message == "HTTP Error: 'Unauthorized'"):
isValid = False
self.tracer.error("%s SOAP api %s is protected for %s, %s ", logTag, apiName, client.wsdl.location, e, exc_info=True)
else:
self.tracer.error("%s suppressing error during validation of SOAP api %s for %s, %s", logTag, apiName, client.wsdl.location, e, exc_info=True)
except Exception as e:
self.tracer.error("%s suppressing error during validation of SOAP api %s for %s, %s ", logTag, apiName, client.wsdl.location, e, exc_info=True)
if (not isValid):
raise Exception("%s one or more SOAP APIs failed validation" % (logTag))
"""
if customer provided RFC SDK configuration, then validate that all required properties are specified
and validate we can establish RFC client connections to APIs we need to call
"""
def _validateRfcClient(self) -> None:
logTag = "[%s][%s][validation]" % (self.fullName, self.sapSid)
# are any RFC SDK config properties populated?
if (not self.sapUsername or
not self.sapPassword or
not self.sapClientId or
not self.sapRfcSdkBlobUrl):
# customer has not chosen to enable RFC SDK, nothing to validate
return
# are ALL RFC SDK config properties populated?
if (not self.sapUsername and
not self.sapPassword and
not self.sapClientId and
not self.sapRfcSdkBlobUrl):
# customer specified only partial set of config properties needed to enable RFC, so fail validation
raise Exception("must specify all properties to enable RFC metric collection: Username, Password, ClientId, and RfcSdkBlobUrl")
if (not self.areRfcMetricsEnabled()):
raise Exception("RFC SDK failed to install and is not usable")
# initialize a client for the first healthy ABAP/Dispatcher instance we find
client = self.getRfcClient(logTag=logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
# get metric query window to lookback 10 minutes to see if any results are available. If not that probably
# indicates customer has not enabled SMON on their SAP system
self.tracer.info("%s attempting to fetch server timestamp from %s", logTag, sapHostnameStr)
(startTime, endTime) = client.getQueryWindow(lastRunServerTime=None,
minimumRunIntervalSecs=600,
logTag=logTag)
self.tracer.info("%s attempting to fetch SMON metrics from %s", logTag, sapHostnameStr)
result = client.getSmonMetrics(startDateTime=startTime, endDateTime=endTime, logTag=logTag)
self.tracer.info("%s successfully queried SMON metrics from %s", logTag, sapHostnameStr)
self.tracer.info("%s attempting to fetch SWNC workload metrics from %s", logTag, sapHostnameStr)
result = client.getSwncWorkloadMetrics(startDateTime=startTime, endDateTime=endTime, logTag=logTag)
self.tracer.info("%s successfully queried SWNC workload metrics from %s", logTag, sapHostnameStr)
self.tracer.info("%s attempting to fetch Short Dump metrics from %s", logTag, sapHostnameStr)
result = client.getShortDumpsMetrics(startDateTime=startTime, endDateTime=endTime, logTag=logTag)
self.tracer.info("%s successfully queried Short Dump metrics from %s", logTag, sapHostnameStr)
self.tracer.info("%s attempting to fetch Sys Log metrics from %s", logTag, sapHostnameStr)
result = client.getSysLogMetrics(startDateTime=startTime, endDateTime=endTime, logTag=logTag)
self.tracer.info("%s successfully queried Sys Log metrics from %s", logTag, sapHostnameStr)
self.tracer.info("%s attempting to fetch Failed Updates metrics from %s", logTag, sapHostnameStr)
result = client.getFailedUpdatesMetrics(logTag=logTag)
self.tracer.info("%s successfully queried Failed Updates metrics from %s", logTag, sapHostnameStr)
self.tracer.info("%s attempting to fetch Batch Job metrics from %s", logTag, sapHostnameStr)
result = client.getBatchJobMetrics(startDateTime=startTime, endDateTime=endTime, logTag=logTag)
self.tracer.info("%s successfully queried Batch Job metrics from %s", logTag, sapHostnameStr)
self.tracer.info("%s attempting to fetch inbound queue metrics from %s", logTag, sapHostnameStr)
result = client.getInboundQueuesMetrics(logTag=logTag)
self.tracer.info("%s successfully queried inbound queue metrics from %s", logTag, sapHostnameStr)
self.tracer.info("%s attempting to fetch outbound queue metrics from %s", logTag, sapHostnameStr)
result = client.getOutboundQueuesMetrics(logTag=logTag)
self.tracer.info("%s successfully queried outbound queue metrics from %s", logTag, sapHostnameStr)
self.tracer.info("%s attempting to fetch lock entries metrics from %s", logTag, sapHostnameStr)
result = client.getEnqueueReadMetrics(logTag=logTag)
self.tracer.info("%s successfully queried lock entries metrics from %s", logTag, sapHostnameStr)
self.tracer.info("%s successfully validated all known RFC SDK calls", logTag)
"""
query SAP SOAP API to return list of all instances in the SID, but if caller specifies that cached results are okay
and we have cached instance list with the provider instance, then just return the cached results
"""
def getInstances(self,
filterFeatures: list = None ,
filterType: str = None,
useCache: bool = True) -> list:
# Use cached list of instances if available since they should not change within a single monitor run;
# but if cache is not available or if caller explicitly asks to skip cache then make the SOAP call
if ('hostConfig' in self.state and useCache):
# self.tracer.debug("%s using cached list of system instances", self.logTag)
return self.filterInstancesByFeature(self.state['hostConfig'], filterFeatures=filterFeatures, filterType=filterType)
self.tracer.info("%s getting list of system instances", self.logTag)
startTime = time()
instanceList = []
hosts = self._getHosts()
# Use last known hosts to fetch the updated list of hosts
# Walk through the known hostnames and stop whenever any of them returns the list of all instances
isSuccess = False
for host in hosts:
hostname, instanceNum, httpProtocol, port = host[0], host[1], host[2], host[3]
try:
apiName = 'GetSystemInstanceList'
# if we have a cached host config with already defined protocol and port, then we can initialize
# client directly from that, otherwise we have to instantiate client using ports derived from the instance number
# which will try the derived HTTPS port first and then fallback to derived HTTP port
if (not httpProtocol or not port):
client = self.getDefaultClient(hostname=hostname, instance=instanceNum)
else:
client = self.getClient(hostname, httpProtocol, port)
result = self.callSoapApi(client, apiName)
instanceList = self._parseResults(result)
# cache latest results in provider state
self.state['hostConfig'] = instanceList
isSuccess = True
break
except Exception as e:
self.tracer.error("%s could not connect to SAP with hostname: %s and port: %s", self.logTag, hostname, port, exc_info=True)
if not isSuccess:
raise Exception("%s could not connect to any SAP instances with hosts %s [%d ms]" % \
(self.logTag, hosts, TimeUtils.getElapsedMilliseconds(startTime)))
self.tracer.info("%s finished getting all system instances [%d ms]", self.logTag, TimeUtils.getElapsedMilliseconds(startTime))
return self.filterInstancesByFeature(instanceList, filterFeatures=filterFeatures, filterType=filterType)
"""
fetch cached instance list for this provider and filter down to the list 'ABAP' feature functions
that are healthy (ie. have dispstatus attribute of 'SAPControl-GREEN'). Just return first in the list.
"""
def getActiveDispatcherInstance(self):
# Use cached list of instances if available since they don't change that frequently,
# and filter down to only healthy dispatcher instances since RFC direct application server connection
# only works against dispatchera
dispatcherInstances = self.getInstances(filterFeatures=['ABAP'], filterType='include', useCache=True)
healthyInstances = [instance for instance in dispatcherInstances if 'GREEN' in instance['dispstatus']]
if (len(healthyInstances) == 0):
raise Exception("No healthy ABAP/dispatcher instance found for %s" % self.sapSid)
# return first healthy instance in list
return healthyInstances[0]
"""
fetch cached instance list for this provider and filter down to the list 'MESSAGESERVER' feature functions
return the available message server
"""
def getMessageServerInstance(self):
# Use cached list of instances if available since they don't change that frequently,
# and filter down to only healthy dispatcher instances since RFC direct application server connection
# only works against dispatchera
dispatcherInstances = self.getInstances(filterFeatures=['MESSAGESERVER'], filterType='include', useCache=True)
if (len(dispatcherInstances) == 0):
raise Exception("No MESSAGESERVER instance found for %s" % self.sapSid)
# return first healthy instance in list
return dispatcherInstances[0]
"""
given a list of sap instances and a set of instance features (ie. functions) to include or exclude,
apply filtering logic and return only those instances that match the filter conditions:
'include' filter type will include any instance that matches any of the feature filters
'exclude' filter type will exclude any instance that matches any of the feature filters
"""
def filterInstancesByFeature(self,
sapInstances: list,
filterFeatures: list = None,
filterType: str = None) -> list:
if (not filterFeatures or len(filterFeatures) == 0 or not sapInstances):
return sapInstances
self.tracer.info("%s filtering list of system instances based on features: %s", self.logTag, filterFeatures)
instances = [(instance, instance['features'].split('|')) for instance in sapInstances]
if filterType == "include":
# Inclusion filter
# Only include instances that match at least one of the filter features
filtered_instances = [instance for (instance, instance_features) in instances \
if not set(filterFeatures).isdisjoint(set(instance_features))]
elif filterType == "exclude":
# Exclusion filter
# Only include instance that match none of the filter features
filtered_instances = [instance for (instance, instance_features) in instances \
if set(filterFeatures).isdisjoint(set(instance_features))]
else:
raise Exception("%s filterType '%s' is not supported filter type" % (self.logTag, filterType))
return filtered_instances
"""
helper method to deserialize result and return as list of dictionary objects
"""
def _parseResults(self, results: list) -> list:
return helpers.serialize_object(results, dict)
"""
private method to return default provider hostname config (what customer provided at time netweaver provided was added)
or a fully fleshed out list of <hostname / instance # / https:Port> tuples based on a previous cached call to getInstances()
"""
def _getHosts(self) -> list:
# Fetch last known list from storage. If storage does not have list, use provided
# hostname and instanceNr
if 'hostConfig' not in self.state:
self.tracer.info("%s no host config persisted yet, using user-provided host name and instance nr", self.logTag)
hosts = [(self.sapHostName,
self.sapInstanceNr,
None,
None)]
else:
self.tracer.info("%s fetching last known host config", self.logTag)
currentHostConfig = self.state['hostConfig']
hosts = [(hostConfig['hostname'],
hostConfig['instanceNr'],
"https" if (hostConfig['httpsPort'] and hostConfig['httpsPort'] != "0") else "http",
hostConfig['httpsPort'] if (hostConfig['httpsPort'] and hostConfig['httpsPort'] != "0") else hostConfig['httpPort']) for hostConfig in currentHostConfig]
return hosts
"""
returns flag to indicate whether provider checks should attempt to use RFC SDK client calls to fetch certain metrics.
First time may perform fairly expensive checks to validate if RFC SDK is installed anc configured, and may attempt
to download user provided blob to install to local system. We only want to attempt this at most once per process,
so first caller to this function will pay that cost and the resulting success/failure flag will be cached.
"""
def areRfcMetricsEnabled(self) -> bool:
if self._areRfcCallsEnabled != None:
# the flag for whether RFC is usable has already been initialzed, so return
return self._areRfcCallsEnabled
# there may be 1..N sapNetWeaverProviderInstance instances per sapmon process, and each instance
# may choose to enable/disable RFC calls individually, but we should only attempt to install the
# RFC SDK at most once per process. Use a static/class variable to determine if installation
# attempt has already been attempted and was success/failure, and do all this inside of
# a lock and cache flag for future checks
try:
# class singleton lock
sapNetweaverProviderInstance._rfcInstallerLock.acquire(blocking=True)
# check -> lock -> check
if (self._areRfcCallsEnabled != None):
# flag was initialized prior to obtaining the lock
return self._areRfcCallsEnabled
# ensure this provider instance has necessary config settings to enable RFC SDK calls
if (not self.sapUsername or
not self.sapPassword or
not self.sapClientId or
not self.sapRfcSdkBlobUrl or
not self.sapLogonGroup):
self.tracer.info("%s Netweaver RFC calls disabled for because missing one or more required " +
"config properties: sapUsername, sapPassword, sapClientId, sapLogonGroup and sapRfcSdkBlobUrl",
self.logTag)
self._areRfcCallsEnabled = False
return False
# only attempt to install RFC SDK once per process execution
if (sapNetweaverProviderInstance._isRfcInstalled == None):
sapNetweaverProviderInstance._isRfcInstalled = self._trySetupRfcSdk()
self._areRfcCallsEnabled = sapNetweaverProviderInstance._isRfcInstalled
return self._areRfcCallsEnabled
except Exception as e:
self.tracer.error("%s Exception trying to check if rfc sdk metrics are enabled, %s", self.logTag, e, exc_info=True)
sapNetweaverProviderInstance._isRfcInstalled = False
self._areRfcCallsEnabled = False
finally:
sapNetweaverProviderInstance._rfcInstallerLock.release()
return False
"""
validate that RFC SDK package has been installed and configured correctly and is usable by pyrfc module.
If pyrfc module cannot be imported, then potentially attempt to download RFC SDK blob, install to local system,
and configure necessary environment variables and system settings so that the libraries can be
successfully loaded by the pyrfc module.
Returns flag indicating whether pyrfc module can be imnported (ie. whether RFC calls can be enabled)
Pre-requisites for RFC SDK installation attempt:
1.) Customer provided config property sapRfcSdkBlobUrl must be non-empty.
2.) python module for "pynwrfc" must be installed
3.) was the last failed SDK installation attempt more than N minutes ago (defined by MINIMUM_RFC_INSTALL_RETRY_INTERVAL)
4.) does the sapRfcSdkBlobUrl provided by customer actually exist in the storage account
5.) was the last_modified timestamp on the sapRfcSdkBlobUrl blob modified since the last failed installation attempt
"""
def _trySetupRfcSdk(self) -> bool:
try:
# if no RFC SDK download blob url specified, treat as kill switch to disable any RFC calls
if (not self.sapRfcSdkBlobUrl):
self.tracer.info("%s No user provided RFC SDK blob url, will not leverage RFC SDK. quitting...", self.logTag)
return False
installer = SapRfcSdkInstaller(tracer=self.tracer, installPath=PATH_RFC_SDK_INSTALL)
# environment variables must be initialized before RFC and pyrfc installation can be validated
self.tracer.info("%s initializing RFC SDK environment...", self.logTag)
if (not installer.initRfcSdkEnvironment()):
self.tracer.error("%s failed to initialize rfc sdk environment pre-requisites", self.logTag)
return False
# if we are able to successfully import the pyrfc connector module, that means RFC SDK
# libraries must be installed and were able to be found by pyrfc package initialization,
# so no need to do any further checks.
if (installer.isPyrfcModuleUsable()):
# pyrfc package is usable, which means RFC SDK is already installed and environment configured correctly
self.tracer.info("%s Pyrfc module is usable, RFC calls will be enabled", self.logTag)
return True
# if pyrfc module cannot be imported, check to see if it is even installed. Assumption is that
# pyrfc module is installed as part of container image, so if it is missing something is wrong
# there is no need to even try to install the RFC SDK
if (not installer.isPyrfcModuleInstalled()):
self.tracer.error("%s Pyrfc module is not installed, RFC calls will be disabled", self.logTag)
return False
# check last sdk install attempt time so we can limit how often we retry
# to download and install SDK on persistent failures (eg. no more than once every 30 mins)
lastSdkInstallAttemptTime = installer.getLastSdkInstallAttemptTime()
if (lastSdkInstallAttemptTime > (datetime.now(timezone.utc) - MINIMUM_RFC_INSTALL_RETRY_INTERVAL)):
self.tracer.info("%s last RFC SDK install attempt was %s, minimum attempt retry %s, skipping...",
self.logTag,
lastSdkInstallAttemptTime,
MINIMUM_RFC_INSTALL_RETRY_INTERVAL)
return False
self.tracer.info("%s RFC SDK is not installed, so attempt installation now...", self.logTag)
blobStorageAccount = AzureStorageAccount(tracer=self.tracer,
sapmonId=self.ctx.sapmonId,
msiClientId=self.ctx.msiClientId,
subscriptionId=self.ctx.vmInstance["subscriptionId"],
resourceGroup=self.ctx.vmInstance["resourceGroupName"])
# first check that rfc sdk download blob exists in Azure Storage account, and if it
# exixts also fetch the last_modified timestamp metadata
doesPackageExist, packageLastModifiedTime = installer.isRfcSdkAvailableForDownload(
blobUrl=self.sapRfcSdkBlobUrl,
storageAccount=blobStorageAccount)
if (not doesPackageExist):
self.tracer.error("%s User provided RFC SDK blob does not exist %s, skipping...", self.logTag, self.sapRfcSdkBlobUrl)
return False
self.tracer.info("%s user provided RFC SDK blob exists for download %s, lastModified=%s",
self.logTag, self.sapRfcSdkBlobUrl, packageLastModifiedTime)
# the user provided sdk blob exists, so before we download compare the last_modified timestamp
# with the last modified time of the last download attempt. If nothing has changed,
# then no need to try and download the package again
# TODO: confirm, should we go ahead and try to re-download previously failed packages
# once every 30 minutes anyway? just in case failure was something external?
lastInstallPackageModifiedTime = installer.getLastSdkInstallPackageModifiedTime()
if (packageLastModifiedTime == lastInstallPackageModifiedTime):
self.tracer.info("%s rfc sdk download package has not been modified since last download " +
"attempt (last_modified=%s), will not download again",
self.logTag,
lastInstallPackageModifiedTime)
return False
self.tracer.info("%s user provided rfc sdk package last_modified (%s) has changed " +
"since last install attempt (%s), attempting to re-download and install",
self.logTag,
packageLastModifiedTime,
lastInstallPackageModifiedTime)
# try to download user provided RFC SDK blob, install to local system and configure necessary
# environment variables and system settings so that it can be usable by pyrfc module
if (not installer.downloadAndInstallRfcSdk(blobUrl=self.sapRfcSdkBlobUrl, storageAccount=blobStorageAccount)):
self.tracer.error("%s failed to download and install rfc sdk package, RFC calls will not be enabled...", self.logTag)
return False
# on Linux pyrfc module may not be usable upon first install attempt, as it appears that unpacking
# libraries to the LD_LIBRARY_PATH env variable after the python process starts may not pick up the change.
# The module should be usable on the next sapmon process run.
if (not installer.isPyrfcModuleUsable()):
self.tracer.error("%s pyrfc module still not usable after RFC SDK install (might require process restart), " +
"RFC calls will not be enabled...",
self.logTag)
return False
self.tracer.info("%s pyrfc module is usable after RFC SDK install, RFC calls will be enabled...", self.logTag)
return True
except Exception as e:
self.tracer.error("%s exception trying to setup and validate RFC SDK, RFC calls will be disabled: %s", self.logTag, e, exc_info=True)
return False
###########################
class sapNetweaverProviderCheck(ProviderCheck):
lastResult = []
# hard-coded set of action names that require RFC SDK to be usable
# and can override runtime isEnabled() check if RFC is not usable
rfcCheckNames = {'SMON_Metrics', 'SWNC_Workload_Metrics', 'SDF_Short_Dumps_Metrics', 'Sys_Log_Metrics',
'Failed_Updates_Metrics', 'Batch_Jobs_Metrics', 'Inbound_Queues_Metrics', 'Outbound_Queues_Metrics',
'Enqueue_Read_Metrics'}
def __init__(self,
provider: ProviderInstance,
**kwargs
):
super().__init__(provider, **kwargs)
self.lastRunLocal = None
self.lastRunServer = None
# provider check common logging prefix
self.logTag = "[%s][%s]" % (self.fullName, self.providerInstance.sapSid)
"""
return flag indicating whether this check instances requires the SAP RFC SDK to be installed and usable
"""
def doesCheckRequireRfcSdk(self) -> bool:
return self.name in sapNetweaverProviderCheck.rfcCheckNames
"""
override base ProviderCheck implementation to allow RFC metric collection methods enabled in
the default Provider JSON configuration yet treated as disabled at runtime if RFC SDK
is not configured (to reduce log spam)
"""
def isEnabled(self) -> bool:
if not self.state["isEnabled"]:
return False
# if this check requires RFC and RFC is not installed, then treat as disabled
if (self.doesCheckRequireRfcSdk()):
if (not self.providerInstance.areRfcMetricsEnabled()):
return False
return True
def _getFormattedTimestamp(self) -> str:
return datetime.utcnow().isoformat()
def _parseResult(self, result: object) -> list:
return [helpers.serialize_object(result, dict)]
def _parseResults(self, results: list) -> list:
return helpers.serialize_object(results, dict)
def _getServerTimestamp(self) -> datetime:
self.tracer.info("%s fetching current timestamp from message server", self.logTag)
message_server_instances = self.providerInstance.getInstances(filterFeatures=['MESSAGESERVER'], filterType='include', useCache=True)
date = datetime.fromisoformat(self._getFormattedTimestamp())
# Get timestamp from the first message server that returns a valid date
for instance in message_server_instances:
hostname = instance['hostname']
instanceNr = str(instance['instanceNr']).zfill(2)
port = self.providerInstance.getMessageServerPortFromInstanceNr(instanceNr)
hostname = self.providerInstance.getFullyQualifiedDomainName(hostname)
message_server_endpoint = "http://%s:%s/" % (hostname, port)
try:
# We only care about the date in the response header. so we ignore the response body
# 'Thu, 04 Mar 2021 05:02:12 GMT'
# NOTE: we don't need to follow redirects because the redirect response itself 300-3XX
# will have the 'date' header as well. In some cases we were following a chain
# of redirects that would terminate in a 404, which would not have the 'date' header
response = requests.get(message_server_endpoint, allow_redirects=False)
if ('date' not in response.headers):
raise Exception("no 'date' response header found for response status:%s/%s from:%s"
% (response.status_code, response.reason, message_server_endpoint))
date = datetime.strptime(response.headers['date'], '%a, %d %b %Y %H:%M:%S %Z')
self.tracer.info("%s received message server %s header: %s, parsed time: %s",
self.logTag,
message_server_endpoint,
response.headers['date'],
date)
break
except Exception as e:
self.tracer.info("%s suppressing expected error while fetching server time during HTTP GET request to url %s: %s ",
self.logTag, message_server_endpoint, e)
return date
def _actionGetSystemInstanceList(self) -> None:
self.tracer.info("%s refreshing list of system instances", self.logTag)
self.lastRunLocal = datetime.utcnow()
# when performing the actual provider check action, always fetch fressh instance list snapshot and refresh the cache
instanceList = self.providerInstance.getInstances(useCache=False)
self.lastRunServer = self._getServerTimestamp()
# Update host config, if new list is fetched
# Parse dictionary and add current timestamp and SID to data and log it
if len(instanceList) != 0:
currentTimestamp = self._getFormattedTimestamp()
for instance in instanceList:
instance['timestamp'] = currentTimestamp
instance['serverTimestamp'] = self.lastRunServer.isoformat()
instance['SID'] = self.providerInstance.sapSid
instance['subdomain'] = self.providerInstance.sapSubdomain
self.lastResult = instanceList
# Update internal state
if not self.updateState():
raise Exception("%s failed to update state" % self.logTag)
self.tracer.info("%s successfully fetched system instance list", self.logTag)
def _executeWebServiceRequest(self, apiName: str, filterFeatures: list, filterType: str, parser: Callable[[Any], list] = None) -> None:
self.tracer.info("[%s] executing web service request: %s" % (self.fullName, apiName))
self.lastRunLocal = datetime.utcnow()
# track latency of entire method excecution with dependencies
startTime = time()
if parser is None:
parser = self._parseResults
# Use cached list of instances if available since they don't change that frequently; else fetch afresh.
# filter down to just the instances we need for this SOAP API type
sapInstances = self.providerInstance.getInstances(useCache=True, filterFeatures=filterFeatures, filterType=filterType)
self.lastRunServer = self._getServerTimestamp()
if len(sapInstances) == 0:
self.tracer.info("%s no instances found that support this API: %s", self.logTag, apiName)
# Call web service
all_results = []
currentTimestamp = self._getFormattedTimestamp()
for instance in sapInstances:
# default to https unless the httpsPort was not defined, in which case fallback to http
httpProtocol = "https"
port = instance['httpsPort']
if ((not port) or port == "0"):
# fallback to http port instead
httpProtocol = "http"
port = instance['httpPort']
results = []
try:
client = self.providerInstance.getClient(instance['hostname'], httpProtocol, port)
results = self.providerInstance.callSoapApi(client, apiName)
if(apiName == "GetProcessList"):
results = self._sanitizeGetProcessList(results)
elif(apiName == "ABAPGetWPTable"):
results = self._sanitizeABAPGetWPTable(results)
except Exception as e:
self.tracer.error("%s unable to call the Soap Api %s - %s://%s:%s, %s", self.logTag, apiName, httpProtocol, instance['hostname'], port, e, exc_info=True)
continue
if len(results) != 0:
parsed_results = parser(results)
for result in parsed_results:
result['hostname'] = instance['hostname']
result['instanceNr'] = instance['instanceNr']
result['subdomain'] = self.providerInstance.sapSubdomain
result['timestamp'] = currentTimestamp
result['serverTimestamp'] = self.lastRunServer.isoformat()
result['SID'] = self.providerInstance.sapSid
all_results.extend(parsed_results)
if len(all_results) == 0:
self.tracer.info("%s no results found for: %s", self.logTag, apiName)
self.lastResult = all_results
# Update internal state
if not self.updateState():
raise Exception("[%s] failed to update state for web service request: %s [%d ms]" % \
(self.logTag, apiName, TimeUtils.getElapsedMilliseconds(startTime)))
self.tracer.info("%s successfully processed web service request: %s [%d ms]",
self.logTag, apiName, TimeUtils.getElapsedMilliseconds(startTime))
def _actionExecuteGenericWebServiceRequest(self, apiName: str, filterFeatures: list, filterType: str) -> None:
self._executeWebServiceRequest(apiName, filterFeatures, filterType, self._parseResults)
def _actionExecuteEnqGetStatistic(self, apiName: str, filterFeatures: list, filterType: str) -> None:
self._executeWebServiceRequest(apiName, filterFeatures, filterType, self._parseResult)
"""
Method to parse the value based on the key provided and set the values with None value to empty string ''
"""
def _getKeyValue(self, dictionary, key, apiName):
if key not in dictionary:
raise ValueError("Result received for api %s does not contain key: %s"% (apiName, key))
if(dictionary[key] == None):
dictionary[key] = ""
return dictionary[key]
"""
Method to parse the results from ABAPGetWPTable and set the strings with None value to empty string ''
"""
def _sanitizeABAPGetWPTable(self, records: list) -> list:
apiName = "ABAPGetWPTable"
processed_results = list()
for record in records:
processed_result = {
"Action": self._getKeyValue(record, 'Action', apiName),
"Client": self._getKeyValue(record, 'Client', apiName),
"Cpu": self._getKeyValue(record, 'Cpu', apiName),
"Err": self._getKeyValue(record, 'Err', apiName),
"No": self._getKeyValue(record, 'No', apiName),
"Pid": self._getKeyValue(record, 'Pid', apiName),
"Program": self._getKeyValue(record, 'Program', apiName),
"Reason": self._getKeyValue(record, 'Reason', apiName),
"Sem": self._getKeyValue(record, 'Sem', apiName),
"Start": self._getKeyValue(record, 'Start', apiName),
"Status": self._getKeyValue(record, 'Status', apiName),
"Table": self._getKeyValue(record, 'Table', apiName),
"Time": self._getKeyValue(record, 'Time', apiName),
"Typ": self._getKeyValue(record, 'Typ', apiName),
"User": self._getKeyValue(record, 'User', apiName)
}
processed_results.append(processed_result)
return processed_results
"""
Method to parse the results from GetProcessList and set the strings with None value to empty string ''
"""
def _sanitizeGetProcessList(self, records: list) -> list:
apiName = "GetProcessList"
processed_results = list()
for record in records:
processed_result = {
"description": self._getKeyValue(record, 'description', apiName),
"dispstatus": self._getKeyValue(record, 'dispstatus', apiName),
"elapsedtime": self._getKeyValue(record, 'elapsedtime', apiName),
"name": self._getKeyValue(record, 'name', apiName),
"pid": self._getKeyValue(record, 'pid', apiName),
"starttime": self._getKeyValue(record, 'starttime', apiName),
"textstatus": self._getKeyValue(record, 'textstatus', apiName)
}
processed_results.append(processed_result)
return processed_results
"""
netweaver provider check action to query for SDF/SMON Analysis Run metrics
"""
def _actionGetSmonAnalysisMetrics(self) -> None:
# base class will always call generateJsonString(), so we must always be sure to set the lastResult
# regardless of success or failure
self.lastResult = []
try:
# initialize hostname log string here to default of SID in case we cannot identify a specific dispatcher host
sapHostnameStr = self.providerInstance.sapSid
if (not self.providerInstance.areRfcMetricsEnabled()):
self.tracer.info("%s Skipping SMON metrics because RFC SDK metrics not enabled...", self.logTag)
return
# track latency of entire method excecution with dependencies
latencyStartTime = time()
# initialize a client for the first healthy MessageServer instance we find
client = self.providerInstance.getRfcClient(logTag=self.logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
# get metric query window based on our last successful query where results were returned
(startTime, endTime) = client.getQueryWindow(lastRunServerTime=self.lastRunServer,
minimumRunIntervalSecs=self.frequencySecs,
logTag=self.logTag)
self.lastResult = client.getSmonMetrics(startDateTime=startTime, endDateTime=endTime, logTag=self.logTag)
self.tracer.info("%s successfully queried SMON metrics for %s [%d ms]",
self.logTag, sapHostnameStr, TimeUtils.getElapsedMilliseconds(latencyStartTime))
self.lastRunLocal = datetime.now(timezone.utc)
self.lastRunServer = endTime
# only update state on successful query attempt
self.updateState()
except Exception as e:
self.tracer.error("%s exception trying to fetch SMON Analysis Run metrics for %s [%d ms], error: %s",
self.logTag,
sapHostnameStr,
TimeUtils.getElapsedMilliseconds(latencyStartTime),
e,
exc_info=True)
raise
"""
netweaver provider check action to query for SWNC workload statistics and decorate with ST03 metric calculations
"""
def _actionGetSwncWorkloadMetrics(self) -> None:
# base class will always call generateJsonString(), so we must always be sure to set the lastResult
# regardless of success or failure
self.lastResult = []
try:
# initialize hostname log string here to default of SID in case we cannot identify a specific dispatcher host
sapHostnameStr = self.providerInstance.sapSid
if (not self.providerInstance.areRfcMetricsEnabled()):
self.tracer.info("%s Skipping SWNC metrics because RFC SDK metrics not enabled...", self.logTag)
return
# track latency of entire method excecution with dependencies
latencyStartTime = time()
# initialize a client for the first healthy MessageServer instance we find
client = self.providerInstance.getRfcClient(logTag=self.logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
# get metric query window based on our last successful query where results were returned
(startTime, endTime) = client.getQueryWindow(lastRunServerTime=self.lastRunServer,
minimumRunIntervalSecs=self.frequencySecs,
logTag=self.logTag)
self.lastResult = client.getSwncWorkloadMetrics(startDateTime=startTime, endDateTime=endTime, logTag=self.logTag)
self.tracer.info("%s successfully queried SWNC workload metrics for %s [%d ms]",
self.logTag, sapHostnameStr, TimeUtils.getElapsedMilliseconds(latencyStartTime))
self.lastRunLocal = datetime.now(timezone.utc)
self.lastRunServer = endTime
# only update state on successful query attempt
self.updateState()
except Exception as e:
self.tracer.error("%s exception trying to fetch SWNC workload metrics for %s [%d ms], error: %s",
self.logTag,
sapHostnameStr,
TimeUtils.getElapsedMilliseconds(latencyStartTime),
e,
exc_info=True)
raise
"""
netweaver provider check action to query for short dumps
"""
def _actionGetShortDumpsMetrics(self) -> None:
# base class will always call generateJsonString(), so we must always be sure to set the lastResult
# regardless of success or failure
self.lastResult = []
try:
# initialize hostname log string here to default of SID in case we cannot identify a specific dispatcher host
sapHostnameStr = self.providerInstance.sapSid
if (not self.providerInstance.areRfcMetricsEnabled()):
self.tracer.info("%s Skipping short dumps metrics because RFC SDK metrics not enabled...", self.logTag)
return
# track latency of entire method excecution with dependencies
latencyStartTime = time()
# initialize a client for the first healthy MessageServer instance we find
client = self.providerInstance.getRfcClient(logTag=self.logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
# get metric query window based on our last successful query where results were returned
(startTime, endTime) = client.getQueryWindow(lastRunServerTime=self.lastRunServer,
minimumRunIntervalSecs=self.frequencySecs,
logTag=self.logTag)
self.lastResult = client.getShortDumpsMetrics(startDateTime=startTime, endDateTime=endTime, logTag=self.logTag)
self.tracer.info("%s successfully queried short dumps metrics for %s [%d ms]",
self.logTag, sapHostnameStr, TimeUtils.getElapsedMilliseconds(latencyStartTime))
self.lastRunLocal = datetime.now(timezone.utc)
self.lastRunServer = endTime
# only update state on successful query attempt
self.updateState()
except Exception as e:
self.tracer.error("%s exception trying to fetch short dumps metrics for %s [%d ms], error: %s",
self.logTag,
sapHostnameStr,
TimeUtils.getElapsedMilliseconds(latencyStartTime),
e,
exc_info=True)
raise
"""
netweaver provider check action to query for sys logs
"""
def _actionGetSysLogMetrics(self) -> None:
# base class will always call generateJsonString(), so we must always be sure to set the lastResult
# regardless of success or failure
self.lastResult = []
try:
# initialize hostname log string here to default of SID in case we cannot identify a specific dispatcher host
sapHostnameStr = self.providerInstance.sapSid
if (not self.providerInstance.areRfcMetricsEnabled()):
self.tracer.info("%s Skipping sys logs metrics because RFC SDK metrics not enabled...", self.logTag)
return
# track latency of entire method excecution with dependencies
latencyStartTime = time()
# initialize a client for the first healthy MessageServer instance we find
client = self.providerInstance.getRfcClient(logTag=self.logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
# get metric query window based on our last successful query where results were returned
(startTime, endTime) = client.getQueryWindow(lastRunServerTime=self.lastRunServer,
minimumRunIntervalSecs=self.frequencySecs,
logTag=self.logTag)
self.lastResult = client.getSysLogMetrics(startDateTime=startTime, endDateTime=endTime, logTag=self.logTag)
self.tracer.info("%s successfully queried sys log metrics for %s [%d ms]",
self.logTag, sapHostnameStr, TimeUtils.getElapsedMilliseconds(latencyStartTime))
self.lastRunLocal = datetime.now(timezone.utc)
self.lastRunServer = endTime
# only update state on successful query attempt
self.updateState()
except Exception as e:
self.tracer.error("%s exception trying to fetch sys logs metrics for %s [%d ms], error: %s",
self.logTag,
sapHostnameStr,
TimeUtils.getElapsedMilliseconds(latencyStartTime),
e,
exc_info=True)
raise
"""
netweaver provider check action to query for failed updates metrics
"""
def _actionGetFailedUpdatesMetrics(self) -> None:
# base class will always call generateJsonString(), so we must always be sure to set the lastResult
# regardless of success or failure
self.lastResult = []
try:
# initialize hostname log string here to default of SID in case we cannot identify a specific dispatcher host
sapHostnameStr = self.providerInstance.sapSid
if (not self.providerInstance.areRfcMetricsEnabled()):
self.tracer.info("%s Skipping sys logs metrics because RFC SDK metrics not enabled...", self.logTag)
return
# track latency of entire method excecution with dependencies
latencyStartTime = time()
# initialize a client for the first healthy MessageServer instance we find
client = self.providerInstance.getRfcClient(logTag=self.logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
# get metric query window based on our last successful query where results were returned
(startTime, endTime) = client.getQueryWindow(lastRunServerTime=self.lastRunServer,
minimumRunIntervalSecs=self.frequencySecs,
logTag=self.logTag)
self.lastResult = client.getFailedUpdatesMetrics(logTag=self.logTag)
self.tracer.info("%s successfully queried failed updates metrics for %s [%d ms]",
self.logTag, sapHostnameStr, TimeUtils.getElapsedMilliseconds(latencyStartTime))
self.lastRunLocal = datetime.now(timezone.utc)
self.lastRunServer = endTime
# only update state on successful query attempt
self.updateState()
except Exception as e:
self.tracer.error("%s exception trying to fetch failed updates metrics for %s [%d ms], error: %s",
self.logTag,
sapHostnameStr,
TimeUtils.getElapsedMilliseconds(latencyStartTime),
e,
exc_info=True)
raise
"""
netweaver provider check action to query for batch job metrics
"""
def _actionGetBatchJobMetrics(self) -> None:
# base class will always call generateJsonString(), so we must always be sure to set the lastResult
# regardless of success or failure
self.lastResult = []
try:
# initialize hostname log string here to default of SID in case we cannot identify a specific dispatcher host
sapHostnameStr = self.providerInstance.sapSid
if (not self.providerInstance.areRfcMetricsEnabled()):
self.tracer.info("%s Skipping batch jobs metrics because RFC SDK metrics not enabled...", self.logTag)
return
# track latency of entire method excecution with dependencies
latencyStartTime = time()
# initialize a client for the first healthy MessageServer instance we find
client = self.providerInstance.getRfcClient(logTag=self.logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
# get metric query window based on our last successful query where results were returned
(startTime, endTime) = client.getQueryWindow(lastRunServerTime=self.lastRunServer,
minimumRunIntervalSecs=self.frequencySecs,
logTag=self.logTag)
self.lastResult = client.getBatchJobMetrics(startDateTime=startTime, endDateTime=endTime, logTag=self.logTag)
self.tracer.info("%s successfully queried batch job metrics for %s [%d ms]",
self.logTag, sapHostnameStr, TimeUtils.getElapsedMilliseconds(latencyStartTime))
self.lastRunLocal = datetime.now(timezone.utc)
self.lastRunServer = endTime
# only update state on successful query attempt
self.updateState()
except Exception as e:
self.tracer.error("%s exception trying to fetch failed updates metrics for %s [%d ms], error: %s",
self.logTag,
sapHostnameStr,
TimeUtils.getElapsedMilliseconds(latencyStartTime),
e,
exc_info=True)
raise
"""
netweaver provider check action to query for inbound queues statistics
"""
def _actionGetInboundQueuesMetrics(self) -> None:
# base class will always call generateJsonString(), so we must always be sure to set the lastResult
# regardless of success or failure
self.lastResult = []
try:
# initialize hostname log string here to default of SID in case we cannot identify a specific dispatcher host
sapHostnameStr = self.providerInstance.sapSid
if (not self.providerInstance.areRfcMetricsEnabled()):
self.tracer.info("%s Skipping Current Inbound Queues metrics because RFC SDK metrics not enabled...", self.logTag)
return
# track latency of entire method excecution with dependencies
latencyStartTime = time()
# initialize a client for the first healthy MessageServer instance we find
client = self.providerInstance.getRfcClient(logTag=self.logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
self.lastResult = client.getInboundQueuesMetrics(logTag=self.logTag)
self.tracer.info("%s successfully queried Current Inbound Queues metrics for %s [%d ms]",
self.logTag, sapHostnameStr, TimeUtils.getElapsedMilliseconds(latencyStartTime))
self.lastRunLocal = datetime.now(timezone.utc)
# only update state on successful query attempt
self.updateState()
except Exception as e:
self.tracer.error("%s exception trying to fetch Current Inbound Queues metrics for %s [%d ms], error: %s",
self.logTag,
sapHostnameStr,
TimeUtils.getElapsedMilliseconds(latencyStartTime),
e,
exc_info=True)
raise
"""
netweaver provider check action to query for outbound queues statistics
"""
def _actionGetOutboundQueuesMetrics(self) -> None:
# base class will always call generateJsonString(), so we must always be sure to set the lastResult
# regardless of success or failure
self.lastResult = []
try:
# initialize hostname log string here to default of SID in case we cannot identify a specific dispatcher host
sapHostnameStr = self.providerInstance.sapSid
if (not self.providerInstance.areRfcMetricsEnabled()):
self.tracer.info("%s Skipping Current Outbound Queues metrics because RFC SDK metrics not enabled...", self.logTag)
return
# track latency of entire method excecution with dependencies
latencyStartTime = time()
# initialize a client for the first healthy MessageServer instance we find
client = self.providerInstance.getRfcClient(logTag=self.logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
self.lastResult = client.getOutboundQueuesMetrics(logTag=self.logTag)
self.tracer.info("%s successfully queried Current Outbound Queues metrics for %s [%d ms]",
self.logTag, sapHostnameStr, TimeUtils.getElapsedMilliseconds(latencyStartTime))
self.lastRunLocal = datetime.now(timezone.utc)
# only update state on successful query attempt
self.updateState()
except Exception as e:
self.tracer.error("%s exception trying to fetch Current Outbound Queues metrics for %s [%d ms], error: %s",
self.logTag,
sapHostnameStr,
TimeUtils.getElapsedMilliseconds(latencyStartTime),
e,
exc_info=True)
raise
"""
netweaver provider check action to query for object lock entries by connecting to ENQUEUE_READ RFC
"""
def _actionGetEnqueueReadMetrics(self) -> None:
# base class will always call generateJsonString(), so we must always be sure to set the lastResult
# regardless of success or failure
self.lastResult = []
try:
# initialize hostname log string here to default of SID in case we cannot identify a specific dispatcher host
sapHostnameStr = self.providerInstance.sapSid
if (not self.providerInstance.areRfcMetricsEnabled()):
self.tracer.info("%s Skipping ENQUEUE_READ metrics because RFC SDK metrics not enabled...", self.logTag)
return
# track latency of entire method excecution with dependencies
latencyStartTime = time()
# initialize a client for the first healthy MessageServer instance we find
client = self.providerInstance.getRfcClient(logTag=self.logTag)
# update logging prefix with the specific instance details of the client
sapHostnameStr = "%s|%s" % (client.Hostname, client.InstanceNr)
self.lastResult = client.getEnqueueReadMetrics(logTag=self.logTag)
self.tracer.info("%s successfully queried ENQUEUE_READ metrics for %s [%d ms]",
self.logTag, sapHostnameStr, TimeUtils.getElapsedMilliseconds(latencyStartTime))
self.lastRunLocal = datetime.now(timezone.utc)
# only update state on successful query attempt
self.updateState()
except Exception as e:
self.tracer.error("%s exception trying to fetch ENQUEUE_READ metrics for %s [%d ms], error: %s",
self.logTag,
sapHostnameStr,
TimeUtils.getElapsedMilliseconds(latencyStartTime),
e,
exc_info=True)
raise
def generateJsonString(self) -> str:
self.tracer.info("%s converting result to json string", self.logTag)
if self.lastResult is not None and len(self.lastResult) != 0:
for result in self.lastResult:
result['SAPMON_VERSION'] = PAYLOAD_VERSION
result['PROVIDER_INSTANCE'] = self.providerInstance.name
result['METADATA'] = self.providerInstance.metadata
resultJsonString = json.dumps(self.lastResult, sort_keys=True, indent=4, cls=JsonEncoder)
self.tracer.debug("%s resultJson=%s", self.logTag, str(resultJsonString))
return resultJsonString
def updateState(self) -> bool:
self.tracer.info("%s updating internal state", self.logTag)
self.state['lastRunLocal'] = self.lastRunLocal
self.state['lastRunServer'] = self.lastRunServer
self.tracer.info("%s internal state successfully updated", self.logTag)
return True
| [((30, 0, 30, 67), 'urllib3.disable_warnings', 'urllib3.disable_warnings', ({(30, 25, 30, 66): 'urllib3.exceptions.InsecureRequestWarning'}, {}), '(urllib3.exceptions.InsecureRequestWarning)', False, 'import urllib3\n'), ((34, 37, 34, 58), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import datetime, timedelta, timezone\n'), ((40, 32, 40, 53), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import datetime, timedelta, timezone\n'), ((46, 24, 46, 30), 'threading.Lock', 'Lock', ({}, {}), '()', False, 'from threading import Lock\n'), ((188, 20, 188, 26), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((240, 20, 240, 26), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((260, 20, 260, 26), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((470, 20, 470, 26), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((580, 15, 580, 54), 'zeep.helpers.serialize_object', 'helpers.serialize_object', ({(580, 40, 580, 47): 'results', (580, 49, 580, 53): 'dict'}, {}), '(results, dict)', False, 'from zeep import helpers\n'), ((828, 15, 828, 54), 'zeep.helpers.serialize_object', 'helpers.serialize_object', ({(828, 40, 828, 47): 'results', (828, 49, 828, 53): 'dict'}, {}), '(results, dict)', False, 'from zeep import helpers\n'), ((870, 28, 870, 45), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime, timedelta, timezone\n'), ((895, 28, 895, 45), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime, timedelta, timezone\n'), ((898, 20, 898, 26), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1454, 27, 1454, 97), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((190, 24, 190, 30), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((243, 22, 243, 31), 'requests.Session', 'Session', ({}, {}), '()', False, 'from requests import Session\n'), ((681, 24, 681, 96), 'netweaver.rfcsdkinstaller.SapRfcSdkInstaller', 'SapRfcSdkInstaller', (), '', False, 'from netweaver.rfcsdkinstaller import PATH_RFC_SDK_INSTALL, SapRfcSdkInstaller\n'), ((715, 33, 719, 108), 'helper.azure.AzureStorageAccount', 'AzureStorageAccount', (), '', False, 'from helper.azure import AzureStorageAccount\n'), ((825, 16, 825, 54), 'zeep.helpers.serialize_object', 'helpers.serialize_object', ({(825, 41, 825, 47): 'result', (825, 49, 825, 53): 'dict'}, {}), '(result, dict)', False, 'from zeep import helpers\n'), ((1038, 31, 1038, 37), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1054, 32, 1054, 58), 'datetime.datetime.now', 'datetime.now', ({(1054, 45, 1054, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n'), ((1086, 31, 1086, 37), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1103, 32, 1103, 58), 'datetime.datetime.now', 'datetime.now', ({(1103, 45, 1103, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n'), ((1136, 31, 1136, 37), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1153, 32, 1153, 58), 'datetime.datetime.now', 'datetime.now', ({(1153, 45, 1153, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n'), ((1185, 31, 1185, 37), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1202, 32, 1202, 58), 'datetime.datetime.now', 'datetime.now', ({(1202, 45, 1202, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n'), ((1234, 31, 1234, 37), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1251, 32, 1251, 58), 'datetime.datetime.now', 'datetime.now', ({(1251, 45, 1251, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n'), ((1283, 31, 1283, 37), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1300, 32, 1300, 58), 'datetime.datetime.now', 'datetime.now', ({(1300, 45, 1300, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n'), ((1332, 31, 1332, 37), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1344, 32, 1344, 58), 'datetime.datetime.now', 'datetime.now', ({(1344, 45, 1344, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n'), ((1376, 31, 1376, 37), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1388, 32, 1388, 58), 'datetime.datetime.now', 'datetime.now', ({(1388, 45, 1388, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n'), ((1420, 31, 1420, 37), 'time.time', 'time', ({}, {}), '()', False, 'from time import time\n'), ((1432, 32, 1432, 58), 'datetime.datetime.now', 'datetime.now', ({(1432, 45, 1432, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n'), ((230, 51, 230, 68), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime, timedelta, timezone\n'), ((822, 15, 822, 32), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime, timedelta, timezone\n'), ((850, 27, 850, 87), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((856, 23, 856, 94), 'datetime.datetime.strptime', 'datetime.strptime', ({(856, 41, 856, 65): "response.headers['date']", (856, 67, 856, 93): '"""%a, %d %b %Y %H:%M:%S %Z"""'}, {}), "(response.headers['date'], '%a, %d %b %Y %H:%M:%S %Z')", False, 'from datetime import datetime, timedelta, timezone\n'), ((130, 46, 132, 69), 're.match', 're.match', ({(130, 55, 130, 82): 'REGEX_EXTERNAL_KEYVAULT_URL', (131, 55, 131, 77): 'sapPasswordKeyVaultUrl', (132, 55, 132, 68): 're.IGNORECASE'}, {}), '(REGEX_EXTERNAL_KEYVAULT_URL, sapPasswordKeyVaultUrl, re.IGNORECASE)', False, 'import re\n'), ((245, 43, 245, 141), 'zeep.transports.Transport', 'Transport', (), '', False, 'from zeep.transports import Transport\n'), ((255, 83, 255, 100), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime, timedelta, timezone\n'), ((707, 45, 707, 71), 'datetime.datetime.now', 'datetime.now', ({(707, 58, 707, 70): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timedelta, timezone\n')] |
pombredanne/docker-scripts | docker_squash/version.py | ecee9f921b22cd44943197635875572185dd015d | version = "1.0.10.dev0"
| [] |
oceanprotocol/plecos | example_usage/example_list_errors.py | ae532df8539e5c327cca57fbc1ea1b1193916cd1 | from pathlib import Path
import plecos
import json
print(plecos.__version__)
#%%
path_to_json_local = Path("~/ocn/plecos/plecos/samples/sample_metadata_local.json").expanduser()
path_to_json_remote = Path("~/ocn/plecos/plecos/samples/sample_metadata_remote.json").expanduser()
path_to_broken_json = Path("~/ocn/plecos/plecos/samples/metadata_local_broken.json").expanduser()
path_to_schema_local = Path("~/ocn/plecos/plecos/schemas/metadata_local_v0_3.json").expanduser()
path_to_schema_remote = Path("~/ocn/plecos/plecos/schemas/metadata_remote_v0_3.json").expanduser()
# Select remote or local metadata
LOCAL=True
if LOCAL:
path_json_file = path_to_json_local
path_schema_file = path_to_schema_local
with open(path_to_json_local) as f:
json_dict = json.load(f)
else:
path_json_file = path_to_json_remote
path_schema_file = path_to_schema_remote
with open(path_to_json_remote) as f:
json_dict = json.load(f)
print("Json file:", path_json_file)
print("Schema file:", path_schema_file)
#%%
del json_dict['base']['files'][0]['index']
# del json_dict['base']['files'][0]['url']
# json_dict['base']['extra'] = 1
plecos.is_valid_dict(json_dict)
# json_dict['base']['files'][0]['url']
# json_dict['base']['EXTRA ATTRIB!'] = 0
# json_dict['base']['files'][0]['EXTRA_ATTR'] = "????"
# json_dict['base']['price'] = "A string is not allowed!"
errors = plecos.list_errors(json_dict, path_schema_file)
if errors:
print("ERRORS:")
for e in errors:
print(e)
else:
print("No errors")
raise
#%%
json_dict = {
"base": {
"name": "10 Monkey Species Small",
"author": "Mario",
"license": "CC0: Public Domain",
"contentType": "jpg/txt",
"price": 5,
"categories": [
"image"
],
"tags": [
"image data",
" animals"
],
"type": "dataset",
"description": "Example description",
"copyrightHolder": "",
"encoding": "",
"compression": "",
"workExample": "",
"inLanguage": "en",
"files": [
{
"url": "https://s3.amazonaws.com/datacommons-seeding-us-east/10_Monkey_Species_Small/assets/training.zip"
},
{
"url": "https://s3.amazonaws.com/datacommons-seeding-us-east/10_Monkey_Species_Small/assets/monkey_labels.txt"
},
{
"url": "https://s3.amazonaws.com/datacommons-seeding-us-east/10_Monkey_Species_Small/assets/validation.zip"
}
],
"links": [
{
"url": "https://s3.amazonaws.com/datacommons-seeding-us-east/10_Monkey_Species_Small/links/sample/sample.zip",
"name": "sample.zip",
"type": "sample"
},
{
"url": "https://github.com/slothkong/CNN_classification_10_monkey_species",
"name": "example code",
"type": "example code"
},
{
"url": "https://s3.amazonaws.com/datacommons-seeding-us-east/10_Monkey_Species_Small/links/discovery/n5151.jpg",
"name": "n5151.jpg",
"type": "discovery"
}
],
"checksum": "0",
},
}
#%%
path_to_schema_local = Path("~/ocn/Plecos/plecos/schemas/metadata_local_190305.json").expanduser()
errors = plecos.list_errors(json_dict, path_to_schema_local)
if errors:
print("ERRORS:")
for e in errors:
print(e)
else:
print("No errors") | [((41, 0, 41, 31), 'plecos.is_valid_dict', 'plecos.is_valid_dict', ({(41, 21, 41, 30): 'json_dict'}, {}), '(json_dict)', False, 'import plecos\n'), ((48, 9, 48, 56), 'plecos.list_errors', 'plecos.list_errors', ({(48, 28, 48, 37): 'json_dict', (48, 39, 48, 55): 'path_schema_file'}, {}), '(json_dict, path_schema_file)', False, 'import plecos\n'), ((117, 9, 117, 60), 'plecos.list_errors', 'plecos.list_errors', ({(117, 28, 117, 37): 'json_dict', (117, 39, 117, 59): 'path_to_schema_local'}, {}), '(json_dict, path_to_schema_local)', False, 'import plecos\n'), ((6, 21, 6, 83), 'pathlib.Path', 'Path', ({(6, 26, 6, 82): '"""~/ocn/plecos/plecos/samples/sample_metadata_local.json"""'}, {}), "('~/ocn/plecos/plecos/samples/sample_metadata_local.json')", False, 'from pathlib import Path\n'), ((7, 22, 7, 85), 'pathlib.Path', 'Path', ({(7, 27, 7, 84): '"""~/ocn/plecos/plecos/samples/sample_metadata_remote.json"""'}, {}), "('~/ocn/plecos/plecos/samples/sample_metadata_remote.json')", False, 'from pathlib import Path\n'), ((8, 22, 8, 84), 'pathlib.Path', 'Path', ({(8, 27, 8, 83): '"""~/ocn/plecos/plecos/samples/metadata_local_broken.json"""'}, {}), "('~/ocn/plecos/plecos/samples/metadata_local_broken.json')", False, 'from pathlib import Path\n'), ((9, 23, 9, 83), 'pathlib.Path', 'Path', ({(9, 28, 9, 82): '"""~/ocn/plecos/plecos/schemas/metadata_local_v0_3.json"""'}, {}), "('~/ocn/plecos/plecos/schemas/metadata_local_v0_3.json')", False, 'from pathlib import Path\n'), ((10, 24, 10, 85), 'pathlib.Path', 'Path', ({(10, 29, 10, 84): '"""~/ocn/plecos/plecos/schemas/metadata_remote_v0_3.json"""'}, {}), "('~/ocn/plecos/plecos/schemas/metadata_remote_v0_3.json')", False, 'from pathlib import Path\n'), ((20, 20, 20, 32), 'json.load', 'json.load', ({(20, 30, 20, 31): 'f'}, {}), '(f)', False, 'import json\n'), ((27, 20, 27, 32), 'json.load', 'json.load', ({(27, 30, 27, 31): 'f'}, {}), '(f)', False, 'import json\n'), ((116, 23, 116, 85), 'pathlib.Path', 'Path', ({(116, 28, 116, 84): '"""~/ocn/Plecos/plecos/schemas/metadata_local_190305.json"""'}, {}), "('~/ocn/Plecos/plecos/schemas/metadata_local_190305.json')", False, 'from pathlib import Path\n')] |
CLRafaelR/pangloss | pangloss/backend.py | 920c509381a8d7831471fc3f22a07e58b53b8c0e | import re
import panflute as pf
from functools import partial
from pangloss.util import smallcapify, break_plain
# regular expression for label formats
label_re = re.compile(r'\{#ex:(\w+)\}')
gb4e_fmt_labelled = """
\\ex\\label{{ex:{label}}}
\\gll {} \\\\
{} \\\\
\\trans {}
"""
gb4e_fmt = """
\\ex
\\gll {} \\\\
{} \\\\
\\trans {}
"""
def gb4e(lst):
"""
Convert an example list into a series of gb4e-formatted interlinear
glosses.
Because example list references are replaced at parsing by Pandoc, the
normal syntax of (@foo) cannot be used for labels; instead, a label syntax
similar to that used for headers (and tables and figures with
pandoc-crossref) is used, namely a {#ex:foo} inserted after the
translation, which will be stripped and replaced with a LaTeX label on the
relevant example.
"""
latex = "\\begin{exe}\n"
for li in lst.content:
lines = break_plain(li.content[0])
if len(lines) != 3: continue
orig, gloss, trans = map(partial(pf.stringify, newlines=False), lines)
gloss = smallcapify(gloss)
label_match = label_re.search(trans)
if label_match:
label = label_match.group(1)
trans = trans[:label_match.start() - 1]
latex += gb4e_fmt_labelled.format(orig, gloss, trans, label=label)
else:
latex += gb4e_fmt.format(orig, gloss, trans)
latex += "\\end{exe}"
return pf.RawBlock(latex, format='latex')
leipzigjs_fmt = """
<div data-gloss>
<p>{}</p>
<p>{}</p>
<p>{}</p>
</div>
"""
def leipzigjs(lst):
"""
Convert an example list into a series of div's suitable for use with
Leipzig.js.
"""
html = ''
for li in lst.content:
lines = break_plain(li.content[0])
if len(lines) != 3: continue
orig, gloss, trans = map(partial(pf.stringify, newlines=False), lines)
html += leipzigjs_fmt.format(orig, gloss, trans)
return pf.RawBlock(html, format='html')
# available formats and backends
formats = {
'latex': {
'gb4e': gb4e
},
'html': {
'leipzigjs': leipzigjs
}
}
| [((8, 11, 8, 39), 're.compile', 're.compile', ({(8, 22, 8, 38): '"""\\\\{#ex:(\\\\w+)\\\\}"""'}, {}), "('\\\\{#ex:(\\\\w+)\\\\}')", False, 'import re\n'), ((56, 11, 56, 45), 'panflute.RawBlock', 'pf.RawBlock', (), '', True, 'import panflute as pf\n'), ((81, 11, 81, 43), 'panflute.RawBlock', 'pf.RawBlock', (), '', True, 'import panflute as pf\n'), ((40, 16, 40, 42), 'pangloss.util.break_plain', 'break_plain', ({(40, 28, 40, 41): 'li.content[0]'}, {}), '(li.content[0])', False, 'from pangloss.util import smallcapify, break_plain\n'), ((44, 16, 44, 34), 'pangloss.util.smallcapify', 'smallcapify', ({(44, 28, 44, 33): 'gloss'}, {}), '(gloss)', False, 'from pangloss.util import smallcapify, break_plain\n'), ((75, 16, 75, 42), 'pangloss.util.break_plain', 'break_plain', ({(75, 28, 75, 41): 'li.content[0]'}, {}), '(li.content[0])', False, 'from pangloss.util import smallcapify, break_plain\n'), ((43, 33, 43, 70), 'functools.partial', 'partial', (), '', False, 'from functools import partial\n'), ((78, 33, 78, 70), 'functools.partial', 'partial', (), '', False, 'from functools import partial\n')] |
xavfernandez/virtualenv | tests/unit/discovery/test_py_spec.py | dd37c7d2af8a21026f4d4b7f43142e4e1e0faf86 | from __future__ import absolute_import, unicode_literals
import itertools
import os
import sys
from copy import copy
import pytest
from virtualenv.discovery.py_spec import PythonSpec
def test_bad_py_spec():
text = "python2.3.4.5"
spec = PythonSpec.from_string_spec(text)
assert text in repr(spec)
assert spec.str_spec == text
assert spec.path == os.path.abspath(text)
content = vars(spec)
del content[str("str_spec")]
del content[str("path")]
assert all(v is None for v in content.values())
def test_py_spec_first_digit_only_major():
spec = PythonSpec.from_string_spec("278")
assert spec.major == 2
assert spec.minor == 78
def test_spec_satisfies_path_ok():
spec = PythonSpec.from_string_spec(sys.executable)
assert spec.satisfies(spec) is True
def test_spec_satisfies_path_nok(tmp_path):
spec = PythonSpec.from_string_spec(sys.executable)
of = PythonSpec.from_string_spec(str(tmp_path))
assert spec.satisfies(of) is False
def test_spec_satisfies_arch():
spec_1 = PythonSpec.from_string_spec("python-32")
spec_2 = PythonSpec.from_string_spec("python-64")
assert spec_1.satisfies(spec_1) is True
assert spec_2.satisfies(spec_1) is False
@pytest.mark.parametrize(
"req, spec",
list(itertools.combinations(["py", "CPython", "python"], 2)) + [("jython", "jython")] + [("CPython", "cpython")],
)
def test_spec_satisfies_implementation_ok(req, spec):
spec_1 = PythonSpec.from_string_spec(req)
spec_2 = PythonSpec.from_string_spec(spec)
assert spec_1.satisfies(spec_1) is True
assert spec_2.satisfies(spec_1) is True
def test_spec_satisfies_implementation_nok():
spec_1 = PythonSpec.from_string_spec("python")
spec_2 = PythonSpec.from_string_spec("jython")
assert spec_2.satisfies(spec_1) is False
assert spec_1.satisfies(spec_2) is False
def _version_satisfies_pairs():
target = set()
version = tuple(str(i) for i in sys.version_info[0:3])
for i in range(len(version) + 1):
req = ".".join(version[0:i])
for j in range(i + 1):
sat = ".".join(version[0:j])
# can be satisfied in both directions
target.add((req, sat))
target.add((sat, req))
return sorted(target)
@pytest.mark.parametrize("req, spec", _version_satisfies_pairs())
def test_version_satisfies_ok(req, spec):
req_spec = PythonSpec.from_string_spec("python{}".format(req))
sat_spec = PythonSpec.from_string_spec("python{}".format(spec))
assert sat_spec.satisfies(req_spec) is True
def _version_not_satisfies_pairs():
target = set()
version = tuple(str(i) for i in sys.version_info[0:3])
for i in range(len(version)):
req = ".".join(version[0 : i + 1])
for j in range(i + 1):
sat_ver = list(sys.version_info[0 : j + 1])
for l in range(j + 1):
for o in [1, -1]:
temp = copy(sat_ver)
temp[l] += o
sat = ".".join(str(i) for i in temp)
target.add((req, sat))
return sorted(target)
@pytest.mark.parametrize("req, spec", _version_not_satisfies_pairs())
def test_version_satisfies_nok(req, spec):
req_spec = PythonSpec.from_string_spec("python{}".format(req))
sat_spec = PythonSpec.from_string_spec("python{}".format(spec))
assert sat_spec.satisfies(req_spec) is False
def test_relative_spec(tmp_path, monkeypatch):
monkeypatch.chdir(tmp_path)
a_relative_path = str((tmp_path / "a" / "b").relative_to(tmp_path))
spec = PythonSpec.from_string_spec(a_relative_path)
assert spec.path == os.path.abspath(str(tmp_path / a_relative_path))
| [((15, 11, 15, 44), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(15, 39, 15, 43): 'text'}, {}), '(text)', False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((26, 11, 26, 45), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(26, 39, 26, 44): '"""278"""'}, {}), "('278')", False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((32, 11, 32, 54), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(32, 39, 32, 53): 'sys.executable'}, {}), '(sys.executable)', False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((37, 11, 37, 54), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(37, 39, 37, 53): 'sys.executable'}, {}), '(sys.executable)', False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((43, 13, 43, 53), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(43, 41, 43, 52): '"""python-32"""'}, {}), "('python-32')", False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((44, 13, 44, 53), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(44, 41, 44, 52): '"""python-64"""'}, {}), "('python-64')", False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((55, 13, 55, 45), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(55, 41, 55, 44): 'req'}, {}), '(req)', False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((56, 13, 56, 46), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(56, 41, 56, 45): 'spec'}, {}), '(spec)', False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((62, 13, 62, 50), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(62, 41, 62, 49): '"""python"""'}, {}), "('python')", False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((63, 13, 63, 50), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(63, 41, 63, 49): '"""jython"""'}, {}), "('jython')", False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((114, 11, 114, 55), 'virtualenv.discovery.py_spec.PythonSpec.from_string_spec', 'PythonSpec.from_string_spec', ({(114, 39, 114, 54): 'a_relative_path'}, {}), '(a_relative_path)', False, 'from virtualenv.discovery.py_spec import PythonSpec\n'), ((18, 24, 18, 45), 'os.path.abspath', 'os.path.abspath', ({(18, 40, 18, 44): 'text'}, {}), '(text)', False, 'import os\n'), ((52, 9, 52, 63), 'itertools.combinations', 'itertools.combinations', ({(52, 32, 52, 59): "['py', 'CPython', 'python']", (52, 61, 52, 62): '(2)'}, {}), "(['py', 'CPython', 'python'], 2)", False, 'import itertools\n'), ((97, 27, 97, 40), 'copy.copy', 'copy', ({(97, 32, 97, 39): 'sat_ver'}, {}), '(sat_ver)', False, 'from copy import copy\n')] |
robertcsapo/dnacenter-ansible | plugins/module_utils/definitions/trigger_image_activation.py | 33f776f8c0bc7113da73191c301dd1807e6b4a43 | from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
module_definition = json.loads(
"""{
"family": "software_image_management_swim",
"name": "trigger_image_activation",
"operations": {
"post": [
"trigger_software_image_activation"
]
},
"parameters": {
"trigger_software_image_activation": [
{
"name": "schedule_validate",
"required": false,
"type": "boolean"
},
{
"array_type": "object",
"name": "payload",
"required": true,
"schema": [
{
"name": "activateLowerImageVersion",
"required": false,
"type": "boolean"
},
{
"name": "deviceUpgradeMode",
"required": false,
"type": "string"
},
{
"name": "deviceUuid",
"required": false,
"type": "string"
},
{
"name": "distributeIfNeeded",
"required": false,
"type": "boolean"
},
{
"array_type": "string",
"name": "imageUuidList",
"required": false,
"schema": [],
"type": "array"
},
{
"array_type": "string",
"name": "smuImageUuidList",
"required": false,
"schema": [],
"type": "array"
}
],
"type": "array"
}
]
},
"responses": {
"trigger_software_image_activation": {
"properties": [
"response",
"version"
],
"type": "object"
}
}
}"""
)
| [((5, 20, 75, 1), 'json.loads', 'json.loads', ({(6, 4, 74, 4): '"""{\n "family": "software_image_management_swim",\n "name": "trigger_image_activation",\n "operations": {\n "post": [\n "trigger_software_image_activation"\n ]\n },\n "parameters": {\n "trigger_software_image_activation": [\n {\n "name": "schedule_validate",\n "required": false,\n "type": "boolean"\n },\n {\n "array_type": "object",\n "name": "payload",\n "required": true,\n "schema": [\n {\n "name": "activateLowerImageVersion",\n "required": false,\n "type": "boolean"\n },\n {\n "name": "deviceUpgradeMode",\n "required": false,\n "type": "string"\n },\n {\n "name": "deviceUuid",\n "required": false,\n "type": "string"\n },\n {\n "name": "distributeIfNeeded",\n "required": false,\n "type": "boolean"\n },\n {\n "array_type": "string",\n "name": "imageUuidList",\n "required": false,\n "schema": [],\n "type": "array"\n },\n {\n "array_type": "string",\n "name": "smuImageUuidList",\n "required": false,\n "schema": [],\n "type": "array"\n }\n ],\n "type": "array"\n }\n ]\n },\n "responses": {\n "trigger_software_image_activation": {\n "properties": [\n "response",\n "version"\n ],\n "type": "object"\n }\n }\n}"""'}, {}), '(\n """{\n "family": "software_image_management_swim",\n "name": "trigger_image_activation",\n "operations": {\n "post": [\n "trigger_software_image_activation"\n ]\n },\n "parameters": {\n "trigger_software_image_activation": [\n {\n "name": "schedule_validate",\n "required": false,\n "type": "boolean"\n },\n {\n "array_type": "object",\n "name": "payload",\n "required": true,\n "schema": [\n {\n "name": "activateLowerImageVersion",\n "required": false,\n "type": "boolean"\n },\n {\n "name": "deviceUpgradeMode",\n "required": false,\n "type": "string"\n },\n {\n "name": "deviceUuid",\n "required": false,\n "type": "string"\n },\n {\n "name": "distributeIfNeeded",\n "required": false,\n "type": "boolean"\n },\n {\n "array_type": "string",\n "name": "imageUuidList",\n "required": false,\n "schema": [],\n "type": "array"\n },\n {\n "array_type": "string",\n "name": "smuImageUuidList",\n "required": false,\n "schema": [],\n "type": "array"\n }\n ],\n "type": "array"\n }\n ]\n },\n "responses": {\n "trigger_software_image_activation": {\n "properties": [\n "response",\n "version"\n ],\n "type": "object"\n }\n }\n}"""\n )', False, 'import json\n')] |
bopchik/Simple-minecraft-mod-launcher | minecraft_launcher_lib/fabric.py | 52e4e8ec351b0bac7eb4fe707f21de8da14b9ac9 | from .helper import download_file, get_user_agent
from .install import install_minecraft_version
from typing import List, Dict, Union
from xml.dom import minidom
import subprocess
import requests
import tempfile
import random
import os
def get_all_minecraft_versions() -> List[Dict[str,Union[str,bool]]]:
"""
Returns all available Minecraft Versions for fabric
"""
FABRIC_MINECARFT_VERSIONS_URL = "https://meta.fabricmc.net/v2/versions/game"
return requests.get(FABRIC_MINECARFT_VERSIONS_URL,headers={"user-agent": get_user_agent()}).json()
def get_stable_minecraft_versions() -> List[str]:
"""
Returns a list which only contains the stable Minecraft versions that supports fabric
"""
minecraft_versions = get_all_minecraft_versions()
stable_versions = []
for i in minecraft_versions:
if i["stable"] == True:
stable_versions.append(i["version"])
return stable_versions
def get_latest_minecraft_version() -> str:
"""
Returns the latest unstable Minecraft versions that supports fabric. This could be a snapshot.
"""
minecraft_versions = get_all_minecraft_versions()
return minecraft_versions[0]["version"]
def get_latest_stable_minecraft_version() -> str:
"""
Returns the latest stable Minecraft version that supports fabric
"""
stable_versions = get_stable_minecraft_versions()
return stable_versions[0]
def is_minecraft_version_supported(version: str) -> bool:
"""
Checks if a Minecraft version supported by fabric
"""
minecraft_versions = get_all_minecraft_versions()
for i in minecraft_versions:
if i["version"] == version:
return True
return False
def get_all_loader_versions() -> List[Dict[str,Union[str,bool,int]]]:
"""
Returns all loader versions
"""
FABRIC_LOADER_VERSIONS_URL = "https://meta.fabricmc.net/v2/versions/loader"
return requests.get(FABRIC_LOADER_VERSIONS_URL,headers={"user-agent": get_user_agent()}).json()
def get_latest_loader_version() -> str:
"""
Get the latest loader version
"""
loader_versions = get_all_loader_versions()
return loader_versions[0]["version"]
def get_latest_installer_version() -> str:
"""
Returns the latest installer version
"""
FABRIC_INSTALLER_MAVEN_URL = "https://maven.fabricmc.net/net/fabricmc/fabric-installer/maven-metadata.xml"
r = requests.get(FABRIC_INSTALLER_MAVEN_URL,headers={"user-agent": get_user_agent()})
xml_data = minidom.parseString(r.text)
release = xml_data.getElementsByTagName("release")
return release.item(0).lastChild.data
def install_fabric(path: str, minecraft_version: str,loader_version: str=None):
"""
Install a fabric version
"""
#Get latest loader version if not given
if not loader_version:
loader_version = get_latest_loader_version()
#Make sure the Minecraft version is installed
install_minecraft_version(path,minecraft_version)
#Get installer version
installer_version = get_latest_installer_version()
installer_download_url = f"https://maven.fabricmc.net/net/fabricmc/fabric-installer/{installer_version}/fabric-installer-{installer_version}.jar"
#Generate a temporary path for downloading the installer
installer_path = os.path.join(tempfile.gettempdir(),f"fabric-installer-{random.randrange(100,10000)}.tmp")
#Download the installer
download_file(installer_download_url,installer_path)
#Run the installer see https://fabricmc.net/wiki/install#cli_installation
subprocess.run(["java","-jar",installer_path,"client","-dir",path,"-mcversion",minecraft_version,"-loader",loader_version,"-noprofile","-snapshot"])
#Delete the installer we don't need them anymore
os.remove(installer_path)
#Install all libs of fabric
fabric_minecraft_version = f"fabric-loader-{loader_version}-{minecraft_version}"
install_minecraft_version(path,fabric_minecraft_version)
| [((73, 15, 73, 42), 'xml.dom.minidom.parseString', 'minidom.parseString', ({(73, 35, 73, 41): 'r.text'}, {}), '(r.text)', False, 'from xml.dom import minidom\n'), ((94, 4, 94, 152), 'subprocess.run', 'subprocess.run', ({(94, 19, 94, 151): "['java', '-jar', installer_path, 'client', '-dir', path, '-mcversion',\n minecraft_version, '-loader', loader_version, '-noprofile', '-snapshot']"}, {}), "(['java', '-jar', installer_path, 'client', '-dir', path,\n '-mcversion', minecraft_version, '-loader', loader_version,\n '-noprofile', '-snapshot'])", False, 'import subprocess\n'), ((96, 4, 96, 29), 'os.remove', 'os.remove', ({(96, 14, 96, 28): 'installer_path'}, {}), '(installer_path)', False, 'import os\n'), ((90, 34, 90, 55), 'tempfile.gettempdir', 'tempfile.gettempdir', ({}, {}), '()', False, 'import tempfile\n'), ((90, 76, 90, 103), 'random.randrange', 'random.randrange', ({(90, 93, 90, 96): '100', (90, 97, 90, 102): '10000'}, {}), '(100, 10000)', False, 'import random\n')] |
Nishkarsh-Tripathi/Sorting-algorithms- | Strand Sort.py | cda25f1a8e7fb5e25e59e69e78f000421b0e4eb0 | # STRAND SORT
# It is a recursive comparison based sorting technique which sorts in increasing order.
# It works by repeatedly pulling sorted sub-lists out of the list to be sorted and merging them
# with a result array.
# Algorithm:
# Create a empty strand (list) and append the first element to it popping it from the input array
# Compare this element with the rest of elements of the input array
# if a greater element is found then pop and append it to strand otherwise skip
# Now merge this array to the final output array
# Recur for remaining items in strand and input array.
# Utility Function to merge two arrays
def merge(arr1, arr2):
# list to store merged output
merged_list = []
# while there are elements in both arrays
while len(arr1) and len(arr2):
# the array having smaller first elements gets appended as the resultant array must be sorted
if arr1[0] < arr2[0]:
merged_list.append(arr1.pop(0))
else:
merged_list.append(arr2.pop(0))
# if the length of either of array is exhausted , merge the remaining part to
# the merge sublist
merged_list += arr1
merged_list += arr2
# return the merged list
return merged_list
# Function to return the strand (sorted sub-list)
def strand(arr):
# append the first element to the strand
s = [arr.pop(0)]
# initialise a pointer
i = 0
# while it is less then length
while i > len(arr):
# compare the input array elements to the last element of the strand
if arr[i] > s[-1]:
# if we found a greater element than s[-1] then pop it and append to the strand
s.append(arr.pop(i))
else:
# else increment
i += 1
# return the strand
return s
# Strand Sort Function
def strand_sort(arr):
# initialise the output array with the strand
output = strand(arr)
# while there are elements in the array
while len(arr):
# merge the strand and previous output list to create a new list
output = merge(output, strand(arr))
# return the sorted output
return output
# Driver Code
arr = [1, 6, 3, 8, 2, 0, 9]
print(strand_sort(arr))
# Time Complexity : O(n^2) [Worst]
# O(n*log(n)) [Average]
# Space Complexity : O(n)
# Stable : Yes
# Inplace : No
| [] |
minhhoang1023/GamestonkTerminal | gamestonk_terminal/cryptocurrency/overview/pycoingecko_model.py | 195dc19b491052df080178c0cc6a9d535a91a704 | """CoinGecko model"""
__docformat__ = "numpy"
# pylint: disable=C0301, E1101
import logging
import re
from typing import Any, List
import numpy as np
import pandas as pd
from pycoingecko import CoinGeckoAPI
from gamestonk_terminal.cryptocurrency.dataframe_helpers import (
create_df_index,
long_number_format_with_type_check,
replace_underscores_in_column_names,
)
from gamestonk_terminal.cryptocurrency.discovery.pycoingecko_model import get_coins
from gamestonk_terminal.decorators import log_start_end
logger = logging.getLogger(__name__)
HOLD_COINS = ["ethereum", "bitcoin"]
NEWS_FILTERS = ["Index", "Title", "Author", "Posted"]
CATEGORIES_FILTERS = [
"Rank",
"Name",
"Change_1h",
"Change_24h",
"Change_7d",
"Market_Cap",
"Volume_24h",
"Coins",
]
STABLES_FILTERS = [
"Rank",
"Name",
"Symbol",
"Price",
"Change_24h",
"Exchanges",
"Market_Cap",
"Change_30d",
]
PRODUCTS_FILTERS = [
"Rank",
"Platform",
"Identifier",
"Supply_Rate",
"Borrow_Rate",
]
PLATFORMS_FILTERS = ["Rank", "Name", "Category", "Centralized"]
EXCHANGES_FILTERS = [
"Rank",
"Trust_Score",
"Id",
"Name",
"Country",
"Year Established",
"Trade_Volume_24h_BTC",
]
EXRATES_FILTERS = ["Index", "Name", "Unit", "Value", "Type"]
INDEXES_FILTERS = ["Rank", "Name", "Id", "Market", "Last", "MultiAsset"]
DERIVATIVES_FILTERS = [
"Rank",
"Market",
"Symbol",
"Price",
"Pct_Change_24h",
"Contract_Type",
"Basis",
"Spread",
"Funding_Rate",
"Volume_24h",
]
COINS_COLUMNS = [
"symbol",
"name",
"current_price",
"market_cap",
"market_cap_rank",
"price_change_percentage_7d_in_currency",
"price_change_percentage_24h_in_currency",
"total_volume",
]
@log_start_end(log=logger)
def get_holdings_overview(endpoint: str = "bitcoin") -> List[Any]:
"""Returns public companies that holds ethereum or bitcoin [Source: CoinGecko]
Parameters
----------
endpoint : str
"bitcoin" or "ethereum"
Returns
-------
List:
- str: Overall statistics
- pandas.DataFrame: Companies holding crypto
"""
cg = CoinGeckoAPI()
data = cg.get_companies_public_treasury_by_coin_id(coin_id=endpoint)
stats_str = f"""{len(data["companies"])} companies hold a total of {long_number_format_with_type_check(data["total_holdings"])} {endpoint} ({data["market_cap_dominance"]}% of market cap dominance) with the current value of {long_number_format_with_type_check(int(data["total_value_usd"]))} USD dollars""" # noqa
df = pd.json_normalize(data, record_path=["companies"])
df.columns = list(
map(
lambda x: replace_underscores_in_column_names(x)
if isinstance(x, str)
else x,
df.columns,
)
)
return [stats_str, df]
SORT_VALUES = [
"market_cap_desc",
"market_cap_asc",
"name_desc",
"name_asc",
"market_cap_change_24h_desc",
"market_cap_change_24h_asc",
]
@log_start_end(log=logger)
def coin_formatter(n):
# TODO: can be improved
coins = []
re_str = "small/(.*)(.jpg|.png|.JPG|.PNG)"
for coin in n:
if re.search(re_str, coin):
coin_stripped = re.search(re_str, coin).group(1)
coins.append(coin_stripped)
return ",".join(coins)
@log_start_end(log=logger)
def get_top_crypto_categories(sort_filter: str = SORT_VALUES[0]) -> pd.DataFrame:
"""Returns top crypto categories [Source: CoinGecko]
Returns
-------
pandas.DataFrame
Rank, Name, Change_1h, Change_7d, Market_Cap, Volume_24h,Coins, Url
"""
if sort_filter in SORT_VALUES:
client = CoinGeckoAPI()
data = client.get_coins_categories()
df = pd.DataFrame(data)
del df["id"]
del df["content"]
del df["updated_at"]
df["top_3_coins"] = df["top_3_coins"].apply(coin_formatter)
df.columns = [
replace_underscores_in_column_names(col) if isinstance(col, str) else col
for col in df.columns
]
return df
return pd.DataFrame()
# TODO: add string with overview
@log_start_end(log=logger)
def get_stable_coins(top: int = 20) -> pd.DataFrame:
"""Returns top stable coins [Source: CoinGecko]
Returns
-------
pandas.DataFrame
Rank, Name, Symbol, Price, Change_24h, Exchanges, Market_Cap, Change_30d, Url
"""
df = get_coins(top=top, category="stablecoins")
return df[COINS_COLUMNS]
@log_start_end(log=logger)
def get_exchanges() -> pd.DataFrame:
"""Get list of top exchanges from CoinGecko API [Source: CoinGecko]
Returns
-------
pandas.DataFrame
Trust_Score, Id, Name, Country, Year_Established, Trade_Volume_24h_BTC, Url
"""
client = CoinGeckoAPI()
df = pd.DataFrame(client.get_exchanges_list(per_page=250))
df.replace({float(np.NaN): None}, inplace=True)
df = df[
[
"trust_score",
"id",
"name",
"country",
"year_established",
"trade_volume_24h_btc",
"url",
]
]
df.columns = [
"Trust_Score",
"Id",
"Name",
"Country",
"Year_Established",
"Trade_Volume_24h_BTC",
"Url",
]
create_df_index(df, "Rank")
return df
@log_start_end(log=logger)
def get_financial_platforms() -> pd.DataFrame:
"""Get list of financial platforms from CoinGecko API [Source: CoinGecko]
Returns
-------
pandas.DataFrame
Rank, Name, Category, Centralized, Url
"""
client = CoinGeckoAPI()
df = pd.DataFrame(client.get_finance_platforms())
df.drop("facts", axis=1, inplace=True)
create_df_index(df, "rank")
df.columns = ["Rank", "Name", "Category", "Centralized", "Url"]
return df
@log_start_end(log=logger)
def get_finance_products() -> pd.DataFrame:
"""Get list of financial products from CoinGecko API
Returns
-------
pandas.DataFrame
Rank, Platform, Identifier, Supply_Rate, Borrow_Rate
"""
client = CoinGeckoAPI()
df = pd.DataFrame(
client.get_finance_products(per_page=250),
columns=[
"platform",
"identifier",
"supply_rate_percentage",
"borrow_rate_percentage",
],
)
df.columns = ["Platform", "Identifier", "Supply_Rate", "Borrow_Rate"]
create_df_index(df, "Rank")
return df
@log_start_end(log=logger)
def get_indexes() -> pd.DataFrame:
"""Get list of crypto indexes from CoinGecko API [Source: CoinGecko]
Returns
-------
pandas.DataFrame
Name, Id, Market, Last, MultiAsset
"""
client = CoinGeckoAPI()
df = pd.DataFrame(client.get_indexes(per_page=250))
df.columns = ["Name", "Id", "Market", "Last", "MultiAsset"]
create_df_index(df, "Rank")
return df
@log_start_end(log=logger)
def get_derivatives() -> pd.DataFrame:
"""Get list of crypto derivatives from CoinGecko API [Source: CoinGecko]
Returns
-------
pandas.DataFrame
Rank, Market, Symbol, Price, Pct_Change_24h, Contract_Type, Basis, Spread, Funding_Rate, Volume_24h,
"""
client = CoinGeckoAPI()
df = pd.DataFrame(client.get_derivatives(include_tickers="unexpired"))
df.drop(
["index", "last_traded_at", "expired_at", "index_id", "open_interest"],
axis=1,
inplace=True,
)
df.rename(columns={"price_percentage_change_24h": "pct_change_24h"}, inplace=True)
create_df_index(df, "rank")
df["price"] = df["price"].apply(
lambda x: "" if not x else float(x.strip("$").replace(",", ""))
)
df.columns = [
"Rank",
"Market",
"Symbol",
"Price",
"Pct_Change_24h",
"Contract_Type",
"Basis",
"Spread",
"Funding_Rate",
"Volume_24h",
]
return df
@log_start_end(log=logger)
def get_exchange_rates() -> pd.DataFrame:
"""Get list of crypto, fiats, commodity exchange rates from CoinGecko API [Source: CoinGecko]
Returns
-------
pandas.DataFrame
Index, Name, Unit, Value, Type
"""
client = CoinGeckoAPI()
df = pd.DataFrame(client.get_exchange_rates()["rates"]).T.reset_index()
df.drop("index", axis=1, inplace=True)
create_df_index(df, "index")
df.columns = ["Index", "Name", "Unit", "Value", "Type"]
return df
@log_start_end(log=logger)
def get_global_info() -> pd.DataFrame:
"""Get global statistics about crypto from CoinGecko API like:
- market cap change
- number of markets
- icos
- number of active crypto
[Source: CoinGecko]
Returns
-------
pandas.DataFrame
Metric, Value
"""
client = CoinGeckoAPI()
results = client.get_global()
total_mcap = results.pop("market_cap_percentage")
btc, eth = total_mcap.get("btc"), total_mcap.get("eth")
for key in ["total_market_cap", "total_volume", "updated_at"]:
del results[key]
results["btc_market_cap_in_pct"] = btc
results["eth_market_cap_in_pct"] = eth
results["altcoin_market_cap_in_pct"] = 100 - (float(eth) + float(btc))
df = pd.Series(results).reset_index()
df.columns = ["Metric", "Value"]
df["Metric"] = df["Metric"].apply(
lambda x: replace_underscores_in_column_names(x) if isinstance(x, str) else x
)
return df
@log_start_end(log=logger)
def get_global_markets_info() -> pd.DataFrame:
"""Get global statistics about crypto markets from CoinGecko API like:
Market_Cap, Volume, Market_Cap_Percentage
[Source: CoinGecko]
Returns
-------
pandas.DataFrame
Market_Cap, Volume, Market_Cap_Percentage
"""
columns = [
"Market_Cap",
"Volume",
"Market_Cap_Percentage",
]
data = []
client = CoinGeckoAPI()
results = client.get_global()
for key in columns:
data.append(results.get(key))
df = pd.DataFrame(data).T
df.columns = columns
df.replace({float("nan"): None}, inplace=True)
return df.reset_index()
@log_start_end(log=logger)
def get_global_defi_info() -> pd.DataFrame:
"""Get global statistics about Decentralized Finances [Source: CoinGecko]
Returns
-------
pandas.DataFrame
Metric, Value
"""
client = CoinGeckoAPI()
results = client.get_global_decentralized_finance_defi()
for key, value in results.items():
try:
results[key] = round(float(value), 4)
except (ValueError, TypeError):
pass
df = pd.Series(results).reset_index()
df.columns = ["Metric", "Value"]
df["Metric"] = df["Metric"].apply(
lambda x: replace_underscores_in_column_names(x) if isinstance(x, str) else x
)
return df
| [((22, 9, 22, 36), 'logging.getLogger', 'logging.getLogger', ({(22, 27, 22, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((99, 1, 99, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((143, 1, 143, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((155, 1, 155, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((181, 1, 181, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((195, 1, 195, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((232, 1, 232, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((250, 1, 250, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((275, 1, 275, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((292, 1, 292, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((331, 1, 331, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((349, 1, 349, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((383, 1, 383, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((412, 1, 412, 26), 'gamestonk_terminal.decorators.log_start_end', 'log_start_end', (), '', False, 'from gamestonk_terminal.decorators import log_start_end\n'), ((114, 9, 114, 23), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((119, 9, 119, 59), 'pandas.json_normalize', 'pd.json_normalize', (), '', True, 'import pandas as pd\n'), ((177, 11, 177, 25), 'pandas.DataFrame', 'pd.DataFrame', ({}, {}), '()', True, 'import pandas as pd\n'), ((191, 9, 191, 51), 'gamestonk_terminal.cryptocurrency.discovery.pycoingecko_model.get_coins', 'get_coins', (), '', False, 'from gamestonk_terminal.cryptocurrency.discovery.pycoingecko_model import get_coins\n'), ((205, 13, 205, 27), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((228, 4, 228, 31), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.create_df_index', 'create_df_index', ({(228, 20, 228, 22): 'df', (228, 24, 228, 30): '"""Rank"""'}, {}), "(df, 'Rank')", False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((242, 13, 242, 27), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((245, 4, 245, 31), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.create_df_index', 'create_df_index', ({(245, 20, 245, 22): 'df', (245, 24, 245, 30): '"""rank"""'}, {}), "(df, 'rank')", False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((260, 13, 260, 27), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((271, 4, 271, 31), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.create_df_index', 'create_df_index', ({(271, 20, 271, 22): 'df', (271, 24, 271, 30): '"""Rank"""'}, {}), "(df, 'Rank')", False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((285, 13, 285, 27), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((288, 4, 288, 31), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.create_df_index', 'create_df_index', ({(288, 20, 288, 22): 'df', (288, 24, 288, 30): '"""Rank"""'}, {}), "(df, 'Rank')", False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((302, 13, 302, 27), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((311, 4, 311, 31), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.create_df_index', 'create_df_index', ({(311, 20, 311, 22): 'df', (311, 24, 311, 30): '"""rank"""'}, {}), "(df, 'rank')", False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((341, 13, 341, 27), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((344, 4, 344, 32), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.create_df_index', 'create_df_index', ({(344, 20, 344, 22): 'df', (344, 24, 344, 31): '"""index"""'}, {}), "(df, 'index')", False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((365, 13, 365, 27), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((402, 13, 402, 27), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((422, 13, 422, 27), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((149, 11, 149, 34), 're.search', 're.search', ({(149, 21, 149, 27): 're_str', (149, 29, 149, 33): 'coin'}, {}), '(re_str, coin)', False, 'import re\n'), ((165, 17, 165, 31), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ({}, {}), '()', False, 'from pycoingecko import CoinGeckoAPI\n'), ((167, 13, 167, 31), 'pandas.DataFrame', 'pd.DataFrame', ({(167, 26, 167, 30): 'data'}, {}), '(data)', True, 'import pandas as pd\n'), ((406, 9, 406, 27), 'pandas.DataFrame', 'pd.DataFrame', ({(406, 22, 406, 26): 'data'}, {}), '(data)', True, 'import pandas as pd\n'), ((117, 72, 117, 130), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.long_number_format_with_type_check', 'long_number_format_with_type_check', ({(117, 107, 117, 129): "data['total_holdings']"}, {}), "(data['total_holdings'])", False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((375, 9, 375, 27), 'pandas.Series', 'pd.Series', ({(375, 19, 375, 26): 'results'}, {}), '(results)', True, 'import pandas as pd\n'), ((430, 9, 430, 27), 'pandas.Series', 'pd.Series', ({(430, 19, 430, 26): 'results'}, {}), '(results)', True, 'import pandas as pd\n'), ((173, 12, 173, 52), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.replace_underscores_in_column_names', 'replace_underscores_in_column_names', ({(173, 48, 173, 51): 'col'}, {}), '(col)', False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((378, 18, 378, 56), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.replace_underscores_in_column_names', 'replace_underscores_in_column_names', ({(378, 54, 378, 55): 'x'}, {}), '(x)', False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((433, 18, 433, 56), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.replace_underscores_in_column_names', 'replace_underscores_in_column_names', ({(433, 54, 433, 55): 'x'}, {}), '(x)', False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((123, 22, 123, 60), 'gamestonk_terminal.cryptocurrency.dataframe_helpers.replace_underscores_in_column_names', 'replace_underscores_in_column_names', ({(123, 58, 123, 59): 'x'}, {}), '(x)', False, 'from gamestonk_terminal.cryptocurrency.dataframe_helpers import create_df_index, long_number_format_with_type_check, replace_underscores_in_column_names\n'), ((150, 28, 150, 51), 're.search', 're.search', ({(150, 38, 150, 44): 're_str', (150, 46, 150, 50): 'coin'}, {}), '(re_str, coin)', False, 'import re\n')] |
sourceperl/tk-dashboard | docker/messein/board-import-app/app.py | 015ececc670902b02284749ac59f354db4304e48 | #!/usr/bin/env python3
from configparser import ConfigParser
from datetime import datetime
import urllib.parse
import hashlib
import io
import json
import logging
import os
import re
import time
from xml.dom import minidom
import feedparser
import requests
import schedule
import PIL.Image
import PIL.ImageDraw
import PIL.ImageFont
from metar.Metar import Metar
import pytz
import pdf2image
import PIL.Image
import PIL.ImageDraw
from board_lib import CustomRedis, catch_log_except, dt_utc_to_local
from webdav import WebDAV
# some const
USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64; rv:2.0.1) Gecko/20100101 Firefox/4.0.1'
# some var
owc_doc_dir_last_sync = 0
owc_car_dir_last_sync = 0
# read config
cnf = ConfigParser()
cnf.read('/data/conf/board.conf')
# redis
main_redis_user = cnf.get('redis', 'user')
main_redis_pass = cnf.get('redis', 'pass')
# redis-loos for share
loos_redis_user = cnf.get('redis-loos', 'user')
loos_redis_pass = cnf.get('redis-loos', 'pass')
# gmap img traffic
gmap_img_url = cnf.get('gmap_img', 'img_url')
# gsheet
gsheet_url = cnf.get('gsheet', 'url')
# openweathermap
ow_app_id = cnf.get('openweathermap', 'app_id')
# webdav
webdav_url = cnf.get('owncloud_dashboard', 'webdav_url')
webdav_user = cnf.get('owncloud_dashboard', 'webdav_user')
webdav_pass = cnf.get('owncloud_dashboard', 'webdav_pass')
webdav_reglement_doc_dir = cnf.get('owncloud_dashboard', 'webdav_reglement_doc_dir')
webdav_carousel_img_dir = cnf.get('owncloud_dashboard', 'webdav_carousel_img_dir')
# some class
class DB:
# create connector
main = CustomRedis(host='board-redis-srv', username=main_redis_user, password=main_redis_pass,
socket_timeout=4, socket_keepalive=True)
loos = CustomRedis(host='board-redis-loos-tls-cli', username=loos_redis_user, password=loos_redis_pass,
socket_timeout=4, socket_keepalive=True)
# some function
@catch_log_except()
def air_quality_atmo_ge_job():
url = 'https://services3.arcgis.com/' + \
'Is0UwT37raQYl9Jj/arcgis/rest/services/ind_grandest_5j/FeatureServer/0/query' + \
'?where=%s' % urllib.parse.quote('code_zone IN (54395, 57463, 51454, 67482)') + \
'&outFields=date_ech, code_qual, lib_qual, lib_zone, code_zone' + \
'&returnGeometry=false&resultRecordCount=48' + \
'&orderByFields=%s&f=json' % urllib.parse.quote('date_ech ASC')
today_dt_date = datetime.today().date()
# https request
r = requests.get(url, timeout=5.0)
# check error
if r.status_code == 200:
# decode json message
atmo_raw_d = r.json()
# populate zones dict with receive values
zones_d = {}
for record in atmo_raw_d['features']:
# load record data
r_code_zone = record['attributes']['code_zone']
r_ts = int(record['attributes']['date_ech'])
r_dt = datetime.utcfromtimestamp(r_ts / 1000)
r_value = record['attributes']['code_qual']
# retain today value
if r_dt.date() == today_dt_date:
zones_d[r_code_zone] = r_value
# skip key publish if zones_d is empty
if not zones_d:
raise ValueError('dataset is empty')
# create and populate result dict
d_air_quality = {'nancy': zones_d.get(54395, 0),
'metz': zones_d.get(57463, 0),
'reims': zones_d.get(51454, 0),
'strasbourg': zones_d.get(67482, 0)}
# update redis
DB.main.set_as_json('json:atmo', d_air_quality, ex=6 * 3600)
@catch_log_except()
def dir_est_img_job():
# retrieve DIR-est webcams: Houdemont, Velaine-en-Haye, Saint-Nicolas, Côte de Flavigny
for id_redis, lbl_cam, get_code in [('houdemont', 'Houdemont', '18'), ('velaine', 'Velaine', '53'),
('st-nicolas', 'Saint-Nicolas', '49'), ('flavigny', 'Flavigny', '5')]:
r = requests.get('https://webcam.dir-est.fr/app.php/lastimg/%s' % get_code)
if r.status_code == 200:
# load image to PIL and resize it
img = PIL.Image.open(io.BytesIO(r.content))
img.thumbnail([224, 235])
# add text to image
txt_img = '%s - %s' % (lbl_cam, datetime.now().strftime('%H:%M'))
font = PIL.ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMono.ttf', 16)
draw = PIL.ImageDraw.Draw(img)
draw.text((5, 5), txt_img, (0x10, 0x0e, 0x0e), font=font)
# save image as PNG for redis
redis_io = io.BytesIO()
img.save(redis_io, format='PNG')
# update redis
DB.main.set('img:dir-est:%s:png' % id_redis, redis_io.getvalue(), ex=3600)
@catch_log_except()
def gsheet_job():
# https request
response = requests.get(gsheet_url, timeout=5.0)
# process response
d = dict()
for line in response.iter_lines(decode_unicode=True):
tag, value = line.split(',')
d[tag] = value
redis_d = dict(update=datetime.now().isoformat('T'), tags=d)
DB.main.set_as_json('json:gsheet', redis_d, ex=2 * 3600)
@catch_log_except()
def img_gmap_traffic_job():
# http request
r = requests.get(gmap_img_url, stream=True, timeout=5.0)
if r.status_code == 200:
# convert RAW img format (bytes) to Pillow image
pil_img = PIL.Image.open(io.BytesIO(r.raw.read()))
# crop image
pil_img = pil_img.crop((0, 0, 560, 328))
# pil_img.thumbnail([632, 328])
img_io = io.BytesIO()
pil_img.save(img_io, format='PNG')
# store RAW PNG to redis key
DB.main.set('img:traffic-map:png', img_io.getvalue(), ex=2 * 3600)
@catch_log_except()
def local_info_job():
# do request
l_titles = []
for post in feedparser.parse('https://france3-regions.francetvinfo.fr/societe/rss?r=grand-est').entries:
title = post.title
title = title.strip()
title = title.replace('\n', ' ')
l_titles.append(title)
DB.main.set_as_json('json:news', l_titles, ex=2 * 3600)
@catch_log_except()
def owc_updated_job():
# check if the owncloud directories has been updated by users (start sync jobs if need)
global owc_doc_dir_last_sync, owc_car_dir_last_sync
for f in wdv.ls():
item = f['file_path']
item_last_modified = int(f['dt_last_modified'].timestamp())
# document update ?
if item == webdav_reglement_doc_dir:
# update need
if item_last_modified > owc_doc_dir_last_sync:
logging.debug(f'"{webdav_reglement_doc_dir}" seem updated: run "owncloud_sync_doc_job"')
owc_sync_doc_job()
owc_doc_dir_last_sync = item_last_modified
# carousel update ?
elif item == webdav_carousel_img_dir:
# update need
if item_last_modified > owc_car_dir_last_sync:
logging.debug(f'"{webdav_carousel_img_dir}" seem updated: run "owncloud_sync_carousel_job"')
owc_sync_carousel_job()
owc_car_dir_last_sync = item_last_modified
@catch_log_except()
def owc_sync_carousel_job():
# sync owncloud carousel directory with local
# local constants
DIR_CAR_INFOS = 'dir:carousel:infos'
DIR_CAR_RAW = 'dir:carousel:raw:min-png'
# local functions
def update_carousel_raw_data(filename, raw_data):
# build json infos record
md5 = hashlib.md5(raw_data).hexdigest()
js_infos = json.dumps(dict(size=len(raw_data), md5=md5))
# convert raw data to PNG thumbnails
# create default error image
img_to_redis = PIL.Image.new('RGB', (655, 453), (255, 255, 255))
draw = PIL.ImageDraw.Draw(img_to_redis)
draw.text((0, 0), f'loading error (src: "{filename}")', (0, 0, 0))
# replace default image by convert result
try:
# convert png and jpg file
if filename.lower().endswith('.png') or filename.lower().endswith('.jpg'):
# image to PIL
img_to_redis = PIL.Image.open(io.BytesIO(raw_data))
# convert pdf file
elif filename.lower().endswith('.pdf'):
# PDF to PIL: convert first page to PIL image
img_to_redis = pdf2image.convert_from_bytes(raw_data)[0]
except Exception:
pass
# resize and format as raw png
img_to_redis.thumbnail([655, 453])
io_to_redis = io.BytesIO()
img_to_redis.save(io_to_redis, format='PNG')
# redis add (atomic write)
pipe = DB.main.pipeline()
pipe.hset(DIR_CAR_INFOS, filename, js_infos)
pipe.hset(DIR_CAR_RAW, filename, io_to_redis.getvalue())
pipe.execute()
# log sync start
logging.info('start of sync for owncloud carousel')
# list local redis files
local_files_d = {}
for f_name, js_infos in DB.main.hgetall(DIR_CAR_INFOS).items():
try:
filename = f_name.decode()
size = json.loads(js_infos)['size']
local_files_d[filename] = size
except ValueError:
pass
# check "dir:carousel:raw:min-png" consistency
raw_file_l = [f.decode() for f in DB.main.hkeys(DIR_CAR_RAW)]
# remove orphan infos record
for f in list(set(local_files_d) - set(raw_file_l)):
logging.debug(f'remove orphan "{f}" record in hash "{DIR_CAR_INFOS}"')
DB.main.hdel(DIR_CAR_INFOS, f)
del local_files_d[f]
# remove orphan raw-png record
for f in list(set(raw_file_l) - set(local_files_d)):
logging.debug(f'remove orphan "{f}" record in hash "{DIR_CAR_RAW}"')
DB.main.hdel(DIR_CAR_RAW, f)
# list owncloud files (disallow directory)
own_files_d = {}
for f_d in wdv.ls(webdav_carousel_img_dir):
file_path = f_d['file_path']
size = f_d['content_length']
if file_path and not file_path.endswith('/'):
# search site only tags (_@loos_, _@messein_...) in filename
# site id is 16 chars max
site_tag_l = re.findall(r'_@([a-zA-Z0-9\-]{1,16})', file_path)
site_tag_l = [s.strip().lower() for s in site_tag_l]
site_tag_ok = 'messein' in site_tag_l or not site_tag_l
# download filter: ignore txt type, heavy file (>10 MB) or name tags mismatch
filter_ok = not file_path.lower().endswith('.txt') \
and (size < 10 * 1024 * 1024) \
and site_tag_ok
# add file to owncloud dict
if filter_ok:
own_files_d[f_d['file_path']] = size
# exist only on local redis
for f in list(set(local_files_d) - set(own_files_d)):
logging.info(f'"{f}" exist only on local -> remove it')
# redis remove (atomic)
pipe = DB.main.pipeline()
pipe.hdel(DIR_CAR_INFOS, f)
pipe.hdel(DIR_CAR_RAW, f)
pipe.execute()
# exist only on remote owncloud
for f in list(set(own_files_d) - set(local_files_d)):
logging.info('"%s" exist only on remote -> download it' % f)
data = wdv.download(os.path.join(webdav_carousel_img_dir, f))
if data:
update_carousel_raw_data(f, data)
# exist at both side (update only if file size change)
for f in list(set(local_files_d).intersection(own_files_d)):
local_size = local_files_d[f]
remote_size = own_files_d[f]
logging.debug(f'check "{f}" remote size [{remote_size}]/local size [{local_size}]')
if local_size != remote_size:
logging.info(f'"{f}" size mismatch -> download it')
data = wdv.download(os.path.join(webdav_carousel_img_dir, f))
if data:
update_carousel_raw_data(f, data)
# log sync end
logging.info('end of sync for owncloud carousel')
@catch_log_except()
def owc_sync_doc_job():
# sync owncloud document directory with local
# local constants
DIR_DOC_INFOS = 'dir:doc:infos'
DIR_DOC_RAW = 'dir:doc:raw'
# local functions
def update_doc_raw_data(filename, raw_data):
# build json infos record
md5 = hashlib.md5(raw_data).hexdigest()
js_infos = json.dumps(dict(size=len(raw_data), md5=md5))
# redis add (atomic write)
pipe = DB.main.pipeline()
pipe.hset(DIR_DOC_INFOS, filename, js_infos)
pipe.hset(DIR_DOC_RAW, filename, raw_data)
pipe.execute()
# log sync start
logging.info('start of sync for owncloud doc')
# list local redis files
local_files_d = {}
for f_name, js_infos in DB.main.hgetall(DIR_DOC_INFOS).items():
try:
filename = f_name.decode()
size = json.loads(js_infos)['size']
local_files_d[filename] = size
except ValueError:
pass
# check "dir:doc:raw:min-png" consistency
raw_file_l = [f.decode() for f in DB.main.hkeys(DIR_DOC_RAW)]
# remove orphan infos record
for f in list(set(local_files_d) - set(raw_file_l)):
logging.debug(f'remove orphan "{f}" record in hash "{DIR_DOC_INFOS}"')
DB.main.hdel(DIR_DOC_INFOS, f)
del local_files_d[f]
# remove orphan raw-png record
for f in list(set(raw_file_l) - set(local_files_d)):
logging.debug(f'remove orphan "{f}" record in hash "{DIR_DOC_RAW}"')
DB.main.hdel(DIR_DOC_RAW, f)
# list owncloud files (disallow directory)
own_files_d = {}
for f_d in wdv.ls(webdav_reglement_doc_dir):
file_path = f_d['file_path']
size = f_d['content_length']
if file_path and not file_path.endswith('/'):
# download filter: ignore txt file or heavy fie (>10 MB)
ok_load = not file_path.lower().endswith('.txt') \
and (size < 10 * 1024 * 1024)
if ok_load:
own_files_d[f_d['file_path']] = size
# exist only on local redis
for f in list(set(local_files_d) - set(own_files_d)):
logging.info(f'"{f}" exist only on local -> remove it')
# redis remove (atomic)
pipe = DB.main.pipeline()
pipe.hdel(DIR_DOC_INFOS, f)
pipe.hdel(DIR_DOC_RAW, f)
pipe.execute()
# exist only on remote owncloud
for f in list(set(own_files_d) - set(local_files_d)):
logging.info(f'"{f}" exist only on remote -> download it')
data = wdv.download(os.path.join(webdav_reglement_doc_dir, f))
if data:
update_doc_raw_data(f, data)
# exist at both side (update only if file size change)
for f in list(set(local_files_d).intersection(own_files_d)):
local_size = local_files_d[f]
remote_size = own_files_d[f]
logging.debug(f'check "{f}" remote size [{remote_size}]/local size [{local_size}]')
if local_size != remote_size:
logging.info(f'"{f}" size mismatch -> download it')
data = wdv.download(os.path.join(webdav_reglement_doc_dir, f))
if data:
update_doc_raw_data(f, data)
# log sync end
logging.info('end of sync for owncloud doc')
@catch_log_except()
def loos_redis_import_job():
share_keys_l = [('to:messein:json:tweets:@grtgaz', 'from:loos:json:tweets:@grtgaz'),
('to:messein:img:grt-twitter-cloud:png', 'from:loos:img:grt-twitter-cloud:png'),
('to:messein:json:flyspray-est', 'from:loos:json:flyspray-est')]
for from_remote_key, to_local_key in share_keys_l:
# copy redis data from loos key to local key
data = DB.loos.get(from_remote_key)
if data:
DB.main.set(to_local_key, data, ex=4 * 3600)
@catch_log_except()
def vigilance_job():
# request XML data from server
r = requests.get('http://vigilance.meteofrance.com/data/NXFR34_LFPW_.xml', timeout=10.0)
# check error
if r.status_code == 200:
# dom parsing (convert UTF-8 r.text to XML char)
dom = minidom.parseString(r.text.encode('ascii', 'xmlcharrefreplace'))
# set dict for dep data
vig_data = {'update': '', 'department': {}}
# map build date
tz = pytz.timezone('Europe/Paris')
map_date = str(dom.getElementsByTagName('entetevigilance')[0].getAttribute('dateinsert'))
map_dt = tz.localize(datetime(int(map_date[0:4]), int(map_date[4:6]),
int(map_date[6:8]), int(map_date[8:10]),
int(map_date[10:12])))
vig_data['update'] = map_dt.isoformat()
# parse every departments
for items in dom.getElementsByTagName('datavigilance'):
# current department
dep_code = str(items.attributes['dep'].value)
# get risk ID if exist
risk_id = []
for risk in items.getElementsByTagName('risque'):
risk_id.append(int(risk.attributes['valeur'].value))
# get flood ID if exist
flood_id = None
for flood in items.getElementsByTagName('crue'):
flood_id = int(flood.attributes['valeur'].value)
# get color ID
color_id = int(items.attributes['couleur'].value)
# build vig_data
vig_data['department'][dep_code] = {'vig_level': color_id,
'flood_level': flood_id,
'risk_id': risk_id}
DB.main.set_as_json('json:vigilance', vig_data, ex=2 * 3600)
@catch_log_except()
def weather_today_job():
# request data from NOAA server (METAR of Nancy-Essey Airport)
r = requests.get('http://tgftp.nws.noaa.gov/data/observations/metar/stations/LFSN.TXT',
timeout=10.0, headers={'User-Agent': USER_AGENT})
# check error
if r.status_code == 200:
# extract METAR message
metar_msg = r.content.decode().split('\n')[1]
# METAR parse
obs = Metar(metar_msg)
# init and populate d_today dict
d_today = {}
# message date and time
if obs.time:
d_today['update_iso'] = obs.time.strftime('%Y-%m-%dT%H:%M:%SZ')
d_today['update_fr'] = dt_utc_to_local(obs.time).strftime('%H:%M %d/%m')
# current temperature
if obs.temp:
d_today['temp'] = round(obs.temp.value('C'))
# current dew point
if obs.dewpt:
d_today['dewpt'] = round(obs.dewpt.value('C'))
# current pressure
if obs.press:
d_today['press'] = round(obs.press.value('hpa'))
# current wind speed
if obs.wind_speed:
d_today['w_speed'] = round(obs.wind_speed.value('KMH'))
# current wind gust
if obs.wind_gust:
d_today['w_gust'] = round(obs.wind_gust.value('KMH'))
# current wind direction
if obs.wind_dir:
# replace 'W'est by 'O'uest
d_today['w_dir'] = obs.wind_dir.compass().replace('W', 'O')
# weather status str
d_today['descr'] = 'n/a'
# store to redis
DB.main.set_as_json('json:weather:today:nancy', d_today, ex=2 * 3600)
# main
if __name__ == '__main__':
# logging setup
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
logging.getLogger('PIL').setLevel(logging.INFO)
logging.info('board-import-app started')
# init webdav client
wdv = WebDAV(webdav_url, username=webdav_user, password=webdav_pass)
# init scheduler
schedule.every(5).minutes.do(owc_updated_job)
schedule.every(1).hours.do(owc_sync_carousel_job)
schedule.every(1).hours.do(owc_sync_doc_job)
schedule.every(2).minutes.do(loos_redis_import_job)
schedule.every(60).minutes.do(air_quality_atmo_ge_job)
schedule.every(5).minutes.do(dir_est_img_job)
schedule.every(5).minutes.do(gsheet_job)
schedule.every(2).minutes.do(img_gmap_traffic_job)
schedule.every(5).minutes.do(local_info_job)
schedule.every(5).minutes.do(vigilance_job)
schedule.every(5).minutes.do(weather_today_job)
# first call
air_quality_atmo_ge_job()
dir_est_img_job()
gsheet_job()
img_gmap_traffic_job()
local_info_job()
loos_redis_import_job()
vigilance_job()
weather_today_job()
owc_updated_job()
# main loop
while True:
schedule.run_pending()
time.sleep(1)
| [((37, 6, 37, 20), 'configparser.ConfigParser', 'ConfigParser', ({}, {}), '()', False, 'from configparser import ConfigParser\n'), ((69, 1, 69, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((107, 1, 107, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((129, 1, 129, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((142, 1, 142, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((158, 1, 158, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((170, 1, 170, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((194, 1, 194, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((301, 1, 301, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((380, 1, 380, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((392, 1, 392, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((430, 1, 430, 19), 'board_lib.catch_log_except', 'catch_log_except', ({}, {}), '()', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((62, 11, 63, 63), 'board_lib.CustomRedis', 'CustomRedis', (), '', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((64, 11, 65, 63), 'board_lib.CustomRedis', 'CustomRedis', (), '', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((79, 8, 79, 38), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((132, 15, 132, 52), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((145, 8, 145, 60), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((234, 4, 234, 55), 'logging.info', 'logging.info', ({(234, 17, 234, 54): '"""start of sync for owncloud carousel"""'}, {}), "('start of sync for owncloud carousel')", False, 'import logging\n'), ((298, 4, 298, 53), 'logging.info', 'logging.info', ({(298, 17, 298, 52): '"""end of sync for owncloud carousel"""'}, {}), "('end of sync for owncloud carousel')", False, 'import logging\n'), ((320, 4, 320, 50), 'logging.info', 'logging.info', ({(320, 17, 320, 49): '"""start of sync for owncloud doc"""'}, {}), "('start of sync for owncloud doc')", False, 'import logging\n'), ((377, 4, 377, 48), 'logging.info', 'logging.info', ({(377, 17, 377, 47): '"""end of sync for owncloud doc"""'}, {}), "('end of sync for owncloud doc')", False, 'import logging\n'), ((395, 8, 395, 92), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((433, 8, 434, 70), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((475, 4, 475, 77), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((477, 4, 477, 44), 'logging.info', 'logging.info', ({(477, 17, 477, 43): '"""board-import-app started"""'}, {}), "('board-import-app started')", False, 'import logging\n'), ((480, 10, 480, 72), 'webdav.WebDAV', 'WebDAV', (), '', False, 'from webdav import WebDAV\n'), ((112, 12, 112, 83), 'requests.get', 'requests.get', ({(112, 25, 112, 82): "'https://webcam.dir-est.fr/app.php/lastimg/%s' % get_code"}, {}), "('https://webcam.dir-est.fr/app.php/lastimg/%s' % get_code)", False, 'import requests\n'), ((152, 17, 152, 29), 'io.BytesIO', 'io.BytesIO', ({}, {}), '()', False, 'import io\n'), ((162, 16, 162, 99), 'feedparser.parse', 'feedparser.parse', ({(162, 33, 162, 98): '"""https://france3-regions.francetvinfo.fr/societe/rss?r=grand-est"""'}, {}), "(\n 'https://france3-regions.francetvinfo.fr/societe/rss?r=grand-est')", False, 'import feedparser\n'), ((225, 22, 225, 34), 'io.BytesIO', 'io.BytesIO', ({}, {}), '()', False, 'import io\n'), ((248, 8, 248, 78), 'logging.debug', 'logging.debug', ({(248, 22, 248, 77): 'f"""remove orphan "{f}" record in hash "{DIR_CAR_INFOS}\\""""'}, {}), '(f\'remove orphan "{f}" record in hash "{DIR_CAR_INFOS}"\')', False, 'import logging\n'), ((253, 8, 253, 76), 'logging.debug', 'logging.debug', ({(253, 22, 253, 75): 'f"""remove orphan "{f}" record in hash "{DIR_CAR_RAW}\\""""'}, {}), '(f\'remove orphan "{f}" record in hash "{DIR_CAR_RAW}"\')', False, 'import logging\n'), ((275, 8, 275, 63), 'logging.info', 'logging.info', ({(275, 21, 275, 62): 'f""""{f}" exist only on local -> remove it"""'}, {}), '(f\'"{f}" exist only on local -> remove it\')', False, 'import logging\n'), ((283, 8, 283, 68), 'logging.info', 'logging.info', ({(283, 21, 283, 67): '(\'"%s" exist only on remote -> download it\' % f)'}, {}), '(\'"%s" exist only on remote -> download it\' % f)', False, 'import logging\n'), ((291, 8, 291, 91), 'logging.debug', 'logging.debug', ({(291, 22, 291, 90): 'f"""check "{f}" remote size [{remote_size}]/local size [{local_size}]"""'}, {}), '(\n f\'check "{f}" remote size [{remote_size}]/local size [{local_size}]\')', False, 'import logging\n'), ((334, 8, 334, 78), 'logging.debug', 'logging.debug', ({(334, 22, 334, 77): 'f"""remove orphan "{f}" record in hash "{DIR_DOC_INFOS}\\""""'}, {}), '(f\'remove orphan "{f}" record in hash "{DIR_DOC_INFOS}"\')', False, 'import logging\n'), ((339, 8, 339, 76), 'logging.debug', 'logging.debug', ({(339, 22, 339, 75): 'f"""remove orphan "{f}" record in hash "{DIR_DOC_RAW}\\""""'}, {}), '(f\'remove orphan "{f}" record in hash "{DIR_DOC_RAW}"\')', False, 'import logging\n'), ((354, 8, 354, 63), 'logging.info', 'logging.info', ({(354, 21, 354, 62): 'f""""{f}" exist only on local -> remove it"""'}, {}), '(f\'"{f}" exist only on local -> remove it\')', False, 'import logging\n'), ((362, 8, 362, 66), 'logging.info', 'logging.info', ({(362, 21, 362, 65): 'f""""{f}" exist only on remote -> download it"""'}, {}), '(f\'"{f}" exist only on remote -> download it\')', False, 'import logging\n'), ((370, 8, 370, 91), 'logging.debug', 'logging.debug', ({(370, 22, 370, 90): 'f"""check "{f}" remote size [{remote_size}]/local size [{local_size}]"""'}, {}), '(\n f\'check "{f}" remote size [{remote_size}]/local size [{local_size}]\')', False, 'import logging\n'), ((403, 13, 403, 42), 'pytz.timezone', 'pytz.timezone', ({(403, 27, 403, 41): '"""Europe/Paris"""'}, {}), "('Europe/Paris')", False, 'import pytz\n'), ((440, 14, 440, 30), 'metar.Metar.Metar', 'Metar', ({(440, 20, 440, 29): 'metar_msg'}, {}), '(metar_msg)', False, 'from metar.Metar import Metar\n'), ((507, 8, 507, 30), 'schedule.run_pending', 'schedule.run_pending', ({}, {}), '()', False, 'import schedule\n'), ((508, 8, 508, 21), 'time.sleep', 'time.sleep', ({(508, 19, 508, 20): '(1)'}, {}), '(1)', False, 'import time\n'), ((77, 20, 77, 36), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n'), ((90, 19, 90, 57), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', ({(90, 45, 90, 56): 'r_ts / 1000'}, {}), '(r_ts / 1000)', False, 'from datetime import datetime\n'), ((123, 23, 123, 35), 'io.BytesIO', 'io.BytesIO', ({}, {}), '()', False, 'import io\n'), ((263, 25, 263, 74), 're.findall', 're.findall', ({(263, 36, 263, 62): '"""_@([a-zA-Z0-9\\\\-]{1,16})"""', (263, 64, 263, 73): 'file_path'}, {}), "('_@([a-zA-Z0-9\\\\-]{1,16})', file_path)", False, 'import re\n'), ((284, 28, 284, 68), 'os.path.join', 'os.path.join', ({(284, 41, 284, 64): 'webdav_carousel_img_dir', (284, 66, 284, 67): 'f'}, {}), '(webdav_carousel_img_dir, f)', False, 'import os\n'), ((293, 12, 293, 63), 'logging.info', 'logging.info', ({(293, 25, 293, 62): 'f""""{f}" size mismatch -> download it"""'}, {}), '(f\'"{f}" size mismatch -> download it\')', False, 'import logging\n'), ((363, 28, 363, 69), 'os.path.join', 'os.path.join', ({(363, 41, 363, 65): 'webdav_reglement_doc_dir', (363, 67, 363, 68): 'f'}, {}), '(webdav_reglement_doc_dir, f)', False, 'import os\n'), ((372, 12, 372, 63), 'logging.info', 'logging.info', ({(372, 25, 372, 62): 'f""""{f}" size mismatch -> download it"""'}, {}), '(f\'"{f}" size mismatch -> download it\')', False, 'import logging\n'), ((476, 4, 476, 28), 'logging.getLogger', 'logging.getLogger', ({(476, 22, 476, 27): '"""PIL"""'}, {}), "('PIL')", False, 'import logging\n'), ((115, 33, 115, 54), 'io.BytesIO', 'io.BytesIO', ({(115, 44, 115, 53): 'r.content'}, {}), '(r.content)', False, 'import io\n'), ((182, 16, 182, 104), 'logging.debug', 'logging.debug', ({(182, 30, 182, 103): 'f""""{webdav_reglement_doc_dir}" seem updated: run "owncloud_sync_doc_job\\""""'}, {}), '(\n f\'"{webdav_reglement_doc_dir}" seem updated: run "owncloud_sync_doc_job"\')', False, 'import logging\n'), ((204, 14, 204, 35), 'hashlib.md5', 'hashlib.md5', ({(204, 26, 204, 34): 'raw_data'}, {}), '(raw_data)', False, 'import hashlib\n'), ((240, 19, 240, 39), 'json.loads', 'json.loads', ({(240, 30, 240, 38): 'js_infos'}, {}), '(js_infos)', False, 'import json\n'), ((294, 32, 294, 72), 'os.path.join', 'os.path.join', ({(294, 45, 294, 68): 'webdav_carousel_img_dir', (294, 70, 294, 71): 'f'}, {}), '(webdav_carousel_img_dir, f)', False, 'import os\n'), ((311, 14, 311, 35), 'hashlib.md5', 'hashlib.md5', ({(311, 26, 311, 34): 'raw_data'}, {}), '(raw_data)', False, 'import hashlib\n'), ((326, 19, 326, 39), 'json.loads', 'json.loads', ({(326, 30, 326, 38): 'js_infos'}, {}), '(js_infos)', False, 'import json\n'), ((373, 32, 373, 73), 'os.path.join', 'os.path.join', ({(373, 45, 373, 69): 'webdav_reglement_doc_dir', (373, 71, 373, 72): 'f'}, {}), '(webdav_reglement_doc_dir, f)', False, 'import os\n'), ((483, 4, 483, 21), 'schedule.every', 'schedule.every', ({(483, 19, 483, 20): '(5)'}, {}), '(5)', False, 'import schedule\n'), ((484, 4, 484, 21), 'schedule.every', 'schedule.every', ({(484, 19, 484, 20): '(1)'}, {}), '(1)', False, 'import schedule\n'), ((485, 4, 485, 21), 'schedule.every', 'schedule.every', ({(485, 19, 485, 20): '(1)'}, {}), '(1)', False, 'import schedule\n'), ((486, 4, 486, 21), 'schedule.every', 'schedule.every', ({(486, 19, 486, 20): '(2)'}, {}), '(2)', False, 'import schedule\n'), ((487, 4, 487, 22), 'schedule.every', 'schedule.every', ({(487, 19, 487, 21): '(60)'}, {}), '(60)', False, 'import schedule\n'), ((488, 4, 488, 21), 'schedule.every', 'schedule.every', ({(488, 19, 488, 20): '(5)'}, {}), '(5)', False, 'import schedule\n'), ((489, 4, 489, 21), 'schedule.every', 'schedule.every', ({(489, 19, 489, 20): '(5)'}, {}), '(5)', False, 'import schedule\n'), ((490, 4, 490, 21), 'schedule.every', 'schedule.every', ({(490, 19, 490, 20): '(2)'}, {}), '(2)', False, 'import schedule\n'), ((491, 4, 491, 21), 'schedule.every', 'schedule.every', ({(491, 19, 491, 20): '(5)'}, {}), '(5)', False, 'import schedule\n'), ((492, 4, 492, 21), 'schedule.every', 'schedule.every', ({(492, 19, 492, 20): '(5)'}, {}), '(5)', False, 'import schedule\n'), ((493, 4, 493, 21), 'schedule.every', 'schedule.every', ({(493, 19, 493, 20): '(5)'}, {}), '(5)', False, 'import schedule\n'), ((138, 26, 138, 40), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((189, 16, 189, 108), 'logging.debug', 'logging.debug', ({(189, 30, 189, 107): 'f""""{webdav_carousel_img_dir}" seem updated: run "owncloud_sync_carousel_job\\""""'}, {}), '(\n f\'"{webdav_carousel_img_dir}" seem updated: run "owncloud_sync_carousel_job"\'\n )', False, 'import logging\n'), ((216, 46, 216, 66), 'io.BytesIO', 'io.BytesIO', ({(216, 57, 216, 65): 'raw_data'}, {}), '(raw_data)', False, 'import io\n'), ((446, 35, 446, 60), 'board_lib.dt_utc_to_local', 'dt_utc_to_local', ({(446, 51, 446, 59): 'obs.time'}, {}), '(obs.time)', False, 'from board_lib import CustomRedis, catch_log_except, dt_utc_to_local\n'), ((220, 31, 220, 69), 'pdf2image.convert_from_bytes', 'pdf2image.convert_from_bytes', ({(220, 60, 220, 68): 'raw_data'}, {}), '(raw_data)', False, 'import pdf2image\n'), ((118, 44, 118, 58), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n')] |
pauldmccarthy/fsleyes-widgets | fsleyes_widgets/widgetlist.py | cb27899a0f665efe3f1c6ca1f89349507e004378 | #!/usr/bin/env python
#
# widgetlist.py - A widget which displays a list of groupable widgets.
#
# Author: Paul McCarthy <[email protected]>
#
"""This module provides the :class:`WidgetList` class, which displays a list
of widgets.
"""
import wx
import wx.lib.newevent as wxevent
import wx.lib.scrolledpanel as scrolledpanel
import fsleyes_widgets.togglepanel as togglepanel
class WidgetList(scrolledpanel.ScrolledPanel):
"""A scrollable list of widgets.
The ``WidgetList`` provides a number of features:
- Widgets can be grouped.
- A label can be shown next to each widget.
- Widget groups can be collapsed/expanded.
- Widgets and groups can be dynamically added/removed.
The most important methods are:
.. autosummary::
:nosignatures:
AddWidget
AddGroup
A ``WidgetList`` looks something like this:
.. image:: images/widgetlist.png
:scale: 50%
:align: center
A ``WidgetList`` emits a :data:`WidgetListChangeEvent` whenever its
contents change.
"""
_defaultOddColour = None
"""Background colour for widgets on odd rows.
Iniitalised in :meth:`__init__`.
"""
_defaultEvenColour = None
"""Background colour for widgets on even rows.
Iniitalised in :meth:`__init__`.
"""
_defaultGroupColour = None
"""Border and title background colour for widget groups.
Iniitalised in :meth:`__init__`.
"""
def __init__(self, parent, style=0, minHeight=-1):
"""Create a ``WidgetList``.
:arg parent: The :mod:`wx` parent object.
:arg style: Passed through to ``wx.ScrolledPanel.__init__``
:arg minHeight: Minimum height of each row
"""
odd = wx.SystemSettings.GetColour(wx.SYS_COLOUR_LISTBOX)
even = odd.ChangeLightness(90)
group = odd
if WidgetList._defaultOddColour is None:
WidgetList._defaultOddColour = odd
if WidgetList._defaultEvenColour is None:
WidgetList._defaultEvenColour = even
if WidgetList._defaultGroupColour is None:
WidgetList._defaultGroupColour = group
self.__minHeight = minHeight
self.__widgSizer = wx.BoxSizer(wx.VERTICAL)
self.__sizer = wx.BoxSizer(wx.VERTICAL)
self.__groupSizer = wx.BoxSizer(wx.VERTICAL)
self.__widgets = {}
self.__groups = {}
self.__oddColour = WidgetList._defaultOddColour
self.__evenColour = WidgetList._defaultEvenColour
self.__groupColour = WidgetList._defaultGroupColour
self.__sizer.Add(self.__widgSizer, flag=wx.EXPAND)
self.__sizer.Add(self.__groupSizer, flag=wx.EXPAND)
self.__oneExpanded = style & WL_ONE_EXPANDED
# The SP.__init__ method seemingly
# induces a call to DoGetBestSize,
# which assumes that all of the
# things above exist. So we call
# init after we've created those
# things.
scrolledpanel.ScrolledPanel.__init__(self, parent)
self.SetSizer(self.__sizer)
self.SetupScrolling()
self.SetAutoLayout(1)
def DoGetBestSize(self):
"""Returns the best size for the widget list, with all group
widgets expanded.
"""
width, height = self.__widgSizer.GetSize().Get()
for name, group in self.__groups.items():
w, h = group.parentPanel.GetBestSize().Get()
w += 20
h += 10
if w > width:
width = w
height += h
return wx.Size(width, height)
def __makeWidgetKey(self, widget):
"""Widgets are stored in a dictionary - this method generates a
string to use as a key, based on the widget ``id``.
"""
return str(id(widget))
def __setLabelWidths(self, widgets):
"""Calculates the maximum width of all widget labels, and sets all
labels to that width.
This ensures that all labels/widgets line are horizontally aligned.
"""
if len(widgets) == 0:
return
dc = wx.ClientDC(widgets[0].label)
lblWidths = [dc.GetTextExtent(w.displayName)[0] for w in widgets]
maxWidth = max(lblWidths)
for w in widgets:
w.label.SetMinSize((maxWidth + 10, -1))
w.label.SetMaxSize((maxWidth + 10, -1))
def __setColours(self):
"""Called whenever the widget list needs to be refreshed.
Makes sure that odd/even widgets and their labels have the correct
background colour.
"""
def setWidgetColours(widgDict):
for i, widg in enumerate(widgDict.values()):
if i % 2: colour = self.__oddColour
else: colour = self.__evenColour
widg.SetBackgroundColour(colour)
setWidgetColours(self.__widgets)
for group in self.__groups.values():
setWidgetColours(group.widgets)
group.parentPanel.SetBackgroundColour(self.__groupColour)
group.colPanel .SetBackgroundColour(self.__groupColour)
def __refresh(self, *args, **kwargs):
"""Updates widget colours (see :meth:`__setColours`), and lays out
the widget list.
:arg postEvent: If ``True`` (the default), a
:data:`WidgetListChangeEvent` is posted.
"""
self.__setColours()
self.FitInside()
self.Layout()
if kwargs.get('postEvent', True):
wx.PostEvent(self, WidgetListChangeEvent())
def SetColours(self, odd=None, even=None, group=None):
"""Sets the colours used on this ``WidgetList``.
Each argument is assumed to be a tuple of ``(r, g, b)`` values,
each in the range ``[0 - 255]``.
:arg odd: Background colour for widgets on odd rows.
:arg even: Background colour for widgets on even rows.
:arg group: Border/title colour for widget groups.
"""
if odd is not None: self.__oddColour = odd
if even is not None: self.__evenColour = even
if group is not None: self.__groupColour = group
self.__setColours()
def GetGroups(self):
"""Returns a list containing the name of every group in this
``WidgetList``.
"""
return list(self.__groups.keys())
def HasGroup(self, groupName):
"""Returns ``True`` if this ``WidgetList`` contains a group
with the specified name.
"""
return groupName in self.__groups
def RenameGroup(self, groupName, newDisplayName):
"""Changes the display name of the specified group.
.. note:: This method only changes the *display name* of a group,
not the group identifier name. See the :meth:`AddGroup`
method.
:arg groupName: Name of the group.
:arg newDisplayName: New display name for the group.
"""
group = self.__groups[groupName]
group.displayName = newDisplayName
group.colPanel.SetLabel(newDisplayName)
def AddGroup(self, groupName, displayName=None):
"""Add a new group to this ``WidgetList``.
A :exc:`ValueError` is raised if a group with the specified name
already exists.
:arg groupName: The name of the group - this is used as an
identifier for the group.
:arg displayName: A string to be shown in the title bar for the
group. This can be changed later via the
:meth:`RenameGroup` method.
"""
if displayName is None:
displayName = groupName
if groupName in self.__groups:
raise ValueError('A group with name {} '
'already exists'.format(groupName))
parentPanel = wx.Panel(self, style=wx.SUNKEN_BORDER)
colPanel = togglepanel.TogglePanel(parentPanel, label=displayName)
widgPanel = colPanel.GetPane()
widgSizer = wx.BoxSizer(wx.VERTICAL)
widgPanel.SetSizer(widgSizer)
gapSizer = wx.BoxSizer(wx.VERTICAL)
# A spacer exists at the top,
# and between, every group.
gapSizer.Add((-1, 5))
gapSizer.Add(parentPanel, border=10, flag=(wx.EXPAND |
wx.LEFT |
wx.RIGHT))
self.__groupSizer.Add(gapSizer, flag=wx.EXPAND)
parentSizer = wx.BoxSizer(wx.VERTICAL)
parentSizer.Add(colPanel,
border=5,
flag=wx.EXPAND | wx.BOTTOM,
proportion=0)
parentPanel.SetSizer(parentSizer)
group = _Group(groupName,
displayName,
gapSizer,
parentPanel,
colPanel,
widgPanel,
widgSizer)
self.__groups[groupName] = group
self.__refresh()
# Mouse wheel listener needed
# on all children under linux/GTK
if wx.Platform == '__WXGTK__':
parentPanel.Bind(wx.EVT_MOUSEWHEEL, self.__onMouseWheel)
colPanel .Bind(wx.EVT_MOUSEWHEEL, self.__onMouseWheel)
colPanel.Bind(togglepanel.EVT_TOGGLEPANEL_EVENT, self.__onGroupExpand)
def GetWidgets(self, groupName=None):
"""Returns a list containing all of the widgets that have been added
to this ``WidgetList``.
:arg groupName: If provided, only widgets in the specified group will
be returned. Otherwise, ungrouped widgets are returned.
"""
if groupName is None: widgDict = self.__widgets
else: widgDict = self.__groups[groupName].widgets
widgets = [w.widget for w in widgDict.values()]
return widgets
def AddWidget(self, widget, displayName, tooltip=None, groupName=None):
"""Add an arbitrary widget to the property list.
If the ``groupName`` is not provided, the widget is added to a list
of *top level* widgets, which appear at the top of the list, above
any groups. Otherwise, the widget is added to the collapsible panel
corresponding to the specified group.
A :exc:`ValueError` is raised if the widget is already contained
in the list.
:arg widget: The widget to add to the list.
:arg displayName: The widget label/display name.
:arg tooltip: A tooltip for the widget.
:arg groupName: Name of the group to which the widget should be
added.
.. note:: The provided ``widget`` may also be a :class:`wx.Sizer`
instances, although support for this is basic. Specifically,
only one level of nesting is possible, i.e. the provided
``wx.Sizer`` may not have any other ``wx.Sizer``
instances as its children.
"""
if groupName is None:
widgDict = self.__widgets
parent = self
parentSizer = self.__widgSizer
else:
group = self.__groups[groupName]
widgDict = group.widgets
parent = group.widgPanel
parentSizer = group.sizer
key = self.__makeWidgetKey(widget)
if key in widgDict:
raise ValueError('Widgets {} already exist'.format(key))
widgPanel = wx.Panel(parent)
widgSizer = wx.BoxSizer(wx.HORIZONTAL)
widgPanel.SetSizer(widgSizer)
if isinstance(widget, wx.Sizer):
for child in widget.GetChildren():
child.GetWindow().Reparent(widgPanel)
else:
w, h = widget.GetBestSize().Get()
if self.__minHeight > h:
h = self.__minHeight
widget.SetMinSize( (w, h))
widget.Reparent(widgPanel)
label = wx.StaticText(widgPanel,
label=displayName,
style=wx.ALIGN_RIGHT)
widgSizer.Add(label, flag=wx.EXPAND)
widgSizer.Add(widget, flag=wx.EXPAND, proportion=1)
parentSizer.Add(widgPanel,
flag=wx.EXPAND | wx.LEFT | wx.RIGHT,
border=5)
widg = _Widget(displayName,
tooltip,
label,
widget,
widgPanel,
widgSizer)
if tooltip is not None:
widg.SetTooltip(tooltip)
# Under linux/GTK, mouse events are
# captured by child windows, so if
# we want scrolling to work, we need
# to capture scroll events on every
# child. Under OSX/cocoa, this is
# not necessary.
if wx.Platform == '__WXGTK__':
widg.Bind(wx.EVT_MOUSEWHEEL, self.__onMouseWheel)
widgDict[key] = widg
self.__setLabelWidths(list(widgDict.values()))
self.__refresh()
def __onMouseWheel(self, ev):
"""Only called if running on GTK. Scrolls the widget list according
to the mouse wheel rotation.
"""
posx, posy = self.GetViewStart()
rotation = ev.GetWheelRotation()
if rotation > 0: delta = 5
elif rotation < 0: delta = -5
else: return
if ev.GetWheelAxis() == wx.MOUSE_WHEEL_VERTICAL: posy -= delta
else: posx += delta
self.Scroll(posx, posy)
def __onGroupExpand(self, ev):
"""Called when the user expands or collapses a group. Enforces
the :data:`WL_ONE_EXPANDED` style if it is enabled, and refreshes
the panel.
"""
panel = ev.GetEventObject()
if panel.IsExpanded() and self.__oneExpanded:
for group in self.__groups.values():
if group.colPanel is not panel:
group.colPanel.Collapse()
self.__refresh()
def AddSpace(self, groupName=None):
"""Adds some empty vertical space to the widget list.
:arg groupName: Name of the group tio which the space should be added.
If not specified, the space is added to the *top level*
widget list - see the :meth:`AddWidget` method.
"""
if groupName is None: parentSizer = self.__widgSizer
else: parentSizer = self.__groups[groupName].sizer
parentSizer.Add((-1, 10))
def RemoveWidget(self, widget, groupName=None):
"""Removes and destroys the specified widget from this ``WidgetList``.
:arg widget: The widget to remove.
:arg groupName: Name of the group in which the widget is contained.
"""
key = self.__makeWidgetKey(widget)
if groupName is None:
parentSizer = self.__widgSizer
widgDict = self.__widgets
else:
group = self.__groups[groupName]
parentSizer = group.sizer
widgDict = group.widgets
widg = widgDict.pop(key)
parentSizer.Detach(widg.panel)
widg.Destroy()
self.__refresh()
def RemoveGroup(self, groupName):
"""Removes the specified group, and destroys all of the widgets
contained within it.
"""
group = self.__groups.pop(groupName)
self.__groupSizer.Detach(group.gapSizer)
group.parentPanel.Destroy()
self.__refresh()
def Clear(self):
"""Removes and destroys all widgets and groups. """
for key in list(self.__widgets.keys()):
widg = self.__widgets.pop(key)
self.__widgSizer.Detach(widg.sizer)
widg.Destroy()
for group in self.GetGroups():
self.RemoveGroup(group)
self.__refresh()
def ClearGroup(self, groupName):
"""Removes and destroys all widgets in the specified group, but
does not remove the group.
"""
group = self.__groups[groupName]
group.sizer.Clear(True)
group.widgets.clear()
self.__refresh()
def GroupSize(self, groupName):
"""Returns the number of widgets that have been added to the
specified group.
"""
return len(self.__groups[groupName].widgets)
def IsExpanded(self, groupName):
"""Returns ``True`` if the panel for the specified group is currently
expanded, ``False`` if it is collapsed
"""
return self.__groups[groupName].colPanel.IsExpanded()
def Expand(self, groupName, expand=True):
"""Expands or collapses the panel for the specified group. """
panel = self.__groups[groupName].colPanel
if expand: panel.Expand()
else: panel.Collapse()
self.__refresh()
class _Widget:
"""The ``_Widget`` class is used internally by the :class:`WidgetList`
to organise references to each widget in the list.
"""
def __init__(self,
displayName,
tooltip,
label,
widget,
panel,
sizer):
self.displayName = displayName
self.tooltip = tooltip
self.label = label
self.widget = widget
self.panel = panel
self.sizer = sizer
def SetBackgroundColour(self, colour):
self.panel.SetBackgroundColour(colour)
self.label.SetBackgroundColour(colour)
def SetTooltip(self, tooltip):
self.label.SetToolTip(wx.ToolTip(tooltip))
if isinstance(self.widget, wx.Sizer):
for child in self.widget.GetChildren():
child.GetWindow().SetToolTip(wx.ToolTip(tooltip))
else:
self.widget.SetToolTip(wx.ToolTip(tooltip))
def Bind(self, evType, callback):
self.panel.Bind(evType, callback)
self.label.Bind(evType, callback)
if isinstance(self.widget, wx.Sizer):
for c in self.widget.GetChildren():
c.GetWindow().Bind(evType, callback)
else:
self.widget.Bind(evType, callback)
def Destroy(self):
self.label.Destroy()
if isinstance(self.widget, wx.Sizer):
self.widget.Clear(True)
else:
self.widget.Destroy()
class _Group:
"""The ``_Group`` class is used internally by :class:`WidgetList`
instances to represent groups of widgets that are in the list.
"""
def __init__(self,
groupName,
displayName,
gapSizer,
parentPanel,
colPanel,
widgPanel,
sizer):
self.groupName = groupName
self.displayName = displayName
self.gapSizer = gapSizer
self.parentPanel = parentPanel
self.colPanel = colPanel
self.widgPanel = widgPanel
self.sizer = sizer
self.widgets = {}
_WidgetListChangeEvent, _EVT_WL_CHANGE_EVENT = wxevent.NewEvent()
WidgetListChangeEvent = _WidgetListChangeEvent
"""Event emitted by a :class:`WidgetList` when its contents change. """
EVT_WL_CHANGE_EVENT = _EVT_WL_CHANGE_EVENT
"""Identifier for the :data:`WidgetListChangeEvent`. """
WL_ONE_EXPANDED = 1
""":class:`WidgetList` style flag. When applied, at most one group will
be expanded at any one time.
"""
| [((619, 47, 619, 65), 'wx.lib.newevent.NewEvent', 'wxevent.NewEvent', ({}, {}), '()', True, 'import wx.lib.newevent as wxevent\n'), ((77, 16, 77, 66), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', ({(77, 44, 77, 65): 'wx.SYS_COLOUR_LISTBOX'}, {}), '(wx.SYS_COLOUR_LISTBOX)', False, 'import wx\n'), ((89, 29, 89, 53), 'wx.BoxSizer', 'wx.BoxSizer', ({(89, 41, 89, 52): 'wx.VERTICAL'}, {}), '(wx.VERTICAL)', False, 'import wx\n'), ((90, 29, 90, 53), 'wx.BoxSizer', 'wx.BoxSizer', ({(90, 41, 90, 52): 'wx.VERTICAL'}, {}), '(wx.VERTICAL)', False, 'import wx\n'), ((92, 29, 92, 53), 'wx.BoxSizer', 'wx.BoxSizer', ({(92, 41, 92, 52): 'wx.VERTICAL'}, {}), '(wx.VERTICAL)', False, 'import wx\n'), ((111, 8, 111, 58), 'wx.lib.scrolledpanel.ScrolledPanel.__init__', 'scrolledpanel.ScrolledPanel.__init__', ({(111, 45, 111, 49): 'self', (111, 51, 111, 57): 'parent'}, {}), '(self, parent)', True, 'import wx.lib.scrolledpanel as scrolledpanel\n'), ((133, 15, 133, 37), 'wx.Size', 'wx.Size', ({(133, 23, 133, 28): 'width', (133, 30, 133, 36): 'height'}, {}), '(width, height)', False, 'import wx\n'), ((153, 20, 153, 49), 'wx.ClientDC', 'wx.ClientDC', ({(153, 32, 153, 48): 'widgets[0].label'}, {}), '(widgets[0].label)', False, 'import wx\n'), ((266, 22, 266, 60), 'wx.Panel', 'wx.Panel', (), '', False, 'import wx\n'), ((267, 22, 267, 77), 'fsleyes_widgets.togglepanel.TogglePanel', 'togglepanel.TogglePanel', (), '', True, 'import fsleyes_widgets.togglepanel as togglepanel\n'), ((270, 22, 270, 46), 'wx.BoxSizer', 'wx.BoxSizer', ({(270, 34, 270, 45): 'wx.VERTICAL'}, {}), '(wx.VERTICAL)', False, 'import wx\n'), ((274, 19, 274, 43), 'wx.BoxSizer', 'wx.BoxSizer', ({(274, 31, 274, 42): 'wx.VERTICAL'}, {}), '(wx.VERTICAL)', False, 'import wx\n'), ((284, 22, 284, 46), 'wx.BoxSizer', 'wx.BoxSizer', ({(284, 34, 284, 45): 'wx.VERTICAL'}, {}), '(wx.VERTICAL)', False, 'import wx\n'), ((366, 20, 366, 36), 'wx.Panel', 'wx.Panel', ({(366, 29, 366, 35): 'parent'}, {}), '(parent)', False, 'import wx\n'), ((367, 20, 367, 46), 'wx.BoxSizer', 'wx.BoxSizer', ({(367, 32, 367, 45): 'wx.HORIZONTAL'}, {}), '(wx.HORIZONTAL)', False, 'import wx\n'), ((380, 16, 382, 51), 'wx.StaticText', 'wx.StaticText', (), '', False, 'import wx\n'), ((568, 30, 568, 49), 'wx.ToolTip', 'wx.ToolTip', ({(568, 41, 568, 48): 'tooltip'}, {}), '(tooltip)', False, 'import wx\n'), ((574, 35, 574, 54), 'wx.ToolTip', 'wx.ToolTip', ({(574, 46, 574, 53): 'tooltip'}, {}), '(tooltip)', False, 'import wx\n'), ((572, 45, 572, 64), 'wx.ToolTip', 'wx.ToolTip', ({(572, 56, 572, 63): 'tooltip'}, {}), '(tooltip)', False, 'import wx\n')] |
TransactPRO/gw3-python-client | setup.py | 77a9395c13f75467385227461b57ce85f4730ce5 | #!/usr/bin/env python
import setuptools
MAINTAINER_NAME = 'Transact Pro'
MAINTAINER_EMAIL = '[email protected]'
URL_GIT = 'https://github.com/TransactPRO/gw3-python-client'
try:
import pypandoc
LONG_DESCRIPTION = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError, OSError, RuntimeError):
LONG_DESCRIPTION = ''
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Libraries :: Python Modules'
]
required = [
'requests',
]
setuptools.setup(
name='transactpro-gw3-client',
version='1.7.6',
description='Transact PRO Gateway3 implementation in Python.',
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
author='Transact Pro',
author_email='[email protected]',
install_requires=required,
url=URL_GIT,
packages=setuptools.find_packages(),
license='MIT',
classifiers=CLASSIFIERS,
keywords='GW3 gateway3 integration gateway TransactPRO python python3',
python_requires='>=3.6',
)
| [((12, 23, 12, 59), 'pypandoc.convert', 'pypandoc.convert', ({(12, 40, 12, 51): '"""README.md"""', (12, 53, 12, 58): '"""rst"""'}, {}), "('README.md', 'rst')", False, 'import pypandoc\n'), ((44, 13, 44, 39), 'setuptools.find_packages', 'setuptools.find_packages', ({}, {}), '()', False, 'import setuptools\n')] |
mitodl/social-auth-mitxpro | social_auth_mitxpro/backends_test.py | 8cae8bbe900b25f724b24f783d06de7b853a1366 | """Tests for our backend"""
from urllib.parse import urljoin
import pytest
from social_auth_mitxpro.backends import MITxProOAuth2
# pylint: disable=redefined-outer-name
@pytest.fixture
def strategy(mocker):
"""Mock strategy"""
return mocker.Mock()
@pytest.fixture
def backend(strategy):
"""MITxProOAuth2 backend fixture"""
return MITxProOAuth2(strategy)
@pytest.mark.parametrize(
"response, expected",
[
(
{"username": "abc123", "email": "[email protected]", "name": "Jane Doe"},
{"username": "abc123", "email": "[email protected]", "name": "Jane Doe"},
),
({"username": "abc123"}, {"username": "abc123", "email": "", "name": ""}),
],
)
def test_get_user_details(backend, response, expected):
"""Test that get_user_details produces expected results"""
assert backend.get_user_details(response) == expected
def test_user_data(backend, strategy, mocked_responses):
"""Tests that the backend makes a correct appropriate request"""
access_token = "user_token"
api_root = "http://xpro.example.com/"
response = {"username": "abc123", "email": "[email protected]", "name": "Jane Doe"}
mocked_responses.add(
mocked_responses.GET, urljoin(api_root, "/api/users/me"), json=response
)
settings = {"API_ROOT": api_root}
def _setting(name, *, backend, default=None): # pylint: disable=unused-argument
"""Dummy setting func"""
return settings.get(name, default)
strategy.setting.side_effect = _setting
assert backend.user_data(access_token) == response
request, _ = mocked_responses.calls[0]
assert request.headers["Authorization"] == "Bearer user_token"
strategy.setting.assert_any_call("API_ROOT", default=None, backend=backend)
def test_authorization_url(backend, strategy):
"""Test authorization_url()"""
strategy.setting.return_value = "abc"
assert backend.authorization_url() == "abc"
strategy.setting.assert_called_once_with(
"AUTHORIZATION_URL", default=None, backend=backend
)
def test_access_token_url(backend, strategy):
"""Test access_token_url()"""
strategy.setting.return_value = "abc"
assert backend.access_token_url() == "abc"
strategy.setting.assert_called_once_with(
"ACCESS_TOKEN_URL", default=None, backend=backend
)
| [((24, 1, 33, 1), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(25, 4, 25, 24): '"""response, expected"""', (26, 4, 32, 5): "[({'username': 'abc123', 'email': '[email protected]', 'name': 'Jane Doe'},\n {'username': 'abc123', 'email': '[email protected]', 'name': 'Jane Doe'}\n ), ({'username': 'abc123'}, {'username': 'abc123', 'email': '', 'name':\n ''})]"}, {}), "('response, expected', [({'username': 'abc123',\n 'email': '[email protected]', 'name': 'Jane Doe'}, {'username': 'abc123',\n 'email': '[email protected]', 'name': 'Jane Doe'}), ({'username':\n 'abc123'}, {'username': 'abc123', 'email': '', 'name': ''})])", False, 'import pytest\n'), ((21, 11, 21, 34), 'social_auth_mitxpro.backends.MITxProOAuth2', 'MITxProOAuth2', ({(21, 25, 21, 33): 'strategy'}, {}), '(strategy)', False, 'from social_auth_mitxpro.backends import MITxProOAuth2\n'), ((46, 30, 46, 64), 'urllib.parse.urljoin', 'urljoin', ({(46, 38, 46, 46): 'api_root', (46, 48, 46, 63): '"""/api/users/me"""'}, {}), "(api_root, '/api/users/me')", False, 'from urllib.parse import urljoin\n')] |
velocist/TS4CheatsInfo | Scripts/simulation/careers/detective/detective_crime_scene.py | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\careers\detective\detective_crime_scene.py
# Compiled at: 2015-02-08 03:00:54
# Size of source mod 2**32: 1608 bytes
from careers.career_event_zone_director import CareerEventZoneDirector
import sims4.log
logger = sims4.log.Logger('Crime Scene', default_owner='bhill')
class CrimeSceneZoneDirector(CareerEventZoneDirector):
def __init__(self, *args, **kwargs):
(super().__init__)(*args, **kwargs)
self._should_load_sims = False
def _load_custom_zone_director(self, zone_director_proto, reader):
self._should_load_sims = True
super()._load_custom_zone_director(zone_director_proto, reader)
def _on_maintain_zone_saved_sim(self, sim_info):
if self._should_load_sims:
super()._on_maintain_zone_saved_sim(sim_info)
else:
logger.info('Discarding saved sim: {}', sim_info)
def _process_injected_sim(self, sim_info):
logger.info('Discarding injected sim: {}', sim_info) | [] |
methylgrammarlab/proj_scwgbs | classifier/interpretation_exp.py | 287196898796eb617fef273bfaf9e978a57047dc | """
Code adapted from https://github.com/ohlerlab/DeepRiPe with changes
Extract information and graphs from the Integrated gradients output
"""
import argparse
import os
import sys
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
from classifier.plotseqlogo import seqlogo_fig
from commons import files_tools
sns.set()
sns.set_style('whitegrid')
def parse_input():
parser = argparse.ArgumentParser()
parser.add_argument('--interpretation_file', help='path for the input file', required=True)
parser.add_argument('--output_folder', help='Path of the output folder', required=False,
default=os.path.dirname(sys.argv[0]))
args = parser.parse_args()
return args
def plot_one_seq(seq, output, title, yl=None):
fig = seqlogo_fig(seq[:, :], vocab="DNA", yl=yl, figsize=(20, 4), ncol=1, plot_name=title)
fig.savefig(output)
plt.close()
def plot_multi_seq(sequences_dict, number_of_seq, output_folder=None):
"""
Plot the multiple sequences in one figure
:param sequences_dict: A dictionary with pl or cl as key and the integrated values results for each
sequence in this label
:param number_of_seq: number of sequences in one figure
:param output_folder: Output folder
"""
for k in sequences_dict:
ex_seq = sequences_dict[k][:number_of_seq]
fig = seqlogo_fig(np.transpose(ex_seq[:, :, :], axes=(1, 2, 0)), vocab="DNA",
figsize=(8, 4), ncol=1, yl=0.1,
plot_name="seq for top %s of type %s" % (number_of_seq, k))
if output_folder:
fig.savefig(os.path.join(output_folder, "seq_for_top_%s_of_type_%s" % (number_of_seq, k)))
else:
plt.show()
plt.close()
def plot_avg_sequence(sequences_dict, output_folder=None):
"""
Plot the average sequence across 30 letters and all the sequence
:param sequences_dict: A dictionary with pl or cl as key and the integrated values results for each
sequence in this label
:param output_folder: Output folder
"""
for k in sequences_dict:
ex_seq = sequences_dict[k]
mean_seq = np.transpose(np.mean(ex_seq[:, 60:90, :], axis=0).reshape(1, 30, 4), axes=(1, 2, 0))
name = k
fig = seqlogo_fig(mean_seq, vocab="DNA", figsize=(20, 4), ncol=1,
plot_name="Average attribution score for prediction %s" % name)
ax = fig.axes[0]
ax.set_title("Average sequence for prediction %s" % name, fontsize=16)
if output_folder:
fig.savefig(os.path.join(output_folder, "Avg_seq_for_%s30.png" % k))
else:
plt.show()
plt.close()
for k in sequences_dict:
ex_seq = sequences_dict[k]
mean_seq = np.transpose(np.mean(ex_seq[:, :, :], axis=0).reshape(1, 150, 4), axes=(1, 2, 0))
fig = seqlogo_fig(mean_seq, vocab="DNA", figsize=(20, 4), ncol=1,
plot_name="Avg seq for %s" % k)
if output_folder:
fig.savefig(os.path.join(output_folder, "Avg_seq_for_%s.png" % k))
else:
plt.show()
plt.close()
def plot_avg_sequence_sw(sequences_dict, output_folder=None):
"""
plot the avg sequence using SW, flatten the AT to W and CG to S
:param sequences_dict: A dictionary with pl or cl as key and the integrated values results for each
sequence in this label
:param output_folder: Output folder
"""
for k in sequences_dict:
ex_seq = sequences_dict[k]
mean_seq = np.transpose(np.mean(ex_seq[:, 60:90, :], axis=0).reshape(1, 30, 4), axes=(1, 2, 0))
new_seq = np.zeros_like(mean_seq)
for i in range(mean_seq.shape[0]):
new_seq[i][0] = mean_seq[i][0] + mean_seq[i][3]
new_seq[i][1] = mean_seq[i][1] + mean_seq[i][2]
fig = seqlogo_fig(new_seq, vocab="DNAWS", figsize=(20, 4), ncol=1, plot_name="Avg seq for %s" % k)
if output_folder:
fig.savefig(os.path.join(output_folder, "Avg_seq_for_%s_sw30.png" % k))
else:
plt.show()
for k in sequences_dict:
ex_seq = sequences_dict[k]
mean_seq = np.transpose(np.mean(ex_seq[:, :, :], axis=0).reshape(1, 150, 4), axes=(1, 2, 0))
new_seq = np.zeros_like(mean_seq)
for i in range(mean_seq.shape[0]):
new_seq[i][0] = mean_seq[i][0] + mean_seq[i][3]
new_seq[i][1] = mean_seq[i][1] + mean_seq[i][2]
fig = seqlogo_fig(new_seq, vocab="DNAWS", figsize=(20, 4), ncol=1, plot_name="Avg seq for %s" % k)
if output_folder:
fig.savefig(os.path.join(output_folder, "Avg_seq_for_%s_sw.png" % k))
else:
plt.show()
plt.close()
def plot_avg_sequence_sw_flatten_values(sequences_dict, output_folder=None):
"""
plot the avg sequence using SW, flatten the AT to W and CG to S and combining both options to get one
number per sequence place
:param sequences_dict: A dictionary with pl or cl as key and the integrated values results for each
sequence in this label
:param output_folder: Output folder
"""
for k in sequences_dict:
ex_seq = sequences_dict[k]
mean_seq = np.transpose(np.mean(ex_seq[:, 60:90, :], axis=0).reshape(1, 30, 4), axes=(1, 2, 0))
new_seq = np.zeros_like(mean_seq)
for i in range(mean_seq.shape[0]):
w = mean_seq[i][0] + mean_seq[i][3]
s = mean_seq[i][1] + mean_seq[i][2]
delta = s - w
sw_index = 1 if delta > 0 else 0
new_seq[i][sw_index] = abs(delta)
fig = seqlogo_fig(new_seq, vocab="DNAWS", figsize=(8, 4), ncol=1, plot_name="Avg seq for %s" % k)
if output_folder:
fig.savefig(os.path.join(output_folder, "Avg_seq_for_%s_sw30_flatten.png" % k))
else:
fig.show()
for k in sequences_dict:
ex_seq = sequences_dict[k]
mean_seq = np.transpose(np.mean(ex_seq[:, :, :], axis=0).reshape(1, 150, 4), axes=(1, 2, 0))
new_seq = np.zeros_like(mean_seq)
for i in range(mean_seq.shape[0]):
w = mean_seq[i][0] + mean_seq[i][3]
s = mean_seq[i][1] + mean_seq[i][2]
delta = s - w
sw_index = 1 if delta > 0 else 0
new_seq[i][sw_index] = abs(delta)
fig = seqlogo_fig(new_seq, vocab="DNAWS", figsize=(20, 4), ncol=1, plot_name="Avg seq for %s" % k)
if output_folder:
fig.savefig(os.path.join(output_folder, "Avg_seq_for_%s_sw_flatten.png" % k))
else:
plt.show()
plt.close()
def plot_distance_weight_two_sides(sequences_dict, output_folder=None):
"""
Plot the integrated gradient value of each feature based on distance from center, two ways graph(-74->74)
We wanted to see if there are indexes and some periodicity
:param sequences_dict: A dictionary with pl or cl as key and the integrated values results for each
sequence in this label
:param output_folder: Output folder
"""
for k in sequences_dict:
class_type = k
ex_seq = np.abs(sequences_dict[k])
mean_seq = np.transpose(np.mean(ex_seq[:, :, :], axis=0).reshape(1, 150, 4), axes=(1, 2, 0))
seq_weight = np.sum(mean_seq, axis=1)
middle = int(seq_weight.shape[0] / 2) - 1
seq_weight[middle] = None
seq_weight[middle + 1] = None
x = np.arange(-74, 1).astype(np.int)
x = np.append(x, x[::-1] * -1)
x_ticks = [i for i in range(-70, 80, 10)]
plt.xticks(x_ticks)
plt.plot(x, seq_weight, '.-')
plt.legend()
plt.grid(axis="y")
plt.xlabel("Distance from CpG Site", fontsize=12)
plt.ylabel("Attribute score", fontsize=12)
plt.title("Attribute score base on distance from CpG site for %s" % class_type, fontsize=14)
if output_folder:
plt.savefig(
os.path.join(output_folder,
"distance_importance_of_flanking_letters_type_%s_two_way.png" % k))
else:
plt.show()
plt.close()
def plot_distance_weight_one_side(sequences_dict, output_folder=None):
"""
Plot the integrated gradient value of each feature based on distance from center, one way graph (0->74)
We wanted to see if there are indexes and some periodicity
:param sequences_dict: A dictionary with pl or cl as key and the integrated values results for each
sequence in this label
:param output_folder: Output folder
"""
for k in sequences_dict:
class_type = k
ex_seq = np.abs(sequences_dict[k])
mean_seq = np.transpose(np.mean(ex_seq[:, :, :], axis=0).reshape(1, 150, 4), axes=(1, 2, 0))
seq_weight = np.sum(mean_seq, axis=1)
std_seq = np.std(mean_seq, axis=1)
middle = int(seq_weight.shape[0] / 2) - 1
seq_to_values = np.flip(seq_weight[:middle])
seq_from_values = seq_weight[middle + 2:]
seq_to_std = np.flip(std_seq[:middle])
seq_from_std = std_seq[middle + 2:]
x = np.arange(1, seq_from_values.shape[0] + 1)
plt.errorbar(x, seq_to_values, seq_to_std, marker='^', label="to", alpha=0.5)
plt.errorbar(x, seq_from_values, seq_from_std, marker='^', label="from", alpha=0.5)
plt.legend()
x_ticks = [i for i in range(1, 5)] + [i for i in range(5, 75, 5)]
plt.xticks(x_ticks)
plt.xlabel("Distance from CG")
plt.ylabel("Importance shannon values")
plt.title("Importance of flanking letters - %s" % (class_type))
if output_folder:
plt.savefig(os.path.join(output_folder,
"distance_importance_of_flanking_letters_type_%s_one_way.png" % k))
else:
plt.show()
plt.close()
def print_each_seq(sequences_dict, output_folder):
"""
Plot all the sequences on after the other
:param sequences_dict: A dictionary with pl or cl as key and the integrated values results for each
sequence in this label
:param output_folder: Output folder
"""
cl_list = []
pl_list = []
# Remove duplicates
seq = None
for i in range(sequences_dict["cl"].shape[0]):
new_seq = sequences_dict["cl"][i]
if np.all(new_seq == seq):
continue
else:
cl_list.append(new_seq)
seq = new_seq
seq = None
for i in range(sequences_dict["pl"].shape[0]):
new_seq = sequences_dict["pl"][i]
if np.all(new_seq == seq):
continue
else:
pl_list.append(new_seq)
seq = new_seq
for i in range(1000):
plot_one_seq(seq=cl_list[i], output=os.path.join(output_folder, "cl_seq_%s.png" % i),
title="CL seq num %s" % i, yl=0.1)
for i in range(1000):
plot_one_seq(seq=pl_list[i], output=os.path.join(output_folder, "pl_seq_%s.png" % i),
title="PL seq num %s" % i, yl=0.1)
def main():
args = parse_input()
ex_seq_d = files_tools.load_pickle(args.interpretation_file)
new_d = {"cl": ex_seq_d["cl"], "pl": ex_seq_d["pl"]}
plot_distance_weight_one_side(new_d, args.output_folder)
plot_distance_weight_two_sides(new_d, args.output_folder)
plot_multi_seq(new_d, 1000, args.output_folder)
plot_avg_sequence(new_d, args.output_folder)
plot_avg_sequence_sw(new_d, args.output_folder)
plot_avg_sequence_sw_flatten_values(new_d, args.output_folder)
print_each_seq(new_d, args.output_folder)
if __name__ == '__main__':
main()
| [((17, 0, 17, 9), 'seaborn.set', 'sns.set', ({}, {}), '()', True, 'import seaborn as sns\n'), ((18, 0, 18, 26), 'seaborn.set_style', 'sns.set_style', ({(18, 14, 18, 25): '"""whitegrid"""'}, {}), "('whitegrid')", True, 'import seaborn as sns\n'), ((22, 13, 22, 38), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((32, 10, 32, 94), 'classifier.plotseqlogo.seqlogo_fig', 'seqlogo_fig', (), '', False, 'from classifier.plotseqlogo import seqlogo_fig\n'), ((34, 4, 34, 15), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((56, 4, 56, 15), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((134, 4, 134, 15), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((181, 4, 181, 15), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((305, 15, 305, 64), 'commons.files_tools.load_pickle', 'files_tools.load_pickle', ({(305, 39, 305, 63): 'args.interpretation_file'}, {}), '(args.interpretation_file)', False, 'from commons import files_tools\n'), ((71, 14, 72, 89), 'classifier.plotseqlogo.seqlogo_fig', 'seqlogo_fig', (), '', False, 'from classifier.plotseqlogo import seqlogo_fig\n'), ((81, 8, 81, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((87, 14, 88, 57), 'classifier.plotseqlogo.seqlogo_fig', 'seqlogo_fig', (), '', False, 'from classifier.plotseqlogo import seqlogo_fig\n'), ((94, 8, 94, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((107, 18, 107, 41), 'numpy.zeros_like', 'np.zeros_like', ({(107, 32, 107, 40): 'mean_seq'}, {}), '(mean_seq)', True, 'import numpy as np\n'), ((112, 14, 112, 106), 'classifier.plotseqlogo.seqlogo_fig', 'seqlogo_fig', (), '', False, 'from classifier.plotseqlogo import seqlogo_fig\n'), ((122, 18, 122, 41), 'numpy.zeros_like', 'np.zeros_like', ({(122, 32, 122, 40): 'mean_seq'}, {}), '(mean_seq)', True, 'import numpy as np\n'), ((127, 14, 127, 106), 'classifier.plotseqlogo.seqlogo_fig', 'seqlogo_fig', (), '', False, 'from classifier.plotseqlogo import seqlogo_fig\n'), ((148, 18, 148, 41), 'numpy.zeros_like', 'np.zeros_like', ({(148, 32, 148, 40): 'mean_seq'}, {}), '(mean_seq)', True, 'import numpy as np\n'), ((156, 14, 156, 105), 'classifier.plotseqlogo.seqlogo_fig', 'seqlogo_fig', (), '', False, 'from classifier.plotseqlogo import seqlogo_fig\n'), ((166, 18, 166, 41), 'numpy.zeros_like', 'np.zeros_like', ({(166, 32, 166, 40): 'mean_seq'}, {}), '(mean_seq)', True, 'import numpy as np\n'), ((174, 14, 174, 106), 'classifier.plotseqlogo.seqlogo_fig', 'seqlogo_fig', (), '', False, 'from classifier.plotseqlogo import seqlogo_fig\n'), ((194, 17, 194, 42), 'numpy.abs', 'np.abs', ({(194, 24, 194, 41): 'sequences_dict[k]'}, {}), '(sequences_dict[k])', True, 'import numpy as np\n'), ((197, 21, 197, 45), 'numpy.sum', 'np.sum', (), '', True, 'import numpy as np\n'), ((203, 12, 203, 38), 'numpy.append', 'np.append', ({(203, 22, 203, 23): 'x', (203, 25, 203, 37): 'x[::-1] * -1'}, {}), '(x, x[::-1] * -1)', True, 'import numpy as np\n'), ((206, 8, 206, 27), 'matplotlib.pyplot.xticks', 'plt.xticks', ({(206, 19, 206, 26): 'x_ticks'}, {}), '(x_ticks)', True, 'import matplotlib.pyplot as plt\n'), ((207, 8, 207, 37), 'matplotlib.pyplot.plot', 'plt.plot', ({(207, 17, 207, 18): 'x', (207, 20, 207, 30): 'seq_weight', (207, 32, 207, 36): '""".-"""'}, {}), "(x, seq_weight, '.-')", True, 'import matplotlib.pyplot as plt\n'), ((208, 8, 208, 20), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((209, 8, 209, 26), 'matplotlib.pyplot.grid', 'plt.grid', (), '', True, 'import matplotlib.pyplot as plt\n'), ((210, 8, 210, 57), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (), '', True, 'import matplotlib.pyplot as plt\n'), ((211, 8, 211, 50), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (), '', True, 'import matplotlib.pyplot as plt\n'), ((212, 8, 212, 100), 'matplotlib.pyplot.title', 'plt.title', (), '', True, 'import matplotlib.pyplot as plt\n'), ((220, 8, 220, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((233, 17, 233, 42), 'numpy.abs', 'np.abs', ({(233, 24, 233, 41): 'sequences_dict[k]'}, {}), '(sequences_dict[k])', True, 'import numpy as np\n'), ((236, 21, 236, 45), 'numpy.sum', 'np.sum', (), '', True, 'import numpy as np\n'), ((237, 18, 237, 42), 'numpy.std', 'np.std', (), '', True, 'import numpy as np\n'), ((239, 24, 239, 52), 'numpy.flip', 'np.flip', ({(239, 32, 239, 51): 'seq_weight[:middle]'}, {}), '(seq_weight[:middle])', True, 'import numpy as np\n'), ((241, 21, 241, 46), 'numpy.flip', 'np.flip', ({(241, 29, 241, 45): 'std_seq[:middle]'}, {}), '(std_seq[:middle])', True, 'import numpy as np\n'), ((243, 12, 243, 54), 'numpy.arange', 'np.arange', ({(243, 22, 243, 23): '1', (243, 25, 243, 53): 'seq_from_values.shape[0] + 1'}, {}), '(1, seq_from_values.shape[0] + 1)', True, 'import numpy as np\n'), ((245, 8, 245, 85), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (), '', True, 'import matplotlib.pyplot as plt\n'), ((246, 8, 246, 91), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (), '', True, 'import matplotlib.pyplot as plt\n'), ((248, 8, 248, 20), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((250, 8, 250, 27), 'matplotlib.pyplot.xticks', 'plt.xticks', ({(250, 19, 250, 26): 'x_ticks'}, {}), '(x_ticks)', True, 'import matplotlib.pyplot as plt\n'), ((251, 8, 251, 38), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(251, 19, 251, 37): '"""Distance from CG"""'}, {}), "('Distance from CG')", True, 'import matplotlib.pyplot as plt\n'), ((252, 8, 252, 47), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(252, 19, 252, 46): '"""Importance shannon values"""'}, {}), "('Importance shannon values')", True, 'import matplotlib.pyplot as plt\n'), ((253, 8, 253, 71), 'matplotlib.pyplot.title', 'plt.title', ({(253, 18, 253, 70): "('Importance of flanking letters - %s' % class_type)"}, {}), "('Importance of flanking letters - %s' % class_type)", True, 'import matplotlib.pyplot as plt\n'), ((260, 8, 260, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((278, 11, 278, 33), 'numpy.all', 'np.all', ({(278, 18, 278, 32): '(new_seq == seq)'}, {}), '(new_seq == seq)', True, 'import numpy as np\n'), ((287, 11, 287, 33), 'numpy.all', 'np.all', ({(287, 18, 287, 32): '(new_seq == seq)'}, {}), '(new_seq == seq)', True, 'import numpy as np\n'), ((25, 32, 25, 60), 'os.path.dirname', 'os.path.dirname', ({(25, 48, 25, 59): 'sys.argv[0]'}, {}), '(sys.argv[0])', False, 'import os\n'), ((47, 26, 47, 71), 'numpy.transpose', 'np.transpose', (), '', True, 'import numpy as np\n'), ((54, 12, 54, 22), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((80, 12, 80, 22), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((93, 12, 93, 22), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((117, 12, 117, 22), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((132, 12, 132, 22), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((179, 12, 179, 22), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((219, 12, 219, 22), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((259, 12, 259, 22), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((52, 24, 52, 101), 'os.path.join', 'os.path.join', ({(52, 37, 52, 50): 'output_folder', (52, 52, 52, 100): "('seq_for_top_%s_of_type_%s' % (number_of_seq, k))"}, {}), "(output_folder, 'seq_for_top_%s_of_type_%s' % (number_of_seq, k))", False, 'import os\n'), ((78, 24, 78, 79), 'os.path.join', 'os.path.join', ({(78, 37, 78, 50): 'output_folder', (78, 52, 78, 78): "('Avg_seq_for_%s30.png' % k)"}, {}), "(output_folder, 'Avg_seq_for_%s30.png' % k)", False, 'import os\n'), ((91, 24, 91, 77), 'os.path.join', 'os.path.join', ({(91, 37, 91, 50): 'output_folder', (91, 52, 91, 76): "('Avg_seq_for_%s.png' % k)"}, {}), "(output_folder, 'Avg_seq_for_%s.png' % k)", False, 'import os\n'), ((115, 24, 115, 82), 'os.path.join', 'os.path.join', ({(115, 37, 115, 50): 'output_folder', (115, 52, 115, 81): "('Avg_seq_for_%s_sw30.png' % k)"}, {}), "(output_folder, 'Avg_seq_for_%s_sw30.png' % k)", False, 'import os\n'), ((130, 24, 130, 80), 'os.path.join', 'os.path.join', ({(130, 37, 130, 50): 'output_folder', (130, 52, 130, 79): "('Avg_seq_for_%s_sw.png' % k)"}, {}), "(output_folder, 'Avg_seq_for_%s_sw.png' % k)", False, 'import os\n'), ((159, 24, 159, 90), 'os.path.join', 'os.path.join', ({(159, 37, 159, 50): 'output_folder', (159, 52, 159, 89): "('Avg_seq_for_%s_sw30_flatten.png' % k)"}, {}), "(output_folder, 'Avg_seq_for_%s_sw30_flatten.png' % k)", False, 'import os\n'), ((177, 24, 177, 88), 'os.path.join', 'os.path.join', ({(177, 37, 177, 50): 'output_folder', (177, 52, 177, 87): "('Avg_seq_for_%s_sw_flatten.png' % k)"}, {}), "(output_folder, 'Avg_seq_for_%s_sw_flatten.png' % k)", False, 'import os\n'), ((202, 12, 202, 29), 'numpy.arange', 'np.arange', ({(202, 22, 202, 25): '-74', (202, 27, 202, 28): '1'}, {}), '(-74, 1)', True, 'import numpy as np\n'), ((216, 16, 217, 95), 'os.path.join', 'os.path.join', ({(216, 29, 216, 42): 'output_folder', (217, 29, 217, 94): "('distance_importance_of_flanking_letters_type_%s_two_way.png' % k)"}, {}), "(output_folder, \n 'distance_importance_of_flanking_letters_type_%s_two_way.png' % k)", False, 'import os\n'), ((256, 24, 257, 103), 'os.path.join', 'os.path.join', ({(256, 37, 256, 50): 'output_folder', (257, 37, 257, 102): "('distance_importance_of_flanking_letters_type_%s_one_way.png' % k)"}, {}), "(output_folder, \n 'distance_importance_of_flanking_letters_type_%s_one_way.png' % k)", False, 'import os\n'), ((294, 44, 294, 92), 'os.path.join', 'os.path.join', ({(294, 57, 294, 70): 'output_folder', (294, 72, 294, 91): "('cl_seq_%s.png' % i)"}, {}), "(output_folder, 'cl_seq_%s.png' % i)", False, 'import os\n'), ((298, 44, 298, 92), 'os.path.join', 'os.path.join', ({(298, 57, 298, 70): 'output_folder', (298, 72, 298, 91): "('pl_seq_%s.png' % i)"}, {}), "(output_folder, 'pl_seq_%s.png' % i)", False, 'import os\n'), ((68, 32, 68, 68), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((85, 32, 85, 64), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((106, 32, 106, 68), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((121, 32, 121, 64), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((147, 32, 147, 68), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((165, 32, 165, 64), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((196, 32, 196, 64), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((235, 32, 235, 64), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n')] |
shulinye/dotfiles | scripts/pythonutils/autorepr.py | a342512c33ca102d03921cc653ee4605d0cf9617 | #!/usr/bin/python3
from collections import OrderedDict
from functools import partial
from ordered_set import OrderedSet
import inspect
import itertools
import types
from .utils import walk_getattr
__all__ = ['autoinit', 'autorepr', 'TotalCompareByKey']
def autoinit(obj=None, *args, params=None, **kwargs):
"""Takes __slots__ and _slots and writes an __init__
Can be used as a class decorator, or by setting
__init__ = autoinit"""
if obj is None: return partial(autoinit, params=params)
if params:
pass
elif hasattr(obj, '__slots__'):
params = OrderedSet(itertools.chain.from_iterable(walk_getattr(obj, '__slots__')))
elif hasattr(obj, '_slots'):
params = OrderedSet(itertools.chain.from_iterable(walk_getattr(obj, '_slots')))
else:
raise RuntimeError("Can't autocreate __init__, please supply '__slots__' or '_slots'")
if inspect.isclass(obj): #I'm being used as a decorator
s = ["def __init__(self,{}):".format(", ".join(i for i in params))]
s.extend("self.{0} = {0}".format(i) for i in params)
scope = {}
exec('\n '.join(s), scope)
setattr(obj, '__init__', scope['__init__'])
return obj
else:
signature = inspect.Signature(inspect.Parameter(i, inspect.Parameter.POSITIONAL_OR_KEYWORD) for i in params)
signature.bind(*args, **kwargs)
for p, val in itertools.chain(zip(params, args), kwargs.items()):
setattr(obj, p, val)
def autorepr(obj=None, *, params=None):
"""Function that automagically gives you a __repr__.
If no params are given, uses __slots__, _slots, and at last resort,
inspects __init__
Can be used as a class decorator or by setting
__repr__ = autorepr"""
if obj is None: return partial(autorepr, params = params)
discard_first = False
if params:
pass
elif hasattr(obj, '__slots__'):
params = OrderedSet(itertools.chain.from_iterable(walk_getattr(obj, '__slots__')))
elif hasattr(obj, '_slots'):
params = OrderedSet(itertools.chain.from_iterable(walk_getattr(obj, '_slots')))
else:
sig = inspect.signature(obj.__init__)
params = sig.parameters
discard_first = True
if inspect.isclass(obj): #I'm being used as a decorator
if discard_first: params = list(params)[1:] #drop the first argument, that's self
s = ["def __repr__(self):\n return '%s(" + ", ".join(["%s=%r"]*(len(params)))]
s.append(")' % (self.__class__.__name__, ")
s.append(', '.join("'{0}', self.{0}".format(i) for i in params) + ')')
scope = {}
exec("".join(s), scope)
setattr(obj, '__repr__', scope['__repr__'])
return obj
else: #Being a normal function here :P
return "%s(%s)" % (obj.__class__.__name__, ", ".join("%s=%r" % (i, getattr(obj,i)) for i in params))
class TotalCompareByKey(object):
"""Writes all comparison methods using one key"""
__slots__ = ['key', 'check_type']
def __init__(self, key, *, check_type=True):
self.key = key
self.check_type = check_type
def __call__(self, cls):
orderings = {'__lt__': '<',
'__le__': '<=',
'__gt__': '>',
'__ge__': '>=',
'__eq__': '==',
'__ne__': '!='}
for dunder, symbol in orderings.items():
if dunder in cls.__dict__: continue
s = ["def {dunder}(self, other):".format(dunder=dunder)]
if self.check_type:
s.append("if not isinstance(other, self.__class__):")
s.append(" return NotImplemented")
s.append("return self.{k} {symbol} other.{k}".format(k=self.key, symbol=symbol))
scope = {}
exec("\n ".join(s), scope)
setattr(cls, dunder, scope[dunder])
return cls
| [((29, 7, 29, 27), 'inspect.isclass', 'inspect.isclass', ({(29, 23, 29, 26): 'obj'}, {}), '(obj)', False, 'import inspect\n'), ((61, 7, 61, 27), 'inspect.isclass', 'inspect.isclass', ({(61, 23, 61, 26): 'obj'}, {}), '(obj)', False, 'import inspect\n'), ((20, 27, 20, 59), 'functools.partial', 'partial', (), '', False, 'from functools import partial\n'), ((49, 27, 49, 61), 'functools.partial', 'partial', (), '', False, 'from functools import partial\n'), ((37, 38, 37, 99), 'inspect.Parameter', 'inspect.Parameter', ({(37, 56, 37, 57): 'i', (37, 59, 37, 98): 'inspect.Parameter.POSITIONAL_OR_KEYWORD'}, {}), '(i, inspect.Parameter.POSITIONAL_OR_KEYWORD)', False, 'import inspect\n'), ((58, 14, 58, 45), 'inspect.signature', 'inspect.signature', ({(58, 32, 58, 44): 'obj.__init__'}, {}), '(obj.__init__)', False, 'import inspect\n')] |
TTOFFLINE-LEAK/ttoffline | v1.0.0.test/toontown/estate/DistributedGardenPlotAI.py | bb0e91704a755d34983e94288d50288e46b68380 | from direct.directnotify import DirectNotifyGlobal
from toontown.estate import GardenGlobals
from toontown.estate.DistributedLawnDecorAI import DistributedLawnDecorAI
FLOWER_X_OFFSETS = (
None, (0, ), (-1.5, 1.5), (-3.4, 0, 3.5))
class DistributedGardenPlotAI(DistributedLawnDecorAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedGardenPlotAI')
def __init__(self, mgr):
DistributedLawnDecorAI.__init__(self, mgr)
self.plotType = 0
self.__plantingAvId = 0
self.treeIndex = 0
self.flowerIndex = 0
def announceGenerate(self):
DistributedLawnDecorAI.announceGenerate(self)
self.plotType = GardenGlobals.whatCanBePlanted(self.ownerIndex, self.plot)
self.__plantingAvId = 0
def setTreeIndex(self, treeIndex):
self.treeIndex = treeIndex
def getTreeIndex(self):
return self.treeIndex
def setFlowerIndex(self, flowerIndex):
self.flowerIndex = flowerIndex
def getFlowerIndex(self):
return self.flowerIndex
def __initialSanityCheck(self, wantedType=None, forceOwner=False):
if self.__plantingAvId:
return
else:
avId = self.air.getAvatarIdFromSender()
av = self.air.doId2do.get(avId)
if not av:
self.air.writeServerEvent('suspicious', avId, 'called DistributedGardenPlotAI method outside shard!')
return
if wantedType is not None and self.plotType != wantedType:
self.air.writeServerEvent('suspicious', avId, 'called incorrect DistributedGardenPlotAI method!', plotType=self.plotType, wantedType=wantedType)
return self.d_interactionDenied()
if avId != self.ownerDoId and not forceOwner:
self.air.writeServerEvent('suspicious', avId, "called someone else's DistributedGardenPlotAI plant method!", ownerDoId=self.ownerDoId)
return self.d_interactionDenied()
return av
def plantFlower(self, species, variety, usingFlowerAll=False):
av = self.__initialSanityCheck(GardenGlobals.FLOWER_TYPE if not usingFlowerAll else None, usingFlowerAll)
if not av:
return
else:
def invalid(problem):
msg = 'tried to plant flower but something went wrong: %s' % problem
self.notify.warning('%d %s' % (av.doId, msg))
self.air.writeServerEvent('suspicious', av.doId, msg)
if not usingFlowerAll:
return self.d_setMovie(GardenGlobals.MOVIE_PLANT_REJECTED)
plantAttributes = GardenGlobals.PlantAttributes.get(species, {})
if plantAttributes.get('plantType') != GardenGlobals.FLOWER_TYPE:
return invalid('invalid species: %d' % species)
if variety >= len(plantAttributes['varieties']):
return invalid('invalid variety: %d' % variety)
if not usingFlowerAll:
cost = len(GardenGlobals.Recipes[plantAttributes['varieties'][variety][0]]['beans'])
av.takeMoney(cost)
self.d_setMovie(GardenGlobals.MOVIE_PLANT)
def handlePlantFlower(task):
flower = self.mgr.plantFlower(self.getFlowerIndex(), species, variety, plot=self, ownerIndex=self.ownerIndex, plotId=self.plot, waterLevel=0, generate=False)
index = (0, 1, 2, 2, 2, 3, 3, 3, 4, 4)[self.getFlowerIndex()]
idx = (0, 0, 0, 1, 2, 0, 1, 2, 0, 1)[self.getFlowerIndex()]
zOffset = 1.5
gardenBox = self.mgr._estateBoxes[index]
xOffset = FLOWER_X_OFFSETS[gardenBox.getTypeIndex()][idx]
flower.setPos(gardenBox, 0, 0, 0)
flower.setZ(gardenBox, zOffset)
flower.setX(gardenBox, xOffset)
flower.setH(gardenBox, 0)
flower.generateWithRequired(self.mgr.estate.zoneId)
if not usingFlowerAll:
flower.d_setMovie(GardenGlobals.MOVIE_FINISHPLANTING, self.__plantingAvId)
flower.d_setMovie(GardenGlobals.MOVIE_CLEAR, self.__plantingAvId)
self.air.writeServerEvent('plant-flower', self.__plantingAvId, species=species, variety=variety, plot=self.plot, name=plantAttributes.get('name', 'unknown flower'))
if task:
return task.done
if usingFlowerAll:
handlePlantFlower(None)
else:
taskMgr.doMethodLater(7, handlePlantFlower, self.uniqueName('handle-plant-flower'))
self.__plantingAvId = av.doId
return 1
def plantGagTree(self, track, index):
av = self.__initialSanityCheck(GardenGlobals.GAG_TREE_TYPE)
if not av:
return
for i in xrange(index):
if not self.mgr.hasTree(track, i):
msg = 'tried to plant tree but an index is missing: %d' % index
self.notify.warning('%d %s' % (av.doId, msg))
self.air.writeServerEvent('suspicious', av.doId, msg)
return self.d_setMovie(GardenGlobals.MOVIE_PLANT_REJECTED)
if self.mgr.hasTree(track, index):
msg = 'tried to plant tree but gag already planted'
self.notify.warning('%d %s' % (av.doId, msg))
self.air.writeServerEvent('suspicious', av.doId, msg)
return self.d_setMovie(GardenGlobals.MOVIE_PLANT_REJECTED)
if av.inventory.useItem(track, index) == -1:
msg = 'tried to plant tree but not carrying selected gag'
self.notify.warning('%d %s' % (av.doId, msg))
self.air.writeServerEvent('suspicious', av.doId, msg)
return self.d_setMovie(GardenGlobals.MOVIE_PLANT_REJECTED)
av.d_setInventory(av.getInventory())
self.d_setMovie(GardenGlobals.MOVIE_PLANT)
def handlePlantTree(task):
if not self.air:
return
tree = self.mgr.plantTree(self.getTreeIndex(), GardenGlobals.getTreeTypeIndex(track, index), plot=self, ownerIndex=self.ownerIndex, plotId=self.plot, pos=(self.getPos(), self.getH()))
tree.d_setMovie(GardenGlobals.MOVIE_FINISHPLANTING, self.__plantingAvId)
tree.d_setMovie(GardenGlobals.MOVIE_CLEAR, self.__plantingAvId)
self.air.writeServerEvent('plant-tree', self.__plantingAvId, track=track, index=index, plot=self.plot)
return task.done
taskMgr.doMethodLater(7, handlePlantTree, self.uniqueName('handle-plant-tree'))
self.__plantingAvId = av.doId
def plantStatuary(self, species):
av = self.__initialSanityCheck(GardenGlobals.STATUARY_TYPE)
if not av:
return
def invalid(problem):
msg = 'tried to plant statuary but something went wrong: %s' % problem
self.notify.warning('%d %s' % (av.doId, msg))
self.air.writeServerEvent('suspicious', av.doId, msg)
return self.d_setMovie(GardenGlobals.MOVIE_PLANT_REJECTED)
plantAttributes = GardenGlobals.PlantAttributes.get(species, {})
if plantAttributes.get('plantType') != GardenGlobals.STATUARY_TYPE:
return invalid('invalid species: %d' % species)
gardenItem = species - 100
if gardenItem == 134:
gardenItem = 135
if not av.removeGardenItem(gardenItem, 1):
return invalid("av doesn't own item: %d" % species)
self.d_setMovie(GardenGlobals.MOVIE_PLANT)
def handlePlaceStatuary(task):
if not self.air:
return
statuary = self.mgr.placeStatuary(self.mgr.S_pack(0, 0, species, 0), plot=self, ownerIndex=self.ownerIndex, plotId=self.plot, pos=(
self.getPos(), self.getH()), generate=False)
statuary.generateWithRequired(self.zoneId)
statuary.d_setMovie(GardenGlobals.MOVIE_FINISHPLANTING, self.__plantingAvId)
statuary.d_setMovie(GardenGlobals.MOVIE_CLEAR, self.__plantingAvId)
self.air.writeServerEvent('plant-statuary', self.__plantingAvId, species=species, plot=self.plot)
return task.done
taskMgr.doMethodLater(7, handlePlaceStatuary, self.uniqueName('handle-place-statuary'))
self.__plantingAvId = av.doId
def plantToonStatuary(self, species, dnaCode):
av = self.__initialSanityCheck(GardenGlobals.STATUARY_TYPE)
if not av:
return
def invalid(problem):
msg = 'tried to plant statuary but something went wrong: %s' % problem
self.notify.warning('%d %s' % (av.doId, msg))
self.air.writeServerEvent('suspicious', av.doId, msg)
return self.d_setMovie(GardenGlobals.MOVIE_PLANT_REJECTED)
plantAttributes = GardenGlobals.PlantAttributes.get(species, {})
if plantAttributes.get('plantType') != GardenGlobals.STATUARY_TYPE:
return invalid('invalid species: %d' % species)
if not av.removeGardenItem(species - 100, 1):
return invalid("av doesn't own item: %d" % species)
self.d_setMovie(GardenGlobals.MOVIE_PLANT)
def handlePlaceStatuary(task):
if not self.air:
return
statuary = self.mgr.placeStatuary(self.mgr.S_pack(dnaCode, 0, species, 0), plot=self, ownerIndex=self.ownerIndex, plotId=self.plot, pos=(
self.getPos(), self.getH()), generate=False)
statuary.generateWithRequired(self.zoneId)
statuary.d_setMovie(GardenGlobals.MOVIE_FINISHPLANTING, self.__plantingAvId)
self.air.writeServerEvent('plant-statuary', self.__plantingAvId, species=species, plot=self.plot)
return task.done
taskMgr.doMethodLater(7, handlePlaceStatuary, self.uniqueName('handle-place-statuary'))
self.__plantingAvId = av.doId
def plantNothing(self, burntBeans):
av = self.__initialSanityCheck()
if av:
av.takeMoney(burntBeans) | [((8, 13, 8, 83), 'direct.directnotify.DirectNotifyGlobal.directNotify.newCategory', 'DirectNotifyGlobal.directNotify.newCategory', ({(8, 57, 8, 82): '"""DistributedGardenPlotAI"""'}, {}), "('DistributedGardenPlotAI')", False, 'from direct.directnotify import DirectNotifyGlobal\n'), ((11, 8, 11, 50), 'toontown.estate.DistributedLawnDecorAI.DistributedLawnDecorAI.__init__', 'DistributedLawnDecorAI.__init__', ({(11, 40, 11, 44): 'self', (11, 46, 11, 49): 'mgr'}, {}), '(self, mgr)', False, 'from toontown.estate.DistributedLawnDecorAI import DistributedLawnDecorAI\n'), ((18, 8, 18, 53), 'toontown.estate.DistributedLawnDecorAI.DistributedLawnDecorAI.announceGenerate', 'DistributedLawnDecorAI.announceGenerate', ({(18, 48, 18, 52): 'self'}, {}), '(self)', False, 'from toontown.estate.DistributedLawnDecorAI import DistributedLawnDecorAI\n'), ((19, 24, 19, 82), 'toontown.estate.GardenGlobals.whatCanBePlanted', 'GardenGlobals.whatCanBePlanted', ({(19, 55, 19, 70): 'self.ownerIndex', (19, 72, 19, 81): 'self.plot'}, {}), '(self.ownerIndex, self.plot)', False, 'from toontown.estate import GardenGlobals\n'), ((147, 26, 147, 72), 'toontown.estate.GardenGlobals.PlantAttributes.get', 'GardenGlobals.PlantAttributes.get', ({(147, 60, 147, 67): 'species', (147, 69, 147, 71): '{}'}, {}), '(species, {})', False, 'from toontown.estate import GardenGlobals\n'), ((182, 26, 182, 72), 'toontown.estate.GardenGlobals.PlantAttributes.get', 'GardenGlobals.PlantAttributes.get', ({(182, 60, 182, 67): 'species', (182, 69, 182, 71): '{}'}, {}), '(species, {})', False, 'from toontown.estate import GardenGlobals\n'), ((64, 30, 64, 76), 'toontown.estate.GardenGlobals.PlantAttributes.get', 'GardenGlobals.PlantAttributes.get', ({(64, 64, 64, 71): 'species', (64, 73, 64, 75): '{}'}, {}), '(species, {})', False, 'from toontown.estate import GardenGlobals\n'), ((127, 59, 127, 103), 'toontown.estate.GardenGlobals.getTreeTypeIndex', 'GardenGlobals.getTreeTypeIndex', ({(127, 90, 127, 95): 'track', (127, 97, 127, 102): 'index'}, {}), '(track, index)', False, 'from toontown.estate import GardenGlobals\n')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.