repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
jun-yoon/onnxruntime | docs/python/conf.py | 806e24d5c69693533ed4b6fa56b84095efa5df70 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
import os
import sys
import shutil
# Check these extensions were installed.
import sphinx_gallery.gen_gallery
# The package should be installed in a virtual environment.
import onnxruntime
# The documentation requires two extensions available at:
# https://github.com/xadupre/sphinx-docfx-yaml
# https://github.com/xadupre/sphinx-docfx-markdown
import sphinx_modern_theme
# -- Project information -----------------------------------------------------
project = 'ONNX Runtime'
copyright = '2018, Microsoft'
author = 'Microsoft'
version = onnxruntime.__version__
release = version
# -- General configuration ---------------------------------------------------
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.imgmath',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
"sphinx.ext.autodoc",
'sphinx.ext.githubpages',
"sphinx_gallery.gen_gallery",
'sphinx.ext.autodoc',
"docfx_yaml.extension",
"docfx_markdown",
"pyquickhelper.sphinxext.sphinx_runpython_extension",
]
templates_path = ['_templates']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
source_suffix = ['.rst', '.md']
master_doc = 'intro'
language = "en"
exclude_patterns = []
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
html_theme = "sphinx_modern_theme"
html_theme_path = [sphinx_modern_theme.get_html_theme_path()]
html_logo = "../MSFT-Onnx-Runtime-11282019-Logo.png"
html_static_path = ['_static']
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# -- Options for Sphinx Gallery ----------------------------------------------
sphinx_gallery_conf = {
'examples_dirs': 'examples',
'gallery_dirs': 'auto_examples',
}
# -- markdown options -----------------------------------------------------------
md_image_dest = "media"
md_link_replace = {
'#onnxruntimesessionoptionsenable-profiling)': '#class-onnxruntimesessionoptions)',
}
# -- Setup actions -----------------------------------------------------------
def setup(app):
# Placeholder to initialize the folder before
# generating the documentation.
app.add_stylesheet('_static/gallery.css')
# download examples for the documentation
this = os.path.abspath(os.path.dirname(__file__))
dest = os.path.join(this, "model.onnx")
if not os.path.exists(dest):
import urllib.request
url = 'https://raw.githubusercontent.com/onnx/onnx/master/onnx/backend/test/data/node/test_sigmoid/model.onnx'
urllib.request.urlretrieve(url, dest)
loc = os.path.split(dest)[-1]
if not os.path.exists(loc):
import shutil
shutil.copy(dest, loc)
return app
| [((61, 19, 61, 60), 'sphinx_modern_theme.get_html_theme_path', 'sphinx_modern_theme.get_html_theme_path', ({}, {}), '()', False, 'import sphinx_modern_theme\n'), ((93, 11, 93, 43), 'os.path.join', 'os.path.join', ({(93, 24, 93, 28): 'this', (93, 30, 93, 42): '"""model.onnx"""'}, {}), "(this, 'model.onnx')", False, 'import os\n'), ((92, 27, 92, 52), 'os.path.dirname', 'os.path.dirname', ({(92, 43, 92, 51): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((94, 11, 94, 31), 'os.path.exists', 'os.path.exists', ({(94, 26, 94, 30): 'dest'}, {}), '(dest)', False, 'import os\n'), ((98, 10, 98, 29), 'os.path.split', 'os.path.split', ({(98, 24, 98, 28): 'dest'}, {}), '(dest)', False, 'import os\n'), ((99, 11, 99, 30), 'os.path.exists', 'os.path.exists', ({(99, 26, 99, 29): 'loc'}, {}), '(loc)', False, 'import os\n'), ((101, 8, 101, 30), 'shutil.copy', 'shutil.copy', ({(101, 20, 101, 24): 'dest', (101, 26, 101, 29): 'loc'}, {}), '(dest, loc)', False, 'import shutil\n')] |
ngngardner/toc_project | traffic_sim/__main__.py | 15a111a2731b583f82e65c622d16d32af4fe3ae0 | """Traffic simulator code."""
import sys
from os import path
from traffic_sim.analysis import TrafficExperiment
from traffic_sim.console import console
if not __package__:
_path = path.realpath(path.abspath(__file__))
sys.path.insert(0, path.dirname(path.dirname(_path)))
def main():
"""Run code from CLI."""
console.log('traffic sim')
num_trials = 30
ex = TrafficExperiment(
experiments=100,
trials=num_trials,
rows=10,
cols=10,
epochs=10,
)
ex.run()
ex.analyze()
if __name__ == '__main__':
main()
| [((16, 4, 16, 30), 'traffic_sim.console.console.log', 'console.log', ({(16, 16, 16, 29): '"""traffic sim"""'}, {}), "('traffic sim')", False, 'from traffic_sim.console import console\n'), ((18, 9, 24, 5), 'traffic_sim.analysis.TrafficExperiment', 'TrafficExperiment', (), '', False, 'from traffic_sim.analysis import TrafficExperiment\n'), ((10, 26, 10, 48), 'os.path.abspath', 'path.abspath', ({(10, 39, 10, 47): '__file__'}, {}), '(__file__)', False, 'from os import path\n'), ((11, 36, 11, 55), 'os.path.dirname', 'path.dirname', ({(11, 49, 11, 54): '_path'}, {}), '(_path)', False, 'from os import path\n')] |
ooblog/TSF1KEV | TSFpy/debug/sample_fibonacci.py | f7d4b4ff88f52ba00b46eb53ed98f8ea62ec2f6d | #! /usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import division,print_function,absolute_import,unicode_literals
import sys
import os
os.chdir(sys.path[0])
sys.path.append('/mnt/sda2/github/TSF1KEV/TSFpy')
from TSF_io import *
#from TSF_Forth import *
from TSF_shuffle import *
from TSF_match import *
from TSF_calc import *
from TSF_time import *
TSF_Forth_init(TSF_io_argvs(),[TSF_shuffle_Initwords,TSF_match_Initwords,TSF_calc_Initwords,TSF_time_Initwords])
TSF_Forth_setTSF("TSF_Tab-Separated-Forth:",
"\t".join(["UTF-8","#TSF_encoding","200","#TSF_calcPR","N-Fibonacci:","#TSF_this","0","#TSF_fin."]),
TSF_style="T")
TSF_Forth_setTSF("N-Fibonacci:",
"\t".join(["TSF_argvs:","#TSF_cloneargvs","TSF_argvs:","#TSF_lenthe","[0]Z[Fibcount:0]~[TSF_argvs:0]","#TSF_calcDC","Fibcount:","0","#TSF_pokethe","Fibonacci:","#TSF_this"]),
TSF_style="T")
TSF_Forth_setTSF("Fibonacci:",
"\t".join(["[Fibcount:1]Z1~[Fibcount:1]","#TSF_calcDC","((2&(([0]+3)*[0]+2)^)/((2&(2*[0]+2)^)-(2&([0]+1)^)-1)\\1)#(2&([0]+1)^)","#TSF_calcDC","1","#TSF_echoN","[Fibcount:1]+1","#TSF_calcDC","Fibcount:","1","#TSF_pokethe","Fibjump:","[Fibcount:0]-([Fibcount:1]+1)o0~1","#TSF_calcDC","#TSF_peekthe","#TSF_this"]),
TSF_style="T")
TSF_Forth_setTSF("Fibcount:",
"\t".join(["20","-1"]),
TSF_style="T")
TSF_Forth_setTSF("Fibjump:",
"\t".join(["Fibonacci:","#exit"]),
TSF_style="T")
TSF_Forth_addfin(TSF_io_argvs())
TSF_Forth_argvsleftcut(TSF_io_argvs(),1)
TSF_Forth_run()
| [((7, 0, 7, 21), 'os.chdir', 'os.chdir', ({(7, 9, 7, 20): 'sys.path[0]'}, {}), '(sys.path[0])', False, 'import os\n'), ((8, 0, 8, 49), 'sys.path.append', 'sys.path.append', ({(8, 16, 8, 48): '"""/mnt/sda2/github/TSF1KEV/TSFpy"""'}, {}), "('/mnt/sda2/github/TSF1KEV/TSFpy')", False, 'import sys\n')] |
rguptan/Tomboy2Evernote | Tomboy2Evernote.py | 2bee66537d080c13856811b806613ca6aaef8833 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import re
import sys, getopt
import glob
import os
def process_files(inputdir, outputdir):
os.chdir(inputdir)
enex_notes = []
output_filename = 'Tomboy2Evernote.enex'
i = 0
for file in glob.glob("*.note"):
note_file_path = inputdir + '/' + file
note_body = open(note_file_path, 'r').read()
title = get_title(note_body)
html_note_body = get_html_body(note_body)
created_date = tomboy_to_enex_date(get_created_date(note_body))
updated_date = tomboy_to_enex_date(get_updated_date(note_body))
enex_notes.append(make_enex(title, html_note_body, created_date, updated_date))
i += 1
multi_enex_body = make_multi_enex(enex_notes)
save_to_file(outputdir, output_filename, multi_enex_body)
print "Exported notes count: " + `i`
print "Evernote file location: " + outputdir + "/" + output_filename
def get_title(note_body):
title_regex = re.compile("<title>(.+?)</title>")
matches = title_regex.search(note_body);
if matches:
return matches.group(1)
else:
return "No Title"
def get_created_date(note_body):
created_date_regex = re.compile("<create-date>(.+?)</create-date>")
matches = created_date_regex.search(note_body);
if matches:
return matches.group(1)
else:
return "No Created Date"
def get_updated_date(note_body):
updated_date_regex = re.compile("<last-change-date>(.+?)</last-change-date>")
matches = updated_date_regex.search(note_body);
if matches:
return matches.group(1)
else:
return "No Updated Date"
def tomboy_to_enex_date(tomboy_date):
return re.sub(r"^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2}).*", r"\1\2\3T\4\5\6Z",
tomboy_date)
def get_html_body(note_body):
new_line = '¬BR¬'
xml_tag = r"<(\/?)[a-zA-Z0-9_\-:]+>"
start_xml_tag = r"<[a-zA-Z0-9_\-:]+>"
# make note body a one liner
note_body = note_body.replace('\n', new_line)
# get content
note_body = re.sub(r".*<note-content.+?>(.+?)</note-content>.*", r"\1", note_body)
# strip title until new_line or start_xml_tag
note_body = re.sub(r"^(.+?)(" + start_xml_tag + "|" + new_line + ")", r"\2", note_body)
# strip first two new lines, even if prefixed with an xml tag
tag = re.match("^" + start_xml_tag, note_body)
if tag != None:
note_body = re.sub(r"^" + start_xml_tag, r"", note_body)
note_body = re.sub(r"^(" + new_line + "){1,2}", r"", note_body)
if tag != None:
note_body = tag.group(0) + note_body
# links
note_body = re.sub(r"<link:internal>(.+?)</link:internal>", r"\1", note_body)
note_body = re.sub(r"<link:broken>(.+?)</link:broken>", r"\1", note_body)
p = re.compile(r"(<link:url>(.+?)</link:url>)")
for m in p.finditer(note_body):
if re.search(r"^([a-zA-Z0-9\._%+\-]+@(?:[a-zA-Z0-9\-]+\.)+[a-zA-Z]{2,10}|https?://.+)$", m.group(2)):
note_body = note_body.replace(m.group(1), '<a href="' + m.group(2) + '">' + m.group(2) + "</a>")
else:
note_body = note_body.replace(m.group(1), m.group(2))
# lists
note_body = re.sub(r"<(\/?)list>", r"<\1ul>", note_body)
note_body = re.sub(r'<list-item dir="ltr">', r"<li>", note_body)
note_body = re.sub(r"<(\/?)list-item>", r"<\1li>", note_body)
# higlight
note_body = re.sub(r"<highlight>(.+?)</highlight>", r'<span style="background:yellow">\1</span>', note_body)
# font size
note_body = re.sub(r"<size:small>(.+?)</size:small>", r'<span style="font-size:small">\1</span>', note_body)
note_body = re.sub(r"<size:large>(.+?)</size:large>", r'<span style="font-size:large">\1</span>', note_body)
note_body = re.sub(r"<size:huge>(.+?)</size:huge>", r'<span style="font-size:xx-large">\1</span>', note_body)
# text style
note_body = re.sub(r"<(\/?)monospace>", r"<\1code>", note_body)
note_body = re.sub(r"<(\/?)bold>", r"<\1b>", note_body)
note_body = re.sub(r"<(\/?)italic>", r"<\1i>", note_body)
note_body = re.sub(r"<(\/?)strikethrough>", r"<\1strike>", note_body)
# identation
note_body = re.sub(r"\t", r" ", note_body)
while re.search(new_line + " ", note_body) != None:
note_body = re.sub("(" + new_line + " *) ", r"\1 ", note_body)
# set new lines
note_body = note_body.replace(new_line, '<br/>\n')
return note_body
def make_enex(title, body, created_date, updated_date):
return '''<note><title>''' + title + '''</title><content><![CDATA[<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE en-note SYSTEM "http://xml.evernote.com/pub/enml2.dtd">
<en-note style="word-wrap: break-word; -webkit-nbsp-mode: space; -webkit-line-break: after-white-space;">
''' + body + '''
</en-note>]]></content><created>''' + created_date + '''</created><updated>''' + updated_date + '''</updated></note>'''
def make_multi_enex(multi_enex_body):
return '''<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE en-export SYSTEM "http://xml.evernote.com/pub/evernote-export2.dtd">
<en-export export-date="20150412T153431Z" application="Evernote/Windows" version="5.x">
''' + ''.join(multi_enex_body) + '''</en-export>'''
def save_to_file(outputdir, filename, body):
if not os.path.exists(outputdir):
os.makedirs(outputdir)
text_file = open(outputdir + '/' + filename, "w")
text_file.write(body)
text_file.close()
def get_help_line():
print 'Usage: ', sys.argv[0], ' -i <inputdir> -o <outputdir>'
def get_input_params(argv):
inputdir = ''
outputdir = ''
printhelpline = 0
try:
opts, args = getopt.getopt(argv, "hi:o:", ["idir=", "odir="])
except getopt.GetoptError:
exit_with_error()
for opt, arg in opts:
if opt == '-h':
get_help_line()
sys.exit()
elif opt in ("-i", "--idir"):
inputdir = arg
elif opt in ("-o", "--odir"):
outputdir = arg
if (inputdir == ""):
print "Error: Missing input folder"
printhelpline = 1
if (outputdir == ""):
print "Error: Missing output folder"
printhelpline = 1
if printhelpline == 1:
exit_with_error()
return (inputdir, outputdir)
def exit_with_error():
get_help_line()
sys.exit(2)
def main(argv):
inputdir, outputdir = get_input_params(argv)
process_files(inputdir, outputdir)
if __name__ == "__main__":
main(sys.argv[1:])
| [] |
williamfzc/pyat | demo.py | 4e9792d4bfdc119d910eb88cf8a13a0ab7848518 | from pyatool import PYAToolkit
# 个性化的函数需要toolkit形参,即使不需要使用
def test_b(toolkit):
return 'i am test_b, running on {}'.format(toolkit.device_id)
# 封装adb命令成为方法
PYAToolkit.bind_cmd(func_name='test_a', command='shell pm list package | grep google')
# 或者绑定个性化的函数
PYAToolkit.bind_func(real_func=test_b)
# 是否需要log
PYAToolkit.switch_logger(True)
# 初始化
d = PYAToolkit('123456F')
assert d.is_connected()
# 它也支持远程控制(还不够稳定,暂不推荐
# d = PYAToolkit('123456F', mode='remote')
# 已经绑定的方法直接调用即可
result = d.test_a()
# 可能的输出
# package:com.google.android.webview
# 个性化函数也一样
result = d.test_b()
# i am test_b, running on 123456F
# 也可以通过 `std` 或 `standard_func` 调用(会有代码自动补全,比较方便)
# 仅限标准库,自己拓展的库只支持直接调用
d.std.get_current_activity(toolkit=d)
# 获取所有已经注册的函数
all_functions = d.current_function()
print(all_functions)
# 下面列举所有标准函数的使用方法,有任何问题欢迎反馈或自己改
# 打印出机器id,仅供测试用
d.hello_world()
# 展示所有已安装的包
installed_package = d.show_package()
# 栈顶活动名
current_activity_name = d.get_current_activity()
# 安装指定apk(支持url与path),例子里的安装可能比较久因为是从github下的,可以自己改
d.install_from(url=r'https://github.com/williamfzc/simhand2/releases/download/v0.1.2/app-debug.apk')
# d.install_from(path=r'/Users/admin/some_path/some_apk.apk')
# 检测包是否已安装
target_package_name = 'com.github.williamfzc.simhand2'
is_installed = d.is_installed(package_name=target_package_name)
# 清理缓存
d.clean_cache(target_package_name)
if is_installed:
d.uninstall(target_package_name)
# 获取手机ip
local_address = d.get_ip_address()
print(local_address)
# 切换wifi状态
d.switch_wifi(False)
# 切换飞行模式
d.switch_airplane(True)
d.switch_airplane(False)
d.switch_wifi(True)
# 切换输入法
d.set_ime('com.sohu.inputmethod.sogouoem/.SogouIME')
# push and pull
d.push('./README.md', '/sdcard/')
d.pull('/sdcard/README.md', './haha.md')
# send keyevent
d.input_key_event(26)
d.input_key_event(26)
# swipe
d.swipe(500, 1200, 500, 200)
# click
d.click(200, 200)
| [((10, 0, 10, 86), 'pyatool.PYAToolkit.bind_cmd', 'PYAToolkit.bind_cmd', (), '', False, 'from pyatool import PYAToolkit\n'), ((12, 0, 12, 38), 'pyatool.PYAToolkit.bind_func', 'PYAToolkit.bind_func', (), '', False, 'from pyatool import PYAToolkit\n'), ((15, 0, 15, 30), 'pyatool.PYAToolkit.switch_logger', 'PYAToolkit.switch_logger', ({(15, 25, 15, 29): '(True)'}, {}), '(True)', False, 'from pyatool import PYAToolkit\n'), ((18, 4, 18, 25), 'pyatool.PYAToolkit', 'PYAToolkit', ({(18, 15, 18, 24): '"""123456F"""'}, {}), "('123456F')", False, 'from pyatool import PYAToolkit\n')] |
nlab-mpg/nnlab | nnlab/nn/graph.py | 56aabb53fa7b86601b35c7b8c9e890d50e19d9af | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
from six.moves import xrange, zip
import tensorflow as tf
from .tensor import Tensor
class Graph(object):
"""The class for defining computational graph."""
def __init__(self, loss=None, modules=None, inputs=None, outputs=None, monitors=None):
self._loss = loss
self._modules = modules if modules is not None else []
self._inputs = inputs
self._outputs = outputs
self._monitors = monitors
self._check_arguments(loss, modules, inputs, outputs, monitors)
def _check_arguments(self, loss, modules, inputs, outputs, monitors):
"""Verify the arguments."""
if loss is not None and not isinstance(loss, Tensor):
raise Exception("loss must be a tensor")
if modules is not None and not isinstance(modules, list):
raise Exception("modules must be a list")
if inputs is not None and not self._check_type(inputs):
raise Exception("input must be a tensor/list/dict")
if outputs is not None and not self._check_type(outputs):
raise Exception("output must be a tensor/list/dict")
if monitors is not None and not isinstance(monitors, dict):
raise Exception("monitors must be a dict")
def _check_type(self, obj):
"""Check whether the type is either a tensor or list or dict"""
return isinstance(obj, Tensor) or isinstance(obj, list) or isinstance(obj, dict)
@property
def loss(self):
return self._loss
@property
def modules(self):
return self._modules
@property
def inputs(self):
return self._inputs
| [] |
katiekruzan/masters-thesis | local-rotations.py | c9b89a0995957b5b50442b86ae8a38388f1fb720 | """
Here we're going to code for the local rotations. We're doing an object oriented approach
Left and right are in reference to the origin
"""
__version__ = 1.0
__author__ = 'Katie Kruzan'
import string # just to get the alphabet easily iterable
import sys # This just helps us in our printing
from typing import Dict # This helps us in our documentation
# Getting the structure for the classes we're putting together
class Segment:
"""
These are going to represent the outer segments and the mysteries they hold.
The segments will be adjacent to 2 outer nodes
"""
def __init__(self, name: str):
"""
Initialize the segment, keeping a place for the right left outer vertices to which it is adjacent
:param name: How we will reference this segment. In this implementation, it is expected to be a negative integer
"""
self.leftOuter = None
self.rightOuter = None
self.name = name
def getName(self) -> str:
"""
Return the name we gave to this segment.
:return: name
"""
return self.name
def getLeftOuter(self):
"""
Return the outer node to the left of this segment with respect to the origin
:return: leftOuter
"""
return self.leftOuter
def getRightOuter(self):
"""
Return the outer node to the right of this segment with respect to the origin
:return: rightOuter
"""
return self.rightOuter
def setLeftOuter(self, left):
"""
Set the outer node to the left of this segment with respect to the origin
Also, set left's right segment to this segment.
:param left: A outer node object to be referenced as this segment's left outer node
:return: None
"""
self.leftOuter = left
if left.getRightSegment() is None:
left.setRightSegment(self)
def setRightOuter(self, right):
"""
Set the outer node to the right of this segment with respect to the origin
Also, set right's left segment to this segment.
:param right: A outer node object to be referenced as this segment's right outer node
:return: None
"""
self.rightOuter = right
if right.getLeftSegment() is None:
right.setLeftSegment(self)
def isValidObject(self) -> bool:
"""
Checks to see if this segment has been full initialized.
:return: valid returns true if it has both the left and right outer nodes set
"""
if (self.leftOuter is None) or (self.rightOuter is None):
return False
return True
def toString(self) -> str:
"""
Returns a formatted string of the left and right outer nodes this is associated with
:return: Description string
"""
return 'left Outer: ' + self.leftOuter.getName() + '\nright Outer: ' + self.rightOuter.getName()
class Outer:
"""
Class to represent the outer vertices that are adjacent to an inner vertex and 2 outer segments
"""
def __init__(self, name: str):
"""
Initialize the outer node
Keeping a place for the inner vertex and right and left outer segments to which it is adjacent.
:param name: How we will reference this outer node. In this implementation, it is expected to be a positive integer
"""
self.adjInner = None
self.leftSegment = None
self.rightSegment = None
self.name = name
def getName(self) -> str:
"""
Return the name we gave to this outer node.
:return: name
"""
return self.name
def getLeftSegment(self) -> Segment:
"""
Return the segment object to the left of this outer node with respect to the origin
:return: leftSegment
"""
return self.leftSegment
def getRightSegment(self) -> Segment:
"""
Return the segment object to the right of this outer node with respect to the origin
:return: rightSegment
"""
return self.rightSegment
def getAdjInner(self):
"""
Return the inner node object adjacent to this outer note object
:return: adjInner
"""
return self.adjInner
def setLeftSegment(self, left: Segment):
"""
Set the segment to the left of this outer node with respect to the origin
Also, set left's right outer node to self.
:param left: A segment object to be referenced as this node's left outer segment
:return: None
"""
self.leftSegment = left
if left.getRightOuter() is None:
left.setRightOuter(self)
def setRightSegment(self, right: Segment):
"""
Set the segment to the right of this outer node with respect to the origin
Also, set right's left outer node to self.
:param right: A segment object to be referenced as this node's right outer segment
:return: None
"""
self.rightSegment = right
if right.getLeftOuter() is None:
right.setLeftOuter(self)
def setAdjInner(self, inner):
"""
Set the inner node adjacent to this outer node
Also, set inner's adjacent outer node to self.
:param inner: A inner node object to be referenced as this node's adjacent inner node
:return: None
"""
self.adjInner = inner
if inner.getAdjOuter() is None:
inner.setAdjOuter(self)
def isValidObject(self) -> bool:
"""
Checks to see if this outer node has been full initialized.
:return: valid returns true if it has the left segment, right segment, and inner node set
"""
if (self.leftSegment is None) or (self.rightSegment is None) or (self.adjInner is None):
return False
return True
def toString(self) -> str:
"""
Returns a formatted string of the left segment, right segment, and inner node this outer node is associated with
:return: Description string
"""
return 'left Segment: ' + self.leftSegment.getName() + '\nright Segment: ' + self.rightSegment.getName() \
+ '\nadj Inner: ' + self.adjInner.getName()
class Inner:
"""
Class to represent the inner vertices that are adjacent to an outer vertex and 2 neighboring inner vertices
"""
def __init__(self, name: str):
"""
Initialize the inner node object
Keeping a place for the outer vertex and right and left adjacent inner nodes.
:param name: How we will reference this inner node. In this implementation, it is expected to be a lowercase letter
"""
self.adjOuter = None
self.leftInner = None
self.rightInner = None
self.name = name
def getName(self) -> str:
"""
Return the name we gave to this inner node.
:return: name
"""
return self.name
def getLeftInner(self):
"""
Return the inner node object to the left of this inner node with respect to the origin
:return: leftInner
"""
return self.leftInner
def getRightInner(self):
"""
Return the inner node object to the right of this inner node with respect to the origin
:return: rightInner
"""
return self.rightInner
def getAdjOuter(self) -> Outer:
"""
Return the outer node object adjacent to this inner node
:return: adjOuter
"""
return self.adjOuter
def setLeftInner(self, left):
"""
Set the inner node to the left of this inner node with respect to the origin
Also, set left's right inner node to self.
:param left: An inner node object to be referenced as this node's left inner node
:return: None
"""
self.leftInner = left
if left.getRightInner() is None:
left.setRightInner(self)
def setRightInner(self, right):
"""
Set the inner node to the right of this inner node with respect to the origin
Also, set right's left inner node to self.
:param right: An inner node object to be referenced as this node's right inner node
:return: None
"""
self.rightInner = right
if right.getLeftInner() is None:
right.setLeftInner(self)
def setAdjOuter(self, outer: Outer):
"""
Set the outer node adjacent to this inner node
Also, set outer's adjacent inner node to self.
:param outer: An outer node object to be referenced as this node's adjacent outer node
:return: None
"""
self.adjOuter = outer
if outer.getAdjInner() is None:
outer.setAdjInner(self)
def isValidObject(self) -> bool:
"""
Checks to see if this inner node has been full initialized.
:return: valid returns true if it has the left inner node, right inner node, and adjacent outer node set
"""
if (self.leftInner is None) or (self.rightInner is None) or (self.adjOuter is None):
return False
return True
def toString(self) -> str:
"""
Returns a formatted string of the left inner node, right inner node, and adjacent outer node this inner node
is associated with
:return: Description string
"""
return 'left Inner: ' + self.leftInner.getName() + '\nright Inner: ' + self.rightInner.getName() \
+ '\nadj Outer: ' + self.adjOuter.getName()
def standardCircle(num_verts: int) -> (Dict[str, Segment], Dict[str, Outer], Dict[str, Inner]):
"""
This will go through and initialize our standard starting circle
:param num_verts: the number of outer nodes we will have
:returns: tuple(segs, outs, inns)
-segs - dictionary of str: Segment objects in the circle \\
-outs - dictionary of str: Outer objects in the circle \\
-inns - dictionary of str: Inner objects in the circle
"""
# Initializing our dictionaries
segs = dict()
outs = dict()
inns = dict()
# Running through the number of vertices we will be edning up with
for i in range(num_verts):
# start with an inner node - labeling with lowercase letters
inn = Inner(string.ascii_letters[i])
# If we aren't on the first one, connect it to the previous one.
if i != 0:
inn.setLeftInner(inns[string.ascii_letters[i - 1]])
# If we've hit the end of the line, go ahead and close up the circle.
if i == num_verts - 1:
inn.setRightInner(inns[string.ascii_letters[0]])
# then make the outer
out = Outer(str(i + 1))
# Go ahead and connect the inner we just made with this outer node
out.setAdjInner(inn)
# If we aren't on the first one, go ahead and connect it to the previous segment
if i != 0:
out.setLeftSegment(segs[str(-i)])
# Now time to make the segment
seg = Segment(str(-i - 1))
# Go ahead and connect the outer node we just made with this segment
seg.setLeftOuter(out)
# If we're at the end of the circle, then we close it up. Otherwise, move on
if i == num_verts - 1:
seg.setRightOuter(outs[str(1)])
# add them to our dictionaries
segs[seg.getName()] = seg
outs[out.getName()] = out
inns[inn.getName()] = inn
# If we've made it here, then we've made the full circle and are ready to return it
return segs, outs, inns
def findTheFace(source_in: Inner) -> list:
"""
This will take an inner node and use the algorithm to walk the face that it is on.
The order of the face will be i, o, s, o, i repeat
:param source_in: Inner node object we are starting from.
:return: face: a list representing the face. This list is of inner, outer, and segment objects in the
order i, o, s, o, i, repeat.
"""
# initialize the list
face = list()
# starting the face with the source inner node.
face.append(source_in)
# initialize the ending inner node we will be using for comparison
end_in = None
# As long as we haven't looped back around, go through the following process.
while source_in != end_in:
# inner: find adjacent outer
face.append(face[-1].getAdjOuter())
# outer: go to right seg
face.append(face[-1].getRightSegment())
# segment: go to right outer
face.append(face[-1].getRightOuter())
# outer: then adj inner
face.append(face[-1].getAdjInner())
# then left inner and repeat.
# set this inner node as our node to compare to our starting node.
end_in = face[-1].getLeftInner()
face.append(end_in)
return face
def faceCannonOrder(face: list) -> list:
"""
Just list the face with the face elements in order.
We will do it with the first numerical face, and then go right before it for an order that will be consistent.
:param face: a list representing the face. This list is of inner, outer, and segment objects in the
order i, o, s, o, i, repeat.
:return: ordered face in canonical order
"""
# find the first numerical face then go right before it
# initialize face num as a relatively high number we won't encounter
facenum = 333
# initialize the int for where we will split the list
start_ind = 0
# loop through and find the face we want to find
for i in range(len(face)):
try:
if int(face[i].getName()) < facenum:
# To get here, we must have found a lower face
# keep track of where this is located in the list
start_ind = i - 1
# make our current lowest face the new lowest face to keep comparing to.
facenum = int(face[i].getName())
# if we try casting a letter to a number, python will get upset, but that also means we're looking at
# an inner node, which we don't want for this anyways.
except ValueError:
continue
# make our ordered face getting from the starting index to the end, then wrapping around and getting the rest of
# the face
ord_face = face[start_ind:] + face[:start_ind]
# go through and make sure we don't have any duplicate elements right by each other. If we do, then drop them.
for i in range(len(ord_face) - 1):
if ord_face[i].toString() == ord_face[i + 1].toString():
ord_face.pop(i)
break
# return the ordered face
return ord_face
def grabAllTheFaces(inns: Dict[str, Inner]) -> list:
"""
Function to get the list of unique faces for our circle.
:param inns: dictionary of Inner objects. We will loop through these to get the faces
:return: faces: List of distinct faces in canonical order.
"""
# initialize the list of faces
faces = list()
# a set of all the elements we have covered by the faces. Will use this for a completeness check
covered = set()
# run through every inner node we've been given
for inn in inns:
# Generate the face that inner node lies on
face = findTheFace(inns[inn])
# put the face we've gotten in canonical order
face = faceCannonOrder(face)
# Check if we've already captured it.
if face not in faces:
# If not, then add it to our list of faces
faces.append(face)
# Go ahead and add the elements in this face to our covered set
covered.update(face)
# check we've gotten all the elements
if len(covered) == (3 * len(inns)):
print('We got em!!!')
# Now return a list of all the faces we have.
return faces
def printCircleStatus(segs: Dict[str, Segment], outs: Dict[str, Outer], inns: Dict[str, Inner]):
"""
Helper function that prints the status of the circle to the console
:param segs: dictionary of str: Segment objects in the circle
:param outs: dictionary of str: Outer objects in the circle
:param inns: dictionary of str: Inner objects in the circle
:return: None
"""
# Run through the segments
print('\nSegments:')
for k in segs:
print()
print(k)
print(segs[k].toString())
# Run through the Outer nodes
print('\nOuters:')
for k in outs:
print()
print(k)
print(outs[k].toString())
# Run through the Inner nodes
print('\nInners:')
for k in inns:
print()
print(k)
print(inns[k].toString())
if __name__ == '__main__':
# This is where you change the variables.
# must be a positive integer > 2
verts = 12
# Must be a string with spaces between each element. If you want to denote multiple cycles, you must add a |
switch_txt = '2 3 4 5 | 12 7'
# we're going to make a list of all the switches and all the cycles
switches = list()
# first, we get the cycles, split by '|'
cycles = switch_txt.split('|')
for c in cycles:
# We're going to split the switch into a list split by the whitespace
s = c.strip().split()
# Then we're going to append the switches in the cycle to the new list
switches.append(s)
# Go ahead and make the standard circle given the number of vertices we want to use.
segments, outers, inners = standardCircle(verts)
# Go through and grab the faces for our standard circle
facs = grabAllTheFaces(inners)
print('\nPrinting the faces')
for f in facs:
print()
for p in f:
sys.stdout.write(p.getName() + ' ')
# Go through and do the switches for each cycle
for switch in switches:
for num in range(len(switch)):
# store the current part of the switch we're working on
cs = switch[num]
# store the next part of the switch we're working on, looping to the beginning if we're at the end
ns = switch[(num + 1) % len(switch)]
# Do the actual switch
# Getting the new inner and outer validly switched up
inners[string.ascii_letters[int(cs) - 1]].setAdjOuter(outers[ns])
outers[ns].setAdjInner(inners[string.ascii_letters[int(cs) - 1]])
# print how the final rotation sits
printCircleStatus(segments, outers, inners)
# Go through and generate and print the new faces
new_facs = grabAllTheFaces(inners)
print('\nPrinting the new faces')
for f in new_facs:
print()
for p in f:
sys.stdout.write(p.getName() + ' ')
| [] |
EtienneDavid/FROST | PT-FROST/frost.py | 1cea124d69f07e3ac7e3ad074059d29c0849254c | import random
import argparse
import numpy as np
import pandas as pd
import os
import time
import string
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from tqdm import tqdm
from model import WideResnet
from cifar import get_train_loader, get_val_loader
from label_guessor import LabelGuessor
from lr_scheduler import WarmupCosineLrScheduler
from ema import EMA
import utils
## args
parser = argparse.ArgumentParser(description=' FixMatch Training')
parser.add_argument('--wresnet-k', default=2, type=int, help='width factor of wide resnet')
parser.add_argument('--wresnet-n', default=28, type=int, help='depth of wide resnet')
parser.add_argument('--n-classes', type=int, default=10, help='number of classes in dataset')
parser.add_argument('--n-labeled', type=int, default=10, help='number of labeled samples for training')
parser.add_argument('--n-epochs', type=int, default=256, help='number of training epochs')
parser.add_argument('--batchsize', type=int, default=64, help='train batch size of labeled samples')
parser.add_argument('--mu', type=int, default=7, help='factor of train batch size of unlabeled samples')
parser.add_argument('--mu-c', type=int, default=1, help='factor of train batch size of contrastive learing samples')
parser.add_argument('--thr', type=float, default=0.95, help='pseudo label threshold')
parser.add_argument('--n-imgs-per-epoch', type=int, default=50000, help='number of training images for each epoch')
parser.add_argument('--lam-x', type=float, default=1., help='coefficient of labeled loss')
parser.add_argument('--lam-u', type=float, default=1., help='coefficient of unlabeled loss')
parser.add_argument('--lam-clr', type=float, default=1., help='coefficient of contrastive loss')
parser.add_argument('--ema-alpha', type=float, default=0.999, help='decay rate for ema module')
parser.add_argument('--lr', type=float, default=0.03, help='learning rate for training')
parser.add_argument('--weight-decay', type=float, default=5e-4, help='weight decay')
parser.add_argument('--momentum', type=float, default=0.9, help='momentum for optimizer')
parser.add_argument('--seed', type=int, default=-1, help='seed for random behaviors, no seed if negtive')
parser.add_argument('--feature_dim', default=128, type=int, help='Feature dim for latent vector')
parser.add_argument('--temperature', default=0.5, type=float, help='Temperature used in softmax')
parser.add_argument('--k', default=200, type=int, help='Top k most similar images used to predict the label')
parser.add_argument('--test', default=0, type=int, help='0 is softmax test function, 1 is similarity test function')
parser.add_argument('--bootstrap', type=int, default=16, help='Bootstrapping factor (default=16)')
parser.add_argument('--boot-schedule', type=int, default=1, help='Bootstrapping schedule (default=1)')
parser.add_argument('--balance', type=int, default=0, help='Balance class methods to use (default=0 None)')
parser.add_argument('--delT', type=float, default=0.2, help='Class balance threshold delta (default=0.2)')
args = parser.parse_args()
print(args)
# save results
save_name_pre = '{}_E{}_B{}_LX{}_LU{}_LCLR{}_THR{}_LR{}_WD{}'.format(args.n_labeled, args.n_epochs, args.batchsize,
args.lam_x, args.lam_u, args.lam_clr, args.thr, args.lr, args.weight_decay)
ticks = time.time()
result_dir = 'results/' + save_name_pre + '.' + str(ticks)
if not os.path.exists(result_dir):
os.mkdir(result_dir)
def set_model():
model = WideResnet(args.n_classes, k=args.wresnet_k, n=args.wresnet_n, feature_dim=args.feature_dim) # wresnet-28-2
model.train()
model.cuda()
criteria_x = nn.CrossEntropyLoss().cuda()
criteria_u = nn.CrossEntropyLoss().cuda()
return model, criteria_x, criteria_u
def train_one_epoch(
model,
criteria_x,
criteria_u,
optim,
lr_schdlr,
ema,
dltrain_x,
dltrain_u,
dltrain_all,
lb_guessor,
):
loss_avg, loss_x_avg, loss_u_avg, loss_clr_avg = [], [], [], []
epsilon = 0.000001
dl_u, dl_all = iter(dltrain_u), iter(dltrain_all)
for _, _, ims_all_1, ims_all_2, _ in tqdm(dl_all, desc='Training ...'):
ims_u_weak, ims_u_strong, _, _, lbs_u = next(dl_u)
loss_x, loss_u, loss_clr = torch.tensor(0).cuda(), torch.tensor(0).cuda(), torch.tensor(0).cuda()
fv_1, fv_2 = torch.tensor(0).cuda(), torch.tensor(0).cuda()
ims_u_weak = ims_u_weak.cuda()
ims_u_strong = ims_u_strong.cuda()
ims_all_1 = ims_all_1.cuda(non_blocking=True)
ims_all_2 = ims_all_2.cuda(non_blocking=True)
dl_x = iter(dltrain_x)
ims_x_weak, _, _, _, lbs_x = next(dl_x)
ims_x_weak = ims_x_weak.cuda()
lbs_x = lbs_x.cuda()
n_x, n_u, n_all = 0, 0, 0
if args.lam_u >= epsilon and args.lam_clr >= epsilon: #pseudo-labeling and Contrasive learning
lbs_u, valid_u, mask_u = lb_guessor(model, ims_u_weak, args.balance, args.delT)
ims_u_strong = ims_u_strong[valid_u]
n_x, n_u, n_all = ims_x_weak.size(0), ims_u_strong.size(0), ims_all_1.size(0)
if n_u != 0:
ims_x_u_all_1 = torch.cat([ims_x_weak, ims_u_strong, ims_all_1], dim=0).detach()
ims_x_u_all_2 = torch.cat([ims_x_weak, ims_u_strong, ims_all_2], dim=0).detach()
logits_x_u_all_1, fv_1, z_1 = model(ims_x_u_all_1)
logits_x_u_all_2, fv_2, z_2 = model(ims_x_u_all_2)
logits_x_u_all = (logits_x_u_all_1 + logits_x_u_all_2) / 2
logits_x, logits_u = logits_x_u_all[:n_x], logits_x_u_all[n_x:(n_x + n_u)]
loss_x = criteria_x(logits_x, lbs_x)
if args.balance == 2 or args.balance == 3:
loss_u = (F.cross_entropy(logits_u, lbs_u, reduction='none') * mask_u).mean()
else:
loss_u = criteria_u(logits_u, lbs_u)
else: # n_u == 0
ims_x_all_1 = torch.cat([ims_x_weak, ims_all_1], dim=0).detach()
ims_x_all_2 = torch.cat([ims_x_weak, ims_all_2], dim=0).detach()
logits_x_all_1, fv_1, z_1 = model(ims_x_all_1)
logits_x_all_2, fv_2, z_2 = model(ims_x_all_2)
logits_x_all = (logits_x_all_1 + logits_x_all_2) / 2
logits_x = logits_x_all[:n_x]
loss_x = criteria_x(logits_x, lbs_x)
loss_u = torch.tensor(0)
elif args.lam_u >= epsilon: #lam_clr == 0: pseudo-labeling only
lbs_u, valid_u, mask_u = lb_guessor(model, ims_u_weak, args.balance, args.delT)
ims_u_strong = ims_u_strong[valid_u]
n_x, n_u = ims_x_weak.size(0), ims_u_strong.size(0)
if n_u != 0:
ims_x_u = torch.cat([ims_x_weak, ims_u_strong], dim=0).detach()
logits_x_u, _, _ = model(ims_x_u)
logits_x, logits_u = logits_x_u[:n_x], logits_x_u[n_x:]
loss_x = criteria_x(logits_x, lbs_x)
if args.balance == 2 or args.balance == 3:
loss_u = (F.cross_entropy(logits_u, lbs_u, reduction='none') * mask_u).mean()
else:
loss_u = criteria_u(logits_u, lbs_u)
else: # n_u == 0
logits_x, _, _ = model(ims_x_weak)
loss_x = criteria_x(logits_x, lbs_x)
loss_u = torch.tensor(0)
else: #lam_u == 0: contrastive learning only
n_x, n_all = ims_x_weak.size(0), ims_all_1.size(0)
ims_x_all_1 = torch.cat([ims_x_weak, ims_all_1], dim=0).detach()
ims_x_all_2 = torch.cat([ims_x_weak, ims_all_2], dim=0).detach()
logits_x_all_1, fv_1, z_1 = model(ims_x_all_1)
logits_x_all_2, fv_2, z_2 = model(ims_x_all_2)
logits_x_all = (logits_x_all_1 + logits_x_all_2) / 2
logits_x = logits_x_all[:n_x]
loss_x = criteria_x(logits_x, lbs_x)
loss_u = torch.tensor(0)
if args.lam_clr >= epsilon:
#compute l_clr
fv_1 = fv_1[(n_x + n_u):]
fv_2 = fv_2[(n_x + n_u):]
z_1 = z_1[(n_x + n_u):]
z_2 = z_2[(n_x + n_u):]
#[2*muc*B, D]
z = torch.cat([z_1, z_2], dim=0)
#[2*muc*B, 2*muc*B]
sim_matrix = torch.exp(torch.mm(z, z.t().contiguous()) / args.temperature) #denominator
#[2*muc*B, 2*muc*B]
# mask = (torch.ones_like(sim_matrix) - torch.eye(2 * args.mu_c * args.batchsize, device=sim_matrix.device)).bool()
mask = (torch.ones_like(sim_matrix) - torch.eye(2 * args.mu_c * args.batchsize, device=sim_matrix.device))
mask = mask > 0
#[2*muc*B, 2*muc*B - 1]
sim_matrix = sim_matrix.masked_select(mask).view(2 * args.mu_c * args.batchsize, -1)
#[muc*B]
pos_sim = torch.exp(torch.sum(z_1 * z_2, dim=-1) / args.temperature) #numerator
#[2*muc*B]
pos_sim = torch.cat([pos_sim, pos_sim], dim=0)
loss_clr = (- torch.log(pos_sim / sim_matrix.sum(dim=-1))).mean()
#compute loss
loss = args.lam_x * loss_x + args.lam_u * loss_u + args.lam_clr * loss_clr
optim.zero_grad()
loss.backward()
optim.step()
ema.update_params()
lr_schdlr.step()
loss_x_avg.append(loss_x.item())
loss_u_avg.append(loss_u.item())
loss_clr_avg.append(loss_clr.item())
loss_avg.append(loss.item())
ema.update_buffer()
def evaluate(ema):
ema.apply_shadow()
ema.model.eval()
ema.model.cuda()
dlval = get_val_loader(batch_size=128, num_workers=0)
matches = []
for ims, lbs in dlval:
ims = ims.cuda()
lbs = lbs.cuda()
with torch.no_grad():
logits, _, _ = ema.model(ims)
scores = torch.softmax(logits, dim=1)
_, preds = torch.max(scores, dim=1)
match = lbs == preds
matches.append(match)
matches = torch.cat(matches, dim=0).float()
acc = torch.mean(matches)
ema.restore()
return acc
def test(model, memory_data_loader, test_data_loader, c, epoch):
model.eval()
total_top1, total_top5, total_num, feature_bank, feature_labels = 0.0, 0.0, 0, [], []
with torch.no_grad():
# generate feature bank
for data, _, _ in tqdm(memory_data_loader, desc='Feature extracting'):
logits, feature, _ = model(data.cuda(non_blocking=True))
feature_bank.append(feature)
feature_labels.append(torch.tensor(torch.argmax(logits,dim=1),dtype=torch.int64))
# [D, N]
feature_bank = torch.cat(feature_bank, dim=0).t().contiguous()
# [N]
feature_labels = torch.cat(feature_labels, dim=0).contiguous().cpu()
# loop test data to predict the label by weighted knn search
test_bar = tqdm(test_data_loader)
for data, _, target in test_bar:
# data, target = data.cuda(non_blocking=True), target.cuda(non_blocking=True)
data = data.cuda(non_blocking=True)
_, feature, _ = model(data)
total_num += data.size(0)
# compute cos similarity between each feature vector and feature bank ---> [B, N]
sim_matrix = torch.mm(feature, feature_bank)
# [B, K]
sim_weight, sim_indices = sim_matrix.topk(k=args.k, dim=-1)
# [B, K]
# sim_labels = torch.gather(feature_labels.expand(data.size(0), -1), dim=-1, index=sim_indices)
sim_labels = torch.gather(feature_labels.expand(data.size(0), -1), dim=-1, index=sim_indices.cpu())
sim_weight = (sim_weight / args.temperature).exp()
# counts for each class
one_hot_label = torch.zeros(data.size(0) * args.k, c, device=sim_labels.device)
# [B*K, C]
one_hot_label = one_hot_label.scatter(-1, sim_labels.view(-1, 1), 1.0)
# weighted score ---> [B, C]
pred_scores = torch.sum(one_hot_label.view(data.size(0), -1, c) * sim_weight.cpu().unsqueeze(dim=-1), dim=1)
pred_labels = pred_scores.argsort(dim=-1, descending=True)
total_top1 += torch.sum((pred_labels[:, :1] == target.unsqueeze(dim=-1)).any(dim=-1).float()).item()
test_bar.set_description('Test Epoch: [{}/{}] Acc@1:{:.2f}%'
.format(epoch, args.n_epochs, total_top1 / total_num * 100))
return total_top1 / total_num * 100
def get_random_string(length):
letters = string.ascii_lowercase
result_str = ''.join(random.choice(letters) for i in range(length))
return result_str
def sort_unlabeled(ema,numPerClass):
ema.apply_shadow()
ema.model.eval()
ema.model.cuda()
n_iters_per_epoch = args.n_imgs_per_epoch // args.batchsize
_, _, dltrain_all = get_train_loader(args.batchsize, 1, 1, n_iters_per_epoch, L=args.n_classes*numPerClass, seed=args.seed)
predicted = []
labels = []
for ims_w, _, _, _, lbs in dltrain_all:
ims = ims_w.cuda()
labels.append(lbs)
with torch.no_grad():
logits, _, _ = ema.model(ims)
scores = torch.softmax(logits, dim=1)
predicted.append(scores.cpu())
print( "labels ",len(labels))
labels = np.concatenate(labels, axis=0)
print( "labels ",len(labels))
predicted = np.concatenate( predicted, axis=0)
preds = predicted.argmax(1)
probs = predicted.max(1)
top = np.argsort(-probs,axis=0)
del dltrain_all, logits
labeledSize =args.n_classes * numPerClass
unique_train_pseudo_labels, unique_train_counts = np.unique(preds, return_counts=True)
print("Number of training pseudo-labels in each class: ", unique_train_counts," for classes: ", unique_train_pseudo_labels)
sortByClass = np.random.randint(0,high=len(top), size=(args.n_classes, numPerClass), dtype=int)
indx = np.zeros([args.n_classes], dtype=int)
matches = np.zeros([args.n_classes, numPerClass], dtype=int)
labls = preds[top]
samples = top
for i in range(len(top)):
if indx[labls[i]] < numPerClass:
sortByClass[labls[i], indx[labls[i]]] = samples[i]
if labls[i] == labels[top[i]]:
matches[labls[i], indx[labls[i]]] = 1
indx[labls[i]] += 1
if min(indx) < numPerClass:
print("Counts of at least one class ", indx, " is lower than ", numPerClass)
name = "dataset/seeds/size"+str(labeledSize)+"." + get_random_string(8) + ".npy"
np.save(name, sortByClass[0:args.n_classes, :numPerClass])
classAcc = 100*np.sum(matches, axis=1)/numPerClass
print("Accuracy of the predicted pseudo-labels: top ", labeledSize, ", ", np.mean(classAcc), classAcc )
ema.restore()
return name
def train():
n_iters_per_epoch = args.n_imgs_per_epoch // args.batchsize
n_iters_all = n_iters_per_epoch * args.n_epochs #/ args.mu_c
epsilon = 0.000001
model, criteria_x, criteria_u = set_model()
lb_guessor = LabelGuessor(thresh=args.thr)
ema = EMA(model, args.ema_alpha)
wd_params, non_wd_params = [], []
for param in model.parameters():
if len(param.size()) == 1:
non_wd_params.append(param)
else:
wd_params.append(param)
param_list = [{'params': wd_params}, {'params': non_wd_params, 'weight_decay': 0}]
optim = torch.optim.SGD(param_list, lr=args.lr, weight_decay=args.weight_decay, momentum=args.momentum, nesterov=True)
lr_schdlr = WarmupCosineLrScheduler(optim, max_iter=n_iters_all, warmup_iter=0)
dltrain_x, dltrain_u, dltrain_all = get_train_loader(args.batchsize, args.mu, args.mu_c, n_iters_per_epoch,
L=args.n_labeled, seed=args.seed)
train_args = dict(
model=model,
criteria_x=criteria_x,
criteria_u=criteria_u,
optim=optim,
lr_schdlr=lr_schdlr,
ema=ema,
dltrain_x=dltrain_x,
dltrain_u=dltrain_u,
dltrain_all=dltrain_all,
lb_guessor=lb_guessor,
)
n_labeled = int(args.n_labeled / args.n_classes)
best_acc, top1 = -1, -1
results = {'top 1 acc': [], 'best_acc': []}
b_schedule = [args.n_epochs/2, 3*args.n_epochs/4]
if args.boot_schedule == 1:
step = int(args.n_epochs/3)
b_schedule = [step, 2*step]
elif args.boot_schedule == 2:
step = int(args.n_epochs/4)
b_schedule = [step, 2*step, 3*step]
for e in range(args.n_epochs):
if args.bootstrap > 1 and (e in b_schedule):
seed = 99
n_labeled *= args.bootstrap
name = sort_unlabeled(ema, n_labeled)
print("Bootstrap at epoch ", e," Name = ",name)
dltrain_x, dltrain_u, dltrain_all = get_train_loader(args.batchsize, args.mu, args.mu_c, n_iters_per_epoch,
L=10*n_labeled, seed=seed, name=name)
train_args = dict(
model=model,
criteria_x=criteria_x,
criteria_u=criteria_u,
optim=optim,
lr_schdlr=lr_schdlr,
ema=ema,
dltrain_x=dltrain_x,
dltrain_u=dltrain_u,
dltrain_all=dltrain_all,
lb_guessor=lb_guessor,
)
model.train()
train_one_epoch(**train_args)
torch.cuda.empty_cache()
if args.test == 0 or args.lam_clr < epsilon:
top1 = evaluate(ema) * 100
elif args.test == 1:
memory_data = utils.CIFAR10Pair(root='dataset', train=True, transform=utils.test_transform, download=False)
memory_data_loader = DataLoader(memory_data, batch_size=args.batchsize, shuffle=False, num_workers=16, pin_memory=True)
test_data = utils.CIFAR10Pair(root='dataset', train=False, transform=utils.test_transform, download=False)
test_data_loader = DataLoader(test_data, batch_size=args.batchsize, shuffle=False, num_workers=16, pin_memory=True)
c = len(memory_data.classes) #10
top1 = test(model, memory_data_loader, test_data_loader, c, e)
best_acc = top1 if best_acc < top1 else best_acc
results['top 1 acc'].append('{:.4f}'.format(top1))
results['best_acc'].append('{:.4f}'.format(best_acc))
data_frame = pd.DataFrame(data=results)
data_frame.to_csv(result_dir + '/' + save_name_pre + '.accuracy.csv', index_label='epoch')
log_msg = [
'epoch: {}'.format(e + 1),
'top 1 acc: {:.4f}'.format(top1),
'best_acc: {:.4f}'.format(best_acc)]
print(', '.join(log_msg))
if __name__ == '__main__':
train()
| [((22, 9, 22, 66), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((56, 8, 56, 19), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((58, 7, 58, 33), 'os.path.exists', 'os.path.exists', ({(58, 22, 58, 32): 'result_dir'}, {}), '(result_dir)', False, 'import os\n'), ((59, 4, 59, 24), 'os.mkdir', 'os.mkdir', ({(59, 13, 59, 23): 'result_dir'}, {}), '(result_dir)', False, 'import os\n'), ((62, 12, 62, 104), 'model.WideResnet', 'WideResnet', (), '', False, 'from model import WideResnet\n'), ((86, 41, 86, 74), 'tqdm.tqdm', 'tqdm', (), '', False, 'from tqdm import tqdm\n'), ((199, 12, 199, 57), 'cifar.get_val_loader', 'get_val_loader', (), '', False, 'from cifar import get_train_loader, get_val_loader\n'), ((211, 10, 211, 29), 'torch.mean', 'torch.mean', ({(211, 21, 211, 28): 'matches'}, {}), '(matches)', False, 'import torch\n'), ((271, 24, 271, 127), 'cifar.get_train_loader', 'get_train_loader', (), '', False, 'from cifar import get_train_loader, get_val_loader\n'), ((282, 13, 282, 43), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((284, 16, 284, 50), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((287, 10, 287, 35), 'numpy.argsort', 'np.argsort', (), '', True, 'import numpy as np\n'), ((292, 54, 292, 90), 'numpy.unique', 'np.unique', (), '', True, 'import numpy as np\n'), ((295, 11, 295, 48), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((296, 14, 296, 64), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((310, 4, 310, 62), 'numpy.save', 'np.save', ({(310, 12, 310, 16): 'name', (310, 18, 310, 61): 'sortByClass[0:args.n_classes, :numPerClass]'}, {}), '(name, sortByClass[0:args.n_classes, :numPerClass])', True, 'import numpy as np\n'), ((324, 17, 324, 46), 'label_guessor.LabelGuessor', 'LabelGuessor', (), '', False, 'from label_guessor import LabelGuessor\n'), ((325, 10, 325, 36), 'ema.EMA', 'EMA', ({(325, 14, 325, 19): 'model', (325, 21, 325, 35): 'args.ema_alpha'}, {}), '(model, args.ema_alpha)', False, 'from ema import EMA\n'), ((334, 12, 334, 122), 'torch.optim.SGD', 'torch.optim.SGD', (), '', False, 'import torch\n'), ((335, 16, 335, 83), 'lr_scheduler.WarmupCosineLrScheduler', 'WarmupCosineLrScheduler', (), '', False, 'from lr_scheduler import WarmupCosineLrScheduler\n'), ((337, 40, 338, 90), 'cifar.get_train_loader', 'get_train_loader', (), '', False, 'from cifar import get_train_loader, get_val_loader\n'), ((218, 9, 218, 24), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((220, 26, 220, 77), 'tqdm.tqdm', 'tqdm', (), '', False, 'from tqdm import tqdm\n'), ((229, 19, 229, 41), 'tqdm.tqdm', 'tqdm', ({(229, 24, 229, 40): 'test_data_loader'}, {}), '(test_data_loader)', False, 'from tqdm import tqdm\n'), ((313, 79, 313, 96), 'numpy.mean', 'np.mean', ({(313, 87, 313, 95): 'classAcc'}, {}), '(classAcc)', True, 'import numpy as np\n'), ((386, 8, 386, 32), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ({}, {}), '()', False, 'import torch\n'), ((402, 21, 402, 47), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((65, 17, 65, 38), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((66, 17, 66, 38), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ({}, {}), '()', True, 'import torch.nn as nn\n'), ((164, 16, 164, 44), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((176, 22, 176, 58), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((204, 13, 204, 28), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((206, 21, 206, 49), 'torch.softmax', 'torch.softmax', (), '', False, 'import torch\n'), ((207, 23, 207, 47), 'torch.max', 'torch.max', (), '', False, 'import torch\n'), ((210, 14, 210, 39), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((237, 25, 237, 56), 'torch.mm', 'torch.mm', ({(237, 34, 237, 41): 'feature', (237, 43, 237, 55): 'feature_bank'}, {}), '(feature, feature_bank)', False, 'import torch\n'), ((262, 25, 262, 47), 'random.choice', 'random.choice', ({(262, 39, 262, 46): 'letters'}, {}), '(letters)', False, 'import random\n'), ((277, 13, 277, 28), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((279, 21, 279, 49), 'torch.softmax', 'torch.softmax', (), '', False, 'import torch\n'), ((312, 19, 312, 42), 'numpy.sum', 'np.sum', (), '', True, 'import numpy as np\n'), ((369, 48, 370, 102), 'cifar.get_train_loader', 'get_train_loader', (), '', False, 'from cifar import get_train_loader, get_val_loader\n'), ((127, 25, 127, 40), 'torch.tensor', 'torch.tensor', ({(127, 38, 127, 39): '0'}, {}), '(0)', False, 'import torch\n'), ((154, 21, 154, 36), 'torch.tensor', 'torch.tensor', ({(154, 34, 154, 35): '0'}, {}), '(0)', False, 'import torch\n'), ((169, 20, 169, 47), 'torch.ones_like', 'torch.ones_like', ({(169, 36, 169, 46): 'sim_matrix'}, {}), '(sim_matrix)', False, 'import torch\n'), ((169, 50, 169, 117), 'torch.eye', 'torch.eye', (), '', False, 'import torch\n'), ((391, 26, 391, 119), 'utils.CIFAR10Pair', 'utils.CIFAR10Pair', (), '', False, 'import utils\n'), ((392, 33, 392, 131), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((393, 24, 393, 118), 'utils.CIFAR10Pair', 'utils.CIFAR10Pair', (), '', False, 'import utils\n'), ((394, 31, 394, 127), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((89, 35, 89, 50), 'torch.tensor', 'torch.tensor', ({(89, 48, 89, 49): '(0)'}, {}), '(0)', False, 'import torch\n'), ((89, 59, 89, 74), 'torch.tensor', 'torch.tensor', ({(89, 72, 89, 73): '(0)'}, {}), '(0)', False, 'import torch\n'), ((89, 83, 89, 98), 'torch.tensor', 'torch.tensor', ({(89, 96, 89, 97): '(0)'}, {}), '(0)', False, 'import torch\n'), ((90, 21, 90, 36), 'torch.tensor', 'torch.tensor', ({(90, 34, 90, 35): '(0)'}, {}), '(0)', False, 'import torch\n'), ((90, 45, 90, 60), 'torch.tensor', 'torch.tensor', ({(90, 58, 90, 59): '(0)'}, {}), '(0)', False, 'import torch\n'), ((144, 25, 144, 40), 'torch.tensor', 'torch.tensor', ({(144, 38, 144, 39): '0'}, {}), '(0)', False, 'import torch\n'), ((174, 32, 174, 60), 'torch.sum', 'torch.sum', (), '', False, 'import torch\n'), ((223, 47, 223, 73), 'torch.argmax', 'torch.argmax', (), '', False, 'import torch\n'), ((108, 32, 108, 87), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((109, 32, 109, 87), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((120, 30, 120, 71), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((121, 30, 121, 71), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((147, 26, 147, 67), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((148, 26, 148, 67), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((225, 23, 225, 53), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((227, 25, 227, 57), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((133, 26, 133, 70), 'torch.cat', 'torch.cat', (), '', False, 'import torch\n'), ((116, 30, 116, 80), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (), '', True, 'import torch.nn.functional as F\n'), ((138, 30, 138, 80), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (), '', True, 'import torch.nn.functional as F\n')] |
Frightera/LR-and-NN-for-Cancer-Data | Logistic Regression/main.py | 54f8c9455af529c512efe012d8b3ed3f6b594a57 | import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
data = pd.read_csv("data.csv")
data.info()
"""
Data columns (total 33 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 id 569 non-null int64
.
.
.
32 Unnamed: 32 0 non-null float64
"""
data.drop(["Unnamed: 32", "id"], axis = 1, inplace = True)
# data.head(10)
data.diagnosis = [1 if each == "M" else 0 for each in data.diagnosis]
y = data.diagnosis.values
x_data = data.drop(["diagnosis"], axis = 1)
# %% Normalization
x_normalized = (x_data - np.min(x_data)) / (np.max(x_data) - np.min(x_data)).values
x_data.head()
"""
x_data.head()
Out[9]:
radius_mean texture_mean ... symmetry_worst fractal_dimension_worst
0 17.99 10.38 ... 0.4601 0.11890
1 20.57 17.77 ... 0.2750 0.08902
2 19.69 21.25 ... 0.3613 0.08758
3 11.42 20.38 ... 0.6638 0.17300
4 20.29 14.34 ... 0.2364 0.07678
"""
x_normalized.head()
"""
x_normalized.head()
Out[10]:
radius_mean texture_mean ... symmetry_worst fractal_dimension_worst
0 0.521037 0.022658 ... 0.598462 0.418864
1 0.643144 0.272574 ... 0.233590 0.222878
2 0.601496 0.390260 ... 0.403706 0.213433
3 0.210090 0.360839 ... 1.000000 0.773711
4 0.629893 0.156578 ... 0.157500 0.142595
"""
# %% train test split
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x_normalized,y,test_size = 0.25, random_state = 42)
# test size & random state can be changed, test size can be choosen as 0.2 or 0.18
# sklearn randomly splits, with given state data will be splitted with same random pattern.
# rows as features
x_train = x_train.T
x_test = x_test.T
y_train = y_train.T
y_test = y_test.T
# %% Parameter Initialize
"""
If all the weights were initialized to zero,
backpropagation will not work as expected because the gradient for the intermediate neurons
and starting neurons will die out(become zero) and will not update ever.
"""
def initialize_weights_and_bias(dimension):
w = np.full((dimension,1), 0.01) # init 0.01
b = np.zeros(1)
return w,b
def sigmoid(n):
y_hat = 1 / (1 + np.exp(-n))
return y_hat
# %%
def forward_backward_propagation(w,b,x_train,y_train):
# forward propagation
z = np.dot(w.T,x_train) + b
#y_train = y_train.T.reshape(-1,1)
y_hat = sigmoid(z)
loss = -(y_train*np.log(y_hat)+(1-y_train)*np.log(1-y_hat))
cost = (np.sum(loss))/x_train.shape[1] # x_train.shape[1] is for scaling
# Once cost is calculated, forward prop. is completed.
# backward propagation
derivative_weight = (np.dot(x_train,((y_hat-y_train).T)))/x_train.shape[1] # x_train.shape[1] is for scaling
derivative_bias = np.sum(y_hat-y_train)/x_train.shape[1] # x_train.shape[1] is for scaling
# x_train.shape[1] = 426
gradients = {"derivative_weight": derivative_weight,"derivative_bias": derivative_bias}
return cost,gradients
# Updating(learning) parameters
def update(w, b, x_train, y_train, learning_rate,number_of_iteration):
cost_list = []
cost_list2 = []
index = []
# updating(learning) parameters is number_of_iterarion times
for i in range(number_of_iteration):
# make forward and backward propagation and find cost and gradients
cost,gradients = forward_backward_propagation(w,b,x_train,y_train)
cost_list.append(cost)
# lets update
w = w - learning_rate * gradients["derivative_weight"]
b = b - learning_rate * gradients["derivative_bias"]
if i % 100 == 0: # that's arbitrary, you can set it differently
cost_list2.append(cost)
index.append(i)
print ("Cost after iteration %i: %f" %(i, cost))
# we update(learn) parameters weights and bias
parameters = {"weight": w,"bias": b}
plt.plot(index,cost_list2)
plt.xticks(index,rotation='vertical')
plt.xlabel("Number of Iteration")
plt.ylabel("Cost")
plt.legend()
plt.show()
return parameters, gradients, cost_list
# prediction
def predict(w,b,x_test):
# x_test is a input for forward propagation
z = sigmoid(np.dot(w.T,x_test)+b)
Y_prediction = np.zeros((1,x_test.shape[1]))
# if z is bigger than 0.5, our prediction is one - true (y_hat=1),
# if z is smaller than 0.5, our prediction is sign zero - false (y_hat=0),
for i in range(z.shape[1]):
if z[0,i]<= 0.5:
Y_prediction[0,i] = 0
else:
Y_prediction[0,i] = 1
return Y_prediction
#implementing logistic regression
def logistic_regression(x_train, y_train, x_test, y_test, learning_rate , num_iterations):
# initialize
dimension = x_train.shape[0]
w,b = initialize_weights_and_bias(dimension)
# do not change learning rate
parameters, gradients, cost_list = update(w, b, x_train, y_train, learning_rate,num_iterations)
y_prediction_test = predict(parameters["weight"],parameters["bias"],x_test)
y_pred_train = predict(parameters["weight"],parameters["bias"],x_train)
# Print accuracy
print("test accuracy: {} %".format(100 - np.mean(np.abs(y_prediction_test - y_test)) * 100))
print("train accuracy: {} %".format(100 - np.mean(np.abs(y_pred_train - y_train)) * 100))
# %% Hyperparameter tuning
logistic_regression(x_train, y_train, x_test, y_test,learning_rate = 3, num_iterations = 1500)
"""
Cost after iteration 0: 0.693035
Cost after iteration 100: 0.153169
Cost after iteration 200: 0.121662
Cost after iteration 300: 0.107146
Cost after iteration 400: 0.098404
Cost after iteration 500: 0.092401
Cost after iteration 600: 0.087937
Cost after iteration 700: 0.084435
Cost after iteration 800: 0.081582
Cost after iteration 900: 0.079191
Cost after iteration 1000: 0.077143
Cost after iteration 1100: 0.075359
Cost after iteration 1200: 0.073784
Cost after iteration 1300: 0.072378
Cost after iteration 1400: 0.071111
No handles with labels found to put in legend.
test accuracy: 98.6013986013986 %
train accuracy: 98.35680751173709 %
"""
logistic_regression(x_train, y_train, x_test, y_test,learning_rate = 1, num_iterations = 1500)
"""
Cost after iteration 0: 0.693035
Cost after iteration 100: 0.226383
Cost after iteration 200: 0.176670
Cost after iteration 300: 0.153585
Cost after iteration 400: 0.139306
Cost after iteration 500: 0.129319
Cost after iteration 600: 0.121835
Cost after iteration 700: 0.115963
Cost after iteration 800: 0.111204
Cost after iteration 900: 0.107248
No handles with labels found to put in legend.
Cost after iteration 1000: 0.103893
Cost after iteration 1100: 0.101001
Cost after iteration 1200: 0.098474
Cost after iteration 1300: 0.096240
Cost after iteration 1400: 0.094247
test accuracy: 97.9020979020979 %
train accuracy: 98.12206572769954 %
"""
logistic_regression(x_train, y_train, x_test, y_test,learning_rate = 0.3, num_iterations = 1500)
"""
Cost after iteration 0: 0.693035
Cost after iteration 100: 0.357455
Cost after iteration 200: 0.274917
Cost after iteration 300: 0.235865
Cost after iteration 400: 0.212165
Cost after iteration 500: 0.195780
Cost after iteration 600: 0.183524
Cost after iteration 700: 0.173868
Cost after iteration 800: 0.165980
Cost after iteration 900: 0.159363
Cost after iteration 1000: 0.153700
Cost after iteration 1100: 0.148775
Cost after iteration 1200: 0.144439
Cost after iteration 1300: 0.140581
Cost after iteration 1400: 0.137119
No handles with labels found to put in legend.
test accuracy: 97.9020979020979 %
train accuracy: 96.94835680751174 %
"""
# %% Sklearn
from sklearn.linear_model import LogisticRegression
x_train = x_train.T
x_test = x_test.T
y_train = y_train.T
y_test = y_test.T
logreg = LogisticRegression(random_state = 42,max_iter= 1500)
print("test accuracy: {} ".format(logreg.fit(x_train, y_train).score(x_test, y_test)))
print("train accuracy: {} ".format(logreg.fit(x_train, y_train).score(x_train, y_train)))
"""
test accuracy: 0.986013986013986
train accuracy: 0.9671361502347418
"""
# %%
| [((6, 7, 6, 30), 'pandas.read_csv', 'pd.read_csv', ({(6, 19, 6, 29): '"""data.csv"""'}, {}), "('data.csv')", True, 'import pandas as pd\n'), ((54, 35, 54, 103), 'sklearn.model_selection.train_test_split', 'train_test_split', (), '', False, 'from sklearn.model_selection import train_test_split\n'), ((228, 9, 228, 61), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', (), '', False, 'from sklearn.linear_model import LogisticRegression\n'), ((70, 8, 70, 36), 'numpy.full', 'np.full', ({(70, 16, 70, 29): '(dimension, 1)', (70, 31, 70, 35): '0.01'}, {}), '((dimension, 1), 0.01)', True, 'import numpy as np\n'), ((71, 8, 71, 19), 'numpy.zeros', 'np.zeros', ({(71, 17, 71, 18): '1'}, {}), '(1)', True, 'import numpy as np\n'), ((115, 4, 115, 30), 'matplotlib.pyplot.plot', 'plt.plot', ({(115, 13, 115, 18): 'index', (115, 19, 115, 29): 'cost_list2'}, {}), '(index, cost_list2)', True, 'import matplotlib.pyplot as plt\n'), ((116, 4, 116, 41), 'matplotlib.pyplot.xticks', 'plt.xticks', (), '', True, 'import matplotlib.pyplot as plt\n'), ((117, 4, 117, 37), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(117, 15, 117, 36): '"""Number of Iteration"""'}, {}), "('Number of Iteration')", True, 'import matplotlib.pyplot as plt\n'), ((118, 4, 118, 22), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(118, 15, 118, 21): '"""Cost"""'}, {}), "('Cost')", True, 'import matplotlib.pyplot as plt\n'), ((119, 4, 119, 16), 'matplotlib.pyplot.legend', 'plt.legend', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((120, 4, 120, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((127, 19, 127, 48), 'numpy.zeros', 'np.zeros', ({(127, 28, 127, 47): '(1, x_test.shape[1])'}, {}), '((1, x_test.shape[1]))', True, 'import numpy as np\n'), ((27, 25, 27, 39), 'numpy.min', 'np.min', ({(27, 32, 27, 38): 'x_data'}, {}), '(x_data)', True, 'import numpy as np\n'), ((81, 8, 81, 27), 'numpy.dot', 'np.dot', ({(81, 15, 81, 18): 'w.T', (81, 19, 81, 26): 'x_train'}, {}), '(w.T, x_train)', True, 'import numpy as np\n'), ((85, 12, 85, 24), 'numpy.sum', 'np.sum', ({(85, 19, 85, 23): 'loss'}, {}), '(loss)', True, 'import numpy as np\n'), ((89, 25, 89, 60), 'numpy.dot', 'np.dot', ({(89, 32, 89, 39): 'x_train', (89, 41, 89, 58): '(y_hat - y_train).T'}, {}), '(x_train, (y_hat - y_train).T)', True, 'import numpy as np\n'), ((90, 22, 90, 43), 'numpy.sum', 'np.sum', ({(90, 29, 90, 42): '(y_hat - y_train)'}, {}), '(y_hat - y_train)', True, 'import numpy as np\n'), ((27, 44, 27, 58), 'numpy.max', 'np.max', ({(27, 51, 27, 57): 'x_data'}, {}), '(x_data)', True, 'import numpy as np\n'), ((27, 61, 27, 75), 'numpy.min', 'np.min', ({(27, 68, 27, 74): 'x_data'}, {}), '(x_data)', True, 'import numpy as np\n'), ((75, 21, 75, 31), 'numpy.exp', 'np.exp', ({(75, 28, 75, 30): '(-n)'}, {}), '(-n)', True, 'import numpy as np\n'), ((126, 16, 126, 34), 'numpy.dot', 'np.dot', ({(126, 23, 126, 26): 'w.T', (126, 27, 126, 33): 'x_test'}, {}), '(w.T, x_test)', True, 'import numpy as np\n'), ((84, 21, 84, 34), 'numpy.log', 'np.log', ({(84, 28, 84, 33): 'y_hat'}, {}), '(y_hat)', True, 'import numpy as np\n'), ((84, 47, 84, 62), 'numpy.log', 'np.log', ({(84, 54, 84, 61): '(1 - y_hat)'}, {}), '(1 - y_hat)', True, 'import numpy as np\n'), ((149, 53, 149, 87), 'numpy.abs', 'np.abs', ({(149, 60, 149, 86): '(y_prediction_test - y_test)'}, {}), '(y_prediction_test - y_test)', True, 'import numpy as np\n'), ((150, 54, 150, 84), 'numpy.abs', 'np.abs', ({(150, 61, 150, 83): '(y_pred_train - y_train)'}, {}), '(y_pred_train - y_train)', True, 'import numpy as np\n')] |
LinHuiqing/nonparaSeq2seqVC_code | fine-tune/inference_embedding.py | d40a0cb9dc11c77b8af56b8510e4ab041f2f2b25 | import os
import numpy as np
import torch
import argparse
from hparams import create_hparams
from model import lcm
from train import load_model
from torch.utils.data import DataLoader
from reader import TextMelIDLoader, TextMelIDCollate, id2sp
from inference_utils import plot_data
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--checkpoint_path', type=str,
help='directory to save checkpoints')
parser.add_argument('--hparams', type=str,
required=False, help='comma separated name=value pairs')
args = parser.parse_args()
checkpoint_path=args.checkpoint_path
hparams = create_hparams(args.hparams)
model = load_model(hparams)
model.load_state_dict(torch.load(checkpoint_path)['state_dict'], strict=False)
_ = model.eval()
def gen_embedding(speaker):
training_list = hparams.training_list
train_set_A = TextMelIDLoader(training_list, hparams.mel_mean_std, hparams.speaker_A,
hparams.speaker_B,
shuffle=False,pids=[speaker])
collate_fn = TextMelIDCollate(lcm(hparams.n_frames_per_step_encoder,
hparams.n_frames_per_step_decoder))
train_loader_A = DataLoader(train_set_A, num_workers=1, shuffle=False,
sampler=None,
batch_size=1, pin_memory=False,
drop_last=True, collate_fn=collate_fn)
with torch.no_grad():
speaker_embeddings = []
for i,batch in enumerate(train_loader_A):
#print i
x, y = model.parse_batch(batch)
text_input_padded, mel_padded, text_lengths, mel_lengths, speaker_id = x
speaker_id, speaker_embedding = model.speaker_encoder.inference(mel_padded)
speaker_embedding = speaker_embedding.data.cpu().numpy()
speaker_embeddings.append(speaker_embedding)
speaker_embeddings = np.vstack(speaker_embeddings)
print(speaker_embeddings.shape)
if not os.path.exists('outdir/embeddings'):
os.makedirs('outdir/embeddings')
np.save('outdir/embeddings/%s.npy'%speaker, speaker_embeddings)
plot_data([speaker_embeddings],
'outdir/embeddings/%s.pdf'%speaker)
print('Generating embedding of %s ...'%hparams.speaker_A)
gen_embedding(hparams.speaker_A)
print('Generating embedding of %s ...'%hparams.speaker_B)
gen_embedding(hparams.speaker_B)
| [((13, 9, 13, 34), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((22, 10, 22, 38), 'hparams.create_hparams', 'create_hparams', ({(22, 25, 22, 37): 'args.hparams'}, {}), '(args.hparams)', False, 'from hparams import create_hparams\n'), ((24, 8, 24, 27), 'train.load_model', 'load_model', ({(24, 19, 24, 26): 'hparams'}, {}), '(hparams)', False, 'from train import load_model\n'), ((33, 18, 35, 41), 'reader.TextMelIDLoader', 'TextMelIDLoader', (), '', False, 'from reader import TextMelIDLoader, TextMelIDCollate, id2sp\n'), ((40, 21, 43, 70), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((64, 4, 64, 67), 'numpy.save', 'np.save', ({(64, 12, 64, 46): "('outdir/embeddings/%s.npy' % speaker)", (64, 48, 64, 66): 'speaker_embeddings'}, {}), "('outdir/embeddings/%s.npy' % speaker, speaker_embeddings)", True, 'import numpy as np\n'), ((65, 4, 66, 43), 'inference_utils.plot_data', 'plot_data', ({(65, 14, 65, 34): '[speaker_embeddings]', (66, 8, 66, 42): "('outdir/embeddings/%s.pdf' % speaker)"}, {}), "([speaker_embeddings], 'outdir/embeddings/%s.pdf' % speaker)", False, 'from inference_utils import plot_data\n'), ((25, 22, 25, 49), 'torch.load', 'torch.load', ({(25, 33, 25, 48): 'checkpoint_path'}, {}), '(checkpoint_path)', False, 'import torch\n'), ((37, 34, 38, 62), 'model.lcm', 'lcm', ({(37, 38, 37, 71): 'hparams.n_frames_per_step_encoder', (38, 28, 38, 61): 'hparams.n_frames_per_step_decoder'}, {}), '(hparams.n_frames_per_step_encoder, hparams.n_frames_per_step_decoder)', False, 'from model import lcm\n'), ((45, 9, 45, 24), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((58, 29, 58, 58), 'numpy.vstack', 'np.vstack', ({(58, 39, 58, 57): 'speaker_embeddings'}, {}), '(speaker_embeddings)', True, 'import numpy as np\n'), ((61, 11, 61, 46), 'os.path.exists', 'os.path.exists', ({(61, 26, 61, 45): '"""outdir/embeddings"""'}, {}), "('outdir/embeddings')", False, 'import os\n'), ((62, 8, 62, 40), 'os.makedirs', 'os.makedirs', ({(62, 20, 62, 39): '"""outdir/embeddings"""'}, {}), "('outdir/embeddings')", False, 'import os\n')] |
DALME/dalme | dalme_app/migrations/0001_initial.py | 46f9a0011fdb75c5098b552104fc73b1062e16e9 | # Generated by Django 3.1.2 on 2020-11-29 13:25
import dalme_app.models._templates
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_currentuser.middleware
import uuid
import wagtail.search.index
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '0012_alter_user_first_name_max_length'),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='rs_collection',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=100, null=True)),
('user', models.IntegerField(null=True)),
('created', models.DateTimeField(blank=True, null=True)),
('public', models.IntegerField(default='0')),
('theme', models.CharField(max_length=100, null=True)),
('theme2', models.CharField(max_length=100, null=True)),
('theme3', models.CharField(max_length=100, null=True)),
('allow_changes', models.IntegerField(default='0')),
('cant_delete', models.IntegerField(default='0')),
('keywords', models.TextField()),
('savedsearch', models.IntegerField(null=True)),
('home_page_publish', models.IntegerField(null=True)),
('home_page_text', models.TextField()),
('home_page_image', models.IntegerField(null=True)),
('session_id', models.IntegerField(null=True)),
('theme4', models.CharField(max_length=100, null=True)),
('theme5', models.CharField(max_length=100, null=True)),
('theme6', models.CharField(max_length=100, null=True)),
('theme7', models.CharField(max_length=100, null=True)),
('theme8', models.CharField(max_length=100, null=True)),
('theme9', models.CharField(max_length=100, null=True)),
('theme10', models.CharField(max_length=100, null=True)),
('theme11', models.CharField(max_length=100, null=True)),
('theme12', models.CharField(max_length=100, null=True)),
('theme13', models.CharField(max_length=100, null=True)),
('theme14', models.CharField(max_length=100, null=True)),
('theme15', models.CharField(max_length=100, null=True)),
('theme16', models.CharField(max_length=100, null=True)),
('theme17', models.CharField(max_length=100, null=True)),
('theme18', models.CharField(max_length=100, null=True)),
('theme19', models.CharField(max_length=100, null=True)),
('theme20', models.CharField(max_length=100, null=True)),
],
options={
'db_table': 'collection',
'managed': False,
},
),
migrations.CreateModel(
name='rs_collection_resource',
fields=[
('date_added', models.DateTimeField(auto_now_add=True, primary_key=True, serialize=False)),
('comment', models.TextField()),
('rating', models.IntegerField(null=True)),
('use_as_theme_thumbnail', models.IntegerField(null=True)),
('purchase_size', models.CharField(max_length=10, null=True)),
('purchase_complete', models.IntegerField(default='0')),
('purchase_price', models.FloatField(default='0.00', max_length=10)),
('sortorder', models.IntegerField(null=True)),
],
options={
'db_table': 'collection_resource',
'managed': False,
},
),
migrations.CreateModel(
name='rs_resource',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=200, null=True)),
('resource_type', models.IntegerField(null=True)),
('has_image', models.IntegerField(default='0')),
('is_transcoding', models.IntegerField(default='0')),
('hit_count', models.IntegerField(default='0')),
('new_hit_count', models.IntegerField(default='0')),
('creation_date', models.DateTimeField(blank=True, null=True)),
('rating', models.IntegerField(null=True)),
('user_rating', models.IntegerField(null=True)),
('user_rating_count', models.IntegerField(null=True)),
('user_rating_total', models.IntegerField(null=True)),
('country', models.CharField(default=None, max_length=200, null=True)),
('file_extension', models.CharField(max_length=10, null=True)),
('preview_extension', models.CharField(max_length=10, null=True)),
('image_red', models.IntegerField(null=True)),
('image_green', models.IntegerField(null=True)),
('image_blue', models.IntegerField(null=True)),
('thumb_width', models.IntegerField(null=True)),
('thumb_height', models.IntegerField(null=True)),
('archive', models.IntegerField(default='0')),
('access', models.IntegerField(default='0')),
('colour_key', models.CharField(max_length=5, null=True)),
('created_by', models.IntegerField(null=True)),
('file_path', models.CharField(max_length=500, null=True)),
('file_modified', models.DateTimeField(blank=True, null=True)),
('file_checksum', models.CharField(max_length=32, null=True)),
('request_count', models.IntegerField(default='0')),
('expiry_notification_sent', models.IntegerField(default='0')),
('preview_tweaks', models.CharField(max_length=50, null=True)),
('geo_lat', models.FloatField(default=None, null=True)),
('geo_long', models.FloatField(default=None, null=True)),
('mapzoom', models.IntegerField(null=True)),
('disk_usage', models.IntegerField(null=True)),
('disk_usage_last_updated', models.DateTimeField(blank=True, null=True)),
('file_size', models.IntegerField(default=None, null=True)),
('preview_attempts', models.IntegerField(default=None, null=True)),
('field12', models.CharField(default=None, max_length=200, null=True)),
('field8', models.CharField(default=None, max_length=200, null=True)),
('field3', models.CharField(default=None, max_length=200, null=True)),
('annotation_count', models.IntegerField(null=True)),
('field51', models.CharField(default=None, max_length=200, null=True)),
('field79', models.CharField(blank=True, default=None, max_length=200, null=True)),
('modified', models.DateTimeField(auto_now_add=True, null=True)),
],
options={
'db_table': 'resource',
'managed': False,
},
),
migrations.CreateModel(
name='rs_resource_data',
fields=[
('django_id', models.IntegerField(db_column='django_id', primary_key=True, serialize=False)),
('value', models.TextField()),
],
options={
'db_table': 'resource_data',
'managed': False,
},
),
migrations.CreateModel(
name='rs_resource_type_field',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50, null=True)),
('title', models.CharField(max_length=400, null=True)),
('type', models.IntegerField(null=True)),
('order_by', models.IntegerField(default='0')),
('keywords_index', models.IntegerField(default='0')),
('partial_index', models.IntegerField(default='0')),
('resource_type', models.IntegerField(default='0')),
('resource_column', models.CharField(max_length=50, null=True)),
('display_field', models.IntegerField(default='1')),
('use_for_similar', models.IntegerField(default='1')),
('iptc_equiv', models.CharField(max_length=20, null=True)),
('display_template', models.TextField()),
('tab_name', models.CharField(max_length=50, null=True)),
('required', models.IntegerField(default='0')),
('smart_theme_name', models.CharField(max_length=200, null=True)),
('exiftool_field', models.CharField(max_length=200, null=True)),
('advanced_search', models.IntegerField(default='1')),
('simple_search', models.IntegerField(default='0')),
('help_text', models.TextField()),
('display_as_dropdown', models.IntegerField(default='0')),
('external_user_access', models.IntegerField(default='1')),
('autocomplete_macro', models.TextField()),
('hide_when_uploading', models.IntegerField(default='0')),
('hide_when_restricted', models.IntegerField(default='0')),
('value_filter', models.TextField()),
('exiftool_filter', models.TextField()),
('omit_when_copying', models.IntegerField(default='0')),
('tooltip_text', models.TextField()),
('regexp_filter', models.CharField(max_length=400, null=True)),
('sync_field', models.IntegerField(null=True)),
('display_condition', models.CharField(max_length=400, null=True)),
('onchange_macro', models.TextField()),
('field_constraint', models.IntegerField(null=True)),
('linked_data_field', models.TextField()),
('automatic_nodes_ordering', models.IntegerField(default='0')),
('fits_field', models.CharField(max_length=255, null=True)),
('personal_data', models.IntegerField(default='0')),
],
options={
'db_table': 'resource_type_field',
'managed': False,
},
),
migrations.CreateModel(
name='rs_user',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('username', models.CharField(max_length=50, unique=True)),
('password', models.CharField(max_length=64, null=True)),
('fullname', models.CharField(max_length=100, null=True)),
('email', models.CharField(max_length=100, null=True)),
('usergroup', models.IntegerField(choices=[(2, 'General User'), (4, 'Archivist'), (1, 'Administrator'), (3, 'Super Admin')], null=True)),
('last_active', models.DateTimeField(blank=True, null=True)),
('logged_in', models.IntegerField(null=True)),
('last_browser', models.TextField()),
('last_ip', models.CharField(max_length=100, null=True)),
('current_collection', models.IntegerField(null=True)),
('accepted_terms', models.IntegerField(default='0')),
('account_expires', models.DateTimeField(blank=True, null=True)),
('comments', models.TextField()),
('session', models.CharField(max_length=50, null=True)),
('ip_restrict', models.TextField()),
('search_filter_override', models.TextField()),
('password_last_change', models.DateTimeField(null=True)),
('login_tries', models.IntegerField(default='0')),
('login_last_try', models.DateTimeField(blank=True, null=True)),
('approved', models.IntegerField(default='1')),
('lang', models.CharField(max_length=11, null=True)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('hidden_collections', models.TextField()),
('password_reset_hash', models.CharField(max_length=100, null=True)),
('origin', models.CharField(max_length=50, null=True)),
('unique_hash', models.CharField(max_length=50, null=True)),
('wp_authrequest', models.CharField(max_length=50, null=True)),
('csrf_token', models.CharField(max_length=255, null=True)),
],
options={
'db_table': 'user',
'managed': False,
},
),
migrations.CreateModel(
name='Agent',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('standard_name', models.CharField(max_length=255)),
('type', models.IntegerField(choices=[(1, 'Person'), (2, 'Organization')])),
('notes', models.TextField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_agent_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_agent_modification', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='agent', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('file', models.FileField(upload_to='attachments/%Y/%m/')),
('type', models.CharField(max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attachment_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attachment_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attachment_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Attribute_type',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55, unique=True)),
('description', models.TextField()),
('data_type', models.CharField(choices=[('DATE', 'DATE (date)'), ('INT', 'INT (integer)'), ('STR', 'STR (string)'), ('TXT', 'TXT (text)'), ('FK-UUID', 'FK-UUID (DALME record)'), ('FK-INT', 'FK-INT (DALME record)')], max_length=15)),
('source', models.CharField(blank=True, default=None, max_length=255, null=True)),
('options_list', models.CharField(blank=True, default=None, max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_type_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_type_modification', to=settings.AUTH_USER_MODEL)),
('same_as', models.ForeignKey(db_column='same_as', null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attribute_type')),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Concept',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('getty_id', models.IntegerField(db_index=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_concept_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_concept_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Content_attributes',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('order', models.IntegerField(db_index=True, null=True)),
('required', models.BooleanField(default=False)),
('unique', models.BooleanField(default=True)),
('attribute_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='content_types', to='dalme_app.attribute_type')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Content_class',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55, unique=True)),
('description', models.TextField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_class_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_class_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Content_type',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255, unique=True)),
('short_name', models.CharField(max_length=55)),
('description', models.TextField()),
('has_pages', models.BooleanField(db_index=True, default=False)),
('has_inventory', models.BooleanField(default=False)),
('parents', models.CharField(blank=True, default=None, max_length=255, null=True)),
('r1_inheritance', models.CharField(blank=True, default=None, max_length=255, null=True)),
('r2_inheritance', models.CharField(blank=True, default=None, max_length=255, null=True)),
('attribute_types', models.ManyToManyField(through='dalme_app.Content_attributes', to='dalme_app.Attribute_type')),
('content_class', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.content_class')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_type_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_type_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='CountryReference',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255, unique=True)),
('alpha_3_code', models.CharField(max_length=3)),
('alpha_2_code', models.CharField(max_length=2)),
('num_code', models.IntegerField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_countryreference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_countryreference_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Entity_phrase',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('phrase', models.TextField(blank=True)),
('type', models.IntegerField(choices=[(1, 'Agent'), (2, 'Object'), (3, 'Place')])),
('object_id', models.UUIDField(db_index=True, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_entity_phrase_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_entity_phrase_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Headword',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('word', models.CharField(max_length=55)),
('full_lemma', models.CharField(max_length=255)),
('concept_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.concept')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_headword_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_headword_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Object',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('concept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.concept')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Page',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=55)),
('dam_id', models.IntegerField(db_index=True, null=True)),
('order', models.IntegerField(db_index=True)),
('canvas', models.TextField(null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_page_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_page_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['order'],
},
),
migrations.CreateModel(
name='Set',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('set_type', models.IntegerField(choices=[(1, 'Corpus'), (2, 'Collection'), (3, 'Dataset'), (4, 'Workset')])),
('is_public', models.BooleanField(default=False)),
('has_landing', models.BooleanField(default=False)),
('endpoint', models.CharField(max_length=55)),
('permissions', models.IntegerField(choices=[(1, 'Private'), (2, 'Others: view'), (3, 'Others: view|add'), (4, 'Others: view|add|delete')], default=2)),
('description', models.TextField()),
('stat_title', models.CharField(blank=True, max_length=25, null=True)),
('stat_text', models.CharField(blank=True, max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_creation', to=settings.AUTH_USER_MODEL)),
('dataset_usergroup', models.ForeignKey(limit_choices_to={'properties__type': 3}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='dataset', to='auth.group')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Source',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55)),
('has_inventory', models.BooleanField(db_index=True, default=False)),
('is_private', models.BooleanField(db_index=True, default=False)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_related', to=settings.AUTH_USER_MODEL)),
],
bases=(wagtail.search.index.Indexed, models.Model),
),
migrations.CreateModel(
name='Wordform',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('normalized_form', models.CharField(max_length=55)),
('pos', models.CharField(max_length=255)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_wordform_creation', to=settings.AUTH_USER_MODEL)),
('headword_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.headword')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_wordform_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Transcription',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('transcription', models.TextField(blank=True, default=None)),
('author', models.CharField(default=dalme_app.models._templates.get_current_username, max_length=255)),
('version', models.IntegerField(default=1)),
('count_ignore', models.BooleanField(default=False)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_transcription_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_transcription_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Token',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('raw_token', models.CharField(max_length=255)),
('clean_token', models.CharField(max_length=55)),
('order', models.IntegerField(db_index=True)),
('flags', models.CharField(max_length=10)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_token_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_token_modification', to=settings.AUTH_USER_MODEL)),
('object_phrase_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.entity_phrase')),
('wordform_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.wordform')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Ticket',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('subject', models.CharField(max_length=140)),
('description', models.TextField(blank=True, null=True)),
('status', models.IntegerField(choices=[(0, 'Open'), (1, 'Closed')], default=0)),
('url', models.CharField(default=None, max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_ticket_creation', to=settings.AUTH_USER_MODEL)),
('file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attachment')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_ticket_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['status', 'creation_timestamp'],
},
),
migrations.CreateModel(
name='TaskList',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=60)),
('slug', models.SlugField(default='')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tasklist_creation', to=settings.AUTH_USER_MODEL)),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_list_group', to='auth.group')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tasklist_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tasklist_related', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Task Lists',
'ordering': ['name'],
'unique_together': {('group', 'slug')},
},
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('title', models.CharField(max_length=140)),
('due_date', models.DateField(blank=True, null=True)),
('completed', models.BooleanField(default=False)),
('completed_date', models.DateField(blank=True, null=True)),
('description', models.TextField(blank=True, null=True)),
('priority', models.PositiveIntegerField(blank=True, null=True)),
('position', models.CharField(blank=True, default=None, max_length=255)),
('url', models.CharField(default=None, max_length=255, null=True)),
('assigned_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_assigned_to', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_created_by', to=settings.AUTH_USER_MODEL)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_task_creation', to=settings.AUTH_USER_MODEL)),
('file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attachment')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_task_modification', to=settings.AUTH_USER_MODEL)),
('task_list', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.tasklist')),
('workset', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='dalme_app.set')),
],
options={
'ordering': ['priority', 'creation_timestamp'],
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('tag_type', models.CharField(choices=[('WF', 'Workflow'), ('C', 'Control'), ('T', 'Ticket')], max_length=2)),
('tag', models.CharField(default=None, max_length=55, null=True)),
('tag_group', models.CharField(default=None, max_length=255, null=True)),
('object_id', models.CharField(db_index=True, max_length=55, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tag_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tag_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Source_pages',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_pages_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_pages_modification', to=settings.AUTH_USER_MODEL)),
('page', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sources', to='dalme_app.page')),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='source_pages', to='dalme_app.source')),
('transcription', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='source_pages', to='dalme_app.transcription')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='source',
name='pages',
field=models.ManyToManyField(db_index=True, through='dalme_app.Source_pages', to='dalme_app.Page'),
),
migrations.AddField(
model_name='source',
name='parent',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='dalme_app.source'),
),
migrations.AddField(
model_name='source',
name='primary_dataset',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_query_name='set_members', to='dalme_app.set'),
),
migrations.AddField(
model_name='source',
name='type',
field=models.ForeignKey(db_column='type', on_delete=django.db.models.deletion.PROTECT, to='dalme_app.content_type'),
),
migrations.CreateModel(
name='Scope',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('type', models.IntegerField(choices=[(1, 'Temporal'), (2, 'Spatial'), (3, 'Linguistic'), (4, 'Context')])),
('range', models.TextField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_scope_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_scope_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='RightsPolicy',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=100)),
('rights_status', models.IntegerField(choices=[(1, 'Copyrighted'), (2, 'Orphaned'), (3, 'Owned'), (4, 'Public Domain'), (5, 'Unknown')], default=5)),
('rights', models.TextField(blank=True, default=None)),
('rights_notice', models.JSONField(null=True)),
('licence', models.TextField(blank=True, default=None, null=True)),
('rights_holder', models.CharField(default=None, max_length=255, null=True)),
('notice_display', models.BooleanField(default=False)),
('public_display', models.BooleanField(default=True)),
('attachments', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attachment')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_rightspolicy_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_rightspolicy_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Relationship',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('source_object_id', models.UUIDField(db_index=True, null=True)),
('target_object_id', models.UUIDField(db_index=True, null=True)),
('notes', models.TextField(blank=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_relationship_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_relationship_modification', to=settings.AUTH_USER_MODEL)),
('scope', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='dalme_app.scope')),
('source_content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='relationship_sources', to='contenttypes.contenttype')),
('target_content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='relationship_targets', to='contenttypes.contenttype')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='PublicRegister',
fields=[
('object_id', models.UUIDField(db_index=True, primary_key=True, serialize=False)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creator', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_publicregister_creation', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(blank=True, max_length=50)),
('primary_group', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='auth.group')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Place',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('std_name', models.CharField(max_length=255)),
('type', models.IntegerField(db_index=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_place_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_place_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Object_attribute',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('attribute_concept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.concept')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_attribute_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_attribute_modification', to=settings.AUTH_USER_MODEL)),
('object', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.object')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='LanguageReference',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('glottocode', models.CharField(max_length=25, unique=True)),
('iso6393', models.CharField(blank=True, default=None, max_length=25, null=True, unique=True)),
('name', models.CharField(max_length=255)),
('type', models.IntegerField(choices=[(1, 'Language'), (2, 'Dialect')])),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_languagereference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_languagereference_modification', to=settings.AUTH_USER_MODEL)),
('parent', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.languagereference')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='GroupProperties',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.IntegerField(choices=[(1, 'Admin'), (2, 'DAM'), (3, 'Dataset'), (4, 'Knowledge Base'), (5, 'Website')])),
('description', models.CharField(max_length=255)),
('group', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='properties', to='auth.group')),
],
),
migrations.AddField(
model_name='entity_phrase',
name='transcription_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='entity_phrases', to='dalme_app.transcription'),
),
migrations.AddField(
model_name='content_attributes',
name='content_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attribute_type_list', to='dalme_app.content_type'),
),
migrations.AddField(
model_name='content_attributes',
name='creation_user',
field=models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_attributes_creation', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='content_attributes',
name='modification_user',
field=models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_attributes_modification', to=settings.AUTH_USER_MODEL),
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('object_id', models.CharField(db_index=True, max_length=55, null=True)),
('body', models.TextField(blank=True, default=None, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_comment_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_comment_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['creation_timestamp'],
},
),
migrations.CreateModel(
name='AttributeReference',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55)),
('description', models.TextField()),
('data_type', models.CharField(max_length=15)),
('source', models.CharField(max_length=255)),
('term_type', models.CharField(blank=True, default=None, max_length=55)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attributereference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attributereference_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Workflow',
fields=[
('source', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='workflow', serialize=False, to='dalme_app.source')),
('wf_status', models.IntegerField(choices=[(1, 'assessing'), (2, 'processing'), (3, 'processed')], default=2)),
('stage', models.IntegerField(choices=[(1, 'ingestion'), (2, 'transcription'), (3, 'markup'), (4, 'review'), (5, 'parsing')], default=1)),
('last_modified', models.DateTimeField(blank=True, null=True)),
('help_flag', models.BooleanField(default=False)),
('ingestion_done', models.BooleanField(default=False)),
('transcription_done', models.BooleanField(default=False)),
('markup_done', models.BooleanField(default=False)),
('parsing_done', models.BooleanField(default=False)),
('review_done', models.BooleanField(default=False)),
('is_public', models.BooleanField(default=False)),
('last_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Work_log',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('event', models.CharField(max_length=255)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='work_log', to='dalme_app.workflow')),
],
),
migrations.CreateModel(
name='Source_credit',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('type', models.IntegerField(choices=[(1, 'Editor'), (2, 'Corrections'), (3, 'Contributor')])),
('note', models.CharField(blank=True, max_length=255, null=True)),
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='credits', to='dalme_app.agent')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_credit_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_credit_modification', to=settings.AUTH_USER_MODEL)),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='credits', to='dalme_app.source')),
],
options={
'unique_together': {('source', 'agent', 'type')},
},
),
migrations.AlterUniqueTogether(
name='source',
unique_together={('type', 'name')},
),
migrations.CreateModel(
name='Set_x_content',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('object_id', models.UUIDField(db_index=True, default=uuid.uuid4)),
('workset_done', models.BooleanField(default=False)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_x_content_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_x_content_modification', to=settings.AUTH_USER_MODEL)),
('set_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='dalme_app.set')),
],
options={
'ordering': ['set_id', 'id'],
'unique_together': {('content_type', 'object_id', 'set_id')},
},
),
migrations.CreateModel(
name='LocaleReference',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('administrative_region', models.CharField(max_length=255)),
('latitude', models.DecimalField(decimal_places=6, max_digits=9, null=True)),
('longitude', models.DecimalField(decimal_places=6, max_digits=9, null=True)),
('country', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.countryreference')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_localereference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_localereference_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['country', 'name'],
'unique_together': {('name', 'administrative_region')},
},
),
migrations.CreateModel(
name='Attribute',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('object_id', models.UUIDField(db_index=True, null=True)),
('value_STR', models.CharField(blank=True, default=None, max_length=255, null=True)),
('value_DATE_d', models.IntegerField(blank=True, null=True)),
('value_DATE_m', models.IntegerField(blank=True, null=True)),
('value_DATE_y', models.IntegerField(blank=True, null=True)),
('value_DATE', models.DateField(blank=True, null=True)),
('value_INT', models.IntegerField(blank=True, null=True)),
('value_TXT', models.TextField(blank=True, default=None, null=True)),
('value_JSON', models.JSONField(null=True)),
('attribute_type', models.ForeignKey(db_column='attribute_type', on_delete=django.db.models.deletion.CASCADE, to='dalme_app.attribute_type')),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('object_id', 'attribute_type', 'value_STR')},
},
),
]
| [((17, 8, 17, 65), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', ({(17, 40, 17, 64): 'settings.AUTH_USER_MODEL'}, {}), '(settings.AUTH_USER_MODEL)', False, 'from django.db import migrations, models\n'), ((872, 8, 875, 9), 'django.db.migrations.AlterUniqueTogether', 'migrations.AlterUniqueTogether', (), '', False, 'from django.db import migrations, models\n'), ((625, 18, 625, 110), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (), '', False, 'from django.db import migrations, models\n'), ((630, 18, 630, 140), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((635, 18, 635, 145), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((640, 18, 640, 127), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((776, 18, 776, 141), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((781, 18, 781, 145), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((786, 18, 786, 231), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((791, 18, 791, 235), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((26, 24, 26, 78), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((27, 25, 27, 68), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((28, 25, 28, 55), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((29, 28, 29, 71), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((30, 27, 30, 59), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((31, 26, 31, 69), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((32, 27, 32, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((33, 27, 33, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((34, 34, 34, 66), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((35, 32, 35, 64), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((36, 29, 36, 47), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((37, 32, 37, 62), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((38, 38, 38, 68), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((39, 35, 39, 53), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((40, 36, 40, 66), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((41, 31, 41, 61), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((42, 27, 42, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((43, 27, 43, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((44, 27, 44, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((45, 27, 45, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((46, 27, 46, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((47, 27, 47, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((48, 28, 48, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((49, 28, 49, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((50, 28, 50, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((51, 28, 51, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((52, 28, 52, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((53, 28, 53, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((54, 28, 54, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((55, 28, 55, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((56, 28, 56, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((57, 28, 57, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((58, 28, 58, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((68, 31, 68, 105), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((69, 28, 69, 46), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((70, 27, 70, 57), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((71, 43, 71, 73), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((72, 34, 72, 76), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((73, 38, 73, 70), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((74, 35, 74, 83), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((75, 30, 75, 60), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((85, 24, 85, 78), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((86, 26, 86, 69), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((87, 34, 87, 64), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((88, 30, 88, 62), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((89, 35, 89, 67), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((90, 30, 90, 62), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((91, 34, 91, 66), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((92, 34, 92, 77), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((93, 27, 93, 57), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((94, 32, 94, 62), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((95, 38, 95, 68), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((96, 38, 96, 68), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((97, 28, 97, 85), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((98, 35, 98, 77), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((99, 38, 99, 80), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((100, 30, 100, 60), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((101, 32, 101, 62), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((102, 31, 102, 61), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((103, 32, 103, 62), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((104, 33, 104, 63), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((105, 28, 105, 60), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((106, 27, 106, 59), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((107, 31, 107, 72), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((108, 31, 108, 61), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((109, 30, 109, 73), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((110, 34, 110, 77), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((111, 34, 111, 76), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((112, 34, 112, 66), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((113, 45, 113, 77), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((114, 35, 114, 77), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((115, 28, 115, 70), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((116, 29, 116, 71), 'django.db.models.FloatField', 'models.FloatField', (), '', False, 'from django.db import migrations, models\n'), ((117, 28, 117, 58), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((118, 31, 118, 61), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((119, 44, 119, 87), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((120, 30, 120, 74), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((121, 37, 121, 81), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((122, 28, 122, 85), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((123, 27, 123, 84), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((124, 27, 124, 84), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((125, 37, 125, 67), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((126, 28, 126, 85), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((127, 28, 127, 97), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((128, 29, 128, 79), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((138, 30, 138, 107), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((139, 26, 139, 44), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((149, 24, 149, 78), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((150, 25, 150, 67), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((151, 26, 151, 69), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((152, 25, 152, 55), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((153, 29, 153, 61), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((154, 35, 154, 67), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((155, 34, 155, 66), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((156, 34, 156, 66), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((157, 36, 157, 78), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((158, 34, 158, 66), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((159, 36, 159, 68), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((160, 31, 160, 73), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((161, 37, 161, 55), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((162, 29, 162, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((163, 29, 163, 61), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((164, 37, 164, 80), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((165, 35, 165, 78), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((166, 36, 166, 68), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((167, 34, 167, 66), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((168, 30, 168, 48), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((169, 40, 169, 72), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((170, 41, 170, 73), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((171, 39, 171, 57), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((172, 40, 172, 72), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((173, 41, 173, 73), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((174, 33, 174, 51), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((175, 36, 175, 54), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((176, 38, 176, 70), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((177, 33, 177, 51), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((178, 34, 178, 77), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((179, 31, 179, 61), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((180, 38, 180, 81), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((181, 35, 181, 53), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((182, 37, 182, 67), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((183, 38, 183, 56), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((184, 45, 184, 77), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((185, 31, 185, 74), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((186, 34, 186, 66), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((196, 24, 196, 78), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((197, 29, 197, 73), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((198, 29, 198, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((199, 29, 199, 72), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((200, 26, 200, 69), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((201, 30, 201, 151), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((202, 32, 202, 75), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((203, 30, 203, 60), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((204, 33, 204, 51), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((205, 28, 205, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((206, 39, 206, 69), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((207, 35, 207, 67), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((208, 36, 208, 79), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((209, 29, 209, 47), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((210, 28, 210, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((211, 32, 211, 50), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((212, 43, 212, 61), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((213, 41, 213, 72), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((214, 32, 214, 64), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((215, 35, 215, 78), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((216, 29, 216, 61), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((217, 25, 217, 67), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((218, 28, 218, 78), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((219, 39, 219, 57), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((220, 40, 220, 83), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((221, 27, 221, 69), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((222, 32, 222, 74), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((223, 35, 223, 77), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((224, 31, 224, 74), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((234, 23, 234, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((235, 39, 235, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((236, 43, 236, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((237, 34, 237, 66), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((238, 25, 238, 90), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((239, 26, 239, 44), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((240, 34, 240, 234), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((241, 38, 241, 242), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((242, 25, 242, 149), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((251, 23, 251, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((252, 39, 252, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((253, 43, 253, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((254, 25, 254, 73), 'django.db.models.FileField', 'models.FileField', (), '', False, 'from django.db import migrations, models\n'), ((255, 25, 255, 68), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((256, 34, 256, 239), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((257, 38, 257, 247), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((258, 26, 258, 174), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((267, 23, 267, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((268, 39, 268, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((269, 43, 269, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((270, 25, 270, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((271, 31, 271, 75), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((272, 32, 272, 50), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((273, 30, 273, 246), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((274, 27, 274, 96), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((275, 33, 275, 102), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((276, 34, 276, 243), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((277, 38, 277, 251), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((278, 28, 278, 154), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((287, 23, 287, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((288, 39, 288, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((289, 43, 289, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((290, 29, 290, 63), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((291, 34, 291, 236), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((292, 38, 292, 244), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((301, 23, 301, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((302, 39, 302, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((303, 43, 303, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((304, 26, 304, 71), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((305, 29, 305, 63), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((306, 27, 306, 60), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((307, 35, 307, 158), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((316, 23, 316, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((317, 39, 317, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((318, 43, 318, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((319, 25, 319, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((320, 31, 320, 75), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((321, 32, 321, 50), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((322, 34, 322, 242), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((323, 38, 323, 250), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((332, 23, 332, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((333, 39, 333, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((334, 43, 334, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((335, 25, 335, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((336, 31, 336, 62), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((337, 32, 337, 50), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((338, 30, 338, 79), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((339, 34, 339, 68), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((340, 28, 340, 97), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((341, 35, 341, 104), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((342, 35, 342, 104), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((343, 36, 343, 129), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (), '', False, 'from django.db import migrations, models\n'), ((344, 34, 344, 126), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((345, 34, 345, 241), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((346, 38, 346, 249), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((355, 23, 355, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((356, 39, 356, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((357, 43, 357, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((358, 25, 358, 70), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((359, 33, 359, 63), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((360, 33, 360, 63), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((361, 29, 361, 50), 'django.db.models.IntegerField', 'models.IntegerField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((362, 34, 362, 245), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((363, 38, 363, 253), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((372, 23, 372, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((373, 39, 373, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((374, 43, 374, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((375, 27, 375, 55), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((376, 25, 376, 97), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((377, 30, 377, 72), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((378, 33, 378, 137), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((379, 34, 379, 242), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((380, 38, 380, 250), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((389, 23, 389, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((390, 39, 390, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((391, 43, 391, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((392, 25, 392, 56), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((393, 31, 393, 63), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((394, 31, 394, 117), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((395, 34, 395, 237), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((396, 38, 396, 245), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((405, 23, 405, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((406, 39, 406, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((407, 43, 407, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((408, 28, 408, 114), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((409, 34, 409, 235), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((410, 38, 410, 243), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((419, 23, 419, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((420, 39, 420, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((421, 43, 421, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((422, 25, 422, 56), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((423, 27, 423, 72), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((424, 26, 424, 60), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((425, 27, 425, 54), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((426, 34, 426, 233), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((427, 38, 427, 241), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((436, 23, 436, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((437, 39, 437, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((438, 43, 438, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((439, 25, 439, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((440, 29, 440, 124), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((441, 30, 441, 64), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((442, 32, 442, 66), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((443, 29, 443, 60), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((444, 32, 444, 166), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((445, 32, 445, 50), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((446, 31, 446, 85), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((447, 30, 447, 85), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((448, 34, 448, 232), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((449, 38, 449, 194), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((450, 38, 450, 240), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((451, 26, 451, 167), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((460, 23, 460, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((461, 39, 461, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((462, 43, 462, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((463, 25, 463, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((464, 31, 464, 62), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((465, 34, 465, 83), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((466, 31, 466, 80), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((467, 34, 467, 235), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((468, 38, 468, 243), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((469, 26, 469, 170), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((476, 23, 476, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((477, 39, 477, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((478, 43, 478, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((479, 36, 479, 67), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((480, 24, 480, 56), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((481, 34, 481, 237), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((482, 32, 482, 119), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((483, 38, 483, 245), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((492, 23, 492, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((493, 39, 493, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((494, 43, 494, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((495, 34, 495, 76), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((496, 27, 496, 117), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((497, 28, 497, 58), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((498, 33, 498, 67), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((499, 34, 499, 242), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((500, 38, 500, 250), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((509, 23, 509, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((510, 39, 510, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((511, 43, 511, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((512, 30, 512, 62), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((513, 32, 513, 63), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((514, 26, 514, 60), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((515, 26, 515, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((516, 34, 516, 234), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((517, 38, 517, 242), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((518, 37, 518, 129), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((519, 32, 519, 119), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((528, 23, 528, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((529, 39, 529, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((530, 43, 530, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((531, 28, 531, 60), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((532, 32, 532, 71), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((533, 27, 533, 95), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((534, 24, 534, 81), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((535, 34, 535, 235), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((536, 25, 536, 138), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((537, 38, 537, 243), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((546, 23, 546, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((547, 39, 547, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((548, 43, 548, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((549, 25, 549, 56), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((550, 25, 550, 53), 'django.db.models.SlugField', 'models.SlugField', (), '', False, 'from django.db import migrations, models\n'), ((551, 34, 551, 237), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((552, 26, 552, 137), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((553, 38, 553, 245), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((554, 26, 554, 172), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((565, 23, 565, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((566, 39, 566, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((567, 43, 567, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((568, 26, 568, 58), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((569, 29, 569, 68), 'django.db.models.DateField', 'models.DateField', (), '', False, 'from django.db import migrations, models\n'), ((570, 30, 570, 64), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((571, 35, 571, 74), 'django.db.models.DateField', 'models.DateField', (), '', False, 'from django.db import migrations, models\n'), ((572, 32, 572, 71), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((573, 29, 573, 79), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', (), '', False, 'from django.db import migrations, models\n'), ((574, 29, 574, 87), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((575, 24, 575, 81), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((576, 32, 576, 179), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((577, 31, 577, 221), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((578, 34, 578, 233), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((579, 25, 579, 138), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((580, 38, 580, 241), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((581, 30, 581, 117), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((582, 28, 582, 121), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((591, 23, 591, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((592, 39, 592, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((593, 43, 593, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((594, 29, 594, 124), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((595, 24, 595, 80), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((596, 30, 596, 87), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((597, 30, 597, 87), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((598, 33, 598, 137), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((599, 34, 599, 232), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((600, 38, 600, 240), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((609, 23, 609, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((610, 39, 610, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((611, 43, 611, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((612, 34, 612, 241), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((613, 38, 613, 249), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((614, 25, 614, 132), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((615, 27, 615, 141), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((616, 34, 616, 167), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((645, 23, 645, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((646, 39, 646, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((647, 43, 647, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((648, 25, 648, 122), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((649, 26, 649, 44), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((650, 34, 650, 234), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((651, 38, 651, 242), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((660, 23, 660, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((661, 39, 661, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((662, 43, 662, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((663, 25, 663, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((664, 34, 664, 163), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((665, 27, 665, 69), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((666, 34, 666, 61), 'django.db.models.JSONField', 'models.JSONField', (), '', False, 'from django.db import migrations, models\n'), ((667, 28, 667, 81), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((668, 34, 668, 91), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((669, 35, 669, 69), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((670, 35, 670, 68), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((671, 32, 671, 145), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((672, 34, 672, 241), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((673, 38, 673, 249), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((682, 23, 682, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((683, 39, 683, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((684, 43, 684, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((685, 37, 685, 79), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((686, 37, 686, 79), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((687, 26, 687, 54), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((688, 34, 688, 241), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((689, 38, 689, 249), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((690, 26, 690, 121), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((691, 40, 691, 181), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((692, 40, 692, 181), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((701, 30, 701, 96), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((702, 28, 702, 78), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((703, 33, 703, 137), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((704, 28, 704, 237), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((710, 23, 710, 112), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((711, 30, 711, 73), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((712, 34, 712, 125), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((713, 25, 713, 143), 'django.db.models.OneToOneField', 'models.OneToOneField', (), '', False, 'from django.db import migrations, models\n'), ((719, 23, 719, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((720, 39, 720, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((721, 43, 721, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((722, 29, 722, 61), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((723, 25, 723, 59), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((724, 34, 724, 234), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((725, 38, 725, 242), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((734, 23, 734, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((735, 39, 735, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((736, 43, 736, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((737, 38, 737, 124), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((738, 34, 738, 245), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((739, 38, 739, 253), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((740, 27, 740, 112), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((749, 23, 749, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((750, 39, 750, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((751, 43, 751, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((752, 31, 752, 75), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((753, 28, 753, 109), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((754, 25, 754, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((755, 25, 755, 87), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((756, 34, 756, 246), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((757, 38, 757, 254), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((758, 27, 758, 135), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((767, 23, 767, 112), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((768, 25, 768, 135), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((769, 32, 769, 64), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((770, 26, 770, 135), 'django.db.models.OneToOneField', 'models.OneToOneField', (), '', False, 'from django.db import migrations, models\n'), ((796, 23, 796, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((797, 39, 797, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((798, 43, 798, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((799, 30, 799, 87), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((800, 25, 800, 78), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((801, 33, 801, 137), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((802, 34, 802, 236), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((803, 38, 803, 244), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((812, 23, 812, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((813, 39, 813, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((814, 43, 814, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((815, 25, 815, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((816, 31, 816, 62), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((817, 32, 817, 50), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((818, 30, 818, 61), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((819, 27, 819, 59), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((820, 30, 820, 87), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((821, 34, 821, 247), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((822, 38, 822, 255), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((831, 27, 831, 175), 'django.db.models.OneToOneField', 'models.OneToOneField', (), '', False, 'from django.db import migrations, models\n'), ((832, 30, 832, 125), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((833, 26, 833, 152), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((834, 34, 834, 77), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((835, 30, 835, 64), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((836, 35, 836, 69), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((837, 39, 837, 73), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((838, 32, 838, 66), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((839, 33, 839, 67), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((840, 32, 840, 66), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((841, 30, 841, 64), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((842, 30, 842, 188), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((848, 23, 848, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((849, 26, 849, 58), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((850, 30, 850, 69), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((851, 25, 851, 183), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((852, 27, 852, 139), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((858, 23, 858, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((859, 39, 859, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((860, 43, 860, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((861, 25, 861, 109), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((862, 25, 862, 80), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((863, 26, 863, 134), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((864, 34, 864, 242), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((865, 38, 865, 250), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((866, 27, 866, 136), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((879, 23, 879, 112), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((880, 39, 880, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((881, 43, 881, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((882, 30, 882, 81), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((883, 33, 883, 67), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import migrations, models\n'), ((884, 33, 884, 137), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((885, 34, 885, 242), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((886, 38, 886, 250), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((887, 27, 887, 133), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((897, 23, 897, 102), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((898, 39, 898, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((899, 43, 899, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((900, 25, 900, 57), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((901, 42, 901, 74), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((902, 29, 902, 91), 'django.db.models.DecimalField', 'models.DecimalField', (), '', False, 'from django.db import migrations, models\n'), ((903, 30, 903, 92), 'django.db.models.DecimalField', 'models.DecimalField', (), '', False, 'from django.db import migrations, models\n'), ((904, 28, 904, 135), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((905, 34, 905, 244), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((906, 38, 906, 252), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((916, 23, 916, 125), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((917, 39, 917, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((918, 43, 918, 89), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((919, 30, 919, 72), 'django.db.models.UUIDField', 'models.UUIDField', (), '', False, 'from django.db import migrations, models\n'), ((920, 30, 920, 99), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((921, 33, 921, 75), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((922, 33, 922, 75), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((923, 33, 923, 75), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((924, 31, 924, 70), 'django.db.models.DateField', 'models.DateField', (), '', False, 'from django.db import migrations, models\n'), ((925, 30, 925, 72), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import migrations, models\n'), ((926, 30, 926, 83), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import migrations, models\n'), ((927, 31, 927, 58), 'django.db.models.JSONField', 'models.JSONField', (), '', False, 'from django.db import migrations, models\n'), ((928, 35, 928, 156), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((929, 33, 929, 137), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((930, 34, 930, 238), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((931, 38, 931, 246), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n')] |
Hezepeng/Financial-Acquisition-And-Editing-System | django_app/DataEntrySystem/apps.py | 0781101e596a31d90bcfa3d67622472c04c6149f | from django.apps import AppConfig
class DataentrysystemConfig(AppConfig):
name = 'DataEntrySystem'
| [] |
JeFaProductions/bombgame2 | bombgame/recursive_bt_maze.py | fc2ca7c6606aecd2bec013ed307aa344a0adffc7 | # recursive_bt_maze.py
#
# Author: Jens Gansloser
# Created On: 16 Feb 2019
import os
import random
import numpy as np
class RecursiveBTMaze:
def __init__(self, width, height):
if width % 2 == 0 or height % 2 == 0:
raise ValueError("Width and height need to be odd.")
self.width = width
self.height = height
self.go = {'N': np.array([0, 2]),
'E': np.array([2, 0]),
'S': np.array([0, -2]),
'W': np.array([-2, 0])}
self.go_half = {key: (0.5 * value).astype(np.int) for key, value in self.go.items()}
self.opposite = {'N': 'S', 'E': 'W', 'S': 'N', 'W': 'E'}
# 0: path, 1: wall.
self.data = np.ones((height, width), dtype=np.int)
self.stack = []
index = np.array([random.randint(0, self.height - 1),
random.randint(0, self.width - 1)])
index[index % 2 == 0] += 1
self.stack.append([index, self.shuffle_directions()])
def generate(self):
while self.next():
pass
def next(self, borders=False):
if self.stack:
index, directions = self.stack.pop()
stack_size = len(self.stack)
directions_size = len(directions)
while directions:
direction = directions.pop()
new_index = index + self.go[direction]
# Special case at the borders.
if borders:
if self.cell_valid(index + self.go_half[direction]) and not self.cell_valid(new_index):
if random.choice([0, 1]):
y, x = index + self.go_half[direction]
self.data[y, x] = 0
if self.cell_valid(new_index) and not self.cell_visited(new_index):
self.stack.append([index, directions])
self.cell_move(index, new_index)
self.stack.append([new_index, self.shuffle_directions()])
break
if directions_size == 4 and not directions and len(self.stack) == stack_size:
self.random_break(index)
return True
else:
return False
def random_break(self, index):
for direction in self.shuffle_directions():
new_index = index + self.go[direction]
if self.cell_valid(new_index) and self.cell_value(index + self.go_half[direction]) == 1:
self.cell_move(index, new_index)
break
def cell_value(self, index):
y, x = index
return self.data[y, x]
def cell_visited(self, index):
return self.cell_value(index) != 1
def cell_valid(self, index):
y, x = index
if y < 0 or y >= self.height or x < 0 or x >= self.width:
return False
return True
def cell_move(self, index, new_index):
y, x = new_index
self.data[y, x] = 0
y, x = (index + 0.5 * (new_index - index)).astype(np.int)
self.data[y, x] = 0
def shuffle_directions(self):
return random.sample(self.go.keys(), len(self.go.keys()))
def itermaze(self):
return self.__iter2d__(self.data)
@staticmethod
def __iter2d__(data):
for i in range(data.shape[0]):
for j in range(data.shape[1]):
yield np.array([i, j]), data[i, j]
def __str__(self):
data = -1 * np.ones((self.height + 2, self.width + 2))
out = ''
wall = '#'
path = '0'
border = '+'
data[1:-1, 1:-1] = self.data
for index, value in self.__iter2d__(data):
if index[1] == 0:
out += os.linesep
if value == -1:
out += border
elif value == 0:
out += path
elif value == 1:
out += wall
return out
| [((27, 20, 27, 58), 'numpy.ones', 'np.ones', (), '', True, 'import numpy as np\n'), ((19, 24, 19, 40), 'numpy.array', 'np.array', ({(19, 33, 19, 39): '[0, 2]'}, {}), '([0, 2])', True, 'import numpy as np\n'), ((20, 24, 20, 40), 'numpy.array', 'np.array', ({(20, 33, 20, 39): '[2, 0]'}, {}), '([2, 0])', True, 'import numpy as np\n'), ((21, 24, 21, 41), 'numpy.array', 'np.array', ({(21, 33, 21, 40): '[0, -2]'}, {}), '([0, -2])', True, 'import numpy as np\n'), ((22, 24, 22, 41), 'numpy.array', 'np.array', ({(22, 33, 22, 40): '[-2, 0]'}, {}), '([-2, 0])', True, 'import numpy as np\n'), ((115, 20, 115, 62), 'numpy.ones', 'np.ones', ({(115, 28, 115, 61): '(self.height + 2, self.width + 2)'}, {}), '((self.height + 2, self.width + 2))', True, 'import numpy as np\n'), ((31, 26, 31, 60), 'random.randint', 'random.randint', ({(31, 41, 31, 42): '0', (31, 44, 31, 59): 'self.height - 1'}, {}), '(0, self.height - 1)', False, 'import random\n'), ((32, 26, 32, 59), 'random.randint', 'random.randint', ({(32, 41, 32, 42): '0', (32, 44, 32, 58): 'self.width - 1'}, {}), '(0, self.width - 1)', False, 'import random\n'), ((55, 27, 55, 48), 'random.choice', 'random.choice', ({(55, 41, 55, 47): '[0, 1]'}, {}), '([0, 1])', False, 'import random\n'), ((112, 22, 112, 38), 'numpy.array', 'np.array', ({(112, 31, 112, 37): '[i, j]'}, {}), '([i, j])', True, 'import numpy as np\n')] |
Nibuja05/KVConverter | KV_Reader.py | 74f810df4ac82358f405eac9c2f56dce13b69302 |
import re
import math
class KVPart():
"""docstring for KVPart"""
def __init__(self, name, tab_count = 0):
#super(KVPart, self).__init__()
self.name = name
self.values = []
self.tab_count = tab_count
self.parent = None
self.master = False
def add_simple_value(self, value):
self.values.append(value)
def add_KVPart(self, name):
if self.master == False:
new_KVPart = KVPart(name, self.tab_count + 1)
else:
new_KVPart = KVPart(name, self.tab_count)
new_KVPart.set_parent(self)
self.values.append(new_KVPart)
return new_KVPart
def add_KVPart_finished(self, part):
if not part is None:
part.set_tab_count(self.tab_count + 1)
self.values.append(part)
def add_KVComment(self, text):
new_KVComment = KVComment(text)
self.values.append(new_KVComment)
def is_empty(self):
if len(self.values) == 0:
return True
return False
def set_parent(self, parent):
self.parent = parent
def get_parent(self):
return self.parent
def has_parent(self):
if self.parent is not None:
return True
return False
def get_name(self):
return self.name
def set_master(self, boolean):
self.master = boolean
def get_values(self):
return self.values
def has_KV_child(self):
return any(isinstance(x, KVPart) for x in self.values)
def set_tab_count(self, count):
self.tab_count = count
def items(self):
return self.name, self.values[0]
def __str__(self):
if self.master == False:
string = self.fTab(self.tab_count) + "\"" + self.name + "\""
if any(isinstance(x, KVPart) for x in self.values):
string += "\n" + self.fTab(self.tab_count) + "{\n"
else:
count = self.get_normal_space(string)
string += self.get_normal_space(string)
for x in self.values:
if type(x) is KVPart:
string += str(x)
elif type(x) is KVComment:
string += self.fTab(self.tab_count + 1) + str(x) + "\n"
else:
string += "\"" + str(x) + "\"\n"
if any(isinstance(x, KVPart) for x in self.values):
string += self.fTab(self.tab_count) + "}\n"
return string
else:
if len(self.values) > 1:
string = ""
for x in self.values:
string += str(x) + "\n"
return string
else:
return ""
def __repr__(self):
return "<|" + self.name + "|>"
def fTab(self, count):
string = ""
for x in range(count):
string += "\t"
return string
def get_normal_space(self, text):
lines = text.splitlines()
last_line = lines[len(lines) - 1]
new_position = last_line.rfind("\"")
tab_count = math.floor((40 - new_position) / 5)
space_count = ((40 - new_position) % 5) + 1
string = ""
for x in range(space_count):
string += " "
string += self.fTab(tab_count)
return string
class KVComment():
"""docstring for KVComment"""
def __init__(self, text):
#super(KVComment, self).__init__()
self.text = text
def __str__(self):
return self.text
def read_file(path):
#path = input("Please enter the path of the KV File:")
#path = "C:\\Steam\\steamapps\\common\\dota 2 beta\\game\\dota_addons\\heataria\\scripts\\npc\\abilities\\heataria_blaze_path.txt"
try:
file = open(path, "r")
text = file.read()
except FileNotFoundError:
text = read_file()
finally:
master = KVPart("master")
master.set_master(True)
progress_text(text, master)
return master
#processes a KV textfile into a KV_Part structure
def progress_text(text, last_KVPart = None):
if last_KVPart is not None:
#search patterns to check structure
quote_pattern = r'\"(.*?)\"'
open_pattern = r'.*{'
close_pattern = r'.*}'
comment_pattern = r'//.*'
quote_match = re.search(quote_pattern, text)
open_match = re.search(open_pattern, text)
close_match = re.search(close_pattern, text)
comment_match = re.search(comment_pattern, text)
#cancel if there are no more quotes left
if quote_match is not None:
quote_start = quote_match.start()
else:
return
#if there are no brackets left, give them a placeholder value
if open_match is not None:
open_start = open_match.start()
else:
open_start = len(text)
if close_match is not None:
close_start = close_match.start()
else:
close_start = len(text)
if comment_match is not None:
comment_start = comment_match.start()
else:
comment_start = len(text)
string = quote_match.group(1)
#print("SEACH: q." + str(quote_start) + " o." + str(open_start) + " cl." + str(close_start) + " co." + str(comment_start))
if comment_start < quote_start and comment_start < open_start and comment_start < close_start:
string = comment_match.group()
text = text[comment_match.end() + 1:]
last_KVPart.add_KVComment(string)
progress_text(text, last_KVPart)
#no bracktes before next quote -> simply add to current KV_Part
elif quote_start < open_start and quote_start < close_start:
#check if its a value or key
if last_KVPart.is_empty() and not last_KVPart.get_name() == "master":
last_KVPart.add_simple_value(string)
new_KVPart = last_KVPart.get_parent()
else:
new_KVPart = last_KVPart.add_KVPart(string)
text = text[quote_match.end() + 1:]
progress_text(text, new_KVPart)
#closing bracket -> remove bracket and move to parent KV_Part
elif close_start < quote_start:
text = text[close_match.end() + 1:]
if last_KVPart.has_parent():
temp_KVPart = last_KVPart.get_parent()
else:
temp_KVPart = last_KVPart
progress_text(text, temp_KVPart)
#opening bracket -> creates a new child KV_Part
elif open_start < quote_start:
new_KVPart = last_KVPart.add_KVPart(string)
text = text[quote_match.end() + 1:]
progress_text(text, new_KVPart)
| [((110, 14, 110, 49), 'math.floor', 'math.floor', ({(110, 25, 110, 48): '(40 - new_position) / 5'}, {}), '((40 - new_position) / 5)', False, 'import math\n'), ((150, 16, 150, 46), 're.search', 're.search', ({(150, 26, 150, 39): 'quote_pattern', (150, 41, 150, 45): 'text'}, {}), '(quote_pattern, text)', False, 'import re\n'), ((151, 15, 151, 44), 're.search', 're.search', ({(151, 25, 151, 37): 'open_pattern', (151, 39, 151, 43): 'text'}, {}), '(open_pattern, text)', False, 'import re\n'), ((152, 16, 152, 46), 're.search', 're.search', ({(152, 26, 152, 39): 'close_pattern', (152, 41, 152, 45): 'text'}, {}), '(close_pattern, text)', False, 'import re\n'), ((153, 18, 153, 50), 're.search', 're.search', ({(153, 28, 153, 43): 'comment_pattern', (153, 45, 153, 49): 'text'}, {}), '(comment_pattern, text)', False, 'import re\n')] |
PaulDoessel/appleseed | scripts/updatetestsuiterefimages.py | 142908e05609cd802b3ab937ff27ef2b73dd3088 | #!/usr/bin/python
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2014-2016 Francois Beaune, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from __future__ import print_function
import argparse
import os
import shutil
#--------------------------------------------------------------------------------------------------
# Utility functions.
#--------------------------------------------------------------------------------------------------
def safe_mkdir(dir):
if not os.path.exists(dir):
os.mkdir(dir)
def walk(directory, recursive):
if recursive:
for dirpath, dirnames, filenames in os.walk(directory):
yield dirpath, dirnames, filenames
else:
yield os.walk(directory).next()
#--------------------------------------------------------------------------------------------------
# Update reference images in a given test suite directory.
#--------------------------------------------------------------------------------------------------
def update_ref_images(parent_dir):
renders_dir = os.path.join(parent_dir, "renders")
ref_dir = os.path.join(parent_dir, "ref")
safe_mkdir(ref_dir)
for filename in os.listdir(renders_dir):
if os.path.splitext(filename)[1] == ".png":
src_path = os.path.join(renders_dir, filename)
dst_path = os.path.join(ref_dir, filename)
print(" copying {0} to {1}...".format(src_path, dst_path))
shutil.copyfile(src_path, dst_path)
#--------------------------------------------------------------------------------------------------
# Entry point.
#--------------------------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(description="update functional test suite reference images.")
parser.add_argument("-r", "--recursive", action='store_true', dest="recursive",
help="scan the specified directory and all its subdirectories")
parser.add_argument("directory", nargs='?', default=".", help="directory to scan")
args = parser.parse_args()
for dirpath, dirnames, filenames in walk(args.directory, args.recursive):
if "renders" in dirnames:
update_ref_images(dirpath)
if __name__ == '__main__':
main()
| [((57, 18, 57, 53), 'os.path.join', 'os.path.join', ({(57, 31, 57, 41): 'parent_dir', (57, 43, 57, 52): '"""renders"""'}, {}), "(parent_dir, 'renders')", False, 'import os\n'), ((58, 14, 58, 45), 'os.path.join', 'os.path.join', ({(58, 27, 58, 37): 'parent_dir', (58, 39, 58, 44): '"""ref"""'}, {}), "(parent_dir, 'ref')", False, 'import os\n'), ((62, 20, 62, 43), 'os.listdir', 'os.listdir', ({(62, 31, 62, 42): 'renders_dir'}, {}), '(renders_dir)', False, 'import os\n'), ((75, 13, 75, 98), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((41, 11, 41, 30), 'os.path.exists', 'os.path.exists', ({(41, 26, 41, 29): 'dir'}, {}), '(dir)', False, 'import os\n'), ((42, 8, 42, 21), 'os.mkdir', 'os.mkdir', ({(42, 17, 42, 20): 'dir'}, {}), '(dir)', False, 'import os\n'), ((46, 44, 46, 62), 'os.walk', 'os.walk', ({(46, 52, 46, 61): 'directory'}, {}), '(directory)', False, 'import os\n'), ((64, 23, 64, 58), 'os.path.join', 'os.path.join', ({(64, 36, 64, 47): 'renders_dir', (64, 49, 64, 57): 'filename'}, {}), '(renders_dir, filename)', False, 'import os\n'), ((65, 23, 65, 54), 'os.path.join', 'os.path.join', ({(65, 36, 65, 43): 'ref_dir', (65, 45, 65, 53): 'filename'}, {}), '(ref_dir, filename)', False, 'import os\n'), ((67, 12, 67, 47), 'shutil.copyfile', 'shutil.copyfile', ({(67, 28, 67, 36): 'src_path', (67, 38, 67, 46): 'dst_path'}, {}), '(src_path, dst_path)', False, 'import shutil\n'), ((63, 11, 63, 37), 'os.path.splitext', 'os.path.splitext', ({(63, 28, 63, 36): 'filename'}, {}), '(filename)', False, 'import os\n'), ((49, 14, 49, 32), 'os.walk', 'os.walk', ({(49, 22, 49, 31): 'directory'}, {}), '(directory)', False, 'import os\n')] |
Breee/raidquaza | raidquaza/poll/polls.py | 308d643e71eddf6f6dc432c01322a02d604ac70e | from typing import List, Any
import time
from discord import Embed, Reaction
from utils import uniquify
# EMOJIS regional_indicator_A to regional_indicator_T
reaction_emojies = ['\U0001F1E6',
'\U0001F1E7',
'\U0001F1E8',
'\U0001F1E9',
'\U0001F1EA',
'\U0001F1EB',
'\U0001F1EC',
'\U0001F1ED',
'\U0001F1EE',
'\U0001F1EF',
'\U0001F1F0',
'\U0001F1F1',
'\U0001F1F2',
'\U0001F1F3',
'\U0001F1F4',
'\U0001F1F5',
'\U0001F1F6',
'\U0001F1F7',
'\U0001F1F8',
'\U0001F1F9']
number_emojies = {'rq_plus_one': 1, 'rq_plus_two': 2, 'rq_plus_three': 3, 'rq_plus_four': 4}
class PollCreationException(Exception):
pass
class Poll(object):
"""
A Poll object.
"""
def __init__(self, poll_id: str, poll_title: str, options: List[Any], is_immortal=False, updated_since_start=True):
if options is None:
options = []
self.poll_id = poll_id
self.creation_time = time.time()
self.last_update = time.time()
self.poll_title = poll_title
self.options = uniquify(options)
self.reaction_to_option = {reaction_emojies[k]: options[k] for k in range(len(options))}
self.option_to_reaction = {options[k]: reaction_emojies[k] for k in range(len(options))}
self.participants = dict()
self.option_to_participants = {key: [] for key in options}
self.sent_message = None
self.received_message = None
self.is_immortal = is_immortal
self.is_enabled = True
self.updated_since_start = updated_since_start
async def full_update(self, reactions: List[Reaction], bot_user_id: int):
if self.updated_since_start:
return
self.reaction_to_option = {reaction_emojies[k]: self.options[k] for k in range(len(self.options))}
self.option_to_reaction = {self.options[k]: reaction_emojies[k] for k in range(len(self.options))}
self.participants = dict()
self.option_to_participants = {key: [] for key in self.options}
for reaction in reactions:
async for user in reaction.users():
if bot_user_id != user.id:
self.process_reaction(reaction=reaction, user=user, add=True)
self.updated_since_start = True
def process_reaction(self, reaction, user, add):
# get users + reaction emoji
if hasattr(user, 'nick') and user.nick is not None:
nick = user.nick
else:
nick = user.display_name
if reaction.emoji in self.reaction_to_option:
# set list of users for the option the reaction belongs to.
option = self.reaction_to_option[reaction.emoji]
if add and nick not in self.option_to_participants[option]:
self.option_to_participants[option].append(nick)
elif not add:
self.option_to_participants[option].remove(nick)
if nick not in self.participants:
self.participants[nick] = 1
if hasattr(reaction.emoji, 'name') and reaction.emoji.name in number_emojies:
amount = number_emojies[reaction.emoji.name]
self.participants[nick] += (amount if add else -1 * amount)
def to_discord(self):
msg = f'Poll for **{self.poll_title}**'
embed = Embed(color=0xbb1c1c)
for option, participants in self.option_to_participants.items():
reaction = self.option_to_reaction[option]
name = f'{reaction} {option}'
value = ', '.join(
sorted([f'{x} [{self.participants[x]}]' for x in participants])) if participants else '-'
field_counters = [self.participants[x] for x in participants]
total = sum(field_counters)
embed.add_field(name=f'{name} [{total}]', value=value, inline=False)
embed.set_footer(text=f'ID: {self.poll_id}')
return msg, embed
| [((44, 29, 44, 40), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((45, 27, 45, 38), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((47, 23, 47, 40), 'utils.uniquify', 'uniquify', ({(47, 32, 47, 39): 'options'}, {}), '(options)', False, 'from utils import uniquify\n'), ((92, 16, 92, 37), 'discord.Embed', 'Embed', (), '', False, 'from discord import Embed, Reaction\n')] |
pavstar619/HackerRank | Python/Regex and Parsing/Validating and Parsing Email Addresses.py | 697ee46b6e621ad884a064047461d7707b1413cd | import email.utils as em
import re
class Main():
def __init__(self):
self.n = int(input())
for i in range(self.n):
self.s = em.parseaddr(input())
if re.match(r'^[a-zA-Z](\w|-|\.|_)+@[a-zA-Z]+\.[a-zA-Z]{0,3}$', self.s[1]):
print(em.formataddr(self.s))
if __name__ == '__main__':
obj = Main()
| [((11, 15, 11, 86), 're.match', 're.match', ({(11, 24, 11, 74): '"""^[a-zA-Z](\\\\w|-|\\\\.|_)+@[a-zA-Z]+\\\\.[a-zA-Z]{0,3}$"""', (11, 76, 11, 85): 'self.s[1]'}, {}), "('^[a-zA-Z](\\\\w|-|\\\\.|_)+@[a-zA-Z]+\\\\.[a-zA-Z]{0,3}$', self.s[1])", False, 'import re\n'), ((12, 22, 12, 43), 'email.utils.formataddr', 'em.formataddr', ({(12, 36, 12, 42): 'self.s'}, {}), '(self.s)', True, 'import email.utils as em\n')] |
codingsoo/virtaul_girlfriend | chatbot/train.py | 7343cb95cc8ab345b735fdb07cfac8176cc41f76 | # -*- coding: utf-8 -*-
import tensorflow as tf
import random
import math
import os
from config import FLAGS
from model import Seq2Seq
from dialog import Dialog
def train(dialog, batch_size=100, epoch=100):
model = Seq2Seq(dialog.vocab_size)
with tf.Session() as sess:
# TODO: 세션을 로드하고 로그를 위한 summary 저장등의 로직을 Seq2Seq 모델로 넣을 필요가 있음
ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
if ckpt and tf.train.checkpoint_exists(ckpt.model_checkpoint_path):
print "Reading model from following file: " + ckpt.model_checkpoint_path
model.saver.restore(sess, ckpt.model_checkpoint_path)
else:
print "Creating new model"
sess.run(tf.global_variables_initializer())
writer = tf.summary.FileWriter(FLAGS.log_dir, sess.graph)
total_batch = int(math.ceil(len(dialog.examples)/float(batch_size)))
for step in range(total_batch * epoch):
enc_input, dec_input, targets = dialog.next_batch(batch_size)
_, loss = model.train(sess, enc_input, dec_input, targets)
if (step + 1) % 100 == 0:
model.write_logs(sess, writer, enc_input, dec_input, targets)
print ('Step:', '%06d' % model.global_step.eval(),\
'cost =', '{:.6f}'.format(loss))
checkpoint_path = os.path.join(FLAGS.train_dir, FLAGS.ckpt_name)
model.saver.save(sess, checkpoint_path, global_step=model.global_step)
print '최적화 완료!'
def test(dialog, batch_size=100):
print ("\n=== 예측 테스트 ===")
model = Seq2Seq(dialog.vocab_size)
with tf.Session() as sess:
ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
print ("다음 파일에서 모델을 읽는 중 입니다..", ckpt.model_checkpoint_path)
model.saver.restore(sess, ckpt.model_checkpoint_path)
enc_input, dec_input, targets = dialog.next_batch(batch_size)
expect, outputs, accuracy = model.test(sess, enc_input, dec_input, targets)
expect = dialog.decode(expect)
outputs = dialog.decode(outputs)
pick = random.randrange(0, len(expect) / 2)
input = dialog.decode([dialog.examples[pick * 2]], True)
expect = dialog.decode([dialog.examples[pick * 2 + 1]], True)
outputs = dialog.cut_eos(outputs[pick])
print ("\n정확도:", accuracy)
print ("랜덤 결과\n",)
print (" 입력값:", input)
print (" 실제값:", expect)
print (" 예측값:", ' '.join(outputs))
def main(_):
dialog = Dialog()
dialog.load_vocab(FLAGS.voc_path)
dialog.load_examples(FLAGS.data_path)
if FLAGS.train:
train(dialog, batch_size=FLAGS.batch_size, epoch=FLAGS.epoch)
elif FLAGS.test:
test(dialog, batch_size=FLAGS.batch_size)
if __name__ == "__main__":
tf.app.run()
| [] |
rock-learning/approxik | evaluation/dmp_behavior.py | 877d50d4d045457593a2fafefd267339a11de20f | # Author: Alexander Fabisch <[email protected]>
import numpy as np
from bolero.representation import BlackBoxBehavior
from bolero.representation import DMPBehavior as DMPBehaviorImpl
class DMPBehavior(BlackBoxBehavior):
"""Dynamical Movement Primitive.
Parameters
----------
execution_time : float, optional (default: 1)
Execution time of the DMP in seconds.
dt : float, optional (default: 0.01)
Time between successive steps in seconds.
n_features : int, optional (default: 50)
Number of RBF features for each dimension of the DMP.
configuration_file : string, optional (default: None)
Name of a configuration file that should be used to initialize the DMP.
If it is set all other arguments will be ignored.
"""
def __init__(self, execution_time=1.0, dt=0.01, n_features=50,
configuration_file=None):
self.dmp = DMPBehaviorImpl(execution_time, dt, n_features,
configuration_file)
def init(self, n_inputs, n_outputs):
"""Initialize the behavior.
Parameters
----------
n_inputs : int
number of inputs
n_outputs : int
number of outputs
"""
self.dmp.init(3 * n_inputs, 3 * n_outputs)
self.n_joints = n_inputs
self.x = np.empty(3 * self.n_joints)
self.x[:] = np.nan
def reset(self):
self.dmp.reset()
self.x[:] = 0.0
def set_inputs(self, inputs):
self.x[:self.n_joints] = inputs[:]
def can_step(self):
return self.dmp.can_step()
def step(self):
self.dmp.set_inputs(self.x)
self.dmp.step()
self.dmp.get_outputs(self.x)
def get_outputs(self, outputs):
outputs[:] = self.x[:self.n_joints]
def get_n_params(self):
return self.dmp.get_n_params()
def get_params(self):
return self.dmp.get_params()
def set_params(self, params):
self.dmp.set_params(params)
def set_meta_parameters(self, keys, values):
self.dmp.set_meta_parameters(keys, values)
def trajectory(self):
return self.dmp.trajectory()
class DMPBehaviorWithGoalParams(DMPBehavior):
def __init__(self, goal, execution_time=1.0, dt=0.01, n_features=50,
configuration_file=None):
super(DMPBehaviorWithGoalParams, self).__init__(
execution_time, dt, n_features, configuration_file)
self.params = np.copy(goal)
def set_meta_parameters(self, keys, values):
self.dmp.set_meta_parameters(keys, values)
self.set_params(self.params)
def get_n_params(self):
return len(self.params)
def get_params(self):
return self.params
def set_params(self, params):
self.params[:] = params
self.dmp.set_meta_parameters(["g"], [self.params])
| [((28, 19, 29, 54), 'bolero.representation.DMPBehavior', 'DMPBehaviorImpl', ({(28, 35, 28, 49): 'execution_time', (28, 51, 28, 53): 'dt', (28, 55, 28, 65): 'n_features', (29, 35, 29, 53): 'configuration_file'}, {}), '(execution_time, dt, n_features, configuration_file)', True, 'from bolero.representation import DMPBehavior as DMPBehaviorImpl\n'), ((44, 17, 44, 44), 'numpy.empty', 'np.empty', ({(44, 26, 44, 43): '3 * self.n_joints'}, {}), '(3 * self.n_joints)', True, 'import numpy as np\n'), ((86, 22, 86, 35), 'numpy.copy', 'np.copy', ({(86, 30, 86, 34): 'goal'}, {}), '(goal)', True, 'import numpy as np\n')] |
oxsoftdev/bitstampws-logger | logger.py | 5597010cad53cd55e949235fbc191f8b1aad344d | import logging.config
import tornado
from bitstampws import Client as Websocket
import lib.configs.logging
from lib.subscribers import SimpleLoggerSubscriber
logging.config.dictConfig(lib.configs.logging.d)
if __name__ == '__main__':
with Websocket() as client:
with SimpleLoggerSubscriber(client):
client.connect()
try:
tornado.ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
client.close()
| [((13, 9, 13, 20), 'bitstampws.Client', 'Websocket', ({}, {}), '()', True, 'from bitstampws import Client as Websocket\n'), ((14, 13, 14, 43), 'lib.subscribers.SimpleLoggerSubscriber', 'SimpleLoggerSubscriber', ({(14, 36, 14, 42): 'client'}, {}), '(client)', False, 'from lib.subscribers import SimpleLoggerSubscriber\n'), ((17, 16, 17, 48), 'tornado.ioloop.IOLoop.instance', 'tornado.ioloop.IOLoop.instance', ({}, {}), '()', False, 'import tornado\n')] |
dougsc/gp | engine/tree.py | d144dd1f483150b26483077e6e5032f4f21a6d4e | import random
from pprint import pformat
from copy import deepcopy
from utils.logger import GP_Logger
from terminal_set import TerminalSet
class Tree:
@classmethod
def log(cls):
return GP_Logger.logger(cls.__name__)
def __init__(self):
self.terminal_set=None
self.function_set=None
self.function_bias=None
self.max_depth=None
self.tree = None
def clone(self, clone_tree):
assert clone_tree.tree != None, 'trying to clone from an uninitialized tree'
self.terminal_set = clone_tree.terminal_set
self.function_set = clone_tree.function_set
self.function_bias = clone_tree.function_bias
self.max_depth = clone_tree.max_depth
self.tree = deepcopy(clone_tree.tree)
def mutate(self, clone_tree):
self.clone(clone_tree)
mutation_node = random.choice(self.get_node_list())
self.log().debug('mutating at node %s - current depth: %d' % (mutation_node['node']['name'], mutation_node['depth']))
self._create_new_node(mutation_node['depth'], mutation_node)
self.log().debug('node mutated to %s' % (mutation_node['node']['name']))
self._add_layer(mutation_node)
def subtree_crossover(self, clone_tree, other_tree):
self.clone(clone_tree)
this_crossover_node = random.choice(self.get_node_list())
other_crossover_node = random.choice(other_tree.get_node_list())
self.log().debug('x-over node 1: %s (depth: %d), node 2: %s (depth: %d)' % (this_crossover_node['node']['name'],
this_crossover_node['depth'],
other_crossover_node['node']['name'],
other_crossover_node['depth']))
this_crossover_node['node'] = deepcopy(other_crossover_node['node'])
this_crossover_node['lower_nodes'] = deepcopy(other_crossover_node['lower_nodes'])
self.recalculate_depth(this_crossover_node['lower_nodes'], this_crossover_node['depth'] + 1)
def create(self, terminal_set=[], function_set=[], function_bias=1, max_depth=3):
self.terminal_set=terminal_set
self.function_set=function_set
self.function_bias=function_bias
self.max_depth=max_depth
self.tree = {}
self._create_new_node(1, self.tree)
self._add_layer(current_node=self.tree)
def _create_new_node(self, depth, node):
node_set = []
if depth == 1:
node_set = self.function_set
elif depth >= self.max_depth:
node_set = self.terminal_set
else:
node_set = self.function_set * self.function_bias + self.terminal_set
chosen_node = random.choice(node_set)
if not chosen_node.has_key('name'):
# this needs converting to a named node
value = chosen_node['function'](*chosen_node['args'])
chosen_node = TerminalSet.terminal_value(value)
node['node'] = chosen_node
node['lower_nodes'] = []
node['depth'] = depth
def _add_layer(self, current_node):
new_node_count = current_node['node'].has_key('arity') and current_node['node']['arity'] or 0
self.log().debug('adding %d nodes below %s - current depth = %d' % (new_node_count, current_node['node']['name'], current_node['depth']))
for i in range(new_node_count):
new_node = {}
self._create_new_node(current_node['depth'] + 1, new_node)
current_node['lower_nodes'].append(new_node)
map(lambda x:self._add_layer(x), current_node['lower_nodes'])
def dump(self):
print 'Tree: \n%s' % pformat(self.tree)
def _dump_structure(self, from_nodes, to_nodes):
for from_node in from_nodes:
new_node = {'name': from_node['node']['name'], 'lower_nodes': []}
to_nodes.append(new_node)
self._dump_structure(from_node['lower_nodes'], new_node['lower_nodes'])
def dump_structure(self):
structure = {'name': self.tree['node']['name'], 'lower_nodes': []}
self._dump_structure(self.tree['lower_nodes'], structure['lower_nodes'])
return structure
def execute_node(self, node, function_lookup, args=None):
assert node.has_key('value') or node.has_key('function'), 'node does not have a function or value'
value = None
if node.has_key('value'):
value = node['value']
else:
if args == None:
args = node['args']
if isinstance(node['function'], str):
value = function_lookup.get_func(node['function'])(*args)
else:
value = node['function'](*args)
return value
def get_lower_node_value(self, function_lookup, lower_node):
if lower_node['node']['node_type'] == 'terminal':
return self.execute_node(lower_node['node'], function_lookup)
else:
result_list = map(lambda x:self.get_lower_node_value(function_lookup, x), lower_node['lower_nodes'])
return self.execute_node(lower_node['node'], function_lookup, result_list)
def execute(self, function_lookup):
result_list = map(lambda x:self.get_lower_node_value(function_lookup, x), self.tree['lower_nodes'])
return self.execute_node(self.tree['node'], function_lookup, result_list)
def iterate_tree(self, nodes, callback):
for node in nodes:
callback(node)
self.iterate_tree(node['lower_nodes'], callback)
def recalculate_depth(self, nodes, depth):
for node in nodes:
node['depth'] = depth
self.recalculate_depth(node['lower_nodes'], depth+1)
def _get_node_list(self, nodes, node_list):
for node in nodes:
node_list.append(node)
self._get_node_list(node['lower_nodes'], node_list)
def get_node_list(self):
node_list = []
self._get_node_list(self.tree['lower_nodes'], node_list)
return node_list
def _simplify(self, node, function_lookup):
if len(node['lower_nodes']) == 0:
return
terminal_value_count = filter(lambda x:TerminalSet.is_terminal_value(x['node']), node['lower_nodes'])
if node['node']['arity'] == terminal_value_count:
value = self.execute_node(node, function_lookup, args=map(lambda x:x['node']['value'], node['lower_nodes']))
self.log().debug('Replacing existing node: %s' % pformat(node['node']))
node['lower_nodes'] = []
node['node'] = TerminalSet.terminal_value(value)
self.log().debug(' -- with node: %s' % pformat(node['node']))
self.is_simplified = False
else:
map(lambda x:self._simplify(x, function_lookup), node['lower_nodes'])
def simplify(self, function_lookup):
self.is_simplified = False
simplify_loop_count = 1
while not self.is_simplified:
self.log().debug('Simplification %d' % (simplify_loop_count))
self.is_simplified = True
self._simplify(self.tree, function_lookup)
simplify_loop_count += 1
| [] |
wilsonGmn/pyrin | src/pyrin/packaging/__init__.py | 25dbe3ce17e80a43eee7cfc7140b4c268a6948e0 | # -*- coding: utf-8 -*-
"""
packaging package.
"""
from pyrin.packaging.base import Package
class PackagingPackage(Package):
"""
packaging package class.
"""
NAME = __name__
COMPONENT_NAME = 'packaging.component'
CONFIG_STORE_NAMES = ['packaging']
| [] |
marble-git/python-laoqi | chap7/heapq_merge.py | 74c4bb5459113e54ce64443e5da5a9c6a3052d6a | #coding:utf-8
'''
filename:heapq_merge.py
chap:7
subject:4-2
conditions:heapq.merge,sorted_list:lst1,lst2
lst3=merged_list(lst1,lst2) is sorted
solution:heapq.merge
'''
import heapq
lst1 = [1,3,5,7,9]
lst2 = [2,4,6,8]
if __name__ == '__main__':
lst3 = heapq.merge(lst1,lst2)
print('lst3',lst3)
print(list(lst3))
| [((22, 11, 22, 33), 'heapq.merge', 'heapq.merge', ({(22, 23, 22, 27): 'lst1', (22, 28, 22, 32): 'lst2'}, {}), '(lst1, lst2)', False, 'import heapq\n')] |
kustodian/google-cloud-sdk | lib/googlecloudsdk/third_party/apis/serviceuser/v1/serviceuser_v1_client.py | b6bae4137d4b58030adb3dcb1271216dfb19f96d | """Generated client library for serviceuser version v1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.serviceuser.v1 import serviceuser_v1_messages as messages
class ServiceuserV1(base_api.BaseApiClient):
"""Generated client library for service serviceuser version v1."""
MESSAGES_MODULE = messages
BASE_URL = u'https://serviceuser.googleapis.com/'
_PACKAGE = u'serviceuser'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/cloud-platform.read-only', u'https://www.googleapis.com/auth/service.management']
_VERSION = u'v1'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = u'ServiceuserV1'
_URL_VERSION = u'v1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new serviceuser handle."""
url = url or self.BASE_URL
super(ServiceuserV1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.projects_services = self.ProjectsServicesService(self)
self.projects = self.ProjectsService(self)
self.services = self.ServicesService(self)
class ProjectsServicesService(base_api.BaseApiService):
"""Service class for the projects_services resource."""
_NAME = u'projects_services'
def __init__(self, client):
super(ServiceuserV1.ProjectsServicesService, self).__init__(client)
self._upload_configs = {
}
def Disable(self, request, global_params=None):
r"""Disable a service so it can no longer be used with a.
project. This prevents unintended usage that may cause unexpected billing
charges or security leaks.
Operation<response: google.protobuf.Empty>
Args:
request: (ServiceuserProjectsServicesDisableRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Disable')
return self._RunMethod(
config, request, global_params=global_params)
Disable.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'serviceuser.projects.services.disable',
ordered_params=[u'projectsId', u'servicesId'],
path_params=[u'projectsId', u'servicesId'],
query_params=[],
relative_path=u'v1/projects/{projectsId}/services/{servicesId}:disable',
request_field=u'disableServiceRequest',
request_type_name=u'ServiceuserProjectsServicesDisableRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Enable(self, request, global_params=None):
r"""Enable a service so it can be used with a project.
See [Cloud Auth Guide](https://cloud.google.com/docs/authentication) for
more information.
Operation<response: google.protobuf.Empty>
Args:
request: (ServiceuserProjectsServicesEnableRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Enable')
return self._RunMethod(
config, request, global_params=global_params)
Enable.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'serviceuser.projects.services.enable',
ordered_params=[u'projectsId', u'servicesId'],
path_params=[u'projectsId', u'servicesId'],
query_params=[],
relative_path=u'v1/projects/{projectsId}/services/{servicesId}:enable',
request_field=u'enableServiceRequest',
request_type_name=u'ServiceuserProjectsServicesEnableRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""List enabled services for the specified consumer.
Args:
request: (ServiceuserProjectsServicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListEnabledServicesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'serviceuser.projects.services.list',
ordered_params=[u'projectsId'],
path_params=[u'projectsId'],
query_params=[u'pageSize', u'pageToken'],
relative_path=u'v1/projects/{projectsId}/services',
request_field='',
request_type_name=u'ServiceuserProjectsServicesListRequest',
response_type_name=u'ListEnabledServicesResponse',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = u'projects'
def __init__(self, client):
super(ServiceuserV1.ProjectsService, self).__init__(client)
self._upload_configs = {
}
class ServicesService(base_api.BaseApiService):
"""Service class for the services resource."""
_NAME = u'services'
def __init__(self, client):
super(ServiceuserV1.ServicesService, self).__init__(client)
self._upload_configs = {
}
def Search(self, request, global_params=None):
r"""Search available services.
When no filter is specified, returns all accessible services. For
authenticated users, also returns all services the calling user has
"servicemanagement.services.bind" permission for.
Args:
request: (ServiceuserServicesSearchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SearchServicesResponse) The response message.
"""
config = self.GetMethodConfig('Search')
return self._RunMethod(
config, request, global_params=global_params)
Search.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'serviceuser.services.search',
ordered_params=[],
path_params=[],
query_params=[u'pageSize', u'pageToken'],
relative_path=u'v1/services:search',
request_field='',
request_type_name=u'ServiceuserServicesSearchRequest',
response_type_name=u'SearchServicesResponse',
supports_download=False,
)
| [((69, 36, 80, 5), 'apitools.base.py.base_api.ApiMethodInfo', 'base_api.ApiMethodInfo', (), '', False, 'from apitools.base.py import base_api\n'), ((99, 35, 110, 5), 'apitools.base.py.base_api.ApiMethodInfo', 'base_api.ApiMethodInfo', (), '', False, 'from apitools.base.py import base_api\n'), ((125, 33, 136, 5), 'apitools.base.py.base_api.ApiMethodInfo', 'base_api.ApiMethodInfo', (), '', False, 'from apitools.base.py import base_api\n'), ((175, 35, 186, 5), 'apitools.base.py.base_api.ApiMethodInfo', 'base_api.ApiMethodInfo', (), '', False, 'from apitools.base.py import base_api\n')] |
lithathampan/wav2letter | bindings/python/examples/feature_example.py | 8abf8431d99da147cc4aefc289ad33626e13de6f | #!/usr/bin/env python3
# adapted from wav2letter/src/feature/test/MfccTest.cpp
import itertools as it
import os
import sys
from wav2letter.feature import FeatureParams, Mfcc
def load_data(filename):
path = os.path.join(data_path, filename)
path = os.path.abspath(path)
with open(path) as f:
return [float(x) for x in it.chain.from_iterable(line.split() for line in f)]
if __name__ == "__main__":
if len(sys.argv) != 2:
print(f"usage: {sys.argv[0]} feature_test_data_path", file=sys.stderr)
print(" (usually: <wav2letter_root>/src/feature/test/data)", file=sys.stderr)
sys.exit(1)
data_path = sys.argv[1]
wavinput = load_data("sa1.dat")
# golden features to compare
htkfeatures = load_data("sa1-mfcc.htk")
assert len(wavinput) > 0
assert len(htkfeatures) > 0
params = FeatureParams()
# define parameters of the featurization
params.sampling_freq = 16000
params.low_freq_filterbank = 0
params.high_freq_filterbank = 8000
params.num_filterbank_chans = 20
params.num_cepstral_coeffs = 13
params.use_energy = False
params.zero_mean_frame = False
params.use_power = False
# apply MFCC featurization
mfcc = Mfcc(params)
features = mfcc.apply(wavinput)
# check that obtained features are the same as golden one
assert len(features) == len(htkfeatures)
assert len(features) % 39 == 0
numframes = len(features) // 39
featurescopy = features.copy()
for f in range(numframes):
for i in range(1, 39):
features[f * 39 + i - 1] = features[f * 39 + i]
features[f * 39 + 12] = featurescopy[f * 39 + 0]
features[f * 39 + 25] = featurescopy[f * 39 + 13]
features[f * 39 + 38] = featurescopy[f * 39 + 26]
differences = [abs(x[0] - x[1]) for x in zip(features, htkfeatures)]
print(f"max_diff={max(differences)}")
print(f"avg_diff={sum(differences)/len(differences)}")
| [((13, 11, 13, 44), 'os.path.join', 'os.path.join', ({(13, 24, 13, 33): 'data_path', (13, 35, 13, 43): 'filename'}, {}), '(data_path, filename)', False, 'import os\n'), ((14, 11, 14, 32), 'os.path.abspath', 'os.path.abspath', ({(14, 27, 14, 31): 'path'}, {}), '(path)', False, 'import os\n'), ((34, 13, 34, 28), 'wav2letter.feature.FeatureParams', 'FeatureParams', ({}, {}), '()', False, 'from wav2letter.feature import FeatureParams, Mfcc\n'), ((46, 11, 46, 23), 'wav2letter.feature.Mfcc', 'Mfcc', ({(46, 16, 46, 22): 'params'}, {}), '(params)', False, 'from wav2letter.feature import FeatureParams, Mfcc\n'), ((23, 8, 23, 19), 'sys.exit', 'sys.exit', ({(23, 17, 23, 18): '(1)'}, {}), '(1)', False, 'import sys\n')] |
shreyashack/PY_Message_Decryption | app.py | 251a82ee26c529ff63668328230c9d494f4c9cfa | from tkinter import *
import onetimepad
class Message_Decrypt:
def __init__(self,root):
self.root=root
self.root.title("Message Decryption")
self.root.geometry("400x475")
self.root.iconbitmap("logo368.ico")
self.root.resizable(0,0)
def on_enter1(e):
but_decrypt['background']="black"
but_decrypt['foreground']="cyan"
def on_leave1(e):
but_decrypt['background']="SystemButtonFace"
but_decrypt['foreground']="SystemButtonText"
def on_enter2(e):
but_clear['background']="black"
but_clear['foreground']="cyan"
def on_leave2(e):
but_clear['background']="SystemButtonFace"
but_clear['foreground']="SystemButtonText"
def clear():
text_decrypt.delete('1.0',"end")
text_decrypt_output.delete('1.0',"end")
def decrypt():
try:
s=text_decrypt.get('1.0','end')
b=s.strip()
x=onetimepad.decrypt(b,'random')
text_decrypt_output.insert('end',x)
except Exception as e:
print(e)
#===========frame==================================#
mainframe=Frame(self.root,width=400,height=475,relief="ridge",bd=4)
mainframe.place(x=0,y=0)
firstframe=Frame(mainframe,width=393,height=207,relief="ridge",bd=4)
firstframe.place(x=0,y=0)
secondframe=Frame(mainframe,width=393,height=207,relief="ridge",bd=4)
secondframe.place(x=0,y=207)
thirdframe=Frame(mainframe,width=393,height=52,relief="ridge",bd=4,bg="gray77")
thirdframe.place(x=0,y=415)
#===================firstframe==============================#
scol=Scrollbar(firstframe,orient="vertical")
scol.place(relx=1, rely=0, relheight=1, anchor='ne')
text_decrypt=Text(firstframe,height=10,width=45,font=('times new roman',12),yscrollcommand=scol.set,relief="sunken",bd=3,fg="black")
text_decrypt.place(x=0,y=0)
scol.config(command=text_decrypt.yview)
#====================secondframe============================#
scol=Scrollbar(secondframe,orient="vertical")
scol.place(relx=1, rely=0, relheight=1, anchor='ne')
text_decrypt_output=Text(secondframe,height=10,width=45,font=('times new roman',12),yscrollcommand=scol.set,relief="sunken",bd=3,fg="black")
text_decrypt_output.place(x=0,y=0)
scol.config(command=text_decrypt_output.yview)
#==================third====================================#
but_decrypt=Button(thirdframe,text="Decrypt",width=13,font=('times new roman',14),cursor="hand2",command=decrypt)
but_decrypt.place(x=20,y=3)
but_decrypt.bind("<Enter>",on_enter1)
but_decrypt.bind("<Leave>",on_leave1)
but_clear=Button(thirdframe,text="Clear",width=13,font=('times new roman',14),cursor="hand2",command=clear)
but_clear.place(x=235,y=3)
but_clear.bind("<Enter>",on_enter2)
but_clear.bind("<Leave>",on_leave2)
if __name__ == "__main__":
root=Tk()
Message_Decrypt(root)
root.mainloop()
| [((39, 18, 39, 48), 'onetimepad.decrypt', 'onetimepad.decrypt', ({(39, 37, 39, 38): 'b', (39, 39, 39, 47): '"""random"""'}, {}), "(b, 'random')", False, 'import onetimepad\n')] |
abcamiletto/urdf2optcontrol | examples/rrbot_p2p_low_energy.py | 39b3f761a4685cc7d50b48793b6b2906c89b1694 | #!/usr/bin/env python3
from urdf2optcontrol import optimizer
from matplotlib import pyplot as plt
import pathlib
# URDF options
urdf_path = pathlib.Path(__file__).parent.joinpath('urdf', 'rrbot.urdf').absolute()
root = "link1"
end = "link3"
in_cond = [0] * 4
def my_cost_func(q, qd, qdd, ee_pos, u, t):
return u.T @ u
def my_constraint1(q, qd, qdd, ee_pos, u, t):
return [-30, -30], u, [30, 30]
def my_constraint2(q, qd, qdd, ee_pos, u, t):
return [-4, -4], qd, [4, 4]
my_constraints = [my_constraint1, my_constraint2]
def my_final_constraint1(q, qd, qdd, ee_pos, u):
return [3.14 / 2, 0], q, [3.14 / 2, 0]
def my_final_constraint2(q, qd, qdd, ee_pos, u):
return [0, 0], qd, [0, 0]
my_final_constraints = [my_final_constraint1, my_final_constraint2]
time_horizon = 2.0
steps = 40
# Load the urdf and calculate the differential equations
optimizer.load_robot(urdf_path, root, end)
# Loading the problem conditions
optimizer.load_problem(
my_cost_func,
steps,
in_cond,
time_horizon=time_horizon,
constraints=my_constraints,
final_constraints=my_final_constraints,
max_iter=500
)
# Solving the non linear problem
res = optimizer.solve()
print('u = ', res['u'][0])
print('q = ', res['q'][0])
# Print the results!
fig = optimizer.plot_result(show=True)
| [((42, 0, 42, 42), 'urdf2optcontrol.optimizer.load_robot', 'optimizer.load_robot', ({(42, 21, 42, 30): 'urdf_path', (42, 32, 42, 36): 'root', (42, 38, 42, 41): 'end'}, {}), '(urdf_path, root, end)', False, 'from urdf2optcontrol import optimizer\n'), ((45, 0, 53, 1), 'urdf2optcontrol.optimizer.load_problem', 'optimizer.load_problem', (), '', False, 'from urdf2optcontrol import optimizer\n'), ((56, 6, 56, 23), 'urdf2optcontrol.optimizer.solve', 'optimizer.solve', ({}, {}), '()', False, 'from urdf2optcontrol import optimizer\n'), ((61, 6, 61, 38), 'urdf2optcontrol.optimizer.plot_result', 'optimizer.plot_result', (), '', False, 'from urdf2optcontrol import optimizer\n'), ((7, 12, 7, 34), 'pathlib.Path', 'pathlib.Path', ({(7, 25, 7, 33): '__file__'}, {}), '(__file__)', False, 'import pathlib\n')] |
fqc/SocketSample_Mina_Socket | SocketServer/apps/django-db-pool-master/dbpool/db/backends/postgresql_psycopg2/base.py | f5a7bb9bcd6052fe9e2a419c877073b32be4dc3d | """
Pooled PostgreSQL database backend for Django.
Requires psycopg 2: http://initd.org/projects/psycopg2
"""
from django import get_version as get_django_version
from django.db.backends.postgresql_psycopg2.base import \
DatabaseWrapper as OriginalDatabaseWrapper
from django.db.backends.signals import connection_created
from threading import Lock
import logging
import sys
try:
import psycopg2 as Database
import psycopg2.extensions
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
logger = logging.getLogger(__name__)
class PooledConnection():
'''
Thin wrapper around a psycopg2 connection to handle connection pooling.
'''
def __init__(self, pool, test_query=None):
self._pool = pool
# If passed a test query we'll run it to ensure the connection is available
if test_query:
self._wrapped_connection = None
num_attempts = 0
while self._wrapped_connection is None:
num_attempts += 1;
c = pool.getconn()
try:
c.cursor().execute(test_query)
except Database.Error:
pool.putconn(c, close=True)
if num_attempts > self._pool.maxconn:
logger.error("Unable to check out connection from pool %s" % self._pool)
raise;
else:
logger.info("Closing dead connection from pool %s" % self._pool,
exc_info=sys.exc_info())
else:
if not c.autocommit:
c.rollback()
self._wrapped_connection = c
else:
self._wrapped_connection = pool.getconn()
logger.debug("Checked out connection %s from pool %s" % (self._wrapped_connection, self._pool))
def close(self):
'''
Override to return the connection to the pool rather than closing it.
'''
if self._wrapped_connection and self._pool:
logger.debug("Returning connection %s to pool %s" % (self._wrapped_connection, self._pool))
self._pool.putconn(self._wrapped_connection)
self._wrapped_connection = None
def __getattr__(self, attr):
'''
All other calls proxy through to the "real" connection
'''
return getattr(self._wrapped_connection, attr)
'''
This holds our connection pool instances (for each alias in settings.DATABASES that
uses our PooledDatabaseWrapper.)
'''
connection_pools = {}
connection_pools_lock = Lock()
pool_config_defaults = {
'MIN_CONNS': None,
'MAX_CONNS': 1,
'TEST_ON_BORROW': False,
'TEST_ON_BORROW_QUERY': 'SELECT 1'
}
def _set_up_pool_config(self):
'''
Helper to configure pool options during DatabaseWrapper initialization.
'''
self._max_conns = self.settings_dict['OPTIONS'].get('MAX_CONNS', pool_config_defaults['MAX_CONNS'])
self._min_conns = self.settings_dict['OPTIONS'].get('MIN_CONNS', self._max_conns)
self._test_on_borrow = self.settings_dict["OPTIONS"].get('TEST_ON_BORROW',
pool_config_defaults['TEST_ON_BORROW'])
if self._test_on_borrow:
self._test_on_borrow_query = self.settings_dict["OPTIONS"].get('TEST_ON_BORROW_QUERY',
pool_config_defaults['TEST_ON_BORROW_QUERY'])
else:
self._test_on_borrow_query = None
def _create_connection_pool(self, conn_params):
'''
Helper to initialize the connection pool.
'''
connection_pools_lock.acquire()
try:
# One more read to prevent a read/write race condition (We do this
# here to avoid the overhead of locking each time we get a connection.)
if (self.alias not in connection_pools or
connection_pools[self.alias]['settings'] != self.settings_dict):
logger.info("Creating connection pool for db alias %s" % self.alias)
logger.info(" using MIN_CONNS = %s, MAX_CONNS = %s, TEST_ON_BORROW = %s" % (self._min_conns,
self._max_conns,
self._test_on_borrow))
from psycopg2 import pool
connection_pools[self.alias] = {
'pool': pool.ThreadedConnectionPool(self._min_conns, self._max_conns, **conn_params),
'settings': dict(self.settings_dict),
}
finally:
connection_pools_lock.release()
'''
Simple Postgres pooled connection that uses psycopg2's built-in ThreadedConnectionPool
implementation. In Django, use this by specifying MAX_CONNS and (optionally) MIN_CONNS
in the OPTIONS dictionary for the given db entry in settings.DATABASES.
MAX_CONNS should be equal to the maximum number of threads your app server is configured
for. For example, if you are running Gunicorn or Apache/mod_wsgi (in a multiple *process*
configuration) MAX_CONNS should be set to 1, since you'll have a dedicated python
interpreter per process/worker. If you're running Apache/mod_wsgi in a multiple *thread*
configuration set MAX_CONNS to the number of threads you have configured for each process.
By default MIN_CONNS will be set to MAX_CONNS, which prevents connections from being closed.
If your load is spikey and you want to recycle connections, set MIN_CONNS to something lower
than MAX_CONNS. I suggest it should be no lower than your 95th percentile concurrency for
your app server.
If you wish to validate connections on each check out, specify TEST_ON_BORROW (set to True)
in the OPTIONS dictionary for the given db entry. You can also provide an optional
TEST_ON_BORROW_QUERY, which is "SELECT 1" by default.
'''
class DatabaseWrapper16(OriginalDatabaseWrapper):
'''
For Django 1.6.x
TODO: See https://github.com/django/django/commit/1893467784deb6cd8a493997e8bac933cc2e4af9
but more importantly https://github.com/django/django/commit/2ee21d9f0d9eaed0494f3b9cd4b5bc9beffffae5
This code may be no longer needed!
'''
set_up_pool_config = _set_up_pool_config
create_connection_pool = _create_connection_pool
def __init__(self, *args, **kwargs):
super(DatabaseWrapper16, self).__init__(*args, **kwargs)
self.set_up_pool_config()
def get_new_connection(self, conn_params):
# Is this the initial use of the global connection_pools dictionary for
# this python interpreter? Build a ThreadedConnectionPool instance and
# add it to the dictionary if so.
if self.alias not in connection_pools or connection_pools[self.alias]['settings'] != self.settings_dict:
for extra in pool_config_defaults.keys():
if extra in conn_params:
del conn_params[extra]
self.create_connection_pool(conn_params)
return PooledConnection(connection_pools[self.alias]['pool'], test_query=self._test_on_borrow_query)
class DatabaseWrapper14and15(OriginalDatabaseWrapper):
'''
For Django 1.4.x and 1.5.x
'''
set_up_pool_config = _set_up_pool_config
create_connection_pool = _create_connection_pool
def __init__(self, *args, **kwargs):
super(DatabaseWrapper14and15, self).__init__(*args, **kwargs)
self.set_up_pool_config()
def _cursor(self):
settings_dict = self.settings_dict
if self.connection is None or connection_pools[self.alias]['settings'] != settings_dict:
# Is this the initial use of the global connection_pools dictionary for
# this python interpreter? Build a ThreadedConnectionPool instance and
# add it to the dictionary if so.
if self.alias not in connection_pools or connection_pools[self.alias]['settings'] != settings_dict:
if not settings_dict['NAME']:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
conn_params = {
'database': settings_dict['NAME'],
}
conn_params.update(settings_dict['OPTIONS'])
for extra in ['autocommit'] + pool_config_defaults.keys():
if extra in conn_params:
del conn_params[extra]
if settings_dict['USER']:
conn_params['user'] = settings_dict['USER']
if settings_dict['PASSWORD']:
conn_params['password'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST']:
conn_params['host'] = settings_dict['HOST']
if settings_dict['PORT']:
conn_params['port'] = settings_dict['PORT']
self.create_connection_pool(conn_params)
self.connection = PooledConnection(connection_pools[self.alias]['pool'],
test_query=self._test_on_borrow_query)
self.connection.set_client_encoding('UTF8')
tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE')
if tz:
try:
get_parameter_status = self.connection.get_parameter_status
except AttributeError:
# psycopg2 < 2.0.12 doesn't have get_parameter_status
conn_tz = None
else:
conn_tz = get_parameter_status('TimeZone')
if conn_tz != tz:
# Set the time zone in autocommit mode (see #17062)
self.connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
self.connection.cursor().execute(
self.ops.set_time_zone_sql(), [tz])
self.connection.set_isolation_level(self.isolation_level)
self._get_pg_version()
connection_created.send(sender=self.__class__, connection=self)
cursor = self.connection.cursor()
cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None
return CursorWrapper(cursor)
class DatabaseWrapper13(OriginalDatabaseWrapper):
'''
For Django 1.3.x
'''
set_up_pool_config = _set_up_pool_config
create_connection_pool = _create_connection_pool
def __init__(self, *args, **kwargs):
super(DatabaseWrapper13, self).__init__(*args, **kwargs)
self.set_up_pool_config()
def _cursor(self):
'''
Override _cursor to plug in our connection pool code. We'll return a wrapped Connection
which can handle returning itself to the pool when its .close() method is called.
'''
from django.db.backends.postgresql.version import get_version
new_connection = False
set_tz = False
settings_dict = self.settings_dict
if self.connection is None or connection_pools[self.alias]['settings'] != settings_dict:
new_connection = True
set_tz = settings_dict.get('TIME_ZONE')
# Is this the initial use of the global connection_pools dictionary for
# this python interpreter? Build a ThreadedConnectionPool instance and
# add it to the dictionary if so.
if self.alias not in connection_pools or connection_pools[self.alias]['settings'] != settings_dict:
if settings_dict['NAME'] == '':
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You need to specify NAME in your Django settings file.")
conn_params = {
'database': settings_dict['NAME'],
}
conn_params.update(settings_dict['OPTIONS'])
for extra in ['autocommit'] + pool_config_defaults.keys():
if extra in conn_params:
del conn_params[extra]
if settings_dict['USER']:
conn_params['user'] = settings_dict['USER']
if settings_dict['PASSWORD']:
conn_params['password'] = settings_dict['PASSWORD']
if settings_dict['HOST']:
conn_params['host'] = settings_dict['HOST']
if settings_dict['PORT']:
conn_params['port'] = settings_dict['PORT']
self.create_connection_pool(conn_params)
self.connection = PooledConnection(connection_pools[self.alias]['pool'],
test_query=self._test_on_borrow_query)
self.connection.set_client_encoding('UTF8')
self.connection.set_isolation_level(self.isolation_level)
# We'll continue to emulate the old signal frequency in case any code depends upon it
connection_created.send(sender=self.__class__, connection=self)
cursor = self.connection.cursor()
cursor.tzinfo_factory = None
if new_connection:
if set_tz:
cursor.execute("SET TIME ZONE %s", [settings_dict['TIME_ZONE']])
if not hasattr(self, '_version'):
self.__class__._version = get_version(cursor)
if self._version[0:2] < (8, 0):
# No savepoint support for earlier version of PostgreSQL.
self.features.uses_savepoints = False
if self.features.uses_autocommit:
if self._version[0:2] < (8, 2):
# FIXME: Needs extra code to do reliable model insert
# handling, so we forbid it for now.
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You cannot use autocommit=True with PostgreSQL prior to 8.2 at the moment.")
else:
# FIXME: Eventually we're enable this by default for
# versions that support it, but, right now, that's hard to
# do without breaking other things (#10509).
self.features.can_return_id_from_insert = True
return CursorWrapper(cursor)
'''
Choose a version of the DatabaseWrapper class to use based on the Django
version. This is a bit hacky, what's a more elegant way?
'''
django_version = get_django_version()
if django_version.startswith('1.3'):
from django.db.backends.postgresql_psycopg2.base import CursorWrapper
class DatabaseWrapper(DatabaseWrapper13):
pass
elif django_version.startswith('1.4') or django_version.startswith('1.5'):
from django.conf import settings
from django.db.backends.postgresql_psycopg2.base import utc_tzinfo_factory, \
CursorWrapper
# The force_str call around the password seems to be the only change from
# 1.4 to 1.5, so we'll use the same DatabaseWrapper class and make
# force_str a no-op.
try:
from django.utils.encoding import force_str
except ImportError:
force_str = lambda x: x
class DatabaseWrapper(DatabaseWrapper14and15):
pass
elif django_version.startswith('1.6'):
class DatabaseWrapper(DatabaseWrapper16):
pass
else:
raise ImportError("Unsupported Django version %s" % django_version)
| [] |
abodacs/fastapi-ml-skeleton | backend/tests/test_api/test_api_auth.py | fa9a013d06e70cbaff9b9469db32246e41ce7e0f | # Skeleton
from fastapi_skeleton.core import messages
def test_auth_using_prediction_api_no_apikey_header(test_client) -> None:
response = test_client.post("/api/model/predict")
assert response.status_code == 400
assert response.json() == {"detail": messages.NO_API_KEY}
def test_auth_using_prediction_api_wrong_apikey_header(test_client) -> None:
response = test_client.post(
"/api/model/predict", json={"image": "test"}, headers={"token": "WRONG_TOKEN"}
)
assert response.status_code == 401
assert response.json() == {"detail": messages.AUTH_REQ}
| [] |
hashnfv/hashnfv-nfvbench | docker/cleanup_generators.py | 8da439b932537748d379c7bd3bdf560ef739b203 | # Copyright 2016 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import shutil
TREX_OPT = '/opt/trex'
TREX_UNUSED = [
'_t-rex-64-debug', '_t-rex-64-debug-o', 'bp-sim-64', 'bp-sim-64-debug',
't-rex-64-debug', 't-rex-64-debug-o', 'automation/__init__.py',
'automation/graph_template.html',
'automation/config', 'automation/h_avc.py', 'automation/phantom',
'automation/readme.txt', 'automation/regression', 'automation/report_template.html',
'automation/sshpass.exp', 'automation/trex_perf.py', 'wkhtmltopdf-amd64'
]
def remove_unused_libs(path, files):
"""
Remove files not used by traffic generator.
"""
for f in files:
f = os.path.join(path, f)
try:
if os.path.isdir(f):
shutil.rmtree(f)
else:
os.remove(f)
except OSError:
print "Skipped file:"
print f
continue
def get_dir_size(start_path='.'):
"""
Computes size of directory.
:return: size of directory with subdirectiories
"""
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
try:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
except OSError:
continue
return total_size
if __name__ == "__main__":
versions = os.listdir(TREX_OPT)
for version in versions:
trex_path = os.path.join(TREX_OPT, version)
print 'Cleaning TRex', version
try:
size_before = get_dir_size(trex_path)
remove_unused_libs(trex_path, TREX_UNUSED)
size_after = get_dir_size(trex_path)
print '==== Saved Space ===='
print size_before - size_after
except OSError:
import traceback
print traceback.print_exc()
print 'Cleanup was not finished.'
| [] |
ophirSarusi/TF_Object_Detection | object_detection/box_coders/mean_stddev_box_coder.py | e08ccd18c6f14586e048048a445cf5a10dbc7c4d | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Mean stddev box coder.
This box coder use the following coding schema to encode boxes:
rel_code = (box_corner - anchor_corner_mean) / anchor_corner_stddev.
"""
from object_detection.core import box_coder
from object_detection.core import box_list
class MeanStddevBoxCoder(box_coder.BoxCoder):
"""Mean stddev box coder."""
@property
def code_size(self):
return 4
def _encode(self, boxes, anchors):
"""Encode a box collection with respect to anchor collection.
Args:
boxes: BoxList holding N boxes to be encoded.
anchors: BoxList of N anchors. We assume that anchors has an associated
stddev field.
Returns:
a tensor representing N anchor-encoded boxes
Raises:
ValueError: if the anchors BoxList does not have a stddev field
"""
if not anchors.has_field('stddev'):
raise ValueError('anchors must have a stddev field')
box_corners = boxes.get()
means = anchors.get()
stddev = anchors.get_field('stddev')
return (box_corners - means) / stddev
def _decode(self, rel_codes, anchors):
"""Decode.
Args:
rel_codes: a tensor representing N anchor-encoded boxes.
anchors: BoxList of anchors. We assume that anchors has an associated
stddev field.
Returns:
boxes: BoxList holding N bounding boxes
Raises:
ValueError: if the anchors BoxList does not have a stddev field
"""
if not anchors.has_field('stddev'):
raise ValueError('anchors must have a stddev field')
means = anchors.get()
stddevs = anchors.get_field('stddev')
box_corners = rel_codes * stddevs + means
return box_list.BoxList(box_corners)
| [((70, 11, 70, 40), 'object_detection.core.box_list.BoxList', 'box_list.BoxList', ({(70, 28, 70, 39): 'box_corners'}, {}), '(box_corners)', False, 'from object_detection.core import box_list\n')] |
nsortur/equi_rl | storage/aug_buffer.py | 83bd2ee9dfaab715e51b71ffff90ab990aaed5f8 | from storage.buffer import QLearningBuffer
from utils.torch_utils import ExpertTransition, augmentTransition
from utils.parameters import buffer_aug_type
class QLearningBufferAug(QLearningBuffer):
def __init__(self, size, aug_n=9):
super().__init__(size)
self.aug_n = aug_n
def add(self, transition: ExpertTransition):
super().add(transition)
for _ in range(self.aug_n):
super().add(augmentTransition(transition, buffer_aug_type))
| [((13, 24, 13, 70), 'utils.torch_utils.augmentTransition', 'augmentTransition', ({(13, 42, 13, 52): 'transition', (13, 54, 13, 69): 'buffer_aug_type'}, {}), '(transition, buffer_aug_type)', False, 'from utils.torch_utils import ExpertTransition, augmentTransition\n')] |
Chainso/HLRL | hlrl/torch/agents/wrappers/agent.py | 584f4ed2fa4d8b311a21dbd862ec9434833dd7cd | import torch
from typing import Any, Dict, List, OrderedDict, Tuple
from hlrl.core.agents import RLAgent
from hlrl.core.common.wrappers import MethodWrapper
class TorchRLAgent(MethodWrapper):
"""
A torch agent that wraps its experiences as torch tensors.
"""
def __init__(self,
agent: RLAgent,
batch_state: bool = True):
"""
Creates torch agent that can wrap experiences as tensors.
Args:
agent: The agent to wrap.
batch_state: If the state should be batched with a batch size of 1
when transformed.
"""
super().__init__(agent)
self.batch_state = batch_state
def make_tensor(self, data):
"""
Creates a float tensor of the data of batch size 1.
"""
if self.batch_state:
data = [data]
return torch.FloatTensor(data).to(self.algo.device)
def transform_state(self, state):
state_dict = self.om.transform_state(state)
state_dict["state"] = self.make_tensor(state_dict["state"])
return state_dict
def transform_reward(
self,
state: Any,
algo_step: OrderedDict[str, Any],
reward: Any,
terminal: Any,
next_state: Any
) -> Any:
"""
Creates a tensor from the reward.
Args:
state: The state of the environment.
algo_step: The transformed algorithm step of the state.
reward: The reward from the environment.
terminal: If the next state is a terminal state.
next_state: The new state of the environment.
Returns:
The reward as a tensor.
"""
reward = self.om.transform_reward(
state, algo_step, reward, terminal, next_state
)
if self.batch_state:
reward = [reward]
return self.make_tensor(reward)
def transform_terminal(self, terminal: Any, info: Any) -> Any:
"""
Transforms the terminal of an environment step.
Args:
terminal: The terminal value to transform.
info: Additional environment information for the step.
Returns:
The transformed terminal.
"""
terminal = self.om.transform_terminal(terminal, info)
if self.batch_state:
terminal = [terminal]
return self.make_tensor(terminal)
def transform_action(self, action):
return self.om.transform_action(action).squeeze().cpu().numpy()
def reward_to_float(self, reward: torch.Tensor) -> float:
"""
Converts the reward to a single float value.
Args:
reward: The reward to turn into a float.
Returns:
The float value of the reward tensor.
"""
reward = reward[0].detach().cpu()
reward = reward.item()
return reward
def create_batch(
self,
ready_experiences: Dict[str, List[Any]],
) -> Dict[str, torch.Tensor]:
"""
Creates a batch of experiences to be trained on from the ready
experiences.
Args:
ready_experiences: The experiences to be trained on.
Returns:
A dictionary of each field necessary for training.
"""
batch = {
key: torch.cat(ready_experiences[key]) for key in ready_experiences
}
return self.om.create_batch(batch)
| [((123, 17, 123, 50), 'torch.cat', 'torch.cat', ({(123, 27, 123, 49): 'ready_experiences[key]'}, {}), '(ready_experiences[key])', False, 'import torch\n'), ((34, 15, 34, 38), 'torch.FloatTensor', 'torch.FloatTensor', ({(34, 33, 34, 37): 'data'}, {}), '(data)', False, 'import torch\n')] |
NHOrus/PixivUtil2 | PixivConstant.py | facd6b1a21e4adf5edf1de4d4809e94e834246b6 | # -*- coding: utf-8 -*-
PIXIVUTIL_VERSION = '20191220-beta1'
PIXIVUTIL_LINK = 'https://github.com/Nandaka/PixivUtil2/releases'
PIXIVUTIL_DONATE = 'https://bit.ly/PixivUtilDonation'
# Log Settings
PIXIVUTIL_LOG_FILE = 'pixivutil.log'
PIXIVUTIL_LOG_SIZE = 10485760
PIXIVUTIL_LOG_COUNT = 10
PIXIVUTIL_LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
# Download Results
PIXIVUTIL_NOT_OK = -1
PIXIVUTIL_OK = 0
PIXIVUTIL_SKIP_OLDER = 1
PIXIVUTIL_SKIP_BLACKLIST = 2
PIXIVUTIL_KEYBOARD_INTERRUPT = 3
PIXIVUTIL_SKIP_DUPLICATE = 4
PIXIVUTIL_SKIP_LOCAL_LARGER = 5
PIXIVUTIL_CHECK_DOWNLOAD = 6
PIXIVUTIL_ABORTED = 9999
BUFFER_SIZE = 8192
| [] |
Threemusketeerz/DSystems | dynamic_schemas/views.py | cd03ad2fa6b55872d57bfd01a4ac781aa5cbed8c | from django.http import Http404
from django.shortcuts import render, redirect, reverse
from django.views.generic import ListView
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.renderers import TemplateHTMLRenderer
from .models import Schema, SchemaColumn, SchemaResponse, SchemaUrl
from .forms import SchemaResponseForm, ResponseUpdateForm
from .serializers import SchemaResponseSerializer
from .prepare_data import getcolumns
import pytz
class SchemaIndexView(LoginRequiredMixin, ListView):
# login_url = '/accounts/login.html/'
template_name = 'dynamic_schemas/index.html'
context_object_name = 'all_schemas'
def get_queryset(self):
return Schema.objects.all()
@login_required
def form_view(request, pk):
schema = Schema.objects.get(pk=pk)
urls = schema.help_field.all()
if request.method == 'POST':
form = SchemaResponseForm(schema, request.POST)
if form.is_valid():
instance = form.save(commit=False)
instance.user = request.user
instance.save()
return redirect(reverse('dynamic_schemas:schema_view',
kwargs={'pk': pk}))
else:
form = SchemaResponseForm(schema)
return render(request, f'dynamic_schemas/create-form.html', \
{
'form': form,
'schema': schema,
'help_urls': urls,
})
@login_required
def form_update_view(request, pk, r_pk):
schema = Schema.objects.get(pk=pk)
instance = SchemaResponse.objects.get(schema=schema, pk=r_pk)
columns = SchemaColumn.objects.filter(schema=schema)
###################################################
# This little snippet checks if the responses can be edited. If they can
# the submit button will be provided. There is no restriction on
# has_been_edited, but since the data cant be saved we're good for now.
load_button = False
aggr_editables = [c.is_editable_once for c in columns]
if True in aggr_editables:
load_button = True
###################################################
form = ResponseUpdateForm(instance, pk)
if request.method == 'POST':
form = ResponseUpdateForm(instance, pk, request.POST or None)
if form.is_valid():
form.update()
return redirect(reverse('dynamic_schemas:schema_view',
kwargs={'pk': pk}))
return render(request, f'dynamic_schemas/update-form.html',
{'form_update': form,
'load_button': load_button}
)
""" API Views """
class MakeDataPrettyMixin:
def _make_date_tz(self, instance=None, tz=None):
""" Takes an instance, and sets its timezone.
TODO:
Should this be a classmethod? Will a classmethod complicate the
view in its context?
"""
# Can this be moved to SETTINGS instead? Same for _make_date_readable.
# Problem is probably that the UTC format gets overridden.
if instance:
if tz:
tz = pytz.timezone(tz)
return instance.pub_date.astimezone(tz)
return
def _make_date_readable(self, instances):
"""
Helper function to change the dates to a format pleasing to the
eyes, takes a bundle of instances and converts their time.
How extensible do we want this?
Function is kept private for now, since in Denmark the timezone is CET.
"""
for instance in instances:
inst_as_cet = self._make_date_tz(
instance=instance
# tz='Europe/Copenhagen'
)
instance.pub_date = inst_as_cet \
.strftime('%d-%m/%Y %H:%M:%S')
return instances
def _make_user_readable(self, serializer):
""" Helper to return the correct attributes to the front-end
"""
for data in serializer.data:
# import ipdb; ipdb.set_trace()
user = data['user']
instance = User.objects.get(id=user)
user = instance.first_name + instance.last_name
if instance.first_name == '':
user = instance.username
data['user'] = user
# __import__('ipdb').set_trace()
# import ipdb; ipdb.set_trace()
return serializer
def _make_intruction_links_readable(self, serializer):
for data in serializer.data:
instr = data['instruction']
instance = SchemaUrl.objects.get(id=instr)
instr = '<a href="'+ instance.url +'">'+ instance.name +'</a>'
data['instruction'] = instr
return serializer
class ResponseList(MakeDataPrettyMixin, APIView):
"""
Lists responses according to schema.
Purely for APIView for now. Not being used in the actual rendering af the
tables.
"""
default_order = [
('desc', '-'),
('asc', ''),
]
def get_orderprefix(self, order):
for tup in self.default_order:
if order in tup:
return tup[1]
def get(self, request, pk, format=None, *args):
req = request.GET
# Amount of data to fetch each pull
start = int(req.get('start', 0))
length = int(req.get('length', 30))
end = start + length;
order = req.get('order[0][dir]')
order_column = req.get('order[0][column]')
order_by_pre = self.get_orderprefix(order)
order_column_name = req.get('columns['+order_column+'][data]')
# __import__('ipdb').set_trace()
order_str = order_by_pre + order_column_name
draw = req.get('draw')
# TODO Gonna require some thinking. Also need to user recordsFiltered.
# search = req.get('search[value]')
schema = Schema.objects.get(pk=pk)
responses_count = SchemaResponse.objects.filter(schema=schema).count()
responses = SchemaResponse \
.objects \
.filter(schema=schema) \
.order_by(order_str)[start:end]
# __import__('ipdb').set_trace()
responses = self._make_date_readable(responses)
serializer = SchemaResponseSerializer(responses, many=True)
serializer = self._make_user_readable(serializer)
serializer = self._make_intruction_links_readable(serializer)
return_data = {
'draw': int(draw),
'recordsTotal': responses_count,
'recordsFiltered': responses_count,
'data': serializer.data,
}
# __import__('ipdb').set_trace()
return Response(return_data)
class ResponseColumns(APIView):
def get(self, request, pk, format=None, *args):
req = request.GET
schema = Schema.objects.get(pk=pk)
sr = SchemaResponse.objects.filter(schema=schema).first()
columns = getcolumns(sr).getvalue()
return Response(columns)
class SchemaView(LoginRequiredMixin, APIView):
"""
Fetches the FIRST object from ResponseList. Makes it availabe for
as a template for the table in main.html
Excludes schema.id, and the placeholder qa_set in the template.
"""
renderer_classes = [TemplateHTMLRenderer]
template_name = 'dynamic_schemas/table_dev.html'
def get_object(self, pk):
try:
schema = Schema.objects.get(pk=pk)
if SchemaColumn.objects.filter(schema=schema).count() != 0:
all_responses = SchemaResponse.objects.filter(schema=schema)
single_response = all_responses.first()
serializer = SchemaResponseSerializer(single_response)
return serializer.data
except single_response.DoesNotExist:
raise Http404
def get(self, request, pk):
schema = Schema.objects.get(pk=pk)
schema_help_urls = schema.help_field.all()
schema_obsolete = schema.obsolete.all()
schema_new = schema.new.all()
all_responses = SchemaResponse.objects.filter(schema=schema)
# self._make_date_readable(all_responses)
serializer = SchemaResponseSerializer(all_responses, many=True)
data = {'single_response': self.get_object(pk),
'all_responses': serializer.data,
'pk': pk,
'schema': schema,
'help_urls': schema_help_urls,
'schema_obsolete': schema_obsolete,
'schema_new': schema_new,
}
# __import__('ipdb').set_trace()
return Response(data)
| [((47, 11, 52, 10), 'django.shortcuts.render', 'render', ({(47, 18, 47, 25): 'request', (47, 27, 47, 62): 'f"""dynamic_schemas/create-form.html"""', (48, 8, 52, 9): "{'form': form, 'schema': schema, 'help_urls': urls}"}, {}), "(request, f'dynamic_schemas/create-form.html', {'form': form,\n 'schema': schema, 'help_urls': urls})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((82, 11, 85, 13), 'django.shortcuts.render', 'render', ({(82, 18, 82, 25): 'request', (82, 27, 82, 62): 'f"""dynamic_schemas/update-form.html"""', (83, 12, 84, 39): "{'form_update': form, 'load_button': load_button}"}, {}), "(request, f'dynamic_schemas/update-form.html', {'form_update': form,\n 'load_button': load_button})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((211, 15, 211, 36), 'rest_framework.response.Response', 'Response', ({(211, 24, 211, 35): 'return_data'}, {}), '(return_data)', False, 'from rest_framework.response import Response\n'), ((220, 15, 220, 32), 'rest_framework.response.Response', 'Response', ({(220, 24, 220, 31): 'columns'}, {}), '(columns)', False, 'from rest_framework.response import Response\n'), ((273, 15, 273, 29), 'rest_framework.response.Response', 'Response', ({(273, 24, 273, 28): 'data'}, {}), '(data)', False, 'from rest_framework.response import Response\n'), ((42, 24, 43, 50), 'django.shortcuts.reverse', 'reverse', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((79, 24, 80, 58), 'django.shortcuts.reverse', 'reverse', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((131, 23, 131, 48), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((103, 21, 103, 38), 'pytz.timezone', 'pytz.timezone', ({(103, 35, 103, 37): 'tz'}, {}), '(tz)', False, 'import pytz\n')] |
minefarmer/deep-Dive-1 | my_classes/.history/ModulesPackages_PackageNamespaces/example3a/main_20210725220637.py | b0675b853180c5b5781888266ea63a3793b8d855 |
import os.path
import types
import sys
| [] |
conscience99/lyriko | api/views.py | 0ecc9e4d5ec8e3d746fcb286209a1e7993548a66 | from django.shortcuts import render
from rest_framework import response
from rest_framework.serializers import Serializer
from . import serializers
from rest_framework.response import Response
from rest_framework.views import APIView
from django.views import View
from rest_framework import status
from . models import SaveList, User, Lyrics, SearchHistory, VerificationCode, SubmitLyrics
from rest_framework.permissions import BasePermission, IsAuthenticated, SAFE_METHODS, IsAdminUser
from rest_framework.authtoken.models import Token
from django.contrib.auth.hashers import make_password, check_password
from django.contrib.auth import login, authenticate
import requests
from django.db.models import Q
from bs4 import BeautifulSoup
import json
from datetime import datetime
import random
from django.core.mail import EmailMessage, EmailMultiAlternatives
from django.conf import settings
from django.template.loader import get_template
from django.urls import reverse
import jwt
from django.utils.encoding import force_bytes, force_text, DjangoUnicodeDecodeError
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
from django.contrib.sites.shortcuts import get_current_site
from .utils import Util
from rest_framework_simplejwt.tokens import RefreshToken
from django.template import Context
from django.http import HttpResponse, HttpResponseNotFound
import os
import re
import urllib
from datetime import datetime
import random
import time
now = datetime.now()
import json
class SignupView(APIView):
now = datetime.now()
def post(self, request, *args,**kwargs):
user=User()
try:
User.objects.get(email=request.data['email'])
return Response({"email":"already taken"})
except:
serializer=serializers.UserSerializer(data=request.data)
if serializer.is_valid():
password=make_password(request.data['password'])
username=request.data['username']
user.username=username
user.first_name=request.data['first_name']
user.last_name=request.data['last_name']
user.email=request.data['email']
user.email_username=request.data['email']
user.password=password
user.is_verified = False
user.save()
new_user=User.objects.get(id=user.id)
token=Token.objects.create(user=new_user)
verification = VerificationCode()
code = random.randint(199999,999999)
verification.code=code
verification.user_id=new_user.id
verification._year = now.year
verification._month = now.month
verification._day = now.day
verification._hour = now.hour
verification._minute = now.minute
verification.save()
from_e = settings.EMAIL_HOST_USER
to=request.data['email']
html = get_template('api/code.html')
html_content = html.render({'username':new_user.username, 'code':code})
text = 'Hi {username}, \n Please use {code} to continue with Lyriko.'
subject = 'Confirm your email'
email = EmailMultiAlternatives(
subject,
text,
from_e,
[to]
)
email.attach_alternative(html_content, 'text/html')
try:
email.send()
except:
pass
token=Token.objects.get(user=user)
response={'token':token.key, 'user':serializer.data}
return Response(response)
else:
return Response(serializer.errors)
class SendCode(APIView):
def post(self, request, *args, **kwargs):
try:
user = User.objects.get(email=request.data['email'])
except:
return Response({"error":"User not found."})
try:
v = VerificationCode.objects.get(user_id=user.id)
v.delete()
except:
pass
verification = VerificationCode()
code = random.randint(199999,999999)
verification.code=code
verification.user_id=user.id
verification._year = now.year
verification._month = now.month
verification._day = now.day
verification._hour = now.hour
verification._minute = now.minute
verification.save()
from_e = settings.EMAIL_HOST_USER
to=request.data['email']
html = get_template('api/code.html')
html_content = html.render({'username':user.username, 'code':code})
text = 'Hi {username}, \n Please use {code} to continue with Lyriko.'
subject = 'Action Required'
email = EmailMultiAlternatives(
subject,
text,
from_e,
[to]
)
email.attach_alternative(html_content, 'text/html')
try:
email.send()
except:
return Response({"error":"Error occured"})
return Response({"success":"Success"})
class AccountActivation(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
user=User.objects.get(username=request.user.username)
code=request.data['code']
try:
verification = VerificationCode.objects.get(user_id=user.id, code=int(code))
user.is_verified=True
user.save()
verification.delete()
return Response({'msg':'success'})
except:
return Response({'error':'Invalid code.'})
class VerifyUser(APIView):
def post(self, request, *args, **kwargs):
user = User.objects.get(email=request.data['email'])
code = request.data['code']
try:
_code = VerificationCode.objects.get(code=int(code), user_id=user.id)
_code.delete()
return Response({"msg":"success"})
except:
return Response({"error":"invalid code"})
class CheckSaveList(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
try:
if SaveList.objects.get(owner_username=request.user.username, lyrics_id=request.data['lyrics_id']):
return Response({"watchlisted":'true'})
except:
return Response({"watchlisted":'false'})
class LyricsView(APIView):
def get(self, request, *args, **kwargs):
if request.method=='GET':
lyrics_items=Lyrics.objects.all()
serializer = serializers.LyricsSerializer(lyrics_items,many=True)
response={'lyrics':serializer.data}
return Response(response, status=status.HTTP_200_OK)
else:
response={'error':'Forbidden'}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
class AddLyricsView(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
if request.method=='POST':
data=request.data
lyrics=Lyrics()
serializer=serializers.LyricsSerializer(data=data)
if serializer.is_valid():
lyrics.title=request.POST['title']
lyrics.artist=request.POST['artist']
lyrics.body=request.POST['body']
lyrics.title_slug=request.POST['title'].replace(' ', '-').lower()
lyrics.artist_slug=request.POST['artist'].replace(' ', '-').lower()
response={'lyrics':serializer.data}
return Response(response,status=status.HTTP_200_OK )
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class SingleLyricsView(APIView):
def post(self, request, *args, **kwargs ):
artist = request.data['artist'].strip().replace("-"," ").title()
title=request.data['title'].strip().replace("-"," ").title()
search_history=SearchHistory()
title_clean1=request.data['title'].strip().replace("ain-t", "aint")
title_clean2=title_clean1.replace('you-re', 'youre')
title_cleean3 = title_clean2.replace('isn-t', 'isnt')
title_clean4 =title_cleean3.replace('aren-t', 'arent')
title_clean_5= title_clean4.replace("weren-t","werent")
title_clean6 = title_clean_5.replace("can-t", "cant")
title_clean7 = title_clean6.replace('don-t', 'dont')
title_clean8 = title_clean7.replace('i-d', 'id').replace('i-ve', 'ive').replace('we-ve','weve',).replace('you-ve', 'youve').replace('he-s', 'hes').replace('she-s', 'shes').replace('it-s', 'its',).replace('you-d', 'youd').replace('i-ll', 'ill').replace("you-ll", "youll").replace('let-s', "lets").replace("amn't", "amnt").replace("haven-t","havent")
try:
lyrics_item=Lyrics.objects.get(artist_slug=request.data['artist'], title_slug__icontains=title_clean8)
views = lyrics_item.views
updt_views=views+1
lyrics_item.views = updt_views
lyrics_item.save()
serializer=serializers.LyricsSerializer(lyrics_item, many=False)
response={'lyrics':serializer.data}
### Record activities ###
search_history.searcher_username = request.data['username']
search_history.artist=artist.replace('-',' ')
search_history.title=title.replace('-',' ')
search_history.save()
return Response(response,status=status.HTTP_200_OK)
except Lyrics.DoesNotExist:
return Response({"error":"Not Found"})
class SearchHistoryView(APIView):
permission_classes=[IsAuthenticated]
def get(self, request, *args, **kwargs ):
search_history_items=SearchHistory.objects.filter(searcher_username=request.user.username).order_by('-moment').all()
serializer=serializers.SearchHistorySerializer(search_history_items, many=True)
response={"search_history":serializer.data}
return Response(response,status=status.HTTP_200_OK)
class DeleteHistory(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
searcher_username = request.user.username
history_item_id = request.data['id']
try:
SearchHistory.objects.get(searcher_username=searcher_username, id=history_item_id).delete()
return Response({"msg":"OK"})
except:
return Response({"msg":"Something went wrong"})
class TrendingView(APIView):
def get(self, request, *args, **kwargs):
lyrics=Lyrics.objects.order_by('-views')[0:35]
serializer=serializers.LyricsSerializer(lyrics, many=True)
response={"top":serializer.data}
return Response(response)
class RandomView(APIView):
def get(self, request,*args, **kwargs, ):
lyrics=Lyrics.objects.all()
lyrics_items=[]
for lyric in lyrics:
lyrics_items.append(lyric)
random_lyrics=random.choice(lyrics_items)
serializer=serializers.LyricsSerializer(random_lyrics)
resp={"lyrics":serializer.data}
return Response(resp)
class RecentView(APIView):
def get(self, request, *args, **kwargs):
recent_items=SearchHistory.objects.order_by('-moment').all()[:20]
recent = []
for i in recent_items:
recent.append(i)
serializer=serializers.SearchHistorySerializer(recent, many=True)
resp={"recent":serializer.data}
return Response(resp)
class SuggestionView(APIView):
def post(self, request, *args, **kwargs):
_type=request.data['type']
if _type=="title":
lyrics=Lyrics.objects.filter(title__contains=request.data['title'])
serializer=serializers.LyricsSerializer(lyrics, many=True)
resp={'suggestions':serializer.data}
return Response(resp)
else:
lyrics=Lyrics.objects.filter(artist__contains=request.data['artist'])
serializer=serializers.LyricsSerializer(lyrics, many=True)
resp={'suggestions':serializer.data}
return Response(resp)
class ChangePassword(APIView):
def post(self, request, *args, **kwargs):
if request.data['access'] == "code":
try:
user = User.objects.get(email=request.data['email'])
except:
pass
user.password = make_password(request.data['new_password'])
user.save()
return Response({"msg":"success"})
else:
user = User.objects.get(username=request.user.username)
current_password = request.data['current_password']
if check_password(current_password, user.password):
user.password = make_password(request.data['new_password'])
user.save()
return Response({"success":"Password changed"})
else:
return Response({"error":"Incorrect password"})
class modifyUser(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
user = User.objects.get(pk=request.user.id)
new_email = request.data['email']
old_email = user.email
if new_email != old_email:
user.is_verified = False
user.username = request.data['username']
user.email = new_email
user.first_name = request.data['first_name']
user.last_name = request.data['last_name']
user.save()
n_user = User.objects.get(id=request.user.id)
serializer=serializers.UserSerializer(user, many=False)
response={'user':serializer.data}
return Response(response)
''' class EditLyricsView(APIView):
def post(self, request, pk, *args, **kwargs ):
data=request.data
lyrics=Lyrics.objects.get(pk=pk)
lyrics.title=request.POST['title']
lyrics.artist=request.POST['artist']
lyrics.body=request.POST['body']
Lyrics.objects.get(pk=pk)
lyrics.save()
lyrics_item=Lyrics.objects.get(pk=pk)
serializer=serializers.LyricsSerializer(lyrics_item,many=False)
response={'lyrics':serializer.data}
return Response(response,status=status.HTTP_200_OK ) '''
class SaveListView(APIView):
permission_classes=[IsAuthenticated]
def get(self, request, *args, **kwargs):
save_list_items=SaveList.objects.filter(owner_username=request.user.username)
save_list=[]
for i in save_list_items:
lyrics = Lyrics.objects.get(pk=i.lyrics_id)
save_list.append(lyrics)
serializer = serializers.LyricsSerializer(save_list, many=True)
return Response({'lyrics':serializer.data}, status=status.HTTP_200_OK)
class AddSaveListView(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
items=SaveList.objects.filter(owner_username=request.user.username)
data=request.data
username=request.user.username
savelist=SaveList()
try:
if SaveList.objects.get(owner_username=request.user.username, lyrics_id=request.data['lyrics_id']):
return Response({"Error":"Cannot add lyrics to Save List twice or more."})
except:
savelist.lyrics_id=request.data['lyrics_id']
savelist.owner_username=username
savelist.save()
save_list_items=SaveList.objects.filter(owner_username=request.user.username)
save_list = []
for save_list_item in save_list_items:
sl = Lyrics.objects.get(pk=save_list_item.lyrics_id)
save_list.append(sl)
serializer = serializers.LyricsSerializer(save_list, many=True)
response={'save_list':serializer.data}
return Response(response, status=status.HTTP_200_OK)
class RemoveSaveListView(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
owner_username=request.user.username
lyrics_id=request.data['lyrics_id']
save_list_item=SaveList.objects.get(owner_username=owner_username, lyrics_id=lyrics_id)
save_list_item.delete()
save_list_items=SaveList.objects.filter(owner_username=request.user.username)
save_list = []
for save_list_item in save_list_items:
sl = Lyrics.objects.get(pk=save_list_item.lyrics_id)
save_list.append(sl)
serializer = serializers.LyricsSerializer(save_list, many=True)
response={'save_list':serializer.data}
return Response(response, status=status.HTTP_200_OK)
class CheckUserView(APIView):
def post(self, request, *args, **kwargs):
try:
User.objects.get(username=request.data['username'])
return Response({'true'}, status=status.HTTP_200_OK)
except User.DoesNotExist:
return Response({'false'})
""" class SignupView(APIView):
def post(self, request, *args, **kwargs):
user=User()
serializer=serializers.UserSerializer(data=request.data)
print(request.data)
if serializer.is_valid():
password=make_password(request.data['password'])
username=request.data['username']
user.username=username
user.first_name=request.data['first_name']
user.last_name=request.data['last_name']
user.email=request.data['email']
user.email_username=request.data['email']
user.password=password
user.save()
new_user=User.objects.get(username=username)
print(new_user)
token=Token.objects.create(user=new_user)
response={'token':token.key, 'user':serializer.data}
return Response(response, status=status.HTTP_200_OK)
else:
return Response(serializer.errors) """
class UserDataView(APIView):
permission_classes=[IsAuthenticated]
def get(self, request, *args, **kwargs):
user=User.objects.get(username=request.user.username)
serializer=serializers.UserSerializer(user, many=False)
response={'user':serializer.data}
return Response(response, status=status.HTTP_200_OK)
class SigninView(APIView):
def post(self, request, *args, **kwargs):
password=request.data['password']
username=request.data['username']
try:
if '@' not in username:
user=User.objects.get(username=username)
elif '@' in username:
user=User.objects.get(email_username=username)
except:
return Response({'error':'User not found.'})
if check_password(password, user.password):
login(self.request, user)
token=Token.objects.get(user=user)
serializer=serializers.UserSerializer(user, many=False)
response={'user':serializer.data, 'token':token.key}
return Response(response, status=status.HTTP_200_OK)
else:
return Response({'error':'Incorrect password'})
class SubmitLyricsv(APIView):
def post(self, request, *args, **kwargs):
serializer = serializers.SubmitLyricsSerializer(data=request.data)
if serializer.is_valid:
sl=SubmitLyrics()
sl.title=request.data['title']
sl.artist=request.data['artist']
sl.body=request.data['body']
sl.save()
response = {"msg":"OK"}
return Response(response)
else:
return Response({serializers.errors})
class ApproveSubmitLyrics(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
user = request.user
if user.is_lyrics_admin != True:
return Response({"Error":"Forbidden"})
else:
lyrics = Lyrics()
lyrics.artist = request.data['artist']
lyrics.artist_slug = request.data['artist'].strip().replace(" ","-").lower()
lyrics.title = request.data['title']
lyrics.title_slug=request.data['title'].strip().replace(" ","-").lower()
lyrics.body = request.data['body']
lyrics.save()
sl = SubmitLyrics.objects.get(id=request.data['id']).delete()
return Response({"msg":"OK"})
class SubmitLyricsListView(APIView):
permission_classes=[IsAuthenticated]
def get(self, request, *args, **kwargs):
user=request.user
if user.is_lyrics_admin != True:
return Response({"Error":"Forbidden"})
else:
sub = SubmitLyrics.objects.all()
serializer = serializers.SubmitLyricsSerializer(sub, many=True)
res = {"submit_lyrics_view":serializer.data}
return Response(res)
class SubmitLyricsView(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
user = request.user
if user.is_lyrics_admin != True:
return Response({"Error":"Forbidden"})
else:
item = SubmitLyrics.objects.get(id=request.data['id'])
serializer = serializers.SubmitLyricsSerializer(item, many=False)
res = {"submit_lyrics_item":serializer.data}
return Response(res)
class DeclineSubmitLyrics(APIView):
def post(self, request, *args, **kwargs):
user = request.user
if user.is_lyrics_admin != True:
return Response({"Error":"Forbidden"})
else:
item = SubmitLyrics.objects.get(id=request.data['id'])
item.delete()
return Response({"msg":"OK"})
class RelatedView(APIView):
def post(self, request, *args, **kwargs):
lyrics = Lyrics.objects.filter(artist_slug=request.data['artist'])[0:10]
serializer=serializers.LyricsSerializer(lyrics, many=True)
response={"top":serializer.data}
return Response(response)
class SearchViewv(APIView):
def post(self, request, *args, **kwargs):
if request.data['term']:
term=request.data['term']
terms = term.split()
results =[]
for i in terms:
if i!="by":
for j in Lyrics.objects.filter(title__icontains=i):
results.append(j)
for k in Lyrics.objects.filter(artist__icontains=i):
results.append(k)
search_res = [i for j, i in enumerate(results) if i not in results[:j]]
serializer=serializers.LyricsSerializer(search_res, many=True)
response={"result":serializer.data}
return Response(response)
else:
return Response({"error":"Unavailable"})
""" data = requests.get(f"https://api.lyrics.ovh/v1/{artistSlug}/{titleSlug}/")
lyric = data.json()
if data.status_code == 200:
lyrics.title=title
lyrics.artist=artist
lyrics.title_slug=titleSlug
lyrics.artist_slug=artistSlug
lyrics.body=lyric['lyrics']
lyrics.save()
lyrics_item=Lyrics.objects.get(title_slug=title_slug, artist_slug=artist_slug)
searchHistory.lyrics_id = lyrics_item.id
searchHistory.searcher_username = request.user.username
searchHistory.moment=now.strftime('%Y-%m-%d %H:%M:%S')
searchHistory.save()
serializer=serializers.LyricsSerializer(lyrics_item, many=False)
response={'lyrics':serializer.data}
return Response(response,status=status.HTTP_200_OK ) """
| [((40, 6, 40, 20), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((49, 10, 49, 24), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((119, 15, 119, 44), 'random.randint', 'random.randint', ({(119, 30, 119, 36): '199999', (119, 37, 119, 43): '999999'}, {}), '(199999, 999999)', False, 'import random\n'), ((130, 15, 130, 44), 'django.template.loader.get_template', 'get_template', ({(130, 28, 130, 43): '"""api/code.html"""'}, {}), "('api/code.html')", False, 'from django.template.loader import get_template\n'), ((134, 16, 140, 9), 'django.core.mail.EmailMultiAlternatives', 'EmailMultiAlternatives', ({(135, 12, 135, 19): 'subject', (136, 12, 136, 16): 'text', (137, 12, 137, 18): 'from_e', (138, 12, 138, 16): '[to]'}, {}), '(subject, text, from_e, [to])', False, 'from django.core.mail import EmailMessage, EmailMultiAlternatives\n'), ((146, 15, 146, 46), 'rest_framework.response.Response', 'Response', ({(146, 24, 146, 45): "{'success': 'Success'}"}, {}), "({'success': 'Success'})", False, 'from rest_framework.response import Response\n'), ((261, 15, 261, 59), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((282, 15, 282, 33), 'rest_framework.response.Response', 'Response', ({(282, 24, 282, 32): 'response'}, {}), '(response)', False, 'from rest_framework.response import Response\n'), ((291, 22, 291, 49), 'random.choice', 'random.choice', ({(291, 36, 291, 48): 'lyrics_items'}, {}), '(lyrics_items)', False, 'import random\n'), ((294, 15, 294, 29), 'rest_framework.response.Response', 'Response', ({(294, 24, 294, 28): 'resp'}, {}), '(resp)', False, 'from rest_framework.response import Response\n'), ((304, 15, 304, 29), 'rest_framework.response.Response', 'Response', ({(304, 24, 304, 28): 'resp'}, {}), '(resp)', False, 'from rest_framework.response import Response\n'), ((358, 15, 358, 33), 'rest_framework.response.Response', 'Response', ({(358, 24, 358, 32): 'response'}, {}), '(response)', False, 'from rest_framework.response import Response\n'), ((395, 15, 395, 78), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((439, 15, 439, 60), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((480, 15, 480, 60), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((493, 11, 493, 50), 'django.contrib.auth.hashers.check_password', 'check_password', ({(493, 26, 493, 34): 'password', (493, 36, 493, 49): 'user.password'}, {}), '(password, user.password)', False, 'from django.contrib.auth.hashers import make_password, check_password\n'), ((573, 15, 573, 33), 'rest_framework.response.Response', 'Response', ({(573, 24, 573, 32): 'response'}, {}), '(response)', False, 'from rest_framework.response import Response\n'), ((55, 19, 55, 54), 'rest_framework.response.Response', 'Response', ({(55, 28, 55, 53): "{'email': 'already taken'}"}, {}), "({'email': 'already taken'})", False, 'from rest_framework.response import Response\n'), ((160, 19, 160, 46), 'rest_framework.response.Response', 'Response', ({(160, 28, 160, 45): "{'msg': 'success'}"}, {}), "({'msg': 'success'})", False, 'from rest_framework.response import Response\n'), ((172, 19, 172, 46), 'rest_framework.response.Response', 'Response', ({(172, 28, 172, 45): "{'msg': 'success'}"}, {}), "({'msg': 'success'})", False, 'from rest_framework.response import Response\n'), ((194, 19, 194, 64), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((197, 19, 197, 73), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((245, 19, 245, 63), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((271, 19, 271, 41), 'rest_framework.response.Response', 'Response', ({(271, 28, 271, 40): "{'msg': 'OK'}"}, {}), "({'msg': 'OK'})", False, 'from rest_framework.response import Response\n'), ((313, 19, 313, 33), 'rest_framework.response.Response', 'Response', ({(313, 28, 313, 32): 'resp'}, {}), '(resp)', False, 'from rest_framework.response import Response\n'), ((318, 19, 318, 33), 'rest_framework.response.Response', 'Response', ({(318, 28, 318, 32): 'resp'}, {}), '(resp)', False, 'from rest_framework.response import Response\n'), ((328, 28, 328, 71), 'django.contrib.auth.hashers.make_password', 'make_password', ({(328, 42, 328, 70): "request.data['new_password']"}, {}), "(request.data['new_password'])", False, 'from django.contrib.auth.hashers import make_password, check_password\n'), ((330, 19, 330, 46), 'rest_framework.response.Response', 'Response', ({(330, 28, 330, 45): "{'msg': 'success'}"}, {}), "({'msg': 'success'})", False, 'from rest_framework.response import Response\n'), ((334, 15, 334, 62), 'django.contrib.auth.hashers.check_password', 'check_password', ({(334, 30, 334, 46): 'current_password', (334, 48, 334, 61): 'user.password'}, {}), '(current_password, user.password)', False, 'from django.contrib.auth.hashers import make_password, check_password\n'), ((445, 19, 445, 64), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((494, 12, 494, 37), 'django.contrib.auth.login', 'login', ({(494, 18, 494, 30): 'self.request', (494, 32, 494, 36): 'user'}, {}), '(self.request, user)', False, 'from django.contrib.auth import login, authenticate\n'), ((495, 18, 495, 46), 'rest_framework.authtoken.models.Token.objects.get', 'Token.objects.get', (), '', False, 'from rest_framework.authtoken.models import Token\n'), ((498, 19, 498, 64), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((500, 19, 500, 59), 'rest_framework.response.Response', 'Response', ({(500, 28, 500, 58): "{'error': 'Incorrect password'}"}, {}), "({'error': 'Incorrect password'})", False, 'from rest_framework.response import Response\n'), ((512, 19, 512, 37), 'rest_framework.response.Response', 'Response', ({(512, 28, 512, 36): 'response'}, {}), '(response)', False, 'from rest_framework.response import Response\n'), ((514, 19, 514, 49), 'rest_framework.response.Response', 'Response', ({(514, 28, 514, 48): '{serializers.errors}'}, {}), '({serializers.errors})', False, 'from rest_framework.response import Response\n'), ((521, 19, 521, 50), 'rest_framework.response.Response', 'Response', ({(521, 28, 521, 49): "{'Error': 'Forbidden'}"}, {}), "({'Error': 'Forbidden'})", False, 'from rest_framework.response import Response\n'), ((531, 19, 531, 41), 'rest_framework.response.Response', 'Response', ({(531, 28, 531, 40): "{'msg': 'OK'}"}, {}), "({'msg': 'OK'})", False, 'from rest_framework.response import Response\n'), ((538, 19, 538, 50), 'rest_framework.response.Response', 'Response', ({(538, 28, 538, 49): "{'Error': 'Forbidden'}"}, {}), "({'Error': 'Forbidden'})", False, 'from rest_framework.response import Response\n'), ((543, 19, 543, 32), 'rest_framework.response.Response', 'Response', ({(543, 28, 543, 31): 'res'}, {}), '(res)', False, 'from rest_framework.response import Response\n'), ((550, 19, 550, 50), 'rest_framework.response.Response', 'Response', ({(550, 28, 550, 49): "{'Error': 'Forbidden'}"}, {}), "({'Error': 'Forbidden'})", False, 'from rest_framework.response import Response\n'), ((555, 19, 555, 32), 'rest_framework.response.Response', 'Response', ({(555, 28, 555, 31): 'res'}, {}), '(res)', False, 'from rest_framework.response import Response\n'), ((561, 19, 561, 50), 'rest_framework.response.Response', 'Response', ({(561, 28, 561, 49): "{'Error': 'Forbidden'}"}, {}), "({'Error': 'Forbidden'})", False, 'from rest_framework.response import Response\n'), ((565, 19, 565, 41), 'rest_framework.response.Response', 'Response', ({(565, 28, 565, 40): "{'msg': 'OK'}"}, {}), "({'msg': 'OK'})", False, 'from rest_framework.response import Response\n'), ((592, 19, 592, 37), 'rest_framework.response.Response', 'Response', ({(592, 28, 592, 36): 'response'}, {}), '(response)', False, 'from rest_framework.response import Response\n'), ((594, 19, 594, 52), 'rest_framework.response.Response', 'Response', ({(594, 28, 594, 51): "{'error': 'Unavailable'}"}, {}), "({'error': 'Unavailable'})", False, 'from rest_framework.response import Response\n'), ((112, 19, 112, 56), 'rest_framework.response.Response', 'Response', ({(112, 28, 112, 55): "{'error': 'User not found.'}"}, {}), "({'error': 'User not found.'})", False, 'from rest_framework.response import Response\n'), ((145, 19, 145, 54), 'rest_framework.response.Response', 'Response', ({(145, 28, 145, 53): "{'error': 'Error occured'}"}, {}), "({'error': 'Error occured'})", False, 'from rest_framework.response import Response\n'), ((162, 19, 162, 54), 'rest_framework.response.Response', 'Response', ({(162, 28, 162, 53): "{'error': 'Invalid code.'}"}, {}), "({'error': 'Invalid code.'})", False, 'from rest_framework.response import Response\n'), ((174, 19, 174, 53), 'rest_framework.response.Response', 'Response', ({(174, 28, 174, 52): "{'error': 'invalid code'}"}, {}), "({'error': 'invalid code'})", False, 'from rest_framework.response import Response\n'), ((182, 23, 182, 55), 'rest_framework.response.Response', 'Response', ({(182, 32, 182, 54): "{'watchlisted': 'true'}"}, {}), "({'watchlisted': 'true'})", False, 'from rest_framework.response import Response\n'), ((184, 18, 184, 51), 'rest_framework.response.Response', 'Response', ({(184, 27, 184, 50): "{'watchlisted': 'false'}"}, {}), "({'watchlisted': 'false'})", False, 'from rest_framework.response import Response\n'), ((214, 23, 214, 68), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((216, 23, 216, 86), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((248, 19, 248, 50), 'rest_framework.response.Response', 'Response', ({(248, 28, 248, 49): "{'error': 'Not Found'}"}, {}), "({'error': 'Not Found'})", False, 'from rest_framework.response import Response\n'), ((273, 19, 273, 59), 'rest_framework.response.Response', 'Response', ({(273, 28, 273, 58): "{'msg': 'Something went wrong'}"}, {}), "({'msg': 'Something went wrong'})", False, 'from rest_framework.response import Response\n'), ((335, 32, 335, 75), 'django.contrib.auth.hashers.make_password', 'make_password', ({(335, 46, 335, 74): "request.data['new_password']"}, {}), "(request.data['new_password'])", False, 'from django.contrib.auth.hashers import make_password, check_password\n'), ((337, 23, 337, 63), 'rest_framework.response.Response', 'Response', ({(337, 32, 337, 62): "{'success': 'Password changed'}"}, {}), "({'success': 'Password changed'})", False, 'from rest_framework.response import Response\n'), ((339, 23, 339, 63), 'rest_framework.response.Response', 'Response', ({(339, 32, 339, 62): "{'error': 'Incorrect password'}"}, {}), "({'error': 'Incorrect password'})", False, 'from rest_framework.response import Response\n'), ((407, 23, 407, 90), 'rest_framework.response.Response', 'Response', ({(407, 32, 407, 89): "{'Error': 'Cannot add lyrics to Save List twice or more.'}"}, {}), "({'Error': 'Cannot add lyrics to Save List twice or more.'})", False, 'from rest_framework.response import Response\n'), ((421, 19, 421, 64), 'rest_framework.response.Response', 'Response', (), '', False, 'from rest_framework.response import Response\n'), ((447, 19, 447, 38), 'rest_framework.response.Response', 'Response', ({(447, 28, 447, 37): "{'false'}"}, {}), "({'false'})", False, 'from rest_framework.response import Response\n'), ((492, 22, 492, 59), 'rest_framework.response.Response', 'Response', ({(492, 31, 492, 58): "{'error': 'User not found.'}"}, {}), "({'error': 'User not found.'})", False, 'from rest_framework.response import Response\n'), ((60, 25, 60, 64), 'django.contrib.auth.hashers.make_password', 'make_password', ({(60, 39, 60, 63): "request.data['password']"}, {}), "(request.data['password'])", False, 'from django.contrib.auth.hashers import make_password, check_password\n'), ((71, 22, 71, 57), 'rest_framework.authtoken.models.Token.objects.create', 'Token.objects.create', (), '', False, 'from rest_framework.authtoken.models import Token\n'), ((73, 23, 73, 52), 'random.randint', 'random.randint', ({(73, 38, 73, 44): '199999', (73, 45, 73, 51): '999999'}, {}), '(199999, 999999)', False, 'import random\n'), ((84, 23, 84, 52), 'django.template.loader.get_template', 'get_template', ({(84, 36, 84, 51): '"""api/code.html"""'}, {}), "('api/code.html')", False, 'from django.template.loader import get_template\n'), ((88, 24, 93, 13), 'django.core.mail.EmailMultiAlternatives', 'EmailMultiAlternatives', ({(89, 20, 89, 27): 'subject', (90, 20, 90, 24): 'text', (91, 20, 91, 26): 'from_e', (92, 20, 92, 24): '[to]'}, {}), '(subject, text, from_e, [to])', False, 'from django.core.mail import EmailMessage, EmailMultiAlternatives\n'), ((99, 22, 99, 50), 'rest_framework.authtoken.models.Token.objects.get', 'Token.objects.get', (), '', False, 'from rest_framework.authtoken.models import Token\n'), ((101, 23, 101, 41), 'rest_framework.response.Response', 'Response', ({(101, 32, 101, 40): 'response'}, {}), '(response)', False, 'from rest_framework.response import Response\n'), ((103, 23, 103, 50), 'rest_framework.response.Response', 'Response', ({(103, 32, 103, 49): 'serializer.errors'}, {}), '(serializer.errors)', False, 'from rest_framework.response import Response\n')] |
AbhilashDatta/InstagramBot | __dm__.py | 21916fcfc621ae3185df8494b12aa35743c165f8 | from selenium import webdriver
from time import sleep
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
def Dm(driver,user,message):
''' This function is used to direct message a single user/group '''
driver.get('https://www.instagram.com/direct/inbox/')
send_message_button = WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH, '//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div/div[3]/div/button'))).click()
search_user = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '/html/body/div[5]/div/div/div[2]/div[1]/div/div[2]/input')))
search_user.send_keys(user)
selector = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '/html/body/div[5]/div/div/div[2]/div[2]/div/div/div[3]/button/span'))).click()
next_button = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '/html/body/div[5]/div/div/div[1]/div/div[2]/div/button/div'))).click()
try:
text = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div[2]/div/div[2]/div/div/div[2]/textarea')))
text.send_keys(message)
send = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div[2]/div/div[2]/div/div/div[3]/button'))).click()
driver.get('https://www.instagram.com/direct/inbox/')
except:
print('No message sent to '+user)
driver.get('https://www.instagram.com/direct/inbox/') | [((16, 50, 16, 148), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'EC.element_to_be_clickable', ({(16, 77, 16, 147): "(By.XPATH, '/html/body/div[5]/div/div/div[2]/div[1]/div/div[2]/input')"}, {}), "((By.XPATH,\n '/html/body/div[5]/div/div/div[2]/div[1]/div/div[2]/input'))", True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((16, 18, 16, 43), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', ({(16, 32, 16, 38): 'driver', (16, 40, 16, 42): '10'}, {}), '(driver, 10)', False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((24, 47, 24, 186), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'EC.element_to_be_clickable', ({(24, 74, 24, 185): '(By.XPATH,\n \'//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div[2]/div/div[2]/div/div/div[2]/textarea\'\n )'}, {}), '((By.XPATH,\n \'//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div[2]/div/div[2]/div/div/div[2]/textarea\'\n ))', True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((14, 58, 14, 177), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'EC.element_to_be_clickable', ({(14, 85, 14, 176): '(By.XPATH,\n \'//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div/div[3]/div/button\'\n )'}, {}), '((By.XPATH,\n \'//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div/div[3]/div/button\'\n ))', True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((19, 47, 19, 155), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'EC.element_to_be_clickable', ({(19, 74, 19, 154): "(By.XPATH, '/html/body/div[5]/div/div/div[2]/div[2]/div/div/div[3]/button/span'\n )"}, {}), "((By.XPATH,\n '/html/body/div[5]/div/div/div[2]/div[2]/div/div/div[3]/button/span'))", True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((21, 50, 21, 150), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'EC.element_to_be_clickable', ({(21, 77, 21, 149): "(By.XPATH, '/html/body/div[5]/div/div/div[1]/div/div[2]/div/button/div')"}, {}), "((By.XPATH,\n '/html/body/div[5]/div/div/div[1]/div/div[2]/div/button/div'))", True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((24, 15, 24, 40), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', ({(24, 29, 24, 35): 'driver', (24, 37, 24, 39): '10'}, {}), '(driver, 10)', False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((14, 26, 14, 51), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', ({(14, 40, 14, 46): 'driver', (14, 48, 14, 50): '20'}, {}), '(driver, 20)', False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((19, 15, 19, 40), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', ({(19, 29, 19, 35): 'driver', (19, 37, 19, 39): '10'}, {}), '(driver, 10)', False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((21, 18, 21, 43), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', ({(21, 32, 21, 38): 'driver', (21, 40, 21, 42): '10'}, {}), '(driver, 10)', False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((26, 47, 26, 184), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'EC.element_to_be_clickable', ({(26, 74, 26, 183): '(By.XPATH,\n \'//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div[2]/div/div[2]/div/div/div[3]/button\'\n )'}, {}), '((By.XPATH,\n \'//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div[2]/div/div[2]/div/div/div[3]/button\'\n ))', True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((26, 15, 26, 40), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', ({(26, 29, 26, 35): 'driver', (26, 37, 26, 39): '10'}, {}), '(driver, 10)', False, 'from selenium.webdriver.support.wait import WebDriverWait\n')] |
Yotamefr/BeitBiram | mashov.py | 84bd6abddf6ac865b502e0692561ee48d510ef7c | import requests
from datetime import datetime
import json
from extras import Day, Lesson
class PasswordError(Exception):
pass
class LoginFailed(Exception):
pass
class MashovAPI:
"""
MashovAPI
Originally made by Xiddoc. Project can be found here: https://github.com/Xiddoc/MashovAPI
Modifications were made by me, Yotamefr.
"""
def __init__(self, username, **kwargs):
"""
Parameters
------------
username -> Represents the username
------------
There are some weird stuff here. I might clean it in a while
Again, this code wasn't made by me, just modified by me
"""
self.url = "https://web.mashov.info/api/{}/"
self.session = requests.Session()
self.session.headers.update({'Accept': 'application/json, text/plain, */*',
'Referer': 'https://web.mashov.info/students/login',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36',
'Content-Type': 'application/json'})
self.username = username
self.auth_ID = 0
self.user_ID = self.auth_ID
self.uid = self.auth_ID
self.uID = self.auth_ID
self.guid = self.auth_ID
self.guID = self.auth_ID
self.school_site = ""
self.moodle_site = ""
self.school_name = ""
self.last_name = ""
self.first_name = ""
self.class_name = ""
self.last_pass = ""
self.last_login = ""
self.school_years = []
self.csrf_token = ""
self.user_children = {}
# Kwargs password
if "password" in kwargs:
self.password = kwargs["password"]
else:
self.password = False
# Kwargs schoolData
if "schoolData" in kwargs:
self.school_data = kwargs["schoolData"]
else:
self.school_data = False
# Kwargs schoolID
if "schoolID" in kwargs:
self.school_ID = kwargs["schoolID"]
elif not self.school_data:
self.school_data = self.get_schools()
self.school_ID = self.get_school_ID_by_name(kwargs["schoolName"])
self.current_year = datetime.now().year + 1
def login(self):
"""
Parameters
------------
------------
"""
if not self.password:
raise PasswordError("No password entered.")
self.login_data = {'semel': self.school_ID,
'username': self.username,
'password': self.password,
'year': self.current_year}
self.ret_data = self.send("login", "post", self.login_data)
self.ret_text = json.loads(self.ret_data.text)
if not self.ret_data.status_code == 200:
self.is_logged_in = False
raise LoginFailed()
self.is_logged_in = True
self.auth_ID = self.ret_text["credential"]["userId"]
self.user_ID = self.auth_ID
self.uid = self.auth_ID
self.uID = self.auth_ID
self.guid = self.auth_ID
self.guID = self.auth_ID
self.school_site = self.ret_text["accessToken"]["schoolOptions"]["schoolSite"]
self.moodle_site = self.ret_text["accessToken"]["schoolOptions"]["moodleSite"]
self.school_name = self.ret_text["accessToken"]["schoolOptions"]["schoolName"]
self.last_name = self.ret_text["accessToken"]["children"][0]["familyName"]
self.first_name = self.ret_text["accessToken"]["children"][0]["privateName"]
self.class_name = f'{self.ret_text["accessToken"]["children"][0]["classNum"]}{self.ret_text["accessToken"]["children"][0]["classCode"]}'
self.last_pass = self.ret_text["accessToken"]["lastPassSet"]
self.last_login = self.ret_text["accessToken"]["lastLogin"]
self.school_years = self.ret_text["accessToken"]["userSchoolYears"]
self.csrf_token = self.ret_data.cookies["Csrf-Token"]
self.session.headers.update({"x-csrf-token": self.csrf_token})
self.user_children = self.ret_text["accessToken"]["children"]
del self.username
del self.password
@property
def timetable(self):
return self.form_return(self.send(f"students/{self.user_ID}/timetable", "get"))
def update_school_data(self):
"""
Parameters
------------
------------
"""
self.school_data = self.form_return(self.send("schools", "get"))
def get_schools(self):
"""
Parameters
------------
------------
"""
self.update_school_data()
return self.school_data()
def get_school_ID_by_name(self, school):
"""
Parameters
------------
school -> Represents the school name
------------
"""
if self.school_data:
schoolData = self.school_data
else:
schoolData = self.update_school_data()
for schools in schoolData:
if schools["name"].find(school) == 0:
return schools["semel"]
def clear_session(self):
"""
Parameters
------------
------------
"""
return self.form_return(self.send("clearSession", "get"))
def get_special_lessons(self):
"""
Parameters
------------
------------
"""
return self.get_private_lessons()
def get_private_lessons(self):
"""
Parameters
------------
------------
"""
return self.form_return(self.send("students/{}/specialHoursLessons".format(self.auth_ID), "get"))
def get_private_lesson_types(self):
"""
Parameters
------------
------------
"""
return self.form_return(self.send("lessonsTypes", "get"))
@property
def classes(self):
return self.groups
@property
def groups(self):
return self.form_return(self.send("students/{}/groups".format(self.auth_ID), "get"))
@property
def teachers(self):
recipents = self.recipents
teachers = []
for i in recipents:
if "הורים/" not in i["displayName"]:
teachers.append(i)
return teachers
@property
def recipents(self):
return self.form_return(self.send("mail/recipients", "get"))
def form_return(self, response):
"""
Parameters
------------
response -> Represents the response from the website
------------
"""
if response.status_code != 200:
return False
else:
try:
return json.loads(response.text)
except:
return response.text
def send(self, url, method="get", params={}, files={}):
"""
Parameters
------------
url -> Represents the url to go to
method -> Represents the method to use. Can be either `get` or `post`
params -> Represents the parameters to send to the website. Only use it on `post`
files -> Pretty much the same as for the params
------------
"""
return getattr(self.session, str(method).strip().lower())(self.url.format(url), data=json.dumps(params),
files=files)
def __str__(self):
return json.dumps({
"MashovAPI": {
"url": self.url,
"sessionH": dict(self.session.headers),
"sessionC": self.session.cookies.get_dict(),
"username": self.username,
"password": self.password,
"schoolData": self.school_data,
"schoolID": self.school_ID,
"currentYear": self.current_year,
"loginData": self.login_data,
"isLoggedIn": self.is_logged_in,
"authID": self.auth_ID,
"userID": self.user_ID,
"uid": self.uid,
"uID": self.uID,
"guid": self.guid,
"guID": self.guID,
"schoolSite": self.school_site,
"moodleSite": self.moodle_site,
"schoolName": self.school_name,
"lastName": self.last_name,
"firstName": self.first_name,
"className": self.class_name,
"lastPass": self.last_pass,
"lastLogin": self.last_login,
"schoolYears": self.school_years,
"csrfToken": self.csrf_token,
"userChildren": self.user_children
}})
def get_day(self, day_num: int):
"""
Parameters
------------
day -> Represents the day number
------------
"""
day = []
timetable = []
for i in self.timetable:
if i["timeTable"]["day"] == day_num:
timetable.append(i)
for i in range(len(timetable)):
for j in range(i+1, len(timetable), 1):
if timetable[i]["timeTable"]["lesson"] > timetable[j]["timeTable"]["lesson"]:
temp = timetable[i]
timetable[i] = timetable[j]
timetable[j] = temp
for i in timetable:
if not "קפ'" in i["groupDetails"]["subjectName"]: # We don't need that. It's useless.
if len(day) > 0:
while i["timeTable"]["lesson"] > day[-1].number + 1:
day.append(Lesson(
lesson="",
lesson_number=day[-1].number + 1,
lesson_time="",
classroom="",
teacher="",
)
)
i["groupDetails"]["groupTeachers"][0]["teacherName"] = i["groupDetails"]["groupTeachers"][0]["teacherName"].replace("-", " ")
day.append(Lesson(
lesson=i["groupDetails"]["subjectName"],
lesson_number=i["timeTable"]["lesson"],
lesson_time="",
classroom=i["timeTable"]["roomNum"],
teacher=i["groupDetails"]["groupTeachers"][0]["teacherName"]
)
)
return Day(day_num, day)
def get_today(self):
"""
Parameters
------------
------------
"""
today = datetime.now().weekday()
today += 2
if today > 7:
today -= 7
return self.get_day(today)
| [((33, 23, 33, 41), 'requests.Session', 'requests.Session', ({}, {}), '()', False, 'import requests\n'), ((87, 24, 87, 54), 'json.loads', 'json.loads', ({(87, 35, 87, 53): 'self.ret_data.text'}, {}), '(self.ret_data.text)', False, 'import json\n'), ((304, 15, 304, 32), 'extras.Day', 'Day', ({(304, 19, 304, 26): 'day_num', (304, 28, 304, 31): 'day'}, {}), '(day_num, day)', False, 'from extras import Day, Lesson\n'), ((72, 28, 72, 42), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((213, 23, 213, 48), 'json.loads', 'json.loads', ({(213, 34, 213, 47): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((230, 93, 230, 111), 'json.dumps', 'json.dumps', ({(230, 104, 230, 110): 'params'}, {}), '(params)', False, 'import json\n'), ((312, 16, 312, 30), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((296, 27, 302, 34), 'extras.Lesson', 'Lesson', (), '', False, 'from extras import Day, Lesson\n'), ((287, 35, 293, 41), 'extras.Lesson', 'Lesson', (), '', False, 'from extras import Day, Lesson\n')] |
jschmidtnj/CS115 | lab6.py | fa2374f1ae9c9b63e572850a97af6086112d7a36 | '''
Created on 10/11/2017
@author: [email protected]
Pledge: I pledge my honor that I have abided by the Stevens Honor System -Joshua Schmidt
CS115 - Lab 6
'''
def isOdd(n):
'''Returns whether or not the integer argument is odd.'''
#question 1: base_2 of 42: 101010
if n == 0:
return False
if n % 2 != 0:
return True
return False
#question 2: if given an odd base-10 number, the least-significant bit of its base-2 representation will be a 1.
#question 3: if given an even base-10 number, the least-significant bit of its base-2 representation will be a 0.
#This is because 2^0 = 1, and that is the only way to make an odd number, by having a 1 in the least significant bit.
#question 4: By eliminating the least significant bit, the original number decreases by a factor of 2, if the bit is a 0.
#if the least significant bit is a 1, the original number is decreased by a factor of 2, - 1.
#question 5: If N is odd, the base-2 of N is Y + "1". If N is even, the base-2 of N is Y + "0".
#This is because to get from N base-10 to N base-2 you do successive division by 2, keeping the remainder, so given
#the base-2 of all of the division except for the first, one must put that remainder in front, hence the answer given.
def numToBinary(n):
'''Precondition: integer argument is non-negative.
Returns the string with the binary representation of non-negative integer n.
If n is 0, the empty string is returned.'''
if n == 0:
return ""
elif isOdd(n):
return numToBinary(n // 2) + "1"
else: return numToBinary(n // 2) + "0"
#print(numToBinary(15))
def binaryToNum(s):
'''Precondition: s is a string of 0s and 1s.
Returns the integer corresponding to the binary representation in s.
Note: the empty string represents 0.'''
if s == "":
return 0
return int(s[0])*(2**(len(s)-1)) + binaryToNum(s[1:])
#print(binaryToNum("1111"))
def addBin(s, numAdd, carry = 0):
"""adds 2 binary numbers"""
if s == "" or numAdd == "":
if carry == 0:
return s + numAdd
place = carry
carry = 0
if s != "" and s[-1] == "1":
carry = place
place = 1 - place
if numAdd != "" and numAdd[-1] == "1":
carry += place
place = 1 - place
return addBin(s[:-1], numAdd[:-1], carry) + str(place)
#print(addBin("100", "001", 0))
def makeEightBit(a):
"""makes a binary number 8 bit"""
if len(a) == 8:
print(str(a))
return str(a)
elif len(a) > 8:
#print(a[(len(a)-8):])
makeEightBit(a[(len(a)-8):])
else:
makeEightBit("0" + a)
return ""
def increment(s):
'''Precondition: s is a string of 8 bits.
Returns the binary representation of binaryToNum(s) + 1.'''
#numAdd = "00000001"
dec = binaryToNum(s)
dec += 1
answer = numToBinary(dec)
#print(answer)
if len(answer) > 8:
return answer[(len(answer)-8):]
answer = (8-len(answer))*"0" + answer
return answer
#print(increment("1110100000"))
def count(s, n):
'''Precondition: s is an 8-bit string and n >= 0.
Prints s and its n successors.'''
if n == 0:
print(s)
return ""
print(s)
return count(increment(s), n-1)
#print(count("11111110", 5))
#print("a")
def numToTernary(n):
'''Precondition: integer argument is non-negative.
Returns the string with the ternary representation of non-negative integer
n. If n is 0, the empty string is returned.'''
if n == 0:
return ""
return numToTernary(n // 3) + str(n % 3)
#print(numToTernary(42))
def ternaryToNum(s):
'''Precondition: s is a string of 0s, 1s, and 2s.
Returns the integer corresponding to the ternary representation in s.
Note: the empty string represents 0.'''
if s == "":
return 0
return int(s[0])*(3**(len(s)-1)) + ternaryToNum(s[1:])
#print(ternaryToNum('12211010'))
| [] |
imyz/25000 | clover.py | 909b6ceaf326138b0684e6600f347a38fe68f9f0 | #!/usr/bin/env python
from math import *
import sys
def rotate(x, y, degrees):
c = cos(pi * degrees / 180.0)
s = sin(pi * degrees / 180.0)
return x * c + y * s, y * c - x * s
def move(verb, **kwargs):
keys = kwargs.keys()
keys.sort()
words = [verb.upper()]
for key in keys:
words.append('%s%g' % (key.upper(), kwargs[key]))
print ' '.join(words)
def travel(**kwargs): move('G0', **kwargs)
def linear(**kwargs): move('G1', **kwargs)
def clockwise(**kwargs): move('G2', **kwargs)
def up(): travel(z=8)
def down(): linear(z=-2)
def jump(**kwargs):
up()
travel(**kwargs)
down()
frame_width = 200
frame_height = 75
drill = 1.6 # 1/16 inch radius.
extrusion = 15
motor_screw_grid = 31
motor_cutout_diameter = 22
motor_width = 42.2
motor_offset = 35 # Motor face to extrusion.
motor_side, motor_bend = rotate(0, motor_offset + extrusion, 30)
motor_side += extrusion/2
motor_side += extrusion/cos(pi/6)
mc = motor_cutout_diameter/2 + drill
#nema23 = 47.14 # Mounting screws center-to-center
clover = 6
thickness = 0.0478 * 25.4 # 18 gauge steel.
enable_perimeter = False
print >> sys.stderr, 'thickness', thickness
print >> sys.stderr, 'motor_bend', motor_bend
print >> sys.stderr, 'motor_side', motor_side
print >> sys.stderr, 'mc', mc
print >> sys.stderr, 'extrusion-to-extrusion', frame_width
print >> sys.stderr, 'edge-to-edge', frame_width + 2*extrusion
xa = motor_side - drill # Outside wings start
xb = motor_side + motor_bend + drill
xs1 = xa + extrusion/2 # Extrusion screws
xs2 = xb - extrusion/2
# xe = frame_width/2 # Extrusion corner
xt = motor_width/2
xms = motor_screw_grid/sqrt(2)
xgs = 19
ya = frame_height/2 + drill # Top without flange
yb = frame_height/2 + drill - extrusion
ys = frame_height/2 - extrusion/2 # Extrusion screws
yt = motor_width/2
yt2 = yt + 4
yms = xms
ygs = xgs
s2 = sqrt(2)
print 'G17 ; Select XY plane for arcs'
print 'G90 ; Absolute coordinates'
move('G92', x=0, y=0, z=0)
linear(x=0, y=0, z=0)
print '; Gasket screw holes'
for x in (-xgs, xgs):
for y in (-x, x):
jump(x=x, y=y)
# clockwise(i=1)
if enable_perimeter:
print '; Horizontal extrusion screw holes'
for x in (xs1, xs2):
jump(x=x, y=ys)
for x in (xs2, xs1, -xs1, -xs2):
jump(x=x, y=-ys)
for x in (-xs2, -xs1):
jump(x=x, y=ys)
#print '; 22mm dia cutout for reference'
#jump(x=0, y=11)
#clockwise(j=-11)
#print '; NEMA17 square for reference'
#jump(x=0, y=yt*s2)
#linear(x=xt*s2, y=0)
#linear(x=0, y=-yt*s2)
#linear(x=-xt*s2, y=0)
#linear(x=0, y=yt*s2)
def clovercut(z):
up()
travel(x=-clover+1, y=yms-clover-1)
linear(z=z)
print '; Motor cutout clover leaf'
linear(x=-clover, y=yms-clover)
clockwise(x=clover, i=clover, j=clover)
#clockwise(x=xms-clover, y=clover, r=mc)
linear(x=xms-clover, y=clover, r=mc)
clockwise(y=-clover, i=clover, j=-clover)
#clockwise(x=clover, y=-yms+clover, r=mc)
linear(x=clover, y=-yms+clover, r=mc)
clockwise(x=-clover, i=-clover, j=-clover)
#clockwise(x=-xms+clover, y=-clover, r=mc)
linear(x=-xms+clover, y=-clover, r=mc)
clockwise(y=clover, i=-clover, j=clover)
#clockwise(x=-clover, y=yms-clover, r=mc)
linear(x=-clover, y=yms-clover, r=mc)
linear(x=-clover+1, y=yms-clover+1)
for z in (-1, -2.5):
clovercut(z)
def perimeter(z):
up()
travel(x=xa, y=yb)
linear(z=z)
print '; Right wing (outside horizontal extrusions)'
clockwise(x=xa+extrusion, y=ya, i=extrusion)
linear(x=xb)
linear(y=-ya)
linear(x=xa+extrusion)
clockwise(x=xa, y=-yb, j=extrusion)
print '; Extrusion pass-through and motor mounting plate'
linear(x=xa-20)
clockwise(x=-xa+20, i=-xa+20, j=yb)
linear(x=-xa, y=-yb)
print '; Left wing (outside horizontal extrusions)'
clockwise(x=-xa-extrusion, y=-ya, i=-extrusion)
linear(x=-xb)
linear(y=ya)
linear(x=-xa-extrusion)
clockwise(x=-xa, y=yb, j=-extrusion)
print '; Extrusion pass-through and motor mounting plate'
linear(x=-xa+20)
clockwise(x=xa-20, i=xa-20, j=-yb)
linear(x=xa, y=yb)
if enable_perimeter:
for z in (-1, -2.5):
perimeter(z)
print '; All done'
up()
| [] |
asiakaczmar/noise2self | scripts/mnist_inference.py | 75daaf188c49bff0da22c235540da20f4eca9614 | import torch
from torchvision.datasets import MNIST
from torchvision import transforms
from torch.utils.data import DataLoader
from scripts.utils import SyntheticNoiseDataset
from models.babyunet import BabyUnet
CHECKPOINTS_PATH = '../checkpoints/'
mnist_test = MNIST('../inferred_data/MNIST', download=True,
transform=transforms.Compose([
transforms.ToTensor(),
]), train=False)
noisy_mnist_test = SyntheticNoiseDataset(mnist_test, 'test')
data_loader = DataLoader(noisy_mnist_test, batch_size=256, shuffle=True)
for x in range(0, 200, 10):
trained_model = BabyUnet()
trained_model.load_state_dict( CHECKPOINTS_PATH + 'model' + str(x))
trained_model.eval()
for i, batch in enumerate(data_loader):
denoised = trained_model(batch)
break()
np.save(denoised.numpy(), '../inferred_data/model' + str(x) + '.npz')
| [] |
sdcubber/kaggle_carvana | src/processing/augmentation.py | 44f6c7f1e80be2caa3c7ad4c7fb69067af45fe8f | # Script for data augmentation functions
import numpy as np
from collections import deque
from PIL import Image
import cv2
from data.config import *
def imread_cv2(image_path):
"""
Read image_path with cv2 format (H, W, C)
if image is '.gif' outputs is a numpy array of {0,1}
"""
image_format = image_path[-3:]
if image_format == 'jpg':
image = cv2.imread(image_path)
else:
image = np.array(Image.open(image_path))
return image
def resize_cv2(image, heigh=1280, width=1918):
return cv2.resize(image, (width, heigh), cv2.INTER_LINEAR)
def image_to_tensor(image, mean=0, std=1.):
"""Transform image (input is numpy array, read in by cv2) """
if len(image.shape) == 2:
image = image.reshape(image.shape[0], image.shape[1], 1)
image = image.astype(np.float32)
image = (image-mean)/std
image = image.transpose((2,0,1))
tensor = torch.from_numpy(image)
return tensor
# --- Data Augmentation functions --- #
# A lot of functions can be found here:
# https://github.com/fchollet/keras/blob/master/keras/preprocessing/image.py#L223
# transform image and label
def randomHorizontalFlip(image, mask, p=0.5):
"""Do a random horizontal flip with probability p"""
if np.random.random() < p:
image = np.fliplr(image)
mask = np.fliplr(mask)
return image, mask
def randomVerticalFlip(image, mask, p=0.5):
"""Do a random vertical flip with probability p"""
if np.random.random() < p:
image = np.flipud(image)
mask = np.flipud(mask)
return image, mask
def randomHorizontalShift(image, mask, max_shift=0.05, p=0.5):
"""Do random horizontal shift with max proportion shift and with probability p
Elements that roll beyond the last position are re-introduced at the first."""
max_shift_pixels = int(max_shift*image.shape[1])
shift = np.random.choice(np.arange(-max_shift_pixels, max_shift_pixels+1))
if np.random.random() < p:
image = np.roll(image, shift, axis=1)
mask = np.roll(mask, shift, axis=1)
return image, mask
def randomVerticalShift(image, mask, max_shift=0.05, p=0.5):
"""Do random vertical shift with max proportion shift and probability p
Elements that roll beyond the last position are re-introduced at the first."""
max_shift_pixels = int(max_shift*image.shape[0])
shift = np.random.choice(np.arange(-max_shift_pixels, max_shift_pixels+1))
if np.random.random() < p:
image = np.roll(image, shift, axis=0)
mask = np.roll(mask, shift, axis=0)
return image, mask
def randomInvert(image, mask, p=0.5):
"""Randomly invert image with probability p"""
if np.random.random() < p:
image = 255 - image
mask = mask
return image, mask
def randomBrightness(image, mask, p=0.75):
"""With probability p, randomly increase or decrease brightness.
See https://stackoverflow.com/questions/37822375/python-opencv-increasing-image-brightness-without-overflowing-uint8-array"""
if np.random.random() < p:
max_value = np.percentile(255-image, q=25) # avoid burning out white cars, so take image-specific maximum
value = np.random.choice(np.arange(-max_value, max_value))
if value > 0:
image = np.where((255 - image) < value,255,image+value).astype(np.uint8)
else:
image = np.where(image < -value,0,image+value).astype(np.uint8)
return image, mask
def randomHue(image, mask, p=0.25, max_value=75):
"""With probability p, randomly increase or decrease hue.
See https://stackoverflow.com/questions/32609098/how-to-fast-change-image-brightness-with-python-opencv"""
if np.random.random() < p:
value = np.random.choice(np.arange(-max_value, max_value))
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
hsv[:,:,0] = hsv[:,:,0] + value
hsv = np.clip(hsv, a_min=0, a_max=255).astype(np.uint8)
image = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
return image, mask
def GaussianBlur(image, mask, kernel=(1, 1),sigma=1, p=0.5):
"""With probability p, apply Gaussian blur"""
# TODO
return image, mask
def randomRotate(image, mask, max_angle, p=0.5):
"""Perform random rotation with max_angle and probability p"""
# TODO
return(image, mask)
| [((23, 11, 23, 62), 'cv2.resize', 'cv2.resize', ({(23, 22, 23, 27): 'image', (23, 29, 23, 43): '(width, heigh)', (23, 45, 23, 61): 'cv2.INTER_LINEAR'}, {}), '(image, (width, heigh), cv2.INTER_LINEAR)', False, 'import cv2\n'), ((16, 16, 16, 38), 'cv2.imread', 'cv2.imread', ({(16, 27, 16, 37): 'image_path'}, {}), '(image_path)', False, 'import cv2\n'), ((43, 7, 43, 25), 'numpy.random.random', 'np.random.random', ({}, {}), '()', True, 'import numpy as np\n'), ((44, 16, 44, 32), 'numpy.fliplr', 'np.fliplr', ({(44, 26, 44, 31): 'image'}, {}), '(image)', True, 'import numpy as np\n'), ((45, 15, 45, 30), 'numpy.fliplr', 'np.fliplr', ({(45, 25, 45, 29): 'mask'}, {}), '(mask)', True, 'import numpy as np\n'), ((50, 7, 50, 25), 'numpy.random.random', 'np.random.random', ({}, {}), '()', True, 'import numpy as np\n'), ((51, 16, 51, 32), 'numpy.flipud', 'np.flipud', ({(51, 26, 51, 31): 'image'}, {}), '(image)', True, 'import numpy as np\n'), ((52, 15, 52, 30), 'numpy.flipud', 'np.flipud', ({(52, 25, 52, 29): 'mask'}, {}), '(mask)', True, 'import numpy as np\n'), ((59, 29, 59, 77), 'numpy.arange', 'np.arange', ({(59, 39, 59, 56): '-max_shift_pixels', (59, 58, 59, 76): 'max_shift_pixels + 1'}, {}), '(-max_shift_pixels, max_shift_pixels + 1)', True, 'import numpy as np\n'), ((60, 7, 60, 25), 'numpy.random.random', 'np.random.random', ({}, {}), '()', True, 'import numpy as np\n'), ((61, 16, 61, 45), 'numpy.roll', 'np.roll', (), '', True, 'import numpy as np\n'), ((62, 15, 62, 43), 'numpy.roll', 'np.roll', (), '', True, 'import numpy as np\n'), ((69, 29, 69, 77), 'numpy.arange', 'np.arange', ({(69, 39, 69, 56): '-max_shift_pixels', (69, 58, 69, 76): 'max_shift_pixels + 1'}, {}), '(-max_shift_pixels, max_shift_pixels + 1)', True, 'import numpy as np\n'), ((70, 7, 70, 25), 'numpy.random.random', 'np.random.random', ({}, {}), '()', True, 'import numpy as np\n'), ((71, 20, 71, 49), 'numpy.roll', 'np.roll', (), '', True, 'import numpy as np\n'), ((72, 19, 72, 47), 'numpy.roll', 'np.roll', (), '', True, 'import numpy as np\n'), ((77, 7, 77, 25), 'numpy.random.random', 'np.random.random', ({}, {}), '()', True, 'import numpy as np\n'), ((85, 7, 85, 25), 'numpy.random.random', 'np.random.random', ({}, {}), '()', True, 'import numpy as np\n'), ((86, 20, 86, 50), 'numpy.percentile', 'np.percentile', (), '', True, 'import numpy as np\n'), ((98, 7, 98, 25), 'numpy.random.random', 'np.random.random', ({}, {}), '()', True, 'import numpy as np\n'), ((100, 14, 100, 52), 'cv2.cvtColor', 'cv2.cvtColor', ({(100, 27, 100, 32): 'image', (100, 34, 100, 51): 'cv2.COLOR_BGR2HSV'}, {}), '(image, cv2.COLOR_BGR2HSV)', False, 'import cv2\n'), ((103, 16, 103, 52), 'cv2.cvtColor', 'cv2.cvtColor', ({(103, 29, 103, 32): 'hsv', (103, 34, 103, 51): 'cv2.COLOR_HSV2BGR'}, {}), '(hsv, cv2.COLOR_HSV2BGR)', False, 'import cv2\n'), ((18, 25, 18, 47), 'PIL.Image.open', 'Image.open', ({(18, 36, 18, 46): 'image_path'}, {}), '(image_path)', False, 'from PIL import Image\n'), ((87, 33, 87, 65), 'numpy.arange', 'np.arange', ({(87, 43, 87, 53): '-max_value', (87, 55, 87, 64): 'max_value'}, {}), '(-max_value, max_value)', True, 'import numpy as np\n'), ((99, 33, 99, 65), 'numpy.arange', 'np.arange', ({(99, 43, 99, 53): '-max_value', (99, 55, 99, 64): 'max_value'}, {}), '(-max_value, max_value)', True, 'import numpy as np\n'), ((102, 14, 102, 46), 'numpy.clip', 'np.clip', (), '', True, 'import numpy as np\n'), ((89, 20, 89, 67), 'numpy.where', 'np.where', ({(89, 29, 89, 50): '255 - image < value', (89, 51, 89, 54): '255', (89, 55, 89, 66): 'image + value'}, {}), '(255 - image < value, 255, image + value)', True, 'import numpy as np\n'), ((91, 20, 91, 58), 'numpy.where', 'np.where', ({(91, 29, 91, 43): 'image < -value', (91, 44, 91, 45): '0', (91, 46, 91, 57): 'image + value'}, {}), '(image < -value, 0, image + value)', True, 'import numpy as np\n')] |
tohugaby/pur_beurre_web | substitute_finder/templatetags/substitute_finder_extra.py | c3bdacee50907eea79821e7a8b3fe0f349719d88 | """
substitute_finder app custom templatetags module
"""
from django import template
register = template.Library()
@register.filter
def range_tag(value, min_value=0):
"""
tag that return a range
"""
if value:
return range(min_value, value)
return range(min_value)
| [((6, 11, 6, 29), 'django.template.Library', 'template.Library', ({}, {}), '()', False, 'from django import template\n')] |
AlexandraAlter/django-terrafirma | terrafirma/core/views/env.py | afce5946f173aded2b4bfea78cf1b1034ec32272 | from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse_lazy
from django import views
from django.views import generic as g_views
from django.views.generic import base as b_views, edit as e_views
from .. import forms, models
class NewEnvView(e_views.CreateView):
model = models.Environment
fields = ['name', 'abbrev']
success_url = reverse_lazy('home')
class EnvMixin(b_views.ContextMixin):
def setup(self, request, *args, **kwargs):
super().setup(request, *args, **kwargs)
self.env = get_object_or_404(models.Environment, abbrev=kwargs['env_abbrev'])
def url_vars(self):
return {'env_abbrev': self.env.abbrev}
def get_context_data(self, **kwargs):
return super().get_context_data(env=self.env, **kwargs)
class MaybeEnvMixin(b_views.ContextMixin):
def setup(self, request, *args, **kwargs):
super().setup(request, *args, **kwargs)
self.env = models.Environment.objects.get(abbrev=request.GET['env'])
def url_vars(self):
return {'env_abbrev': self.env.abbrev if self.env else None}
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
if self.env:
context.update(env=self.env)
return context
class EnvView(EnvMixin, g_views.DetailView):
model = models.Environment
slug_field = 'abbrev'
slug_url_kwarg = 'env_abbrev'
class EditEnvView(EnvMixin, e_views.UpdateView):
model = models.Environment
fields = ['name', 'abbrev']
slug_field = 'abbrev'
slug_url_kwarg = 'env_abbrev'
def setup(self, request, *args, **kwargs):
super().setup(request, *args, **kwargs)
self.object = self.env
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.save()
return redirect('env', env_abbrev=self.env.abbrev)
| [((13, 18, 13, 38), 'django.urls.reverse_lazy', 'reverse_lazy', ({(13, 31, 13, 37): '"""home"""'}, {}), "('home')", False, 'from django.urls import reverse_lazy\n'), ((19, 19, 19, 85), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (), '', False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((62, 15, 62, 58), 'django.shortcuts.redirect', 'redirect', (), '', False, 'from django.shortcuts import render, redirect, get_object_or_404\n')] |
JustinGOSSES/geoviz | geoviz/__init__.py | 159b0665d9efcffe46061313c15ad09ced840d2d | from load_las_data import LoadLasData
from altair_log_plot import AltAirLogPlot
from load_shapefile_data import LoadShpData
from alitair_well_location_map import WellLocationMap
| [] |
ahmad-PH/auto_lcc | core/data/utils.py | 55a6ac0e92994f4eed9951a27b7aa0d834f9d804 | import pickle
import pandas as pd
from typing import List, Tuple
def load_libofc_df(data_path):
def tuple_to_df(data: List[Tuple]) -> pd.DataFrame:
return pd.DataFrame(data, columns=["class", "title", "synopsis", "id"])
with open(data_path, 'rb') as f:
classes = pickle.load(f)
train = pickle.load(f)
test = pickle.load(f)
return classes, tuple_to_df(train), tuple_to_df(test)
| [((7, 15, 7, 79), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((10, 18, 10, 32), 'pickle.load', 'pickle.load', ({(10, 30, 10, 31): 'f'}, {}), '(f)', False, 'import pickle\n'), ((11, 16, 11, 30), 'pickle.load', 'pickle.load', ({(11, 28, 11, 29): 'f'}, {}), '(f)', False, 'import pickle\n'), ((12, 15, 12, 29), 'pickle.load', 'pickle.load', ({(12, 27, 12, 28): 'f'}, {}), '(f)', False, 'import pickle\n')] |
hhdMrLion/mxshop-api | apps/users/adminx.py | 1472ad0d959439ea80c1f8d8bfd3629c15d3017d | import xadmin
from users.models import VerifyCode
from xadmin import views
class BaseSetting(object):
# 添加主题功能
enable_themes = True
user_bootswatch = True
class GlobalSettings(object):
# 全局配置,后端管理标题和页脚
site_title = '天天生鲜后台管理'
site_footer = 'https://www.qnmlgb.top/'
# 菜单收缩
menu_style = 'accordion'
class VerifyCodeAdmin(object):
list_display = ['code', 'mobile', 'add_time']
xadmin.site.register(VerifyCode, VerifyCodeAdmin)
xadmin.site.register(views.BaseAdminView, BaseSetting)
xadmin.site.register(views.CommAdminView, GlobalSettings)
| [((24, 0, 24, 49), 'xadmin.site.register', 'xadmin.site.register', ({(24, 21, 24, 31): 'VerifyCode', (24, 33, 24, 48): 'VerifyCodeAdmin'}, {}), '(VerifyCode, VerifyCodeAdmin)', False, 'import xadmin\n'), ((25, 0, 25, 54), 'xadmin.site.register', 'xadmin.site.register', ({(25, 21, 25, 40): 'views.BaseAdminView', (25, 42, 25, 53): 'BaseSetting'}, {}), '(views.BaseAdminView, BaseSetting)', False, 'import xadmin\n'), ((26, 0, 26, 57), 'xadmin.site.register', 'xadmin.site.register', ({(26, 21, 26, 40): 'views.CommAdminView', (26, 42, 26, 56): 'GlobalSettings'}, {}), '(views.CommAdminView, GlobalSettings)', False, 'import xadmin\n')] |
Yeok-c/Urban-Sound-Classification | Archive/train_cnn.py | 98c46eb54266ef7b859d192e9bebe8a5d48e1708 | ### Load necessary libraries ###
import numpy as np
from sklearn.model_selection import KFold
from sklearn.metrics import accuracy_score
import tensorflow as tf
from tensorflow import keras
from sklearn.metrics import ConfusionMatrixDisplay
model = get_network()
model.summary()
### Train and evaluate via 10-Folds cross-validation ###
accuracies = []
folds = np.array(['fold1','fold2','fold3','fold4',
'fold5','fold6','fold7','fold8',
'fold9','fold10'])
load_dir = "UrbanSounds8K/processed/"
kf = KFold(n_splits=10)
for train_index, test_index in kf.split(folds):
x_train, y_train = [], []
for ind in train_index:
# read features or segments of an audio file
train_data = np.load("{0}/{1}.npz".format(load_dir,folds[ind]),
allow_pickle=True)
# for training stack all the segments so that they are treated as an example/instance
features = np.concatenate(train_data["features"], axis=0)
labels = np.concatenate(train_data["labels"], axis=0)
x_train.append(features)
y_train.append(labels)
# stack x,y pairs of all training folds
x_train = np.concatenate(x_train, axis = 0).astype(np.float32)
y_train = np.concatenate(y_train, axis = 0).astype(np.float32)
# for testing we will make predictions on each segment and average them to
# produce single label for an entire sound clip.
test_data = np.load("{0}/{1}.npz".format(load_dir,
folds[test_index][0]), allow_pickle=True)
x_test = test_data["features"]
y_test = test_data["labels"]
log_dir="logs/fit/" + folds[test_index][0]
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
model = get_network()
model.fit(x_train, y_train, epochs = 20, batch_size = 64, verbose = 1, validation_split=0.2,
use_multiprocessing=True, workers=8, callbacks=[tensorboard_callback])
# evaluate on test set/fold
y_true, y_pred = [], []
for x, y in zip(x_test, y_test):
# average predictions over segments of a sound clip
avg_p = np.argmax(np.mean(model.predict(x), axis = 0))
y_pred.append(avg_p)
# pick single label via np.unique for a sound clip
y_true.append(np.unique(y)[0])
accuracies.append(accuracy_score(y_true, y_pred))
print("Fold n accuracy: {0}".format(accuracy_score(y_true, y_pred)))
cm = ConfusionMatrixDisplay.from_predictions(y_true, y_pred)
cm.figure_.savefig('conf_mat_' + str(test_index) + '_acc_' + str(accuracy_score(y_true, y_pred)) + '.png',dpi=1000)
print("Average 10 Folds Accuracy: {0}".format(np.mean(accuracies)))
| [((17, 8, 19, 36), 'numpy.array', 'np.array', ({(17, 17, 19, 35): "['fold1', 'fold2', 'fold3', 'fold4', 'fold5', 'fold6', 'fold7', 'fold8',\n 'fold9', 'fold10']"}, {}), "(['fold1', 'fold2', 'fold3', 'fold4', 'fold5', 'fold6', 'fold7',\n 'fold8', 'fold9', 'fold10'])", True, 'import numpy as np\n'), ((21, 5, 21, 23), 'sklearn.model_selection.KFold', 'KFold', (), '', False, 'from sklearn.model_selection import KFold\n'), ((46, 27, 46, 92), 'tensorflow.keras.callbacks.TensorBoard', 'tf.keras.callbacks.TensorBoard', (), '', True, 'import tensorflow as tf\n'), ((63, 9, 63, 64), 'sklearn.metrics.ConfusionMatrixDisplay.from_predictions', 'ConfusionMatrixDisplay.from_predictions', ({(63, 49, 63, 55): 'y_true', (63, 57, 63, 63): 'y_pred'}, {}), '(y_true, y_pred)', False, 'from sklearn.metrics import ConfusionMatrixDisplay\n'), ((29, 19, 29, 65), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((30, 17, 30, 61), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((60, 22, 60, 52), 'sklearn.metrics.accuracy_score', 'accuracy_score', ({(60, 37, 60, 43): 'y_true', (60, 45, 60, 51): 'y_pred'}, {}), '(y_true, y_pred)', False, 'from sklearn.metrics import accuracy_score\n'), ((66, 46, 66, 65), 'numpy.mean', 'np.mean', ({(66, 54, 66, 64): 'accuracies'}, {}), '(accuracies)', True, 'import numpy as np\n'), ((35, 14, 35, 47), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((36, 14, 36, 47), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((61, 40, 61, 70), 'sklearn.metrics.accuracy_score', 'accuracy_score', ({(61, 55, 61, 61): 'y_true', (61, 63, 61, 69): 'y_pred'}, {}), '(y_true, y_pred)', False, 'from sklearn.metrics import accuracy_score\n'), ((59, 22, 59, 34), 'numpy.unique', 'np.unique', ({(59, 32, 59, 33): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((64, 69, 64, 99), 'sklearn.metrics.accuracy_score', 'accuracy_score', ({(64, 84, 64, 90): 'y_true', (64, 92, 64, 98): 'y_pred'}, {}), '(y_true, y_pred)', False, 'from sklearn.metrics import accuracy_score\n')] |
wyaadarsh/LeetCode-Solutions | Python3/1436-Destination-City/soln.py | 3719f5cb059eefd66b83eb8ae990652f4b7fd124 | class Solution:
def destCity(self, paths: List[List[str]]) -> str:
bads = set()
cities = set()
for u, v in paths:
cities.add(u)
cities.add(v)
bads.add(u)
ans = cities - bads
return list(ans)[0]
| [] |
zillow/metaflow | metaflow/plugins/kfp/tests/flows/resources_flow.py | a42dc9eab04695f2b0a429874e607ed67d5a2b45 | import os
import pprint
import subprocess
import time
from typing import Dict, List
from kubernetes.client import (
V1EnvVar,
V1EnvVarSource,
V1ObjectFieldSelector,
V1ResourceFieldSelector,
)
from metaflow import FlowSpec, step, environment, resources, current
def get_env_vars(env_resources: Dict[str, str]) -> List[V1EnvVar]:
res = []
for name, resource in env_resources.items():
res.append(
V1EnvVar(
# this is used by some functions of operator-sdk
# it uses this environment variable to get the pods
name=name,
value_from=V1EnvVarSource(
resource_field_ref=V1ResourceFieldSelector(
container_name="main",
resource=resource,
divisor="1m" if "cpu" in resource else "1",
)
),
)
)
return res
kubernetes_vars = get_env_vars(
{
"LOCAL_STORAGE": "requests.ephemeral-storage",
"LOCAL_STORAGE_LIMIT": "limits.ephemeral-storage",
"CPU": "requests.cpu",
"CPU_LIMIT": "limits.cpu",
"MEMORY": "requests.memory",
"MEMORY_LIMIT": "limits.memory",
}
)
kubernetes_vars.append(
V1EnvVar(
name="MY_POD_NAME",
value_from=V1EnvVarSource(
field_ref=V1ObjectFieldSelector(field_path="metadata.name")
),
)
)
annotations = {
"metaflow.org/flow_name": "MF_NAME",
"metaflow.org/step": "MF_STEP",
"metaflow.org/run_id": "MF_RUN_ID",
"metaflow.org/experiment": "MF_EXPERIMENT",
"metaflow.org/tag_metaflow_test": "MF_TAG_METAFLOW_TEST",
"metaflow.org/tag_test_t1": "MF_TAG_TEST_T1",
}
for annotation, env_name in annotations.items():
kubernetes_vars.append(
V1EnvVar(
name=env_name,
value_from=V1EnvVarSource(
field_ref=V1ObjectFieldSelector(
field_path=f"metadata.annotations['{annotation}']"
)
),
)
)
labels = {
"aip.zillowgroup.net/kfp-pod-default": "KF_POD_DEFAULT",
"tags.ledger.zgtools.net/ai-flow-name": "AI_FLOW_NAME",
"tags.ledger.zgtools.net/ai-step-name": "AI_STEP_NAME",
"tags.ledger.zgtools.net/ai-experiment-name": "AI_EXPERIMENT_NAME",
}
for label, env_name in labels.items():
kubernetes_vars.append(
V1EnvVar(
name=env_name,
value_from=V1EnvVarSource(
field_ref=V1ObjectFieldSelector(
field_path=f"metadata.labels['{label}']"
)
),
)
)
class ResourcesFlow(FlowSpec):
@resources(
local_storage="242",
cpu="0.6",
memory="1G",
)
@environment( # pylint: disable=E1102
vars={"MY_ENV": "value"}, kubernetes_vars=kubernetes_vars
)
@step
def start(self):
pprint.pprint(dict(os.environ))
print("=====")
# test simple environment var
assert os.environ.get("MY_ENV") == "value"
# test kubernetes_vars
assert "resourcesflow" in os.environ.get("MY_POD_NAME")
assert os.environ.get("CPU") == "600"
assert os.environ.get("CPU_LIMIT") == "600"
assert os.environ.get("LOCAL_STORAGE") == "242000000"
assert os.environ.get("LOCAL_STORAGE_LIMIT") == "242000000"
assert os.environ.get("MEMORY") == "1000000000"
assert os.environ.get("MEMORY_LIMIT") == "1000000000"
assert os.environ.get("MF_NAME") == current.flow_name
assert os.environ.get("MF_STEP") == current.step_name
assert os.environ.get("MF_RUN_ID") == current.run_id
assert os.environ.get("MF_EXPERIMENT") == "metaflow_test"
assert os.environ.get("MF_TAG_METAFLOW_TEST") == "true"
assert os.environ.get("MF_TAG_TEST_T1") == "true"
assert os.environ.get("KF_POD_DEFAULT") == "true"
assert os.environ.get("AI_FLOW_NAME") == current.flow_name
assert os.environ.get("AI_STEP_NAME") == current.step_name
assert os.environ.get("AI_EXPERIMENT_NAME") == "metaflow_test"
self.items = [1, 2]
self.next(self.foreach_step, foreach="items")
@environment(vars={"MY_ENV": "value"}) # pylint: disable=E1102
@resources(volume="11G")
@step
def foreach_step(self):
# test simple environment var
assert os.environ.get("MY_ENV") == "value"
output = subprocess.check_output(
"df -h | grep /opt/metaflow_volume", shell=True
)
assert "11G" in str(output)
self.next(self.join_step)
@resources(volume="12G")
@step
def join_step(self, inputs):
output = subprocess.check_output(
"df -h | grep /opt/metaflow_volume", shell=True
)
assert "12G" in str(output)
self.next(self.split_step)
@step
def split_step(self):
self.items = [1, 2]
self.next(self.shared_volume_foreach_step, foreach="items")
@resources(volume="13G", volume_mode="ReadWriteMany")
@step
def shared_volume_foreach_step(self):
output = subprocess.check_output(
"df -h | grep /opt/metaflow_volume", shell=True
)
assert "13G" in str(output)
file_path = "/opt/metaflow_volume/test.txt"
message = "hello world!"
# validate the volume is shared across the foreach splits
if self.input == 1:
with open(file_path, "w") as f:
f.write(message)
else:
while not os.path.exists(file_path):
time.sleep(1)
print(".")
with open(file_path, "r") as f:
read_lines = f.readlines()
print("read_lines", read_lines)
assert message == read_lines[0]
self.next(self.shared_volume_join_step)
@step
def shared_volume_join_step(self, inputs):
self.next(self.end)
@step
def end(self):
print("All done.")
if __name__ == "__main__":
ResourcesFlow()
| [((96, 5, 100, 5), 'metaflow.resources', 'resources', (), '', False, 'from metaflow import FlowSpec, step, environment, resources, current\n'), ((101, 5, 103, 5), 'metaflow.environment', 'environment', (), '', False, 'from metaflow import FlowSpec, step, environment, resources, current\n'), ((137, 5, 137, 42), 'metaflow.environment', 'environment', (), '', False, 'from metaflow import FlowSpec, step, environment, resources, current\n'), ((138, 5, 138, 28), 'metaflow.resources', 'resources', (), '', False, 'from metaflow import FlowSpec, step, environment, resources, current\n'), ((151, 5, 151, 28), 'metaflow.resources', 'resources', (), '', False, 'from metaflow import FlowSpec, step, environment, resources, current\n'), ((165, 5, 165, 57), 'metaflow.resources', 'resources', (), '', False, 'from metaflow import FlowSpec, step, environment, resources, current\n'), ((144, 17, 146, 9), 'subprocess.check_output', 'subprocess.check_output', (), '', False, 'import subprocess\n'), ((154, 17, 156, 9), 'subprocess.check_output', 'subprocess.check_output', (), '', False, 'import subprocess\n'), ((168, 17, 170, 9), 'subprocess.check_output', 'subprocess.check_output', (), '', False, 'import subprocess\n'), ((110, 15, 110, 39), 'os.environ.get', 'os.environ.get', ({(110, 30, 110, 38): '"""MY_ENV"""'}, {}), "('MY_ENV')", False, 'import os\n'), ((113, 34, 113, 63), 'os.environ.get', 'os.environ.get', ({(113, 49, 113, 62): '"""MY_POD_NAME"""'}, {}), "('MY_POD_NAME')", False, 'import os\n'), ((114, 15, 114, 36), 'os.environ.get', 'os.environ.get', ({(114, 30, 114, 35): '"""CPU"""'}, {}), "('CPU')", False, 'import os\n'), ((115, 15, 115, 42), 'os.environ.get', 'os.environ.get', ({(115, 30, 115, 41): '"""CPU_LIMIT"""'}, {}), "('CPU_LIMIT')", False, 'import os\n'), ((116, 15, 116, 46), 'os.environ.get', 'os.environ.get', ({(116, 30, 116, 45): '"""LOCAL_STORAGE"""'}, {}), "('LOCAL_STORAGE')", False, 'import os\n'), ((117, 15, 117, 52), 'os.environ.get', 'os.environ.get', ({(117, 30, 117, 51): '"""LOCAL_STORAGE_LIMIT"""'}, {}), "('LOCAL_STORAGE_LIMIT')", False, 'import os\n'), ((118, 15, 118, 39), 'os.environ.get', 'os.environ.get', ({(118, 30, 118, 38): '"""MEMORY"""'}, {}), "('MEMORY')", False, 'import os\n'), ((119, 15, 119, 45), 'os.environ.get', 'os.environ.get', ({(119, 30, 119, 44): '"""MEMORY_LIMIT"""'}, {}), "('MEMORY_LIMIT')", False, 'import os\n'), ((121, 15, 121, 40), 'os.environ.get', 'os.environ.get', ({(121, 30, 121, 39): '"""MF_NAME"""'}, {}), "('MF_NAME')", False, 'import os\n'), ((122, 15, 122, 40), 'os.environ.get', 'os.environ.get', ({(122, 30, 122, 39): '"""MF_STEP"""'}, {}), "('MF_STEP')", False, 'import os\n'), ((123, 15, 123, 42), 'os.environ.get', 'os.environ.get', ({(123, 30, 123, 41): '"""MF_RUN_ID"""'}, {}), "('MF_RUN_ID')", False, 'import os\n'), ((124, 15, 124, 46), 'os.environ.get', 'os.environ.get', ({(124, 30, 124, 45): '"""MF_EXPERIMENT"""'}, {}), "('MF_EXPERIMENT')", False, 'import os\n'), ((125, 15, 125, 53), 'os.environ.get', 'os.environ.get', ({(125, 30, 125, 52): '"""MF_TAG_METAFLOW_TEST"""'}, {}), "('MF_TAG_METAFLOW_TEST')", False, 'import os\n'), ((126, 15, 126, 47), 'os.environ.get', 'os.environ.get', ({(126, 30, 126, 46): '"""MF_TAG_TEST_T1"""'}, {}), "('MF_TAG_TEST_T1')", False, 'import os\n'), ((128, 15, 128, 47), 'os.environ.get', 'os.environ.get', ({(128, 30, 128, 46): '"""KF_POD_DEFAULT"""'}, {}), "('KF_POD_DEFAULT')", False, 'import os\n'), ((130, 15, 130, 45), 'os.environ.get', 'os.environ.get', ({(130, 30, 130, 44): '"""AI_FLOW_NAME"""'}, {}), "('AI_FLOW_NAME')", False, 'import os\n'), ((131, 15, 131, 45), 'os.environ.get', 'os.environ.get', ({(131, 30, 131, 44): '"""AI_STEP_NAME"""'}, {}), "('AI_STEP_NAME')", False, 'import os\n'), ((132, 15, 132, 51), 'os.environ.get', 'os.environ.get', ({(132, 30, 132, 50): '"""AI_EXPERIMENT_NAME"""'}, {}), "('AI_EXPERIMENT_NAME')", False, 'import os\n'), ((142, 15, 142, 39), 'os.environ.get', 'os.environ.get', ({(142, 30, 142, 38): '"""MY_ENV"""'}, {}), "('MY_ENV')", False, 'import os\n'), ((181, 22, 181, 47), 'os.path.exists', 'os.path.exists', ({(181, 37, 181, 46): 'file_path'}, {}), '(file_path)', False, 'import os\n'), ((182, 16, 182, 29), 'time.sleep', 'time.sleep', ({(182, 27, 182, 28): '(1)'}, {}), '(1)', False, 'import time\n'), ((51, 22, 51, 71), 'kubernetes.client.V1ObjectFieldSelector', 'V1ObjectFieldSelector', (), '', False, 'from kubernetes.client import V1EnvVar, V1EnvVarSource, V1ObjectFieldSelector, V1ResourceFieldSelector\n'), ((69, 26, 71, 17), 'kubernetes.client.V1ObjectFieldSelector', 'V1ObjectFieldSelector', (), '', False, 'from kubernetes.client import V1EnvVar, V1EnvVarSource, V1ObjectFieldSelector, V1ResourceFieldSelector\n'), ((87, 26, 89, 17), 'kubernetes.client.V1ObjectFieldSelector', 'V1ObjectFieldSelector', (), '', False, 'from kubernetes.client import V1EnvVar, V1EnvVarSource, V1ObjectFieldSelector, V1ResourceFieldSelector\n'), ((26, 39, 30, 21), 'kubernetes.client.V1ResourceFieldSelector', 'V1ResourceFieldSelector', (), '', False, 'from kubernetes.client import V1EnvVar, V1EnvVarSource, V1ObjectFieldSelector, V1ResourceFieldSelector\n')] |
redfrexx/osm_association_rules | src/nb_utils/general.py | 33975ce25047f9ab3b21e890bc5ed9bab59a0a2f | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions used for data handling
"""
__author__ = "Christina Ludwig, GIScience Research Group, Heidelberg University"
__email__ = "[email protected]"
import os
import yaml
from shapely.geometry import box
import numpy as np
import pandas as pd
import geopandas as gpd
import json
from nb_utils.utils import create_bbox, reproject_to_utm
CONTEXT_NAMES = {"area": "Area", "building_density": "Building density", "age": "Days since creation",
"n_tags": "Number of tags", "changes": "Number of changes", "max_version": "Version number",
"user_count_inner": "Inner user count", "user_density_inner": "Inner user density",
"user_count_outer": "Outer user count", "user_density_outer": "Outer user density",
"feature_count": "Feature count", "random": "Random"}
rules_colnames = ['antecedents', 'consequents', 'antecedent support',
'consequent support', 'support', 'confidence', 'lift', 'leverage',
'conviction', "context", "context_min", "context_max", "context_p_min", "context_p_max", "nfeatures", "rule"]
pretty_names_units = {"area": "Area [ha]", "building_density": "Building density", "feature_count": "Feature count", "age": "Days since creation", "n_tags": "Number of tags", "changes": "Number of changes", "max_version": "Version number", "user_count_inner": "Inner user count", "user_density_inner": "Inner user density", "user_count_outer": "Outer user count",
"user_density_outer": "Outer user density", "random": "Random"}
def load_config(config_file, cities):
"""
Load config parameters from file
:param config_file:
:param cities:
:return:
"""
if not os.path.exists(config_file):
print("ERROR: Config file {} does not exist.".format(config_file))
else:
with open(config_file, 'r') as src:
config = yaml.load(src, Loader=yaml.FullLoader)
config_cities = config["locations"]
config_cities = {city: config_cities[city] for city in cities}
return config_cities
def load_data(cities, data_dir):
"""
Load data into notebook from file
:return:
"""
loaded_tags_dfs = []
loaded_context_dfs = []
for city in cities:
print("Loading {}...".format(city))
# Check paths
tags_file = os.path.join(data_dir, city, "{}_tags.json".format(city))
context_file = os.path.join(data_dir, city, "{}_context.geojson".format(city))
if (not os.path.exists(tags_file)) or (not os.path.exists(context_file)):
print("{}: Input files not found.".format(city))
return None, None, None
# Read data and set index
tags_df = pd.read_json(tags_file).set_index("@osmId")
context_df = gpd.read_file(context_file).set_index("@osmId")
# Calculate area (should be moved to data_extraction)
context_df["area"] = reproject_to_utm(context_df).area #/ 10000. # conversion to ha
# Add column holding the city name
context_df["city"] = city
loaded_tags_dfs.append(tags_df)
loaded_context_dfs.append(context_df)
# Convert list of dataframes to dataframe
all_tags_df = pd.concat(loaded_tags_dfs, axis=0)
all_tags_df = all_tags_df.fillna(False)
all_context_df = pd.concat(loaded_context_dfs, axis=0)
all_features = all_context_df.join(all_tags_df, sort=False)
# Add dummy columns for "no antecedent" and random context variable
all_features["none"] = True
all_features["random"] = np.random.rand(len(all_features))
# The park iteself is always counted as an objects inside of it. Therefore, subtract 1.
all_features["feature_count"] = all_features["feature_count"] - 1
# Delete unnecessary columns
unnecessary_cols = list(filter(lambda x: x.startswith("gt:"), all_features.columns)) + ["leisure=park"]
all_features.drop(unnecessary_cols, axis=1, inplace=True)
return all_features
def create_city_bboxes(config_cities):
"""
Creat bboxes of cities
:return:
"""
bboxes = {c: box(*create_bbox(config_cities[c]["center"], config_cities[c]["width"])) for c in config_cities.keys()}
bbox_df = pd.DataFrame().from_dict(bboxes, orient="index", columns=["geometry"])
return gpd.GeoDataFrame(bbox_df)
def dump_city_rules(city_rules, interim_dir):
"""
Write results from context based association rule analysis to file
:param city_rules:
:param interim_dir:
:return:
"""
city_rules_dir = os.path.join(interim_dir, "city_rules")
if not os.path.exists(city_rules_dir):
os.mkdir(city_rules_dir)
for k, v in city_rules.items():
print(k)
v["heatmap"].to_json(os.path.join(city_rules_dir, "{}_heatmap.json".format(k)))
v["valid_rules"].reset_index().to_json(os.path.join(city_rules_dir, "{}_valid_rules.json".format(k)))
with open(os.path.join(city_rules_dir, "{}_sel_features.json".format(k)), "w") as dst:
json.dump(list(v["sel_features"].index), dst)
def load_city_rules(cities, interim_dir, all_features):
"""
Load results from context based association rule analysis to file
:param cities:
:param interim_dir:
:param all_features:
:return:
"""
city_rules = {}
for city in cities:
with open(os.path.join(interim_dir, "city_rules", "{}_sel_features.json".format(city))) as dst:
selected_ids = json.load(dst)
sel_features = all_features.loc[selected_ids]
selected_osmids = json
city_rules[city] = {
"heatmap": pd.read_json(os.path.join(interim_dir, "city_rules", "{}_heatmap.json".format(city))),
"valid_rules": pd.read_json(
os.path.join(interim_dir, "city_rules", "{}_valid_rules.json".format(city))).set_index("index"),
"sel_features": sel_features}
return city_rules
| [((82, 18, 82, 52), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((84, 21, 84, 58), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((106, 11, 106, 36), 'geopandas.GeoDataFrame', 'gpd.GeoDataFrame', ({(106, 28, 106, 35): 'bbox_df'}, {}), '(bbox_df)', True, 'import geopandas as gpd\n'), ((116, 21, 116, 60), 'os.path.join', 'os.path.join', ({(116, 34, 116, 45): 'interim_dir', (116, 47, 116, 59): '"""city_rules"""'}, {}), "(interim_dir, 'city_rules')", False, 'import os\n'), ((42, 11, 42, 38), 'os.path.exists', 'os.path.exists', ({(42, 26, 42, 37): 'config_file'}, {}), '(config_file)', False, 'import os\n'), ((117, 11, 117, 41), 'os.path.exists', 'os.path.exists', ({(117, 26, 117, 40): 'city_rules_dir'}, {}), '(city_rules_dir)', False, 'import os\n'), ((118, 8, 118, 32), 'os.mkdir', 'os.mkdir', ({(118, 17, 118, 31): 'city_rules_dir'}, {}), '(city_rules_dir)', False, 'import os\n'), ((46, 21, 46, 59), 'yaml.load', 'yaml.load', (), '', False, 'import yaml\n'), ((74, 29, 74, 57), 'nb_utils.utils.reproject_to_utm', 'reproject_to_utm', ({(74, 46, 74, 56): 'context_df'}, {}), '(context_df)', False, 'from nb_utils.utils import create_bbox, reproject_to_utm\n'), ((105, 14, 105, 28), 'pandas.DataFrame', 'pd.DataFrame', ({}, {}), '()', True, 'import pandas as pd\n'), ((138, 27, 138, 41), 'json.load', 'json.load', ({(138, 37, 138, 40): 'dst'}, {}), '(dst)', False, 'import json\n'), ((65, 16, 65, 41), 'os.path.exists', 'os.path.exists', ({(65, 31, 65, 40): 'tags_file'}, {}), '(tags_file)', False, 'import os\n'), ((65, 51, 65, 79), 'os.path.exists', 'os.path.exists', ({(65, 66, 65, 78): 'context_file'}, {}), '(context_file)', False, 'import os\n'), ((70, 18, 70, 41), 'pandas.read_json', 'pd.read_json', ({(70, 31, 70, 40): 'tags_file'}, {}), '(tags_file)', True, 'import pandas as pd\n'), ((71, 21, 71, 48), 'geopandas.read_file', 'gpd.read_file', ({(71, 35, 71, 47): 'context_file'}, {}), '(context_file)', True, 'import geopandas as gpd\n'), ((104, 22, 104, 88), 'nb_utils.utils.create_bbox', 'create_bbox', ({(104, 34, 104, 60): "config_cities[c]['center']", (104, 62, 104, 87): "config_cities[c]['width']"}, {}), "(config_cities[c]['center'], config_cities[c]['width'])", False, 'from nb_utils.utils import create_bbox, reproject_to_utm\n')] |
darren-wang/ksc | keystoneclient/auth/identity/v3/federated.py | fd096540e8e57b6bd7c923f4cb4ad6616d103cc8 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from oslo_config import cfg
import six
from keystoneclient.auth.identity.v3 import base
from keystoneclient.auth.identity.v3 import token
__all__ = ['FederatedBaseAuth']
@six.add_metaclass(abc.ABCMeta)
class FederatedBaseAuth(base.BaseAuth):
rescoping_plugin = token.Token
def __init__(self, auth_url, identity_provider, protocol, **kwargs):
"""Class constructor accepting following parameters:
:param auth_url: URL of the Identity Service
:type auth_url: string
:param identity_provider: name of the Identity Provider the client
will authenticate against. This parameter
will be used to build a dynamic URL used to
obtain unscoped OpenStack token.
:type identity_provider: string
"""
super(FederatedBaseAuth, self).__init__(auth_url=auth_url, **kwargs)
self.identity_provider = identity_provider
self.protocol = protocol
@classmethod
def get_options(cls):
options = super(FederatedBaseAuth, cls).get_options()
options.extend([
cfg.StrOpt('identity-provider',
help="Identity Provider's name"),
cfg.StrOpt('protocol',
help='Protocol for federated plugin'),
])
return options
@property
def federated_token_url(self):
"""Full URL where authorization data is sent."""
values = {
'host': self.auth_url.rstrip('/'),
'identity_provider': self.identity_provider,
'protocol': self.protocol
}
url = ("%(host)s/OS-FEDERATION/identity_providers/"
"%(identity_provider)s/protocols/%(protocol)s/auth")
url = url % values
return url
def _get_scoping_data(self):
return {'trust_id': self.trust_id,
'domain_id': self.domain_id,
'domain_name': self.domain_name,
'project_id': self.project_id,
'project_name': self.project_name,
'project_domain_id': self.project_domain_id,
'project_domain_name': self.project_domain_name}
def get_auth_ref(self, session, **kwargs):
"""Authenticate retrieve token information.
This is a multi-step process where a client does federated authn
receives an unscoped token.
If an unscoped token is successfully received and scoping information
is present then the token is rescoped to that target.
:param session: a session object to send out HTTP requests.
:type session: keystoneclient.session.Session
:returns: a token data representation
:rtype: :py:class:`keystoneclient.access.AccessInfo`
"""
auth_ref = self.get_unscoped_auth_ref(session)
scoping = self._get_scoping_data()
if any(scoping.values()):
token_plugin = self.rescoping_plugin(self.auth_url,
token=auth_ref.auth_token,
**scoping)
auth_ref = token_plugin.get_auth_ref(session)
return auth_ref
@abc.abstractmethod
def get_unscoped_auth_ref(self, session, **kwargs):
"""Fetch unscoped federated token."""
| [((24, 1, 24, 31), 'six.add_metaclass', 'six.add_metaclass', ({(24, 19, 24, 30): 'abc.ABCMeta'}, {}), '(abc.ABCMeta)', False, 'import six\n'), ((50, 12, 51, 55), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (), '', False, 'from oslo_config import cfg\n'), ((52, 12, 53, 60), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (), '', False, 'from oslo_config import cfg\n')] |
lefevre-fraser/openmeta-mms | bin/Python27/Lib/site-packages/tables/utilsExtension.py | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | from warnings import warn
from tables.utilsextension import *
_warnmsg = ("utilsExtension is pending deprecation, import utilsextension instead. "
"You may use the pt2to3 tool to update your source code.")
warn(_warnmsg, DeprecationWarning, stacklevel=2)
| [((6, 0, 6, 48), 'warnings.warn', 'warn', (), '', False, 'from warnings import warn\n')] |
iDevHank/i18n | config.py | ec731b5d6fab330a868ebb9f9e11ff1caef629ef | #!/usr/bin/env python3
# The format of your own localizable method.
# This is an example of '"string".localized'
SUFFIX = '.localized'
KEY = r'"(?:\\.|[^"\\])*"'
LOCALIZABLE_RE = r'%s%s' % (KEY, SUFFIX)
# Specify the path of localizable files in project.
LOCALIZABLE_FILE_PATH = ''
LOCALIZABLE_FILE_NAMES = ['Localizable']
LOCALIZABLE_FILE_TYPES = ['strings']
# File types of source file.
SEARCH_TYPES = ['swift', 'm', 'json']
SOURCE_FILE_EXCLUSIVE_PATHS = [
'Assets.xcassets', 'Carthage', 'ThirdParty',
'Pods', 'Media.xcassets', 'Framework', 'bin']
LOCALIZABLE_FILE_EXCLUSIVE_PATHS = ['Carthage', 'ThirdParty',
'Pods', 'Framework', 'bin']
LOCALIZABLE_FORMAT_RE = r'"(?:\\.|[^"\\])*"\s*=\s*"(?:\\.|[^"\\])*";\n'
DEFAULT_TARGET_PATH = 'generated.strings'
| [] |
Astewart1510/pvt-algoranddashboard | dashboard_analytics/tasks/transaction_processor.py | 6fb6cf37b483339f24cc86f0a95fb2245be492ca | from dashboard_analytics.models import AccountType, InstrumentType, Account, Transaction
def process_json_transactions(transactions):
for txn in transactions:
print(txn["pk"]) | [] |
hschwane/offline_production | MuonGun/resources/scripts/histreduce.py | e14a6493782f613b8bbe64217559765d5213dc1e | #!/usr/bin/env python
"""
Add all (potentially gigantic) histograms in a group of files.
"""
import dashi
import tables
import os, sys, operator, shutil
from optparse import OptionParser
parser = OptionParser(usage="%prog [OPTIONS] infiles outfile", description=__doc__)
parser.add_option("--blocksize", dest="blocksize", type=int, default=2048)
opts, args = parser.parse_args()
if len(args) < 2:
parser.error("You must specify at least one output and one input file")
infiles, outfile = args[:-1], args[-1]
if os.path.exists(outfile):
parser.error("%s already exists!" % outfile)
shutil.copy(infiles[0], outfile)
from collections import defaultdict
paths = defaultdict(list)
for fname in infiles[1:]:
with tables.openFile(fname) as hdf:
for group in hdf.walkNodes(where='/', classname='Group'):
if 'ndim' in group._v_attrs: # a dashi histogram
path = group._v_pathname
paths[path].append(fname)
def histadd(sourceGroup, destGroup, blocksize=1):
"""
Add dashi histograms stored in HDF5 groups
:param blocksize: operate on blocksize I/O chunks at a time
"""
for arr in '_h_bincontent', '_h_squaredweights':
source = sourceGroup._v_children[arr]
dest = destGroup._v_children[arr]
chunksize = blocksize*reduce(operator.mul, dest.chunkshape)
size = reduce(operator.mul, dest.shape)
for i in range(0, size, chunksize):
dest[i:i+chunksize] += source[i:i+chunksize]
for prop in 'nentries', 'nans', 'nans_wgt', 'nans_sqwgt':
destGroup._v_attrs[prop] += sourceGroup._v_attrs[prop]
with tables.openFile(outfile, 'a') as ohdf:
for path, fnames in paths.iteritems():
print(path)
destGroup = ohdf.getNode(path)
for fname in fnames:
with tables.openFile(fname) as hdf:
histadd(hdf.getNode(path), destGroup, opts.blocksize)
| [((12, 9, 12, 83), 'optparse.OptionParser', 'OptionParser', (), '', False, 'from optparse import OptionParser\n'), ((20, 3, 20, 26), 'os.path.exists', 'os.path.exists', ({(20, 18, 20, 25): 'outfile'}, {}), '(outfile)', False, 'import os, sys, operator, shutil\n'), ((23, 0, 23, 32), 'shutil.copy', 'shutil.copy', ({(23, 12, 23, 22): 'infiles[0]', (23, 24, 23, 31): 'outfile'}, {}), '(infiles[0], outfile)', False, 'import os, sys, operator, shutil\n'), ((26, 8, 26, 25), 'collections.defaultdict', 'defaultdict', ({(26, 20, 26, 24): 'list'}, {}), '(list)', False, 'from collections import defaultdict\n'), ((50, 5, 50, 34), 'tables.openFile', 'tables.openFile', ({(50, 21, 50, 28): 'outfile', (50, 30, 50, 33): '"""a"""'}, {}), "(outfile, 'a')", False, 'import tables\n'), ((28, 6, 28, 28), 'tables.openFile', 'tables.openFile', ({(28, 22, 28, 27): 'fname'}, {}), '(fname)', False, 'import tables\n'), ((55, 8, 55, 30), 'tables.openFile', 'tables.openFile', ({(55, 24, 55, 29): 'fname'}, {}), '(fname)', False, 'import tables\n')] |
ignaciocabeza/procrastinate | procrastinate/exceptions.py | 95ba8c7acdf39aa7a1216c19903802b4f65b65d1 | import datetime
class ProcrastinateException(Exception):
"""
Unexpected Procrastinate error.
"""
def __init__(self, message=None):
if not message:
message = self.__doc__
super().__init__(message)
class TaskNotFound(ProcrastinateException):
"""
Task cannot be imported.
"""
class JobError(ProcrastinateException):
"""
Job ended with an exception.
"""
class LoadFromPathError(ImportError, ProcrastinateException):
"""
App was not found at the provided path, or the loaded object is not an App.
"""
class JobRetry(ProcrastinateException):
"""
Job should be retried.
"""
def __init__(self, scheduled_at: datetime.datetime):
self.scheduled_at = scheduled_at
super().__init__()
class AppNotOpen(ProcrastinateException):
"""
App was not open. Procrastinate App needs to be opened using:
- ``app.open()``,
- ``await app.open_async()``,
- ``with app.open():``,
- ``async with app.open_async():``.
"""
class ConnectorException(ProcrastinateException):
"""
Database error.
"""
# The precise error can be seen with ``exception.__cause__``.
class AlreadyEnqueued(ProcrastinateException):
"""
There is already a job waiting in the queue with the same queueing lock.
"""
class UniqueViolation(ConnectorException):
"""
A unique constraint is violated. The constraint name is available in
``exception.constraint_name``.
"""
def __init__(self, *args, constraint_name: str):
super().__init__(*args)
self.constraint_name = constraint_name
class MissingApp(ProcrastinateException):
"""
Missing app. This most probably happened because procrastinate needs an
app via --app or the PROCRASTINATE_APP environment variable.
"""
class SyncConnectorConfigurationError(ProcrastinateException):
"""
A synchronous connector (probably Psycopg2Connector) was used, but the operation
needs an asynchronous connector (AiopgConnector). Please check your App
configuration.
"""
| [] |
vyshakTs/STORE_MANAGEMENT_SYSTEM | config/settings/local.py | b6b82a02c0b512083c35a8656e191436552569a9 | from .base import *
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'SMS',
'USER': 'postgres',
'PASSWORD': 'password',
'HOST': 'localhost',
'PORT': '',
}
}
INSTALLED_APPS += [
'debug_toolbar.apps.DebugToolbarConfig',
'django_extensions',
]
ALLOWED_HOSTS += ['.herokuapp.com']
# Loads SECRET_KEY from .env file
# SECRET_KEY = get_env_variable('SECRET_KEY')
| [] |
haojunsng/foodpanda-dataeng | question3.py | b1b9a5c615113a1b8727c9c7dfe7ad3e50059428 | from functions import get_df, write_df
import geopy
from geopy import distance
"""
The function question3 takes in the latitude and longitude of potential distress locations,
and returns the nearest port with essential provisions such as water, fuel_oil and diesel.
"""
def question3(dataset_name, latitude, longitude):
df = get_df()
distress_location = (latitude, longitude)
ports_with_provisions = df[(df['provisions'] == True) & (df['water'] == True) & (df['fuel_oil'] == True) & (df['diesel'] == True)]
results = []
for each in ports_with_provisions.itertuples(index=False):
each_coords = (float(each[4]), float(each[5]))
dist = geopy.distance.geodesic(distress_location, each_coords)
results.append(dist.km)
ports_with_provisions['dist'] = results
answer3 = ports_with_provisions.sort_values(by='dist', ascending=True)[['country', 'port_name', 'port_latitude', 'port_longitude']].head(1)
write_df(answer3, dataset_name, 'Table for Question 3')
if __name__ == "__main__":
question3("foodpanda_tables", 32.610982, -38.706256)
| [((12, 9, 12, 17), 'functions.get_df', 'get_df', ({}, {}), '()', False, 'from functions import get_df, write_df\n'), ((26, 4, 26, 59), 'functions.write_df', 'write_df', ({(26, 13, 26, 20): 'answer3', (26, 22, 26, 34): 'dataset_name', (26, 36, 26, 58): '"""Table for Question 3"""'}, {}), "(answer3, dataset_name, 'Table for Question 3')", False, 'from functions import get_df, write_df\n'), ((20, 15, 20, 70), 'geopy.distance.geodesic', 'geopy.distance.geodesic', ({(20, 39, 20, 56): 'distress_location', (20, 58, 20, 69): 'each_coords'}, {}), '(distress_location, each_coords)', False, 'import geopy\n')] |
valory-xyz/agents-aea | plugins/aea-cli-benchmark/aea_cli_benchmark/case_acn_communication/case.py | 8f38efa96041b0156ed1ae328178e395dbabf2fc | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2022 Valory AG
# Copyright 2018-2021 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Check amount of time for acn connection communications."""
import asyncio
import logging
import os
import time
from contextlib import contextmanager
from tempfile import TemporaryDirectory
from typing import Callable, List, Tuple, Union
from aea_cli_benchmark.case_acn_communication.utils import (
DEFAULT_DELEGATE_PORT,
DEFAULT_MAILBOX_PORT,
DEFAULT_NODE_PORT,
_make_libp2p_client_connection,
_make_libp2p_connection,
_make_libp2p_mailbox_connection,
)
from aea.connections.base import Connection
from aea.mail.base import Envelope
from packages.fetchai.protocols.default.message import DefaultMessage
class TimeMeasure:
"""Time measure data class."""
def __init__(self):
"""Init data class instance."""
self.time = -1
@contextmanager
def time_measure():
"""Get time measure context."""
start = time.time()
m = TimeMeasure()
try:
yield m
finally:
m.time = time.time() - start
def make_envelope(from_addr: str, to_addr: str) -> Envelope:
"""Construct an envelope."""
msg = DefaultMessage(
dialogue_reference=("", ""),
message_id=1,
target=0,
performative=DefaultMessage.Performative.BYTES,
content=b"hello",
)
envelope = Envelope(
to=to_addr,
sender=from_addr,
message=msg,
)
return envelope
async def _run(con_maker: Callable[..., Connection]) -> Tuple[float, float]:
"""Run test case and return times for the first and the second messages sent over ACN."""
try:
connections = []
genesis_node = _make_libp2p_connection(".", relay=True)
await genesis_node.connect()
connections.append(genesis_node)
genesis_multiaddr = genesis_node.node.multiaddrs[0]
relay_node1 = _make_libp2p_connection(
".",
relay=True,
entry_peers=[genesis_multiaddr],
port=DEFAULT_NODE_PORT + 1,
mailbox=True,
delegate=True,
mailbox_port=DEFAULT_MAILBOX_PORT + 1,
delegate_port=DEFAULT_DELEGATE_PORT + 1,
)
await relay_node1.connect()
connections.append(relay_node1)
relay_node2 = _make_libp2p_connection(
".",
relay=True,
entry_peers=[genesis_multiaddr],
port=DEFAULT_NODE_PORT + 2,
mailbox=True,
delegate=True,
mailbox_port=DEFAULT_MAILBOX_PORT + 2,
delegate_port=DEFAULT_DELEGATE_PORT + 2,
)
await relay_node2.connect()
connections.append(relay_node2)
relay_node1_multiaddr = relay_node1.node.multiaddrs[0]
relay_node2_multiaddr = relay_node2.node.multiaddrs[0]
await asyncio.sleep(1)
con1 = con_maker(
port=DEFAULT_NODE_PORT + 10,
entry_peer=relay_node1_multiaddr,
mailbox_port=DEFAULT_MAILBOX_PORT + 1,
delegate_port=DEFAULT_DELEGATE_PORT + 1,
pub_key=relay_node1.node.pub,
)
await con1.connect()
connections.append(con1)
con2 = con_maker(
port=DEFAULT_NODE_PORT + 20,
entry_peer=relay_node2_multiaddr,
mailbox_port=DEFAULT_MAILBOX_PORT + 2,
delegate_port=DEFAULT_DELEGATE_PORT + 2,
pub_key=relay_node2.node.pub,
)
await con2.connect()
connections.append(con2)
envelope = make_envelope(con1.address, con2.address)
with time_measure() as tm:
await con1.send(envelope)
envelope = await con2.receive()
first_time = tm.time
with time_measure() as tm:
await con1.send(envelope)
envelope = await con2.receive()
second_time = tm.time
return first_time, second_time
finally:
for con in reversed(connections):
await con.disconnect()
def run(connection: str, run_times: int = 10) -> List[Tuple[str, Union[int, float]]]:
"""Check construction time and memory usage."""
logging.basicConfig(level=logging.CRITICAL)
cwd = os.getcwd()
try:
if connection == "p2pnode":
def con_maker(
port: int,
entry_peer: str,
mailbox_port: int,
delegate_port: int,
pub_key: str,
):
return _make_libp2p_connection(".", port=port, entry_peers=[entry_peer])
elif connection == "client":
def con_maker(
port: int,
entry_peer: str,
mailbox_port: int,
delegate_port: int,
pub_key: str,
):
return _make_libp2p_client_connection(
peer_public_key=pub_key, data_dir=".", node_port=delegate_port
)
elif connection == "mailbox":
def con_maker(
port: int,
entry_peer: str,
mailbox_port: int,
delegate_port: int,
pub_key: str,
):
return _make_libp2p_mailbox_connection(
peer_public_key=pub_key, data_dir=".", node_port=mailbox_port
)
else:
raise ValueError(f"Unsupported connection: {connection}")
with TemporaryDirectory() as tmp_dir:
os.chdir(tmp_dir)
coro = _run(con_maker)
first_time, second_time = asyncio.get_event_loop().run_until_complete(coro)
return [
("first time (seconds)", first_time),
("second time (seconds)", second_time),
]
finally:
os.chdir(cwd)
| [((56, 12, 56, 23), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((66, 10, 72, 5), 'packages.fetchai.protocols.default.message.DefaultMessage', 'DefaultMessage', (), '', False, 'from packages.fetchai.protocols.default.message import DefaultMessage\n'), ((73, 15, 77, 5), 'aea.mail.base.Envelope', 'Envelope', (), '', False, 'from aea.mail.base import Envelope\n'), ((160, 4, 160, 47), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((161, 10, 161, 21), 'os.getcwd', 'os.getcwd', ({}, {}), '()', False, 'import os\n'), ((85, 23, 85, 63), 'aea_cli_benchmark.case_acn_communication.utils._make_libp2p_connection', '_make_libp2p_connection', (), '', False, 'from aea_cli_benchmark.case_acn_communication.utils import DEFAULT_DELEGATE_PORT, DEFAULT_MAILBOX_PORT, DEFAULT_NODE_PORT, _make_libp2p_client_connection, _make_libp2p_connection, _make_libp2p_mailbox_connection\n'), ((90, 22, 99, 9), 'aea_cli_benchmark.case_acn_communication.utils._make_libp2p_connection', '_make_libp2p_connection', (), '', False, 'from aea_cli_benchmark.case_acn_communication.utils import DEFAULT_DELEGATE_PORT, DEFAULT_MAILBOX_PORT, DEFAULT_NODE_PORT, _make_libp2p_client_connection, _make_libp2p_connection, _make_libp2p_mailbox_connection\n'), ((102, 22, 111, 9), 'aea_cli_benchmark.case_acn_communication.utils._make_libp2p_connection', '_make_libp2p_connection', (), '', False, 'from aea_cli_benchmark.case_acn_communication.utils import DEFAULT_DELEGATE_PORT, DEFAULT_MAILBOX_PORT, DEFAULT_NODE_PORT, _make_libp2p_client_connection, _make_libp2p_connection, _make_libp2p_mailbox_connection\n'), ((213, 8, 213, 21), 'os.chdir', 'os.chdir', ({(213, 17, 213, 20): 'cwd'}, {}), '(cwd)', False, 'import os\n'), ((61, 17, 61, 28), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((117, 14, 117, 30), 'asyncio.sleep', 'asyncio.sleep', ({(117, 28, 117, 29): '(1)'}, {}), '(1)', False, 'import asyncio\n'), ((203, 13, 203, 33), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ({}, {}), '()', False, 'from tempfile import TemporaryDirectory\n'), ((204, 12, 204, 29), 'os.chdir', 'os.chdir', ({(204, 21, 204, 28): 'tmp_dir'}, {}), '(tmp_dir)', False, 'import os\n'), ((172, 23, 172, 88), 'aea_cli_benchmark.case_acn_communication.utils._make_libp2p_connection', '_make_libp2p_connection', (), '', False, 'from aea_cli_benchmark.case_acn_communication.utils import DEFAULT_DELEGATE_PORT, DEFAULT_MAILBOX_PORT, DEFAULT_NODE_PORT, _make_libp2p_client_connection, _make_libp2p_connection, _make_libp2p_mailbox_connection\n'), ((183, 23, 185, 17), 'aea_cli_benchmark.case_acn_communication.utils._make_libp2p_client_connection', '_make_libp2p_client_connection', (), '', False, 'from aea_cli_benchmark.case_acn_communication.utils import DEFAULT_DELEGATE_PORT, DEFAULT_MAILBOX_PORT, DEFAULT_NODE_PORT, _make_libp2p_client_connection, _make_libp2p_connection, _make_libp2p_mailbox_connection\n'), ((206, 38, 206, 62), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ({}, {}), '()', False, 'import asyncio\n'), ((196, 23, 198, 17), 'aea_cli_benchmark.case_acn_communication.utils._make_libp2p_mailbox_connection', '_make_libp2p_mailbox_connection', (), '', False, 'from aea_cli_benchmark.case_acn_communication.utils import DEFAULT_DELEGATE_PORT, DEFAULT_MAILBOX_PORT, DEFAULT_NODE_PORT, _make_libp2p_client_connection, _make_libp2p_connection, _make_libp2p_mailbox_connection\n')] |
q4a/bullet3 | examples/pybullet/vr_kuka_setup.py | b077f74f5675fb9ca7bafd238f097f87bf6c0367 | import pybullet as p
#p.connect(p.UDP,"192.168.86.100")
p.connect(p.SHARED_MEMORY)
p.resetSimulation()
objects = [p.loadURDF("plane.urdf", 0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("samurai.urdf", 0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("pr2_gripper.urdf", 0.500000,0.300006,0.700000,-0.000000,-0.000000,-0.000031,1.000000)]
pr2_gripper = objects[0]
print ("pr2_gripper=")
print (pr2_gripper)
jointPositions=[ 0.550569, 0.000000, 0.549657, 0.000000 ]
for jointIndex in range (p.getNumJoints(pr2_gripper)):
p.resetJointState(pr2_gripper,jointIndex,jointPositions[jointIndex])
pr2_cid = p.createConstraint(pr2_gripper,-1,-1,-1,p.JOINT_FIXED,[0,0,0],[0.2,0,0],[0.500000,0.300006,0.700000])
print ("pr2_cid")
print (pr2_cid)
objects = [p.loadURDF("kuka_iiwa/model_vr_limits.urdf", 1.400000,-0.200000,0.600000,0.000000,0.000000,0.000000,1.000000)]
kuka = objects[0]
jointPositions=[ -0.000000, -0.000000, 0.000000, 1.570793, 0.000000, -1.036725, 0.000001 ]
for jointIndex in range (p.getNumJoints(kuka)):
p.resetJointState(kuka,jointIndex,jointPositions[jointIndex])
p.setJointMotorControl2(kuka,jointIndex,p.POSITION_CONTROL,jointPositions[jointIndex],0)
objects = [p.loadURDF("lego/lego.urdf", 1.000000,-0.200000,0.700000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("lego/lego.urdf", 1.000000,-0.200000,0.800000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("lego/lego.urdf", 1.000000,-0.200000,0.900000,0.000000,0.000000,0.000000,1.000000)]
objects = p.loadSDF("gripper/wsg50_one_motor_gripper_new_free_base.sdf")
kuka_gripper = objects[0]
print ("kuka gripper=")
print(kuka_gripper)
p.resetBasePositionAndOrientation(kuka_gripper,[0.923103,-0.200000,1.250036],[-0.000000,0.964531,-0.000002,-0.263970])
jointPositions=[ 0.000000, -0.011130, -0.206421, 0.205143, -0.009999, 0.000000, -0.010055, 0.000000 ]
for jointIndex in range (p.getNumJoints(kuka_gripper)):
p.resetJointState(kuka_gripper,jointIndex,jointPositions[jointIndex])
p.setJointMotorControl2(kuka_gripper,jointIndex,p.POSITION_CONTROL,jointPositions[jointIndex],0)
kuka_cid = p.createConstraint(kuka, 6, kuka_gripper,0,p.JOINT_FIXED, [0,0,0], [0,0,0.05],[0,0,0])
objects = [p.loadURDF("jenga/jenga.urdf", 1.300000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 1.200000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 1.100000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 1.000000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 0.900000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("jenga/jenga.urdf", 0.800000,-0.700000,0.750000,0.000000,0.707107,0.000000,0.707107)]
objects = [p.loadURDF("table/table.urdf", 1.000000,-0.200000,0.000000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("teddy_vhacd.urdf", 1.050000,-0.500000,0.700000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("cube_small.urdf", 0.950000,-0.100000,0.700000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("sphere_small.urdf", 0.850000,-0.400000,0.700000,0.000000,0.000000,0.707107,0.707107)]
objects = [p.loadURDF("duck_vhacd.urdf", 0.850000,-0.400000,0.900000,0.000000,0.000000,0.707107,0.707107)]
objects = p.loadSDF("kiva_shelf/model.sdf")
ob = objects[0]
p.resetBasePositionAndOrientation(ob,[0.000000,1.000000,1.204500],[0.000000,0.000000,0.000000,1.000000])
objects = [p.loadURDF("teddy_vhacd.urdf", -0.100000,0.600000,0.850000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("sphere_small.urdf", -0.100000,0.955006,1.169706,0.633232,-0.000000,-0.000000,0.773962)]
objects = [p.loadURDF("cube_small.urdf", 0.300000,0.600000,0.850000,0.000000,0.000000,0.000000,1.000000)]
objects = [p.loadURDF("table_square/table_square.urdf", -1.000000,0.000000,0.000000,0.000000,0.000000,0.000000,1.000000)]
ob = objects[0]
jointPositions=[ 0.000000 ]
for jointIndex in range (p.getNumJoints(ob)):
p.resetJointState(ob,jointIndex,jointPositions[jointIndex])
objects = [p.loadURDF("husky/husky.urdf", 2.000000,-5.000000,1.000000,0.000000,0.000000,0.000000,1.000000)]
ob = objects[0]
jointPositions=[ 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000 ]
for jointIndex in range (p.getNumJoints(ob)):
p.resetJointState(ob,jointIndex,jointPositions[jointIndex])
p.setGravity(0.000000,0.000000,0.000000)
p.setGravity(0,0,-10)
p.stepSimulation()
p.disconnect()
| [((3, 0, 3, 26), 'pybullet.connect', 'p.connect', ({(3, 10, 3, 25): 'p.SHARED_MEMORY'}, {}), '(p.SHARED_MEMORY)', True, 'import pybullet as p\n'), ((4, 0, 4, 19), 'pybullet.resetSimulation', 'p.resetSimulation', ({}, {}), '()', True, 'import pybullet as p\n'), ((17, 10, 17, 111), 'pybullet.createConstraint', 'p.createConstraint', ({(17, 29, 17, 40): 'pr2_gripper', (17, 41, 17, 43): '-1', (17, 44, 17, 46): '-1', (17, 47, 17, 49): '-1', (17, 50, 17, 63): 'p.JOINT_FIXED', (17, 64, 17, 71): '[0, 0, 0]', (17, 72, 17, 81): '[0.2, 0, 0]', (17, 82, 17, 110): '[0.5, 0.300006, 0.7]'}, {}), '(pr2_gripper, -1, -1, -1, p.JOINT_FIXED, [0, 0, 0], [0.2,\n 0, 0], [0.5, 0.300006, 0.7])', True, 'import pybullet as p\n'), ((31, 10, 31, 72), 'pybullet.loadSDF', 'p.loadSDF', ({(31, 20, 31, 71): '"""gripper/wsg50_one_motor_gripper_new_free_base.sdf"""'}, {}), "('gripper/wsg50_one_motor_gripper_new_free_base.sdf')", True, 'import pybullet as p\n'), ((36, 0, 36, 118), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', ({(36, 34, 36, 46): 'kuka_gripper', (36, 47, 36, 76): '[0.923103, -0.2, 1.250036]', (36, 77, 36, 117): '[-0.0, 0.964531, -2e-06, -0.26397]'}, {}), '(kuka_gripper, [0.923103, -0.2, 1.250036],\n [-0.0, 0.964531, -2e-06, -0.26397])', True, 'import pybullet as p\n'), ((43, 11, 43, 100), 'pybullet.createConstraint', 'p.createConstraint', ({(43, 30, 43, 34): 'kuka', (43, 38, 43, 39): '6', (43, 42, 43, 54): 'kuka_gripper', (43, 55, 43, 56): '0', (43, 57, 43, 70): 'p.JOINT_FIXED', (43, 72, 43, 79): '[0, 0, 0]', (43, 81, 43, 91): '[0, 0, 0.05]', (43, 92, 43, 99): '[0, 0, 0]'}, {}), '(kuka, 6, kuka_gripper, 0, p.JOINT_FIXED, [0, 0, 0], [0, \n 0, 0.05], [0, 0, 0])', True, 'import pybullet as p\n'), ((56, 10, 56, 43), 'pybullet.loadSDF', 'p.loadSDF', ({(56, 20, 56, 42): '"""kiva_shelf/model.sdf"""'}, {}), "('kiva_shelf/model.sdf')", True, 'import pybullet as p\n'), ((58, 0, 58, 104), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', ({(58, 34, 58, 36): 'ob', (58, 37, 58, 65): '[0.0, 1.0, 1.2045]', (58, 66, 58, 103): '[0.0, 0.0, 0.0, 1.0]'}, {}), '(ob, [0.0, 1.0, 1.2045], [0.0, 0.0, 0.0, 1.0])', True, 'import pybullet as p\n'), ((74, 0, 74, 40), 'pybullet.setGravity', 'p.setGravity', ({(74, 13, 74, 21): '(0.0)', (74, 22, 74, 30): '(0.0)', (74, 31, 74, 39): '(0.0)'}, {}), '(0.0, 0.0, 0.0)', True, 'import pybullet as p\n'), ((75, 0, 75, 21), 'pybullet.setGravity', 'p.setGravity', ({(75, 13, 75, 14): '(0)', (75, 15, 75, 16): '(0)', (75, 17, 75, 20): '(-10)'}, {}), '(0, 0, -10)', True, 'import pybullet as p\n'), ((77, 0, 77, 18), 'pybullet.stepSimulation', 'p.stepSimulation', ({}, {}), '()', True, 'import pybullet as p\n'), ((79, 0, 79, 14), 'pybullet.disconnect', 'p.disconnect', ({}, {}), '()', True, 'import pybullet as p\n'), ((6, 11, 6, 99), 'pybullet.loadURDF', 'p.loadURDF', ({(6, 22, 6, 34): '"""plane.urdf"""', (6, 36, 6, 44): '(0.0)', (6, 45, 6, 53): '(0.0)', (6, 54, 6, 62): '(0.0)', (6, 63, 6, 71): '(0.0)', (6, 72, 6, 80): '(0.0)', (6, 81, 6, 89): '(0.0)', (6, 90, 6, 98): '(1.0)'}, {}), "('plane.urdf', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0)", True, 'import pybullet as p\n'), ((7, 11, 7, 101), 'pybullet.loadURDF', 'p.loadURDF', ({(7, 22, 7, 36): '"""samurai.urdf"""', (7, 38, 7, 46): '(0.0)', (7, 47, 7, 55): '(0.0)', (7, 56, 7, 64): '(0.0)', (7, 65, 7, 73): '(0.0)', (7, 74, 7, 82): '(0.0)', (7, 83, 7, 91): '(0.0)', (7, 92, 7, 100): '(1.0)'}, {}), "('samurai.urdf', 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0)", True, 'import pybullet as p\n'), ((8, 11, 8, 108), 'pybullet.loadURDF', 'p.loadURDF', ({(8, 22, 8, 40): '"""pr2_gripper.urdf"""', (8, 42, 8, 50): '(0.5)', (8, 51, 8, 59): '(0.300006)', (8, 60, 8, 68): '(0.7)', (8, 69, 8, 78): '(-0.0)', (8, 79, 8, 88): '(-0.0)', (8, 89, 8, 98): '(-3.1e-05)', (8, 99, 8, 107): '(1.0)'}, {}), "('pr2_gripper.urdf', 0.5, 0.300006, 0.7, -0.0, -0.0, -3.1e-05, 1.0)", True, 'import pybullet as p\n'), ((14, 25, 14, 52), 'pybullet.getNumJoints', 'p.getNumJoints', ({(14, 40, 14, 51): 'pr2_gripper'}, {}), '(pr2_gripper)', True, 'import pybullet as p\n'), ((15, 1, 15, 69), 'pybullet.resetJointState', 'p.resetJointState', ({(15, 19, 15, 30): 'pr2_gripper', (15, 31, 15, 41): 'jointIndex', (15, 42, 15, 68): 'jointPositions[jointIndex]'}, {}), '(pr2_gripper, jointIndex, jointPositions[jointIndex])', True, 'import pybullet as p\n'), ((21, 11, 21, 120), 'pybullet.loadURDF', 'p.loadURDF', ({(21, 22, 21, 54): '"""kuka_iiwa/model_vr_limits.urdf"""', (21, 56, 21, 64): '(1.4)', (21, 65, 21, 74): '(-0.2)', (21, 75, 21, 83): '(0.6)', (21, 84, 21, 92): '(0.0)', (21, 93, 21, 101): '(0.0)', (21, 102, 21, 110): '(0.0)', (21, 111, 21, 119): '(1.0)'}, {}), "('kuka_iiwa/model_vr_limits.urdf', 1.4, -0.2, 0.6, 0.0, 0.0, 0.0, 1.0\n )", True, 'import pybullet as p\n'), ((24, 25, 24, 45), 'pybullet.getNumJoints', 'p.getNumJoints', ({(24, 40, 24, 44): 'kuka'}, {}), '(kuka)', True, 'import pybullet as p\n'), ((25, 1, 25, 62), 'pybullet.resetJointState', 'p.resetJointState', ({(25, 19, 25, 23): 'kuka', (25, 24, 25, 34): 'jointIndex', (25, 35, 25, 61): 'jointPositions[jointIndex]'}, {}), '(kuka, jointIndex, jointPositions[jointIndex])', True, 'import pybullet as p\n'), ((26, 1, 26, 89), 'pybullet.setJointMotorControl2', 'p.setJointMotorControl2', ({(26, 25, 26, 29): 'kuka', (26, 30, 26, 40): 'jointIndex', (26, 41, 26, 59): 'p.POSITION_CONTROL', (26, 60, 26, 86): 'jointPositions[jointIndex]', (26, 87, 26, 88): '(0)'}, {}), '(kuka, jointIndex, p.POSITION_CONTROL,\n jointPositions[jointIndex], 0)', True, 'import pybullet as p\n'), ((28, 11, 28, 104), 'pybullet.loadURDF', 'p.loadURDF', ({(28, 22, 28, 38): '"""lego/lego.urdf"""', (28, 40, 28, 48): '(1.0)', (28, 49, 28, 58): '(-0.2)', (28, 59, 28, 67): '(0.7)', (28, 68, 28, 76): '(0.0)', (28, 77, 28, 85): '(0.0)', (28, 86, 28, 94): '(0.0)', (28, 95, 28, 103): '(1.0)'}, {}), "('lego/lego.urdf', 1.0, -0.2, 0.7, 0.0, 0.0, 0.0, 1.0)", True, 'import pybullet as p\n'), ((29, 11, 29, 104), 'pybullet.loadURDF', 'p.loadURDF', ({(29, 22, 29, 38): '"""lego/lego.urdf"""', (29, 40, 29, 48): '(1.0)', (29, 49, 29, 58): '(-0.2)', (29, 59, 29, 67): '(0.8)', (29, 68, 29, 76): '(0.0)', (29, 77, 29, 85): '(0.0)', (29, 86, 29, 94): '(0.0)', (29, 95, 29, 103): '(1.0)'}, {}), "('lego/lego.urdf', 1.0, -0.2, 0.8, 0.0, 0.0, 0.0, 1.0)", True, 'import pybullet as p\n'), ((30, 11, 30, 104), 'pybullet.loadURDF', 'p.loadURDF', ({(30, 22, 30, 38): '"""lego/lego.urdf"""', (30, 40, 30, 48): '(1.0)', (30, 49, 30, 58): '(-0.2)', (30, 59, 30, 67): '(0.9)', (30, 68, 30, 76): '(0.0)', (30, 77, 30, 85): '(0.0)', (30, 86, 30, 94): '(0.0)', (30, 95, 30, 103): '(1.0)'}, {}), "('lego/lego.urdf', 1.0, -0.2, 0.9, 0.0, 0.0, 0.0, 1.0)", True, 'import pybullet as p\n'), ((38, 25, 38, 53), 'pybullet.getNumJoints', 'p.getNumJoints', ({(38, 40, 38, 52): 'kuka_gripper'}, {}), '(kuka_gripper)', True, 'import pybullet as p\n'), ((39, 1, 39, 70), 'pybullet.resetJointState', 'p.resetJointState', ({(39, 19, 39, 31): 'kuka_gripper', (39, 32, 39, 42): 'jointIndex', (39, 43, 39, 69): 'jointPositions[jointIndex]'}, {}), '(kuka_gripper, jointIndex, jointPositions[jointIndex])', True, 'import pybullet as p\n'), ((40, 1, 40, 97), 'pybullet.setJointMotorControl2', 'p.setJointMotorControl2', ({(40, 25, 40, 37): 'kuka_gripper', (40, 38, 40, 48): 'jointIndex', (40, 49, 40, 67): 'p.POSITION_CONTROL', (40, 68, 40, 94): 'jointPositions[jointIndex]', (40, 95, 40, 96): '(0)'}, {}), '(kuka_gripper, jointIndex, p.POSITION_CONTROL,\n jointPositions[jointIndex], 0)', True, 'import pybullet as p\n'), ((45, 11, 45, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(45, 22, 45, 40): '"""jenga/jenga.urdf"""', (45, 42, 45, 50): '(1.3)', (45, 51, 45, 60): '(-0.7)', (45, 61, 45, 69): '(0.75)', (45, 70, 45, 78): '(0.0)', (45, 79, 45, 87): '(0.707107)', (45, 88, 45, 96): '(0.0)', (45, 97, 45, 105): '(0.707107)'}, {}), "('jenga/jenga.urdf', 1.3, -0.7, 0.75, 0.0, 0.707107, 0.0, 0.707107)", True, 'import pybullet as p\n'), ((46, 11, 46, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(46, 22, 46, 40): '"""jenga/jenga.urdf"""', (46, 42, 46, 50): '(1.2)', (46, 51, 46, 60): '(-0.7)', (46, 61, 46, 69): '(0.75)', (46, 70, 46, 78): '(0.0)', (46, 79, 46, 87): '(0.707107)', (46, 88, 46, 96): '(0.0)', (46, 97, 46, 105): '(0.707107)'}, {}), "('jenga/jenga.urdf', 1.2, -0.7, 0.75, 0.0, 0.707107, 0.0, 0.707107)", True, 'import pybullet as p\n'), ((47, 11, 47, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(47, 22, 47, 40): '"""jenga/jenga.urdf"""', (47, 42, 47, 50): '(1.1)', (47, 51, 47, 60): '(-0.7)', (47, 61, 47, 69): '(0.75)', (47, 70, 47, 78): '(0.0)', (47, 79, 47, 87): '(0.707107)', (47, 88, 47, 96): '(0.0)', (47, 97, 47, 105): '(0.707107)'}, {}), "('jenga/jenga.urdf', 1.1, -0.7, 0.75, 0.0, 0.707107, 0.0, 0.707107)", True, 'import pybullet as p\n'), ((48, 11, 48, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(48, 22, 48, 40): '"""jenga/jenga.urdf"""', (48, 42, 48, 50): '(1.0)', (48, 51, 48, 60): '(-0.7)', (48, 61, 48, 69): '(0.75)', (48, 70, 48, 78): '(0.0)', (48, 79, 48, 87): '(0.707107)', (48, 88, 48, 96): '(0.0)', (48, 97, 48, 105): '(0.707107)'}, {}), "('jenga/jenga.urdf', 1.0, -0.7, 0.75, 0.0, 0.707107, 0.0, 0.707107)", True, 'import pybullet as p\n'), ((49, 11, 49, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(49, 22, 49, 40): '"""jenga/jenga.urdf"""', (49, 42, 49, 50): '(0.9)', (49, 51, 49, 60): '(-0.7)', (49, 61, 49, 69): '(0.75)', (49, 70, 49, 78): '(0.0)', (49, 79, 49, 87): '(0.707107)', (49, 88, 49, 96): '(0.0)', (49, 97, 49, 105): '(0.707107)'}, {}), "('jenga/jenga.urdf', 0.9, -0.7, 0.75, 0.0, 0.707107, 0.0, 0.707107)", True, 'import pybullet as p\n'), ((50, 11, 50, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(50, 22, 50, 40): '"""jenga/jenga.urdf"""', (50, 42, 50, 50): '(0.8)', (50, 51, 50, 60): '(-0.7)', (50, 61, 50, 69): '(0.75)', (50, 70, 50, 78): '(0.0)', (50, 79, 50, 87): '(0.707107)', (50, 88, 50, 96): '(0.0)', (50, 97, 50, 105): '(0.707107)'}, {}), "('jenga/jenga.urdf', 0.8, -0.7, 0.75, 0.0, 0.707107, 0.0, 0.707107)", True, 'import pybullet as p\n'), ((51, 11, 51, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(51, 22, 51, 40): '"""table/table.urdf"""', (51, 42, 51, 50): '(1.0)', (51, 51, 51, 60): '(-0.2)', (51, 61, 51, 69): '(0.0)', (51, 70, 51, 78): '(0.0)', (51, 79, 51, 87): '(0.0)', (51, 88, 51, 96): '(0.707107)', (51, 97, 51, 105): '(0.707107)'}, {}), "('table/table.urdf', 1.0, -0.2, 0.0, 0.0, 0.0, 0.707107, 0.707107)", True, 'import pybullet as p\n'), ((52, 11, 52, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(52, 22, 52, 40): '"""teddy_vhacd.urdf"""', (52, 42, 52, 50): '(1.05)', (52, 51, 52, 60): '(-0.5)', (52, 61, 52, 69): '(0.7)', (52, 70, 52, 78): '(0.0)', (52, 79, 52, 87): '(0.0)', (52, 88, 52, 96): '(0.707107)', (52, 97, 52, 105): '(0.707107)'}, {}), "('teddy_vhacd.urdf', 1.05, -0.5, 0.7, 0.0, 0.0, 0.707107, 0.707107)", True, 'import pybullet as p\n'), ((53, 11, 53, 105), 'pybullet.loadURDF', 'p.loadURDF', ({(53, 22, 53, 39): '"""cube_small.urdf"""', (53, 41, 53, 49): '(0.95)', (53, 50, 53, 59): '(-0.1)', (53, 60, 53, 68): '(0.7)', (53, 69, 53, 77): '(0.0)', (53, 78, 53, 86): '(0.0)', (53, 87, 53, 95): '(0.707107)', (53, 96, 53, 104): '(0.707107)'}, {}), "('cube_small.urdf', 0.95, -0.1, 0.7, 0.0, 0.0, 0.707107, 0.707107)", True, 'import pybullet as p\n'), ((54, 11, 54, 107), 'pybullet.loadURDF', 'p.loadURDF', ({(54, 22, 54, 41): '"""sphere_small.urdf"""', (54, 43, 54, 51): '(0.85)', (54, 52, 54, 61): '(-0.4)', (54, 62, 54, 70): '(0.7)', (54, 71, 54, 79): '(0.0)', (54, 80, 54, 88): '(0.0)', (54, 89, 54, 97): '(0.707107)', (54, 98, 54, 106): '(0.707107)'}, {}), "('sphere_small.urdf', 0.85, -0.4, 0.7, 0.0, 0.0, 0.707107, 0.707107)", True, 'import pybullet as p\n'), ((55, 11, 55, 105), 'pybullet.loadURDF', 'p.loadURDF', ({(55, 22, 55, 39): '"""duck_vhacd.urdf"""', (55, 41, 55, 49): '(0.85)', (55, 50, 55, 59): '(-0.4)', (55, 60, 55, 68): '(0.9)', (55, 69, 55, 77): '(0.0)', (55, 78, 55, 86): '(0.0)', (55, 87, 55, 95): '(0.707107)', (55, 96, 55, 104): '(0.707107)'}, {}), "('duck_vhacd.urdf', 0.85, -0.4, 0.9, 0.0, 0.0, 0.707107, 0.707107)", True, 'import pybullet as p\n'), ((59, 11, 59, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(59, 22, 59, 40): '"""teddy_vhacd.urdf"""', (59, 42, 59, 51): '(-0.1)', (59, 52, 59, 60): '(0.6)', (59, 61, 59, 69): '(0.85)', (59, 70, 59, 78): '(0.0)', (59, 79, 59, 87): '(0.0)', (59, 88, 59, 96): '(0.0)', (59, 97, 59, 105): '(1.0)'}, {}), "('teddy_vhacd.urdf', -0.1, 0.6, 0.85, 0.0, 0.0, 0.0, 1.0)", True, 'import pybullet as p\n'), ((60, 11, 60, 109), 'pybullet.loadURDF', 'p.loadURDF', ({(60, 22, 60, 41): '"""sphere_small.urdf"""', (60, 43, 60, 52): '(-0.1)', (60, 53, 60, 61): '(0.955006)', (60, 62, 60, 70): '(1.169706)', (60, 71, 60, 79): '(0.633232)', (60, 80, 60, 89): '(-0.0)', (60, 90, 60, 99): '(-0.0)', (60, 100, 60, 108): '(0.773962)'}, {}), "('sphere_small.urdf', -0.1, 0.955006, 1.169706, 0.633232, -0.0, -\n 0.0, 0.773962)", True, 'import pybullet as p\n'), ((61, 11, 61, 104), 'pybullet.loadURDF', 'p.loadURDF', ({(61, 22, 61, 39): '"""cube_small.urdf"""', (61, 41, 61, 49): '(0.3)', (61, 50, 61, 58): '(0.6)', (61, 59, 61, 67): '(0.85)', (61, 68, 61, 76): '(0.0)', (61, 77, 61, 85): '(0.0)', (61, 86, 61, 94): '(0.0)', (61, 95, 61, 103): '(1.0)'}, {}), "('cube_small.urdf', 0.3, 0.6, 0.85, 0.0, 0.0, 0.0, 1.0)", True, 'import pybullet as p\n'), ((62, 11, 62, 120), 'pybullet.loadURDF', 'p.loadURDF', ({(62, 22, 62, 54): '"""table_square/table_square.urdf"""', (62, 56, 62, 65): '(-1.0)', (62, 66, 62, 74): '(0.0)', (62, 75, 62, 83): '(0.0)', (62, 84, 62, 92): '(0.0)', (62, 93, 62, 101): '(0.0)', (62, 102, 62, 110): '(0.0)', (62, 111, 62, 119): '(1.0)'}, {}), "('table_square/table_square.urdf', -1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0\n )", True, 'import pybullet as p\n'), ((65, 25, 65, 43), 'pybullet.getNumJoints', 'p.getNumJoints', ({(65, 40, 65, 42): 'ob'}, {}), '(ob)', True, 'import pybullet as p\n'), ((66, 1, 66, 60), 'pybullet.resetJointState', 'p.resetJointState', ({(66, 19, 66, 21): 'ob', (66, 22, 66, 32): 'jointIndex', (66, 33, 66, 59): 'jointPositions[jointIndex]'}, {}), '(ob, jointIndex, jointPositions[jointIndex])', True, 'import pybullet as p\n'), ((68, 11, 68, 106), 'pybullet.loadURDF', 'p.loadURDF', ({(68, 22, 68, 40): '"""husky/husky.urdf"""', (68, 42, 68, 50): '(2.0)', (68, 51, 68, 60): '(-5.0)', (68, 61, 68, 69): '(1.0)', (68, 70, 68, 78): '(0.0)', (68, 79, 68, 87): '(0.0)', (68, 88, 68, 96): '(0.0)', (68, 97, 68, 105): '(1.0)'}, {}), "('husky/husky.urdf', 2.0, -5.0, 1.0, 0.0, 0.0, 0.0, 1.0)", True, 'import pybullet as p\n'), ((71, 25, 71, 43), 'pybullet.getNumJoints', 'p.getNumJoints', ({(71, 40, 71, 42): 'ob'}, {}), '(ob)', True, 'import pybullet as p\n'), ((72, 1, 72, 60), 'pybullet.resetJointState', 'p.resetJointState', ({(72, 19, 72, 21): 'ob', (72, 22, 72, 32): 'jointIndex', (72, 33, 72, 59): 'jointPositions[jointIndex]'}, {}), '(ob, jointIndex, jointPositions[jointIndex])', True, 'import pybullet as p\n')] |
SvoONs/genomics_algo | genomics_algo/utilities/string_cmp.py | 3174c1e9e685db12c5849ce5c7e3411f1922a4be | def longest_common_prefix(s1: str, s2: str) -> str:
"""
Finds the longest common prefix (substring) given two strings
s1: First string to compare
s2: Second string to compare
Returns:
Longest common prefix between s1 and s2
>>> longest_common_prefix("ACTA", "GCCT")
''
>>> longest_common_prefix("ACTA", "ACT")
'ACT'
>>> longest_common_prefix("ACT", "ACTA")
'ACT'
>>> longest_common_prefix("GATA", "GAAT")
'GA'
>>> longest_common_prefix("ATGA", "")
''
>>> longest_common_prefix("", "GCCT")
''
>>> longest_common_prefix("GCCT", "GCCT")
'GCCT'
"""
i = 0
while i < min(len(s1), len(s2)):
if s1[i] != s2[i]:
break
i += 1
return s1[:i]
def longest_common_suffix(s1: str, s2: str) -> str:
"""
Finds the longest common suffix (substring) given two strings
s1: First string to compare
s2: Second string to compare
Returns:
Longest common suffix between s1 and s2
>>> longest_common_suffix("ACTA", "GCCT")
''
>>> longest_common_suffix("ACTA", "CTA")
'CTA'
>>> longest_common_suffix("CTA", "ACTA")
'CTA'
>>> longest_common_suffix("GATAT", "GAATAT")
'ATAT'
>>> longest_common_suffix("ACTA", "")
''
>>> longest_common_suffix("", "GCCT")
''
>>> longest_common_suffix("GCCT", "GCCT")
'GCCT'
"""
return longest_common_prefix(s1[::-1], s2[::-1])[::-1]
def find_hamming_distance(s1: str, s2: str) -> int:
"""Compute the Hamming distance between two strings of equal length
>>> find_hamming_distance("ATG", "ATC")
1
>>> find_hamming_distance("ATG", "TGA")
3
>>> find_hamming_distance("A", "A")
0
>>> find_hamming_distance("ATG", "ATG")
0
>>> find_hamming_distance("", "")
0
>>> find_hamming_distance("GAGGTAGCGGCGTTTAAC", "GTGGTAACGGGGTTTAAC")
3
"""
assert len(s1) == len(s2)
return sum(1 for i in range(len(s1)) if s1[i] != s2[i])
def find_levenshtein_distance(s1: str, s2: str) -> int:
"""Compute the Levenshtein distance between two strings (i.e., minimum number
of edits including substitution, insertion and deletion needed in a string to
turn it into another)
>>> find_levenshtein_distance("AT", "")
2
>>> find_levenshtein_distance("AT", "ATC")
1
>>> find_levenshtein_distance("ATG", "ATC")
1
>>> find_levenshtein_distance("ATG", "TGA")
2
>>> find_levenshtein_distance("ATG", "ATG")
0
>>> find_levenshtein_distance("", "")
0
>>> find_levenshtein_distance("GAGGTAGCGGCGTTTAAC", "GTGGTAACGGGGTTTAAC")
3
>>> find_levenshtein_distance("TGGCCGCGCAAAAACAGC", "TGACCGCGCAAAACAGC")
2
>>> find_levenshtein_distance("GCGTATGCGGCTAACGC", "GCTATGCGGCTATACGC")
2
"""
# initializing a matrix for with `len(s1) + 1` rows and `len(s2) + 1` columns
D = [[0 for x in range(len(s2) + 1)] for y in range(len(s1) + 1)]
# fill first column
for i in range(len(s1) + 1):
D[i][0] = i
# fill first row
for j in range(len(s2) + 1):
D[0][j] = j
# fill rest of the matrix
for i in range(1, len(s1) + 1):
for j in range(1, len(s2) + 1):
distance_left = D[i][j - 1] + 1 # deletion in pattern
distance_above = D[i - 1][j] + 1 # insertion in pattern
distance_diagonal = D[i - 1][j - 1] + (
s1[i - 1] != s2[j - 1]
) # substitution
D[i][j] = min(distance_left, distance_above, distance_diagonal)
# return the last value (i.e., right most bottom value)
return D[-1][-1]
| [] |
whythawk/whyqd | whyqd/parsers/wrangling_parser.py | 8ee41768d6788318458d41831200594b61777ccc | from __future__ import annotations
from typing import Optional, Dict, List, Union, Type, TYPE_CHECKING
from datetime import date, datetime
import pandas as pd
import numpy as np
import re
import locale
try:
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
except locale.Error:
# Readthedocs has a problem, but difficult to replicate
locale.setlocale(locale.LC_ALL, "")
from . import CoreScript
from ..models import ColumnModel
from ..types import MimeType
if TYPE_CHECKING:
from ..schema import Schema
from ..models import DataSourceModel
class WranglingScript:
"""Get, review and restructure tabular data."""
def __init__(self):
self.check_source = CoreScript().check_source
self.core = CoreScript()
self.DATE_FORMATS = {
"date": {"fmt": ["%Y-%m-%d"], "txt": ["YYYY-MM-DD"]},
"datetime": {
"fmt": ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M:%S %Z%z"],
"txt": ["YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD hh:mm:ss UTC+0000"],
},
"year": {"fmt": ["%Y"], "txt": ["YYYY"]},
}
def get_dataframe(
self,
source: str,
preserve: Union[str, List[str]] = None,
filetype: MimeType = MimeType.CSV,
names: Optional[List[str]] = None,
nrows: Optional[int] = None,
) -> Union[Dict[str, pd.DataFrame], pd.DataFrame]:
"""Return a Pandas dataframe from a given source.
Accepts default pandas parameters for Excel and CSV, but the objective is to preserve the source data with
little data conversion outside of the data wrangling process. With this in mind, a
Parameters
----------
source: str
Source filename.
preserve: str or list of str, default None
Column names where variable type guessing must be prevented and the original data preserved.
Critical for foreign key references with weird formats, like integers with leading `0`.
filetype: MimeType, default MimeType.CSV
Pandas can read a diversity of filetypes, but whyqd has only been tested on `xls`, `xlsx` and `csv`.
names: list of str, default None
If the source data has no header row, explicitly pass a list of names - in the correct order - to address
the data.
nrows: int, default None
A specified number of rows to return. For review, it is faster to load only a small number.
Returns
-------
DataFrame or dict of DataFrame
"""
self.check_source(source)
# If the dtypes have not been set, then ensure that any provided preserved columns remain untouched
# i.e. no forcing of text to numbers
# defaulting to `dtype = object` ...
kwargs = {}
if preserve:
if not isinstance(preserve, list):
preserve = [preserve]
# kwargs["dtype"] = {k: object for k in preserve}
kwargs["dtype"] = {k: pd.StringDtype() for k in preserve}
if names:
kwargs["header"] = None
kwargs["names"] = names
if nrows:
kwargs["nrows"] = nrows
# Check filetype
if filetype in [MimeType.XLS, MimeType.XLSX]:
# This will default to returning a dictionary of dataframes for each sheet
kwargs["sheet_name"] = None
df = pd.read_excel(source, **kwargs)
keys = list(df.keys())
for k in keys:
if df[k].empty:
del df[k]
if len(df.keys()) == 1:
df = df[keys[0]]
if filetype == MimeType.CSV:
# New in pandas 1.3: will ignore encoding errors - perfect for this initial wrangling process
kwargs["encoding_errors"] = "ignore"
# Supposed to help with fruity separater guessing
kwargs["engine"] = "python"
if not nrows:
df = pd.read_csv(source, **kwargs)
else:
kwargs["iterator"] = True
kwargs["chunksize"] = 10000
df_iterator = pd.read_csv(source, **kwargs)
df = pd.concat(df_iterator, ignore_index=True)
return df
def get_dataframe_from_datasource(self, data: DataSourceModel) -> pd.DataFrame:
"""Return the dataframe for a data source.
Parameters
----------
data: DataSourceModel
Returns
-------
pd.DataFrame
"""
path = data.path
try:
self.core.check_source(path)
except FileNotFoundError:
path = str(self.directory / data.source)
self.core.check_source(path)
df_columns = [d.name for d in data.columns]
names = [d.name for d in data.names] if data.names else None
df = self.get_dataframe(
source=path,
filetype=data.mime,
names=names,
preserve=[d.name for d in data.preserve if d.name in df_columns],
)
if isinstance(df, dict):
if df:
df = df[data.sheet_name]
else:
# It's an empty df for some reason. Maybe excessive filtering.
df = pd.DataFrame()
if df.empty:
raise ValueError(
f"Data source contains no data ({data.path}). Review actions to see if any were more destructive than expected."
)
return df
def get_dataframe_columns(self, df: pd.DataFrame) -> List(ColumnModel):
"""Returns a list of ColumnModels from a source DataFrame.
Parameters
----------
df: pd.DataFrame
Should be derived from `get_dataframe` with a sensible default for `nrows` being 50.
Returns
-------
List of ColumnModel
"""
# Prepare summary
columns = [
{"name": k, "type": "number"}
if v in ["float64", "int64"]
else {"name": k, "type": "date"}
if v in ["datetime64[ns]"]
else {"name": k, "type": "string"}
for k, v in df.dtypes.apply(lambda x: x.name).to_dict().items()
]
return [ColumnModel(**c) for c in columns]
def deduplicate_columns(self, df: pd.DataFrame, schema: Type[Schema]) -> pd.Index:
"""
Source: https://stackoverflow.com/a/65254771/295606
Source: https://stackoverflow.com/a/55405151
Returns a new column list permitting deduplication of dataframes which may result from merge.
Parameters
----------
df: pd.DataFrame
fields: list of FieldModel
Destination Schema fields
Returns
-------
pd.Index
Updated column names
"""
column_index = pd.Series(df.columns.tolist())
if df.columns.has_duplicates:
duplicates = column_index[column_index.duplicated()].unique()
for name in duplicates:
dups = column_index == name
replacements = [f"{name}{i}" if i != 0 else name for i in range(dups.sum())]
column_index.loc[dups] = replacements
# Fix any fields with the same name as any of the target fields
# Do this to 'force' schema assignment
for name in [f.name for f in schema.get.fields]:
dups = column_index == name
replacements = [f"{name}{i}__dd" if i != 0 else f"{name}__dd" for i in range(dups.sum())]
column_index.loc[dups] = replacements
return pd.Index(column_index)
# def check_column_unique(self, source: str, key: str) -> bool:
# """
# Test a column in a dataframe to ensure all values are unique.
# Parameters
# ----------
# source: Source filename
# key: Column name of field where data are to be tested for uniqueness
# Raises
# ------
# ValueError if not unique
# Returns
# -------
# bool, True if unique
# """
# df = self.get_dataframe(source, key)
# if len(df[key]) != len(df[key].unique()):
# import warnings
# filename = source.split("/")[-1] # Obfuscate the path
# e = "'{}' contains non-unique rows in column `{}`".format(filename, key)
# # raise ValueError(e)
# warnings.warn(e)
# return True
# def check_date_format(self, date_type: str, date_value: str) -> bool:
# # https://stackoverflow.com/a/37045601
# # https://www.saltycrane.com/blog/2009/05/converting-time-zones-datetime-objects-python/
# for fmt in self.DATE_FORMATS[date_type]["fmt"]:
# try:
# if date_value == datetime.strptime(date_value, fmt).strftime(fmt):
# return True
# except ValueError:
# continue
# raise ValueError(f"Incorrect date format, should be: `{self.DATE_FORMATS[date_type]['txt']}`")
###################################################################################################
### Pandas type parsers
###################################################################################################
def parse_dates(self, x: Union[None, str]) -> Union[pd.NaT, date.isoformat]:
"""
This is the hard-won 'trust nobody', certainly not Americans, date parser.
TODO: Replace with https://github.com/scrapinghub/dateparser
The only concern is that dateparser.parse(x).date().isoformat() will coerce *any* string to a date,
no matter *what* it is.
"""
if pd.isnull(x):
return pd.NaT
# Check if to_datetime can handle things
if not pd.isnull(pd.to_datetime(x, errors="coerce", dayfirst=True)):
return date.isoformat(pd.to_datetime(x, errors="coerce", dayfirst=True))
# Manually see if coersion will work
x = str(x).strip()[:10]
x = re.sub(r"[\\/,\.]", "-", x)
try:
y, m, d = x.split("-")
except ValueError:
return pd.NaT
if len(y) < 4:
# Swap the day and year positions
# Ignore US dates
d, m, y = x.split("-")
# Fat finger on 1999 ... not going to check for other date errors as no way to figure out
if y[0] == "9":
y = "1" + y[1:]
x = "{}-{}-{}".format(y, m, d)
try:
x = datetime.strptime(x, "%Y-%m-%d")
except ValueError:
return pd.NaT
x = date.isoformat(x)
try:
pd.Timestamp(x)
return x
except pd.errors.OutOfBoundsDatetime:
return pd.NaT
def parse_float(self, x: Union[str, int, float]) -> Union[np.nan, float]:
"""
Regex to extract wrecked floats: https://stackoverflow.com/a/385597
Checked against: https://regex101.com/
"""
try:
return float(x)
except ValueError:
re_float = re.compile(
r"""(?x)
^
\D* # first, match an optional sign *and space*
( # then match integers or f.p. mantissas:
\d+ # start out with a ...
(
\.\d* # mantissa of the form a.b or a.
)? # ? takes care of integers of the form a
|\.\d+ # mantissa of the form .b
)
([eE][+-]?\d+)? # finally, optionally match an exponent
$"""
)
try:
x = re_float.match(x).group(1)
x = re.sub(r"[^e0-9,-\.]", "", str(x))
return locale.atof(x)
except (ValueError, AttributeError):
return np.nan
| [((10, 4, 10, 50), 'locale.setlocale', 'locale.setlocale', ({(10, 21, 10, 34): 'locale.LC_ALL', (10, 36, 10, 49): '"""en_US.UTF-8"""'}, {}), "(locale.LC_ALL, 'en_US.UTF-8')", False, 'import locale\n'), ((148, 57, 148, 74), 'typing.List', 'List', ({(148, 62, 148, 73): 'ColumnModel'}, {}), '(ColumnModel)', False, 'from typing import Optional, Dict, List, Union, Type, TYPE_CHECKING\n'), ((13, 4, 13, 39), 'locale.setlocale', 'locale.setlocale', ({(13, 21, 13, 34): 'locale.LC_ALL', (13, 36, 13, 38): '""""""'}, {}), "(locale.LC_ALL, '')", False, 'import locale\n'), ((201, 15, 201, 37), 'pandas.Index', 'pd.Index', ({(201, 24, 201, 36): 'column_index'}, {}), '(column_index)', True, 'import pandas as pd\n'), ((253, 11, 253, 23), 'pandas.isnull', 'pd.isnull', ({(253, 21, 253, 22): 'x'}, {}), '(x)', True, 'import pandas as pd\n'), ((260, 12, 260, 39), 're.sub', 're.sub', ({(260, 19, 260, 30): '"""[\\\\\\\\/,\\\\.]"""', (260, 32, 260, 35): '"""-"""', (260, 37, 260, 38): 'x'}, {}), "('[\\\\\\\\/,\\\\.]', '-', x)", False, 'import re\n'), ((277, 12, 277, 29), 'datetime.date.isoformat', 'date.isoformat', ({(277, 27, 277, 28): 'x'}, {}), '(x)', False, 'from datetime import date, datetime\n'), ((90, 17, 90, 48), 'pandas.read_excel', 'pd.read_excel', ({(90, 31, 90, 37): 'source'}, {}), '(source, **kwargs)', True, 'import pandas as pd\n'), ((274, 16, 274, 48), 'datetime.datetime.strptime', 'datetime.strptime', ({(274, 34, 274, 35): 'x', (274, 37, 274, 47): '"""%Y-%m-%d"""'}, {}), "(x, '%Y-%m-%d')", False, 'from datetime import date, datetime\n'), ((279, 12, 279, 27), 'pandas.Timestamp', 'pd.Timestamp', ({(279, 25, 279, 26): 'x'}, {}), '(x)', True, 'import pandas as pd\n'), ((80, 34, 80, 50), 'pandas.StringDtype', 'pd.StringDtype', ({}, {}), '()', True, 'import pandas as pd\n'), ((103, 21, 103, 50), 'pandas.read_csv', 'pd.read_csv', ({(103, 33, 103, 39): 'source'}, {}), '(source, **kwargs)', True, 'import pandas as pd\n'), ((107, 30, 107, 59), 'pandas.read_csv', 'pd.read_csv', ({(107, 42, 107, 48): 'source'}, {}), '(source, **kwargs)', True, 'import pandas as pd\n'), ((108, 21, 108, 62), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((141, 21, 141, 35), 'pandas.DataFrame', 'pd.DataFrame', ({}, {}), '()', True, 'import pandas as pd\n'), ((256, 25, 256, 74), 'pandas.to_datetime', 'pd.to_datetime', (), '', True, 'import pandas as pd\n'), ((257, 34, 257, 83), 'pandas.to_datetime', 'pd.to_datetime', (), '', True, 'import pandas as pd\n'), ((292, 23, 305, 13), 're.compile', 're.compile', ({(293, 16, 304, 16): '"""(?x)\n ^\n \\\\D* \t\t# first, match an optional sign *and space*\n ( # then match integers or f.p. mantissas:\n \\\\d+ # start out with a ...\n (\n \\\\.\\\\d* # mantissa of the form a.b or a.\n )? # ? takes care of integers of the form a\n |\\\\.\\\\d+ # mantissa of the form .b\n )\n ([eE][+-]?\\\\d+)? # finally, optionally match an exponent\n $"""'}, {}), '(\n """(?x)\n ^\n \\\\D* \t\t# first, match an optional sign *and space*\n ( # then match integers or f.p. mantissas:\n \\\\d+ # start out with a ...\n (\n \\\\.\\\\d* # mantissa of the form a.b or a.\n )? # ? takes care of integers of the form a\n |\\\\.\\\\d+ # mantissa of the form .b\n )\n ([eE][+-]?\\\\d+)? # finally, optionally match an exponent\n $"""\n )', False, 'import re\n'), ((309, 23, 309, 37), 'locale.atof', 'locale.atof', ({(309, 35, 309, 36): 'x'}, {}), '(x)', False, 'import locale\n')] |
FatChicken277/holbertonschool-higher_level_programming | 0x02-python-import_modules/2-args.py | 520d6310a5e2a874f8c5f5185d0fb769b6412e7c | #!/usr/bin/python3
def args(args):
lenn = len(args) - 1
if lenn == 0:
print("0 arguments.")
elif lenn == 1:
print("{0} argument:".format(lenn))
print("{0}: {1}".format(lenn, args[lenn]))
elif lenn > 1:
print("{0} arguments:".format(lenn))
for i in range(lenn):
print("{0}: {1}".format(i+1, args[i+1]))
if __name__ == "__main__":
import sys
args(sys.argv)
| [] |
rychallener/TauREx3_public | taurex/data/profiles/__init__.py | eb0eeeeca8f47e5e7d64d8d70b43a3af370b7677 | """
These modules contain sub-modules related to defining various profiles in a model
""" | [] |
yuetsin/AoC | day-2/part_b.py | a7c5aea245ee6e77312352907fc4d1ac8eac2d3a | #!/usr/bin/env python3
import re
def get_input() -> list:
with open('./input', 'r') as f:
return [v for v in [v.strip() for v in f.readlines()] if v]
lines = get_input()
count = 0
for line in lines:
lower, upper, char, password = re.split(r'-|: | ', line)
lower, upper = int(lower) - 1, int(upper) - 1
try:
if (password[lower] == char) ^ (password[upper] == char):
count += 1
except:
# don't care about boundaries
pass
print(count)
| [((15, 35, 15, 60), 're.split', 're.split', ({(15, 44, 15, 53): '"""-|: | """', (15, 55, 15, 59): 'line'}, {}), "('-|: | ', line)", False, 'import re\n')] |
devanshslnk/HelpOff | src/tone.py | bbeddc8bbb9d26bbc85f572d4769fc9fc92d5c4a | from __future__ import print_function
import json
from os.path import join, dirname
from watson_developer_cloud import ToneAnalyzerV3
from watson_developer_cloud.tone_analyzer_v3 import ToneInput
from pprint import pprint
# If service instance provides API key authentication
# service = ToneAnalyzerV3(
# ## url is optional, and defaults to the URL below. Use the correct URL for your region.
# url='https://gateway.watsonplatform.net/tone-analyzer/api',
# version='2017-09-21',
# iam_apikey='your_apikey')
service = ToneAnalyzerV3(
## url is optional, and defaults to the URL below. Use the correct URL for your region.
# url='https://gateway.watsonplatform.net/tone-analyzer/api',
username='f0ec47cc-5191-4421-8fca-2395917e1640',
password='q7JOpjOabiY5',
version='2017-09-21')
# print("\ntone_chat() example 1:\n")
# utterances = [{
# 'text': 'I am very happy.',
# 'user': 'glenn'
# }, {
# 'text': 'It is a good day.',
# 'user': 'glenn'
# }]
# tone_chat = service.tone_chat(utterances).get_result()
# print(json.dumps(tone_chat, indent=2))
# print("\ntone() example 1:\n")
# print(
# json.dumps(
# service.tone(
# tone_input='I am very happy. It is a good day.',
# content_type="text/plain").get_result(),
# indent=2))
# print("\ntone() example 2:\n")
# with open(join(dirname(__file__),
# '../resources/tone-example.json')) as tone_json:
# tone = service.tone(json.load(tone_json)['text'], "text/plain").get_result()
# print(json.dumps(tone, indent=2))
# print("\ntone() example 3:\n")
# with open(join(dirname(__file__),
# '../resources/tone-example.json')) as tone_json:
# tone = service.tone(
# tone_input=json.load(tone_json)['text'],
# content_type='text/plain',
# sentences=True).get_result()
# print(json.dumps(tone, indent=2))
# print("\ntone() example 4:\n")
# with open(join(dirname(__file__),
# '../resources/tone-example.json')) as tone_json:
# tone = service.tone(
# tone_input=json.load(tone_json),
# content_type='application/json').get_result()
# print(json.dumps(tone, indent=2))
# print("\ntone() example 5:\n")
# with open(join(dirname(__file__),
# '../resources/tone-example-html.json')) as tone_html:
# tone = service.tone(
# json.load(tone_html)['text'], content_type='text/html').get_result()
# print(json.dumps(tone, indent=2))
# print("\ntone() example 6 with GDPR support:\n")
# service.set_detailed_response(True)
# with open(join(dirname(__file__),
# '../resources/tone-example-html.json')) as tone_html:
# tone = service.tone(
# json.load(tone_html)['text'],
# content_type='text/html',
# headers={
# 'Custom-Header': 'custom_value'
# })
# print(tone)
# print(tone.get_headers())
# print(tone.get_result())
# print(tone.get_status_code())
# service.set_detailed_response(False)
# print("\ntone() example 7:\n")
test_tone="Hi Team, The times are difficult! Our sales have been disappointing for the past three quarters for our data analytics product suite. We have a competitive data analytics product suite in the industry. However, we are not doing a good job at selling it, and this is really frustrating.We are missing critical sales opportunities. We cannot blame the economy for our lack of execution. Our clients need analytical tools to change their current business outcomes. In fact, it is in times such as this, our clients want to get the insights they need to turn their businesses around. It is disheartening to see that we are failing at closing deals, in such a hungry market. Let's buckle up and execute.Jennifer BakerSales Leader, North-East region"
tone_input = ToneInput(test_tone)
result = service.tone(tone_input=tone_input, content_type="application/json").get_result()
# print(type(json.dumps(tone, indent=2)))
pprint(result) | [((16, 10, 21, 25), 'watson_developer_cloud.ToneAnalyzerV3', 'ToneAnalyzerV3', (), '', False, 'from watson_developer_cloud import ToneAnalyzerV3\n'), ((92, 13, 92, 33), 'watson_developer_cloud.tone_analyzer_v3.ToneInput', 'ToneInput', ({(92, 23, 92, 32): 'test_tone'}, {}), '(test_tone)', False, 'from watson_developer_cloud.tone_analyzer_v3 import ToneInput\n'), ((95, 0, 95, 14), 'pprint.pprint', 'pprint', ({(95, 7, 95, 13): 'result'}, {}), '(result)', False, 'from pprint import pprint\n')] |
usmannasir/hcloud-python | hcloud/servers/domain.py | 2a90551fb1c4d9d8a6aea5d8b6601a7c1360494d | # -*- coding: utf-8 -*-
from hcloud.core.domain import BaseDomain
from hcloud.helpers.descriptors import ISODateTime
class Server(BaseDomain):
"""Server Domain
:param id: int
ID of the server
:param name: str
Name of the server (must be unique per project and a valid hostname as per RFC 1123)
:param status: str
Status of the server Choices: `running`, `initializing`, `starting`, `stopping`, `off`, `deleting`, `migrating`, `rebuilding`, `unknown`
:param created: datetime
Point in time when the server was created
:param public_net: :class:`PublicNetwork <hcloud.servers.domain.PublicNetwork>`
Public network information.
:param server_type: :class:`BoundServerType <hcloud.server_types.client.BoundServerType>`
:param datacenter: :class:`BoundDatacenter <hcloud.datacenters.client.BoundDatacenter>`
:param image: :class:`BoundImage <hcloud.images.client.BoundImage>`, None
:param iso: :class:`BoundIso <hcloud.isos.client.BoundIso>`, None
:param rescue_enabled: bool
True if rescue mode is enabled: Server will then boot into rescue system on next reboot.
:param locked: bool
True if server has been locked and is not available to user.
:param backup_window: str, None
Time window (UTC) in which the backup will run, or None if the backups are not enabled
:param outgoing_traffic: int, None
Outbound Traffic for the current billing period in bytes
:param ingoing_traffic: int, None
Inbound Traffic for the current billing period in bytes
:param included_traffic: int
Free Traffic for the current billing period in bytes
:param protection: dict
Protection configuration for the server
:param labels: dict
User-defined labels (key-value pairs)
:param volumes: List[:class:`BoundVolume <hcloud.volumes.client.BoundVolume>`]
Volumes assigned to this server.
"""
STATUS_RUNNING = "running"
"""Server Status running"""
STATUS_INIT = "initializing"
"""Server Status initializing"""
STATUS_STARTING = "starting"
"""Server Status starting"""
STATUS_STOPPING = "stopping"
"""Server Status stopping"""
STATUS_OFF = "off"
"""Server Status off"""
STATUS_DELETING = "deleting"
"""Server Status deleting"""
STATUS_MIGRATING = "migrating"
"""Server Status migrating"""
STATUS_REBUILDING = "rebuilding"
"""Server Status rebuilding"""
STATUS_UNKNOWN = "unknown"
"""Server Status unknown"""
__slots__ = (
"id",
"name",
"status",
"public_net",
"server_type",
"datacenter",
"image",
"iso",
"rescue_enabled",
"locked",
"backup_window",
"outgoing_traffic",
"ingoing_traffic",
"included_traffic",
"protection",
"labels",
"volumes",
)
created = ISODateTime()
supported_fields = ("created",)
def __init__(
self,
id,
name=None,
status=None,
created=None,
public_net=None,
server_type=None,
datacenter=None,
image=None,
iso=None,
rescue_enabled=None,
locked=None,
backup_window=None,
outgoing_traffic=None,
ingoing_traffic=None,
included_traffic=None,
protection=None,
labels=None,
volumes=None,
):
self.id = id
self.name = name
self.status = status
self.created = created
self.public_net = public_net
self.server_type = server_type
self.datacenter = datacenter
self.image = image
self.iso = iso
self.rescue_enabled = rescue_enabled
self.locked = locked
self.backup_window = backup_window
self.outgoing_traffic = outgoing_traffic
self.ingoing_traffic = ingoing_traffic
self.included_traffic = included_traffic
self.protection = protection
self.labels = labels
self.volumes = volumes
class CreateServerResponse(BaseDomain):
"""Create Server Response Domain
:param action: :class:`BoundServer <hcloud.servers.client.BoundServer>`
The created server
:param action: :class:`BoundAction <hcloud.actions.client.BoundAction>`
Shows the progress of the server creation
:param next_actions: List[:class:`BoundAction <hcloud.actions.client.BoundAction>`]
Additional actions like a `start_server` action after the server creation
:param root_password: str, None
The root password of the server if no SSH-Key was given on server creation
"""
__slots__ = (
"server",
"action",
"next_actions",
"root_password"
)
def __init__(
self,
server, # type: BoundServer
action, # type: BoundAction
next_actions, # type: List[Action]
root_password # type: str
):
self.server = server
self.action = action
self.next_actions = next_actions
self.root_password = root_password
class ResetPasswordResponse(BaseDomain):
"""Reset Password Response Domain
:param action: :class:`BoundAction <hcloud.actions.client.BoundAction>`
Shows the progress of the server passwort reset action
:param root_password: str
The root password of the server
"""
__slots__ = (
"action",
"root_password"
)
def __init__(
self,
action, # type: BoundAction
root_password # type: str
):
self.action = action
self.root_password = root_password
class EnableRescueResponse(BaseDomain):
"""Enable Rescue Response Domain
:param action: :class:`BoundAction <hcloud.actions.client.BoundAction>`
Shows the progress of the server enable rescue action
:param root_password: str
The root password of the server in the rescue mode
"""
__slots__ = (
"action",
"root_password"
)
def __init__(
self,
action, # type: BoundAction
root_password # type: str
):
self.action = action
self.root_password = root_password
class RequestConsoleResponse(BaseDomain):
"""Request Console Response Domain
:param action: :class:`BoundAction <hcloud.actions.client.BoundAction>`
Shows the progress of the server request console action
:param wss_url: str
URL of websocket proxy to use. This includes a token which is valid for a limited time only.
:param password: str
VNC password to use for this connection. This password only works in combination with a wss_url with valid token.
"""
__slots__ = (
"action",
"wss_url",
"password"
)
def __init__(
self,
action, # type: BoundAction
wss_url, # type: str
password, # type: str
):
self.action = action
self.wss_url = wss_url
self.password = password
class PublicNetwork(BaseDomain):
"""Public Network Domain
:param ipv4: :class:`IPv4Address <hcloud.servers.domain.IPv4Address>`
:param ipv6: :class:`IPv6Network <hcloud.servers.domain.IPv6Network>`
:param floating_ips: List[:class:`BoundFloatingIP <hcloud.floating_ips.client.BoundFloatingIP>`]
"""
__slots__ = (
"ipv4",
"ipv6",
"floating_ips"
)
def __init__(self,
ipv4, # type: IPv4Address
ipv6, # type: IPv6Network
floating_ips, # type: List[BoundFloatingIP]
):
self.ipv4 = ipv4
self.ipv6 = ipv6
self.floating_ips = floating_ips
class IPv4Address(BaseDomain):
"""IPv4 Address Domain
:param ip: str
The IPv4 Address
:param blocked: bool
Determine if the IP is blocked
:param dns_ptr: str
DNS PTR for the ip
"""
__slots__ = (
"ip",
"blocked",
"dns_ptr"
)
def __init__(self,
ip, # type: str
blocked, # type: bool
dns_ptr, # type: str
):
self.ip = ip
self.blocked = blocked
self.dns_ptr = dns_ptr
class IPv6Network(BaseDomain):
"""IPv6 Network Domain
:param ip: str
The IPv6 Network as CIDR Notation
:param blocked: bool
Determine if the Network is blocked
:param dns_ptr: dict
DNS PTR Records for the Network as Dict
:param network: str
The network without the network mask
:param network_mask: str
The network mask
"""
__slots__ = (
"ip",
"blocked",
"dns_ptr",
"network",
"network_mask"
)
def __init__(self,
ip, # type: str
blocked, # type: bool
dns_ptr, # type: list
):
self.ip = ip
self.blocked = blocked
self.dns_ptr = dns_ptr
ip_parts = self.ip.split("/") # 2001:db8::/64 to 2001:db8:: and 64
self.network = ip_parts[0]
self.network_mask = ip_parts[1]
| [((81, 14, 81, 27), 'hcloud.helpers.descriptors.ISODateTime', 'ISODateTime', ({}, {}), '()', False, 'from hcloud.helpers.descriptors import ISODateTime\n')] |
sbarguil/Testing-framework | AutomationFramework/tests/interfaces/test_if_subif.py | f3ef69f1c4f0aeafd02e222d846162c711783b15 | import pytest
from AutomationFramework.page_objects.interfaces.interfaces import Interfaces
from AutomationFramework.tests.base_test import BaseTest
class TestInterfacesSubInterfaces(BaseTest):
test_case_file = 'if_subif.yml'
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_description',
'page_object_class': Interfaces}])
def test_if_subif_description(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_enabled',
'page_object_class': Interfaces}])
def test_if_subif_enabled(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_ip_prefix_length',
'page_object_class': Interfaces}])
def test_if_subif_ip_prefix_length(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('multiple_create_page_objects_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_ip_state',
'page_object_rpcs_classes': [Interfaces, Interfaces],
'rpc_clean_order': None,
}])
def test_if_subif_ip_state(self, multiple_create_page_objects):
for page_object in multiple_create_page_objects:
page_object.execute_interface_rpc()
assert page_object.validate_rpc(), page_object.get_test_case_description()
@pytest.mark.parametrize('multiple_create_page_objects_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_origin',
'page_object_rpcs_classes': [Interfaces, Interfaces],
'rpc_clean_order': None,
}])
def test_if_subif_origin(self, multiple_create_page_objects):
for page_object in multiple_create_page_objects:
page_object.execute_interface_rpc()
assert page_object.validate_rpc(), page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_dhcp_client',
'page_object_class': Interfaces}])
def test_if_subif_dhcp_client(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_mtu',
'page_object_class': Interfaces}])
def test_if_subif_mtu(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_vlan_id',
'page_object_class': Interfaces}])
def test_if_subif_vlan_id(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_inner_outer_vlan_id',
'page_object_class': Interfaces}])
def test_if_subif_inner_outer_vlan_id(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
@pytest.mark.parametrize('create_page_object_arg', [{'test_case_file': test_case_file,
'test_case_name': 'if_subif_match_vlan_id',
'page_object_class': Interfaces}])
def test_if_subif_match_vlan_id(self, create_page_object):
create_page_object.execute_generic_interfaces_edit_config_test_case()
assert create_page_object.generic_validate_test_case_params(), create_page_object.get_test_case_description()
| [((9, 5, 11, 91), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(9, 29, 9, 53): '"""create_page_object_arg"""', (9, 55, 11, 90): "[{'test_case_file': test_case_file, 'test_case_name':\n 'if_subif_description', 'page_object_class': Interfaces}]"}, {}), "('create_page_object_arg', [{'test_case_file':\n test_case_file, 'test_case_name': 'if_subif_description',\n 'page_object_class': Interfaces}])", False, 'import pytest\n'), ((16, 5, 18, 91), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(16, 29, 16, 53): '"""create_page_object_arg"""', (16, 55, 18, 90): "[{'test_case_file': test_case_file, 'test_case_name': 'if_subif_enabled',\n 'page_object_class': Interfaces}]"}, {}), "('create_page_object_arg', [{'test_case_file':\n test_case_file, 'test_case_name': 'if_subif_enabled',\n 'page_object_class': Interfaces}])", False, 'import pytest\n'), ((23, 5, 25, 91), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(23, 29, 23, 53): '"""create_page_object_arg"""', (23, 55, 25, 90): "[{'test_case_file': test_case_file, 'test_case_name':\n 'if_subif_ip_prefix_length', 'page_object_class': Interfaces}]"}, {}), "('create_page_object_arg', [{'test_case_file':\n test_case_file, 'test_case_name': 'if_subif_ip_prefix_length',\n 'page_object_class': Interfaces}])", False, 'import pytest\n'), ((30, 5, 34, 70), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(30, 29, 30, 63): '"""multiple_create_page_objects_arg"""', (30, 65, 34, 69): "[{'test_case_file': test_case_file, 'test_case_name': 'if_subif_ip_state',\n 'page_object_rpcs_classes': [Interfaces, Interfaces], 'rpc_clean_order':\n None}]"}, {}), "('multiple_create_page_objects_arg', [{\n 'test_case_file': test_case_file, 'test_case_name': 'if_subif_ip_state',\n 'page_object_rpcs_classes': [Interfaces, Interfaces], 'rpc_clean_order':\n None}])", False, 'import pytest\n'), ((40, 5, 44, 70), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(40, 29, 40, 63): '"""multiple_create_page_objects_arg"""', (40, 65, 44, 69): "[{'test_case_file': test_case_file, 'test_case_name': 'if_subif_origin',\n 'page_object_rpcs_classes': [Interfaces, Interfaces], 'rpc_clean_order':\n None}]"}, {}), "('multiple_create_page_objects_arg', [{\n 'test_case_file': test_case_file, 'test_case_name': 'if_subif_origin',\n 'page_object_rpcs_classes': [Interfaces, Interfaces], 'rpc_clean_order':\n None}])", False, 'import pytest\n'), ((50, 5, 52, 91), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(50, 29, 50, 53): '"""create_page_object_arg"""', (50, 55, 52, 90): "[{'test_case_file': test_case_file, 'test_case_name':\n 'if_subif_dhcp_client', 'page_object_class': Interfaces}]"}, {}), "('create_page_object_arg', [{'test_case_file':\n test_case_file, 'test_case_name': 'if_subif_dhcp_client',\n 'page_object_class': Interfaces}])", False, 'import pytest\n'), ((57, 5, 59, 91), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(57, 29, 57, 53): '"""create_page_object_arg"""', (57, 55, 59, 90): "[{'test_case_file': test_case_file, 'test_case_name': 'if_subif_mtu',\n 'page_object_class': Interfaces}]"}, {}), "('create_page_object_arg', [{'test_case_file':\n test_case_file, 'test_case_name': 'if_subif_mtu', 'page_object_class':\n Interfaces}])", False, 'import pytest\n'), ((64, 5, 66, 91), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(64, 29, 64, 53): '"""create_page_object_arg"""', (64, 55, 66, 90): "[{'test_case_file': test_case_file, 'test_case_name': 'if_subif_vlan_id',\n 'page_object_class': Interfaces}]"}, {}), "('create_page_object_arg', [{'test_case_file':\n test_case_file, 'test_case_name': 'if_subif_vlan_id',\n 'page_object_class': Interfaces}])", False, 'import pytest\n'), ((71, 5, 73, 91), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(71, 29, 71, 53): '"""create_page_object_arg"""', (71, 55, 73, 90): "[{'test_case_file': test_case_file, 'test_case_name':\n 'if_subif_inner_outer_vlan_id', 'page_object_class': Interfaces}]"}, {}), "('create_page_object_arg', [{'test_case_file':\n test_case_file, 'test_case_name': 'if_subif_inner_outer_vlan_id',\n 'page_object_class': Interfaces}])", False, 'import pytest\n'), ((78, 5, 80, 91), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(78, 29, 78, 53): '"""create_page_object_arg"""', (78, 55, 80, 90): "[{'test_case_file': test_case_file, 'test_case_name':\n 'if_subif_match_vlan_id', 'page_object_class': Interfaces}]"}, {}), "('create_page_object_arg', [{'test_case_file':\n test_case_file, 'test_case_name': 'if_subif_match_vlan_id',\n 'page_object_class': Interfaces}])", False, 'import pytest\n')] |
sanket4373/keystone | keystone/common/sql/migrate_repo/versions/001_add_initial_tables.py | 7cf7e7497729803f0470167315af9349b88fe0ec | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sql
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
meta = sql.MetaData()
meta.bind = migrate_engine
# catalog
service_table = sql.Table(
'service',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('type', sql.String(255)),
sql.Column('extra', sql.Text()))
service_table.create(migrate_engine, checkfirst=True)
endpoint_table = sql.Table(
'endpoint',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('region', sql.String(255)),
sql.Column('service_id',
sql.String(64),
sql.ForeignKey('service.id'),
nullable=False),
sql.Column('extra', sql.Text()))
endpoint_table.create(migrate_engine, checkfirst=True)
# identity
role_table = sql.Table(
'role',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(255), unique=True, nullable=False))
role_table.create(migrate_engine, checkfirst=True)
if migrate_engine.name == 'ibm_db_sa':
# NOTE(blk-u): SQLAlchemy for PostgreSQL picks the name tenant_name_key
# for the unique constraint, but for DB2 doesn't give the UC a name
# unless we tell it to and there is no DDL to alter a column to drop
# an unnamed unique constraint, so this code creates a named unique
# constraint on the name column rather than an unnamed one.
# (This is used in migration 16.)
tenant_table = sql.Table(
'tenant',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), nullable=False),
sql.Column('extra', sql.Text()),
sql.UniqueConstraint('name', name='tenant_name_key'))
else:
tenant_table = sql.Table(
'tenant',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), unique=True, nullable=False),
sql.Column('extra', sql.Text()))
tenant_table.create(migrate_engine, checkfirst=True)
metadata_table = sql.Table(
'metadata',
meta,
sql.Column('user_id', sql.String(64), primary_key=True),
sql.Column('tenant_id', sql.String(64), primary_key=True),
sql.Column('data', sql.Text()))
metadata_table.create(migrate_engine, checkfirst=True)
ec2_credential_table = sql.Table(
'ec2_credential',
meta,
sql.Column('access', sql.String(64), primary_key=True),
sql.Column('secret', sql.String(64)),
sql.Column('user_id', sql.String(64)),
sql.Column('tenant_id', sql.String(64)))
ec2_credential_table.create(migrate_engine, checkfirst=True)
if migrate_engine.name == 'ibm_db_sa':
# NOTE(blk-u): SQLAlchemy for PostgreSQL picks the name user_name_key
# for the unique constraint, but for DB2 doesn't give the UC a name
# unless we tell it to and there is no DDL to alter a column to drop
# an unnamed unique constraint, so this code creates a named unique
# constraint on the name column rather than an unnamed one.
# (This is used in migration 16.)
user_table = sql.Table(
'user',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), nullable=False),
sql.Column('extra', sql.Text()),
sql.UniqueConstraint('name', name='user_name_key'))
else:
user_table = sql.Table(
'user',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), unique=True, nullable=False),
sql.Column('extra', sql.Text()))
user_table.create(migrate_engine, checkfirst=True)
user_tenant_membership_table = sql.Table(
'user_tenant_membership',
meta,
sql.Column(
'user_id',
sql.String(64),
sql.ForeignKey('user.id'),
primary_key=True),
sql.Column(
'tenant_id',
sql.String(64),
sql.ForeignKey('tenant.id'),
primary_key=True))
user_tenant_membership_table.create(migrate_engine, checkfirst=True)
# token
token_table = sql.Table(
'token',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('expires', sql.DateTime()),
sql.Column('extra', sql.Text()))
token_table.create(migrate_engine, checkfirst=True)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
meta = sql.MetaData()
meta.bind = migrate_engine
tables = ['user_tenant_membership', 'token', 'user', 'tenant', 'role',
'metadata', 'ec2_credential', 'endpoint', 'service']
for t in tables:
table = sql.Table(t, meta, autoload=True)
table.drop(migrate_engine, checkfirst=True)
| [((21, 11, 21, 25), 'sqlalchemy.MetaData', 'sql.MetaData', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((148, 11, 148, 25), 'sqlalchemy.MetaData', 'sql.MetaData', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((154, 16, 154, 49), 'sqlalchemy.Table', 'sql.Table', (), '', True, 'import sqlalchemy as sql\n'), ((29, 25, 29, 39), 'sqlalchemy.String', 'sql.String', ({(29, 36, 29, 38): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((30, 27, 30, 42), 'sqlalchemy.String', 'sql.String', ({(30, 38, 30, 41): '255'}, {}), '(255)', True, 'import sqlalchemy as sql\n'), ((31, 28, 31, 38), 'sqlalchemy.Text', 'sql.Text', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((37, 25, 37, 39), 'sqlalchemy.String', 'sql.String', ({(37, 36, 37, 38): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((38, 29, 38, 44), 'sqlalchemy.String', 'sql.String', ({(38, 40, 38, 43): '255'}, {}), '(255)', True, 'import sqlalchemy as sql\n'), ((40, 19, 40, 33), 'sqlalchemy.String', 'sql.String', ({(40, 30, 40, 32): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((41, 19, 41, 47), 'sqlalchemy.ForeignKey', 'sql.ForeignKey', ({(41, 34, 41, 46): '"""service.id"""'}, {}), "('service.id')", True, 'import sqlalchemy as sql\n'), ((43, 28, 43, 38), 'sqlalchemy.Text', 'sql.Text', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((51, 25, 51, 39), 'sqlalchemy.String', 'sql.String', ({(51, 36, 51, 38): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((52, 27, 52, 42), 'sqlalchemy.String', 'sql.String', ({(52, 38, 52, 41): '255'}, {}), '(255)', True, 'import sqlalchemy as sql\n'), ((68, 12, 68, 64), 'sqlalchemy.UniqueConstraint', 'sql.UniqueConstraint', (), '', True, 'import sqlalchemy as sql\n'), ((82, 30, 82, 44), 'sqlalchemy.String', 'sql.String', ({(82, 41, 82, 43): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((83, 32, 83, 46), 'sqlalchemy.String', 'sql.String', ({(83, 43, 83, 45): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((84, 27, 84, 37), 'sqlalchemy.Text', 'sql.Text', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((90, 29, 90, 43), 'sqlalchemy.String', 'sql.String', ({(90, 40, 90, 42): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((91, 29, 91, 43), 'sqlalchemy.String', 'sql.String', ({(91, 40, 91, 42): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((92, 30, 92, 44), 'sqlalchemy.String', 'sql.String', ({(92, 41, 92, 43): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((93, 32, 93, 46), 'sqlalchemy.String', 'sql.String', ({(93, 43, 93, 45): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((109, 12, 109, 62), 'sqlalchemy.UniqueConstraint', 'sql.UniqueConstraint', (), '', True, 'import sqlalchemy as sql\n'), ((125, 12, 125, 26), 'sqlalchemy.String', 'sql.String', ({(125, 23, 125, 25): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((126, 12, 126, 37), 'sqlalchemy.ForeignKey', 'sql.ForeignKey', ({(126, 27, 126, 36): '"""user.id"""'}, {}), "('user.id')", True, 'import sqlalchemy as sql\n'), ((130, 12, 130, 26), 'sqlalchemy.String', 'sql.String', ({(130, 23, 130, 25): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((131, 12, 131, 39), 'sqlalchemy.ForeignKey', 'sql.ForeignKey', ({(131, 27, 131, 38): '"""tenant.id"""'}, {}), "('tenant.id')", True, 'import sqlalchemy as sql\n'), ((140, 25, 140, 39), 'sqlalchemy.String', 'sql.String', ({(140, 36, 140, 38): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((141, 30, 141, 44), 'sqlalchemy.DateTime', 'sql.DateTime', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((142, 28, 142, 38), 'sqlalchemy.Text', 'sql.Text', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((65, 29, 65, 43), 'sqlalchemy.String', 'sql.String', ({(65, 40, 65, 42): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((66, 31, 66, 45), 'sqlalchemy.String', 'sql.String', ({(66, 42, 66, 44): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((67, 32, 67, 42), 'sqlalchemy.Text', 'sql.Text', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((73, 29, 73, 43), 'sqlalchemy.String', 'sql.String', ({(73, 40, 73, 42): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((74, 31, 74, 45), 'sqlalchemy.String', 'sql.String', ({(74, 42, 74, 44): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((75, 32, 75, 42), 'sqlalchemy.Text', 'sql.Text', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((106, 29, 106, 43), 'sqlalchemy.String', 'sql.String', ({(106, 40, 106, 42): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((107, 31, 107, 45), 'sqlalchemy.String', 'sql.String', ({(107, 42, 107, 44): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((108, 32, 108, 42), 'sqlalchemy.Text', 'sql.Text', ({}, {}), '()', True, 'import sqlalchemy as sql\n'), ((114, 29, 114, 43), 'sqlalchemy.String', 'sql.String', ({(114, 40, 114, 42): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((115, 31, 115, 45), 'sqlalchemy.String', 'sql.String', ({(115, 42, 115, 44): '64'}, {}), '(64)', True, 'import sqlalchemy as sql\n'), ((116, 32, 116, 42), 'sqlalchemy.Text', 'sql.Text', ({}, {}), '()', True, 'import sqlalchemy as sql\n')] |
DanPopa46/neo3-boa | boa3_test/examples/ico.py | e4ef340744b5bd25ade26f847eac50789b97f3e9 | from typing import Any, List, Union
from boa3.builtin import NeoMetadata, metadata, public
from boa3.builtin.contract import Nep17TransferEvent
from boa3.builtin.interop.blockchain import get_contract
from boa3.builtin.interop.contract import GAS, NEO, call_contract
from boa3.builtin.interop.runtime import calling_script_hash, check_witness
from boa3.builtin.interop.storage import delete, get, put
from boa3.builtin.type import UInt160
# -------------------------------------------
# METADATA
# -------------------------------------------
@metadata
def manifest_metadata() -> NeoMetadata:
"""
Defines this smart contract's metadata information
"""
meta = NeoMetadata()
meta.author = "Mirella Medeiros, Ricardo Prado and Lucas Uezu. COZ in partnership with Simpli"
meta.description = "ICO Example"
meta.email = "[email protected]"
return meta
# -------------------------------------------
# Storage Key Prefixes
# -------------------------------------------
KYC_WHITELIST_PREFIX = b'KYCWhitelistApproved'
TOKEN_TOTAL_SUPPLY_PREFIX = b'TokenTotalSupply'
TRANSFER_ALLOWANCE_PREFIX = b'TransferAllowancePrefix_'
# -------------------------------------------
# TOKEN SETTINGS
# -------------------------------------------
# Script hash of the contract owner
TOKEN_OWNER = UInt160()
# Symbol of the Token
TOKEN_SYMBOL = 'ICO'
# Number of decimal places
TOKEN_DECIMALS = 8
# Initial Supply of tokens in the system
TOKEN_INITIAL_SUPPLY = 10_000_000 * 100_000_000 # 10m total supply * 10^8 (decimals)
# -------------------------------------------
# Events
# -------------------------------------------
on_transfer = Nep17TransferEvent
# -------------------------------------------
# Methods
# -------------------------------------------
@public
def verify() -> bool:
"""
When this contract address is included in the transaction signature,
this method will be triggered as a VerificationTrigger to verify that the signature is correct.
For example, this method needs to be called when withdrawing token from the contract.
:return: whether the transaction signature is correct
"""
return is_administrator()
def is_administrator() -> bool:
"""
Validates if the invoker has administrative rights
:return: whether the contract's invoker is an administrator
"""
return check_witness(TOKEN_OWNER)
def is_valid_address(address: UInt160) -> bool:
"""
Validates if the address passed through the kyc.
:return: whether the given address is validated by kyc
"""
return get(KYC_WHITELIST_PREFIX + address).to_int() > 0
@public
def deploy() -> bool:
"""
Initializes the storage when the smart contract is deployed.
:return: whether the deploy was successful. This method must return True only during the smart contract's deploy.
"""
if not check_witness(TOKEN_OWNER):
return False
if get(TOKEN_TOTAL_SUPPLY_PREFIX).to_int() > 0:
return False
put(TOKEN_TOTAL_SUPPLY_PREFIX, TOKEN_INITIAL_SUPPLY)
put(TOKEN_OWNER, TOKEN_INITIAL_SUPPLY)
on_transfer(None, TOKEN_OWNER, TOKEN_INITIAL_SUPPLY)
return True
@public
def mint(amount: int) -> bool:
"""
Mints new tokens
:param amount: the amount of gas to be refunded
:type amount: int
:return: whether the refund was successful
"""
assert amount >= 0
if not is_administrator():
return False
if amount > 0:
current_total_supply = totalSupply()
owner_balance = balanceOf(TOKEN_OWNER)
put(TOKEN_TOTAL_SUPPLY_PREFIX, current_total_supply + amount)
put(TOKEN_OWNER, owner_balance + amount)
on_transfer(None, TOKEN_OWNER, amount)
post_transfer(None, TOKEN_OWNER, amount, None)
return True
@public
def refund(address: UInt160, neo_amount: int, gas_amount: int) -> bool:
"""
Refunds an address with given Neo and Gas
:param address: the address that have the tokens
:type address: UInt160
:param neo_amount: the amount of neo to be refunded
:type neo_amount: int
:param gas_amount: the amount of gas to be refunded
:type gas_amount: int
:return: whether the refund was successful
"""
assert len(address) == 20
assert neo_amount > 0 or gas_amount > 0
if not is_administrator():
return False
if neo_amount > 0:
result = call_contract(NEO, 'transfer', [calling_script_hash, address, neo_amount, None])
if result != True:
# due to a current limitation in the neo3-boa, changing the condition to `not result`
# will result in a compiler error
return False
if gas_amount > 0:
result = call_contract(GAS, 'transfer', [calling_script_hash, address, gas_amount, None])
if result != True:
# due to a current limitation in the neo3-boa, changing the condition to `not result`
# will result in a compiler error
return False
return True
# -------------------------------------------
# Public methods from NEP5.1
# -------------------------------------------
@public
def symbol() -> str:
"""
Gets the symbols of the token.
This symbol should be short (3-8 characters is recommended), with no whitespace characters or new-lines and should
be limited to the uppercase latin alphabet (i.e. the 26 letters used in English).
This method must always return the same value every time it is invoked.
:return: a short string symbol of the token managed in this contract.
"""
return TOKEN_SYMBOL
@public
def decimals() -> int:
"""
Gets the amount of decimals used by the token.
E.g. 8, means to divide the token amount by 100,000,000 (10 ^ 8) to get its user representation.
This method must always return the same value every time it is invoked.
:return: the number of decimals used by the token.
"""
return TOKEN_DECIMALS
@public
def totalSupply() -> int:
"""
Gets the total token supply deployed in the system.
This number mustn't be in its user representation. E.g. if the total supply is 10,000,000 tokens, this method
must return 10,000,000 * 10 ^ decimals.
:return: the total token supply deployed in the system.
"""
return get(TOKEN_TOTAL_SUPPLY_PREFIX).to_int()
@public
def balanceOf(account: UInt160) -> int:
"""
Get the current balance of an address
The parameter account should be a 20-byte address.
:param account: the account address to retrieve the balance for
:type account: UInt160
:return: the token balance of the `account`
:raise AssertionError: raised if `account` length is not 20.
"""
assert len(account) == 20
return get(account).to_int()
@public
def transfer(from_address: UInt160, to_address: UInt160, amount: int, data: Any) -> bool:
"""
Transfers a specified amount of NEP17 tokens from one account to another
If the method succeeds, it must fire the `transfer` event and must return true, even if the amount is 0,
or from and to are the same address.
:param from_address: the address to transfer from
:type from_address: UInt160
:param to_address: the address to transfer to
:type to_address: UInt160
:param amount: the amount of NEP17 tokens to transfer
:type amount: int
:param data: whatever data is pertinent to the onPayment method
:type data: Any
:return: whether the transfer was successful
:raise AssertionError: raised if `from_address` or `to_address` length is not 20 or if `amount` if less than zero.
"""
# the parameters from and to should be 20-byte addresses. If not, this method should throw an exception.
assert len(from_address) == 20 and len(to_address) == 20
# the parameter amount must be greater than or equal to 0. If not, this method should throw an exception.
assert amount >= 0
# The function MUST return false if the from account balance does not have enough tokens to spend.
from_balance = get(from_address).to_int()
if from_balance < amount:
return False
# The function should check whether the from address equals the caller contract hash.
# If so, the transfer should be processed;
# If not, the function should use the check_witness to verify the transfer.
if from_address != calling_script_hash:
if not check_witness(from_address):
return False
# skip balance changes if transferring to yourself or transferring 0 cryptocurrency
if from_address != to_address and amount != 0:
if from_balance == amount:
delete(from_address)
else:
put(from_address, from_balance - amount)
to_balance = get(to_address).to_int()
put(to_address, to_balance + amount)
# if the method succeeds, it must fire the transfer event
on_transfer(from_address, to_address, amount)
# if the to_address is a smart contract, it must call the contracts onPayment
post_transfer(from_address, to_address, amount, data)
# and then it must return true
return True
def post_transfer(from_address: Union[UInt160, None], to_address: Union[UInt160, None], amount: int, data: Any):
"""
Checks if the one receiving NEP17 tokens is a smart contract and if it's one the onPayment method will be called
:param from_address: the address of the sender
:type from_address: UInt160
:param to_address: the address of the receiver
:type to_address: UInt160
:param amount: the amount of cryptocurrency that is being sent
:type amount: int
:param data: any pertinent data that might validate the transaction
:type data: Any
"""
if not isinstance(to_address, None): # TODO: change to 'is not None' when `is` semantic is implemented
contract = get_contract(to_address)
if not isinstance(contract, None): # TODO: change to 'is not None' when `is` semantic is implemented
call_contract(to_address, 'onPayment', [from_address, amount, data])
@public
def allowance(from_address: UInt160, to_address: UInt160) -> int:
"""
Returns the amount of tokens that the to account can transfer from the from account.
:param from_address: the address that have the tokens
:type from_address: UInt160
:param to_address: the address that is authorized to use the tokens
:type to_address: UInt160
:return: the amount of tokens that the `to` account can transfer from the `from` account
:raise AssertionError: raised if `from_address` or `to_address` length is not 20.
"""
# the parameters from and to should be 20-byte addresses. If not, this method should throw an exception.
assert len(from_address) == 20 and len(to_address) == 20
return get(TRANSFER_ALLOWANCE_PREFIX + from_address + to_address).to_int()
@public
def transferFrom(originator: UInt160, from_address: UInt160, to_address: UInt160, amount: int, data: Any) -> bool:
"""
Transfers an amount from the `from` account to the `to` account if the `originator` has been approved to transfer
the requested amount.
:param originator: the address where the actual token is
:type originator: UInt160
:param from_address: the address to transfer from with originator's approval
:type from_address: UInt160
:param to_address: the address to transfer to
:type to_address: UInt160
:param amount: the amount of NEP17 tokens to transfer
:type amount: int
:param data: any pertinent data that might validate the transaction
:type data: Any
:return: whether the transfer was successful
:raise AssertionError: raised if `from_address` or `to_address` length is not 20 or if `amount` if less than zero.
"""
# the parameters from and to should be 20-byte addresses. If not, this method should throw an exception.
assert len(originator) == 20 and len(from_address) == 20 and len(to_address) == 20
# the parameter amount must be greater than or equal to 0. If not, this method should throw an exception.
assert amount >= 0
# The function should check whether the from address equals the caller contract hash.
# If so, the transfer should be processed;
# If not, the function should use the check_witness to verify the transfer.
if from_address != calling_script_hash:
if not check_witness(from_address):
return False
approved_transfer_amount = allowance(originator, from_address)
if approved_transfer_amount < amount:
return False
originator_balance = balanceOf(originator)
if originator_balance < amount:
return False
# update allowance between originator and from
if approved_transfer_amount == amount:
delete(TRANSFER_ALLOWANCE_PREFIX + originator + from_address)
else:
put(TRANSFER_ALLOWANCE_PREFIX + originator + from_address, approved_transfer_amount - amount)
# skip balance changes if transferring to yourself or transferring 0 cryptocurrency
if amount != 0 and from_address != to_address:
# update originator's balance
if originator_balance == amount:
delete(originator)
else:
put(originator, originator_balance - amount)
# updates to's balance
to_balance = get(to_address).to_int()
put(to_address, to_balance + amount)
# if the method succeeds, it must fire the transfer event
on_transfer(from_address, to_address, amount)
# if the to_address is a smart contract, it must call the contracts onPayment
post_transfer(from_address, to_address, amount, data)
# and then it must return true
return True
@public
def approve(originator: UInt160, to_address: UInt160, amount: int) -> bool:
"""
Approves the to account to transfer amount tokens from the originator account.
:param originator: the address that have the tokens
:type originator: UInt160
:param to_address: the address that is authorized to use the tokens
:type to_address: UInt160
:param amount: the amount of NEP17 tokens to transfer
:type amount: int
:return: whether the approval was successful
:raise AssertionError: raised if `originator` or `to_address` length is not 20 or if `amount` if less than zero.
"""
assert len(originator) == 20 and len(to_address) == 20
assert amount >= 0
if not check_witness(originator):
return False
if originator == to_address:
return False
if not is_valid_address(originator) or not is_valid_address(to_address):
# one of the address doesn't passed the kyc yet
return False
if balanceOf(originator) < amount:
return False
put(TRANSFER_ALLOWANCE_PREFIX + originator + to_address, amount)
return True
# -------------------------------------------
# Public methods from KYC
# -------------------------------------------
@public
def kyc_register(addresses: List[UInt160]) -> int:
"""
Includes the given addresses to the kyc whitelist
:param addresses: a list with the addresses to be included
:return: the number of included addresses
"""
included_addresses = 0
if is_administrator():
for address in addresses:
if len(address) == 20:
kyc_key = KYC_WHITELIST_PREFIX + address
put(kyc_key, True)
included_addresses += 1
return included_addresses
@public
def kyc_remove(addresses: List[UInt160]) -> int:
"""
Removes the given addresses from the kyc whitelist
:param addresses: a list with the addresses to be removed
:return: the number of removed addresses
"""
removed_addresses = 0
if is_administrator():
for address in addresses:
if len(address) == 20:
kyc_key = KYC_WHITELIST_PREFIX + address
delete(kyc_key)
removed_addresses += 1
return removed_addresses
| [((45, 14, 45, 23), 'boa3.builtin.type.UInt160', 'UInt160', ({}, {}), '()', False, 'from boa3.builtin.type import UInt160\n'), ((22, 11, 22, 24), 'boa3.builtin.NeoMetadata', 'NeoMetadata', ({}, {}), '()', False, 'from boa3.builtin import NeoMetadata, metadata, public\n'), ((88, 11, 88, 37), 'boa3.builtin.interop.runtime.check_witness', 'check_witness', ({(88, 25, 88, 36): 'TOKEN_OWNER'}, {}), '(TOKEN_OWNER)', False, 'from boa3.builtin.interop.runtime import calling_script_hash, check_witness\n'), ((113, 4, 113, 56), 'boa3.builtin.interop.storage.put', 'put', ({(113, 8, 113, 33): 'TOKEN_TOTAL_SUPPLY_PREFIX', (113, 35, 113, 55): 'TOKEN_INITIAL_SUPPLY'}, {}), '(TOKEN_TOTAL_SUPPLY_PREFIX, TOKEN_INITIAL_SUPPLY)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((114, 4, 114, 42), 'boa3.builtin.interop.storage.put', 'put', ({(114, 8, 114, 19): 'TOKEN_OWNER', (114, 21, 114, 41): 'TOKEN_INITIAL_SUPPLY'}, {}), '(TOKEN_OWNER, TOKEN_INITIAL_SUPPLY)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((432, 4, 432, 68), 'boa3.builtin.interop.storage.put', 'put', ({(432, 8, 432, 59): '(TRANSFER_ALLOWANCE_PREFIX + originator + to_address)', (432, 61, 432, 67): 'amount'}, {}), '(TRANSFER_ALLOWANCE_PREFIX + originator + to_address, amount)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((107, 11, 107, 37), 'boa3.builtin.interop.runtime.check_witness', 'check_witness', ({(107, 25, 107, 36): 'TOKEN_OWNER'}, {}), '(TOKEN_OWNER)', False, 'from boa3.builtin.interop.runtime import calling_script_hash, check_witness\n'), ((137, 8, 137, 69), 'boa3.builtin.interop.storage.put', 'put', ({(137, 12, 137, 37): 'TOKEN_TOTAL_SUPPLY_PREFIX', (137, 39, 137, 68): '(current_total_supply + amount)'}, {}), '(TOKEN_TOTAL_SUPPLY_PREFIX, current_total_supply + amount)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((138, 8, 138, 48), 'boa3.builtin.interop.storage.put', 'put', ({(138, 12, 138, 23): 'TOKEN_OWNER', (138, 25, 138, 47): '(owner_balance + amount)'}, {}), '(TOKEN_OWNER, owner_balance + amount)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((165, 17, 165, 97), 'boa3.builtin.interop.contract.call_contract', 'call_contract', ({(165, 31, 165, 34): 'NEO', (165, 36, 165, 46): '"""transfer"""', (165, 48, 165, 96): '[calling_script_hash, address, neo_amount, None]'}, {}), "(NEO, 'transfer', [calling_script_hash, address, neo_amount, None]\n )", False, 'from boa3.builtin.interop.contract import GAS, NEO, call_contract\n'), ((172, 17, 172, 97), 'boa3.builtin.interop.contract.call_contract', 'call_contract', ({(172, 31, 172, 34): 'GAS', (172, 36, 172, 46): '"""transfer"""', (172, 48, 172, 96): '[calling_script_hash, address, gas_amount, None]'}, {}), "(GAS, 'transfer', [calling_script_hash, address, gas_amount, None]\n )", False, 'from boa3.builtin.interop.contract import GAS, NEO, call_contract\n'), ((288, 8, 288, 44), 'boa3.builtin.interop.storage.put', 'put', ({(288, 12, 288, 22): 'to_address', (288, 24, 288, 43): '(to_balance + amount)'}, {}), '(to_address, to_balance + amount)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((312, 19, 312, 43), 'boa3.builtin.interop.blockchain.get_contract', 'get_contract', ({(312, 32, 312, 42): 'to_address'}, {}), '(to_address)', False, 'from boa3.builtin.interop.blockchain import get_contract\n'), ((377, 8, 377, 69), 'boa3.builtin.interop.storage.delete', 'delete', ({(377, 15, 377, 68): '(TRANSFER_ALLOWANCE_PREFIX + originator + from_address)'}, {}), '(TRANSFER_ALLOWANCE_PREFIX + originator + from_address)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((379, 8, 379, 101), 'boa3.builtin.interop.storage.put', 'put', ({(379, 12, 379, 65): '(TRANSFER_ALLOWANCE_PREFIX + originator + from_address)', (379, 67, 379, 100): '(approved_transfer_amount - amount)'}, {}), '(TRANSFER_ALLOWANCE_PREFIX + originator + from_address, \n approved_transfer_amount - amount)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((391, 8, 391, 44), 'boa3.builtin.interop.storage.put', 'put', ({(391, 12, 391, 22): 'to_address', (391, 24, 391, 43): '(to_balance + amount)'}, {}), '(to_address, to_balance + amount)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((419, 11, 419, 36), 'boa3.builtin.interop.runtime.check_witness', 'check_witness', ({(419, 25, 419, 35): 'originator'}, {}), '(originator)', False, 'from boa3.builtin.interop.runtime import calling_script_hash, check_witness\n'), ((223, 11, 223, 41), 'boa3.builtin.interop.storage.get', 'get', ({(223, 15, 223, 40): 'TOKEN_TOTAL_SUPPLY_PREFIX'}, {}), '(TOKEN_TOTAL_SUPPLY_PREFIX)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((240, 11, 240, 23), 'boa3.builtin.interop.storage.get', 'get', ({(240, 15, 240, 22): 'account'}, {}), '(account)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((269, 19, 269, 36), 'boa3.builtin.interop.storage.get', 'get', ({(269, 23, 269, 35): 'from_address'}, {}), '(from_address)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((277, 15, 277, 42), 'boa3.builtin.interop.runtime.check_witness', 'check_witness', ({(277, 29, 277, 41): 'from_address'}, {}), '(from_address)', False, 'from boa3.builtin.interop.runtime import calling_script_hash, check_witness\n'), ((283, 12, 283, 32), 'boa3.builtin.interop.storage.delete', 'delete', ({(283, 19, 283, 31): 'from_address'}, {}), '(from_address)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((285, 12, 285, 52), 'boa3.builtin.interop.storage.put', 'put', ({(285, 16, 285, 28): 'from_address', (285, 30, 285, 51): '(from_balance - amount)'}, {}), '(from_address, from_balance - amount)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((314, 12, 314, 80), 'boa3.builtin.interop.contract.call_contract', 'call_contract', ({(314, 26, 314, 36): 'to_address', (314, 38, 314, 49): '"""onPayment"""', (314, 51, 314, 79): '[from_address, amount, data]'}, {}), "(to_address, 'onPayment', [from_address, amount, data])", False, 'from boa3.builtin.interop.contract import GAS, NEO, call_contract\n'), ((332, 11, 332, 69), 'boa3.builtin.interop.storage.get', 'get', ({(332, 15, 332, 68): '(TRANSFER_ALLOWANCE_PREFIX + from_address + to_address)'}, {}), '(TRANSFER_ALLOWANCE_PREFIX + from_address + to_address)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((364, 15, 364, 42), 'boa3.builtin.interop.runtime.check_witness', 'check_witness', ({(364, 29, 364, 41): 'from_address'}, {}), '(from_address)', False, 'from boa3.builtin.interop.runtime import calling_script_hash, check_witness\n'), ((385, 12, 385, 30), 'boa3.builtin.interop.storage.delete', 'delete', ({(385, 19, 385, 29): 'originator'}, {}), '(originator)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((387, 12, 387, 56), 'boa3.builtin.interop.storage.put', 'put', ({(387, 16, 387, 26): 'originator', (387, 28, 387, 55): '(originator_balance - amount)'}, {}), '(originator, originator_balance - amount)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((97, 11, 97, 46), 'boa3.builtin.interop.storage.get', 'get', ({(97, 15, 97, 45): '(KYC_WHITELIST_PREFIX + address)'}, {}), '(KYC_WHITELIST_PREFIX + address)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((110, 7, 110, 37), 'boa3.builtin.interop.storage.get', 'get', ({(110, 11, 110, 36): 'TOKEN_TOTAL_SUPPLY_PREFIX'}, {}), '(TOKEN_TOTAL_SUPPLY_PREFIX)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((287, 21, 287, 36), 'boa3.builtin.interop.storage.get', 'get', ({(287, 25, 287, 35): 'to_address'}, {}), '(to_address)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((390, 21, 390, 36), 'boa3.builtin.interop.storage.get', 'get', ({(390, 25, 390, 35): 'to_address'}, {}), '(to_address)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((454, 16, 454, 34), 'boa3.builtin.interop.storage.put', 'put', ({(454, 20, 454, 27): 'kyc_key', (454, 29, 454, 33): '(True)'}, {}), '(kyc_key, True)', False, 'from boa3.builtin.interop.storage import delete, get, put\n'), ((473, 16, 473, 31), 'boa3.builtin.interop.storage.delete', 'delete', ({(473, 23, 473, 30): 'kyc_key'}, {}), '(kyc_key)', False, 'from boa3.builtin.interop.storage import delete, get, put\n')] |
Partaourides/SERN | emotion_recognition.py | e6cc0a9a0cc3ac4b9a87e3ccdf5781792f85d718 | import os
# Restrict the script to run on CPU
os.environ ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = ""
# Import Keras Tensoflow Backend
# from keras import backend as K
import tensorflow as tf
# Configure it to use only specific CPU Cores
config = tf.ConfigProto(intra_op_parallelism_threads=4,
inter_op_parallelism_threads=4,
device_count={"CPU": 1, "GPU": 0},
allow_soft_placement=True)
# import tensorflow as tf
import numpy as np
from IEOMAP_dataset_AC import dataset, IeomapSentenceIterator
from sklearn.metrics import confusion_matrix
from models_AC import SentenceModel
import json
import os
def emotion_recognition(n_run, epochs, batch_size, embedding_size, first_rnn_size, dropout, embedding, num_speakers):
########################################################################################################################
# Hyper-parameters
########################################################################################################################
split_size = 0.8 # Split proportion of train and test data
#log_dir = './logs_AC/RNN_without_ID/1'
log_dir = './logs_AC/RNN_' \
+ str(num_speakers) + '/' + str(n_run) + '/'
#log_dir = './logs_AC/RNN_' + embedding + 'Emb' + str(embedding_size) + '_1layer' + str(2*first_rnn_size) + '/' + str(n_run)
train_log_dir = log_dir + 'train'
val_log_dir = log_dir + 'val'
########################################################################################################################
# Initialize the Data set
########################################################################################################################
sentences, targets, data_info, speakers = dataset(mode='sentences', embedding=embedding, embedding_size=embedding_size)
train_data = IeomapSentenceIterator(sentences[0], targets[0], data_info['sentences_length'][0], speakers[0])
val_data = IeomapSentenceIterator(sentences[1], targets[1], data_info['sentences_length'][1], speakers[1])
test_data = IeomapSentenceIterator(sentences[2], targets[2], data_info['sentences_length'][2], speakers[2])
########################################################################################################################
# Initialize the model
########################################################################################################################
g = SentenceModel(vocab_size=(data_info['vocabulary_size'] + 1),
embedding_size=embedding_size,
first_rnn_size=first_rnn_size,
num_classes=data_info['num_classes'],
dropout=dropout,
embedding=embedding,
num_speakers=num_speakers)
# Store model setup
model_setup = {'vocab_size': (data_info['vocabulary_size'] + 1),
'embedding_size': embedding_size,
'first_rnn_size': first_rnn_size,
'num_classes': data_info['num_classes'],
'dropout': dropout,
'embedding': embedding,
'num_speakers': num_speakers}
dirname = os.path.dirname(log_dir)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(log_dir + 'model_setup.p', 'w') as file:
json.dump(model_setup, file, indent=4)
########################################################################################################################
# Initialize the parameters
########################################################################################################################
sess = tf.Session(config=config)
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
saver = tf.train.Saver()
epoch = 0
best_epoch = 0
train_conf_matrix = 0
val_conf_matrix = 0
test_conf_matrix = 0
best_acc = 0
########################################################################################################################
# Performance Indicators
########################################################################################################################
writer_train = tf.summary.FileWriter(train_log_dir, sess.graph)
writer_val = tf.summary.FileWriter(val_log_dir)
accuracy_tf = tf.placeholder(tf.float32, [])
precision_tf = tf.placeholder(tf.float32, [])
recall_tf = tf.placeholder(tf.float32, [])
summary_op = tf.summary.scalar('accuracy', accuracy_tf)
summary_op = tf.summary.scalar('precision', precision_tf)
summary_op = tf.summary.scalar('recall', recall_tf)
########################################################################################################################
# Model training procedure
########################################################################################################################
while train_data.epoch < epochs: # and train_data.epoch < best_epoch + 20:
sentences_batch, sentences_length_batch, targets_batch, speakers_batch = train_data.next_batch(batch_size)
preds, _ = sess.run([g['preds'],
g['ts']],
feed_dict={g['x']: np.array(sentences_batch),
g['y']: np.array(targets_batch).reshape(len(targets_batch)),
g['speaker']: np.array(speakers_batch),
g['seqlen']: np.array(sentences_length_batch).reshape(len(targets_batch))})
####################################################################################################################
# Calculate the Train data Confusion Matrix
####################################################################################################################
train_conf_matrix += confusion_matrix(targets_batch, preds, labels=range(data_info['num_classes']))
####################################################################################################################
# Add the end of each training epoch compute the validation results and store the relevant information
####################################################################################################################
if train_data.epoch != epoch:
while val_data.epoch == epoch:
sentences_batch, sentences_length_batch, targets_batch, speakers_batch = val_data.next_batch(batch_size)
preds = sess.run([g['preds']],
feed_dict={g['x']: np.array(sentences_batch),
g['y']: np.array(targets_batch).reshape(len(targets_batch)),
g['speaker']: np.array(speakers_batch),
g['seqlen']: np.array(sentences_length_batch).reshape(
len(targets_batch))})
############################################################################################################
# Calculate the Test data Confusion Matrix
############################################################################################################
val_conf_matrix += confusion_matrix(targets_batch, preds[0], labels=range(data_info['num_classes']))
################################################################################################################
# Compute Accuracy, Precision and Recall
################################################################################################################
train_CM_size = len(train_conf_matrix)
total_train = sum(sum(train_conf_matrix))
train_TP = np.diagonal(train_conf_matrix)
train_FP = [sum(train_conf_matrix[:, i]) - train_TP[i] for i in range(train_CM_size)]
train_FN = [sum(train_conf_matrix[i, :]) - train_TP[i] for i in range(train_CM_size)]
train_TN = train_CM_size - train_TP - train_FP - train_FN
train_precision = train_TP / (train_TP + train_FP) # aka True Positive Rate
train_recall = train_TP / (train_TP + train_FN)
total_train_correct = sum(train_TP)
total_train_accuracy = total_train_correct / total_train
total_train_precision = sum(train_precision) / train_CM_size
total_train_recall = sum(train_recall) / train_CM_size
val_CM_size = len(val_conf_matrix)
total_val = sum(sum(val_conf_matrix))
val_TP = np.diagonal(val_conf_matrix)
val_FP = [sum(val_conf_matrix[:, i]) - val_TP[i] for i in range(val_CM_size)]
val_FN = [sum(val_conf_matrix[i, :]) - val_TP[i] for i in range(val_CM_size)]
val_TN = val_CM_size - val_TP - val_FP - val_FN
val_precision = val_TP / (val_TP + val_FP)
val_recall = val_TP / (val_TP + val_FN)
total_val_correct = sum(val_TP)
total_val_accuracy = total_val_correct / total_val
total_val_precision = sum(val_precision) / val_CM_size
total_val_recall = sum(val_recall) / val_CM_size
################################################################################################################
# Store Accuracy Precision Recall
################################################################################################################
train_acc_summary = tf.Summary(
value=[tf.Summary.Value(tag="accuracy", simple_value=total_train_accuracy), ])
train_prec_summary = tf.Summary(
value=[tf.Summary.Value(tag="precision", simple_value=total_train_precision), ])
train_rec_summary = tf.Summary(value=[tf.Summary.Value(tag="recall", simple_value=total_train_recall), ])
val_acc_summary = tf.Summary(value=[tf.Summary.Value(tag="accuracy", simple_value=total_val_accuracy), ])
val_prec_summary = tf.Summary(
value=[tf.Summary.Value(tag="precision", simple_value=total_val_precision), ])
val_rec_summary = tf.Summary(value=[tf.Summary.Value(tag="recall", simple_value=total_val_recall), ])
writer_train.add_summary(train_acc_summary, epoch)
writer_train.add_summary(train_prec_summary, epoch)
writer_train.add_summary(train_rec_summary, epoch)
writer_val.add_summary(val_acc_summary, epoch)
writer_val.add_summary(val_prec_summary, epoch)
writer_val.add_summary(val_rec_summary, epoch)
writer_train.flush()
writer_val.flush()
################################################################################################################
# Print the confusion matrix and store important information
################################################################################################################
print(train_conf_matrix)
print(val_conf_matrix)
if best_acc < total_val_accuracy:
saver.save(sess, log_dir + "acc_best_validation_model.ckpt")
best_acc = total_val_accuracy
best_epoch = epoch
store_info = {'epoch': best_epoch,
'train_conf_matrix': list([list(x) for x in train_conf_matrix]),
'train_accuracy': total_train_accuracy,
'train_precision': list(train_precision),
'total_train_precision': total_train_precision,
'train_recall': list(train_recall),
'total_train_recall': total_train_recall,
'val_conf_matrix': list([list(x) for x in val_conf_matrix]),
'val_accuracy': total_val_accuracy,
'val_precision': list(val_precision),
'total_val_precision': total_val_precision,
'val_recall': list(val_recall),
'total_val_recall': total_val_recall}
store_convergence_info = {'epoch': train_data.epoch,
'train_conf_matrix': list([list(x) for x in train_conf_matrix]),
'train_accuracy': total_train_accuracy,
'train_precision': list(train_precision),
'total_train_precision': total_train_precision,
'train_recall': list(train_recall),
'total_train_recall': total_train_recall,
'val_conf_matrix': list([list(x) for x in val_conf_matrix]),
'val_accuracy': total_val_accuracy,
'val_precision': list(val_precision),
'total_val_precision': total_val_precision,
'val_recall': list(val_recall),
'total_val_recall': total_val_recall}
################################################################################################################
# Get ready for the next epoch
################################################################################################################
epoch += 1
train_conf_matrix = 0
val_conf_matrix = 0
################################################################################################################
####################################################################################################################
# Add the end of training compute the test results and store the relevant information
####################################################################################################################
while test_data.epoch == 0:
sentences_batch, sentences_length_batch, targets_batch, speakers_batch = test_data.next_batch(batch_size)
preds = sess.run([g['preds']],
feed_dict={g['x']: np.array(sentences_batch),
g['y']: np.array(targets_batch).reshape(len(targets_batch)),
g['speaker']: np.array(speakers_batch),
g['seqlen']: np.array(sentences_length_batch).reshape(
len(targets_batch))})
############################################################################################################
# Calculate the Test data Confusion Matrix
############################################################################################################
test_conf_matrix += confusion_matrix(targets_batch, preds[0], labels=range(data_info['num_classes']))
################################################################################################################
# Compute Accuracy, Precision and Recall
################################################################################################################
test_CM_size = len(test_conf_matrix)
total_test = sum(sum(test_conf_matrix))
test_TP = np.diagonal(test_conf_matrix)
test_FP = [sum(test_conf_matrix[:, i]) - test_TP[i] for i in range(test_CM_size)]
test_FN = [sum(test_conf_matrix[i, :]) - test_TP[i] for i in range(test_CM_size)]
test_TN = test_CM_size - test_TP - test_FP - test_FN
test_precision = test_TP / (test_TP + test_FP)
test_recall = test_TP / (test_TP + test_FN)
total_test_correct = sum(test_TP)
total_test_accuracy = total_test_correct / total_test
total_test_precision = sum(test_precision) / test_CM_size
total_test_recall = sum(test_recall) / test_CM_size
################################################################################################################
# Print the confusion matrix and store important information
################################################################################################################
print(test_conf_matrix)
store_convergence_info['test_conf_matrix'] = list([list(x) for x in test_conf_matrix])
store_convergence_info['test_accuracy'] = total_test_accuracy
store_convergence_info['test_precision'] = list(test_precision)
store_convergence_info['total_test_precision'] = total_test_precision
store_convergence_info['test_recall'] = list(test_recall)
store_convergence_info['total_test_recall'] = total_test_recall
# trick to be able to save numpy.int64 into json
def default(o):
if isinstance(o, np.int64): return int(o)
raise TypeError
with open(log_dir + 'convergence_results.p', 'w') as file:
json.dump(store_convergence_info, file, default=default, indent=4)
saver.save(sess, log_dir + "convergence_model.ckpt")
####################################################################################################################
# Add the end of training compute the test results of the best validation model and store the relevant information
####################################################################################################################
saver.restore(sess, log_dir + "acc_best_validation_model.ckpt")
test_conf_matrix = 0
while test_data.epoch == 1:
sentences_batch, sentences_length_batch, targets_batch, speakers_batch = test_data.next_batch(batch_size)
preds = sess.run([g['preds']],
feed_dict={g['x']: np.array(sentences_batch),
g['y']: np.array(targets_batch).reshape(len(targets_batch)),
g['speaker']: np.array(speakers_batch),
g['seqlen']: np.array(sentences_length_batch).reshape(
len(targets_batch))})
############################################################################################################
# Calculate the Test data Confusion Matrix
############################################################################################################
test_conf_matrix += confusion_matrix(targets_batch, preds[0], labels=range(data_info['num_classes']))
################################################################################################################
# Compute Accuracy, Precision and Recall
################################################################################################################
test_CM_size = len(test_conf_matrix)
total_test = sum(sum(test_conf_matrix))
test_TP = np.diagonal(test_conf_matrix)
test_FP = [sum(test_conf_matrix[:, i]) - test_TP[i] for i in range(test_CM_size)]
test_FN = [sum(test_conf_matrix[i, :]) - test_TP[i] for i in range(test_CM_size)]
test_TN = test_CM_size - test_TP - test_FP - test_FN
test_precision = test_TP / (test_TP + test_FP)
test_recall = test_TP / (test_TP + test_FN)
total_test_correct = sum(test_TP)
total_test_accuracy = total_test_correct / total_test
total_test_precision = sum(test_precision) / test_CM_size
total_test_recall = sum(test_recall) / test_CM_size
################################################################################################################
# Print the confusion matrix and store important information
################################################################################################################
print(test_conf_matrix)
store_info['test_conf_matrix'] = list([list(x) for x in test_conf_matrix])
store_info['test_accuracy'] = total_test_accuracy
store_info['test_precision'] = list(test_precision)
store_info['total_test_precision'] = total_test_precision
store_info['test_recall'] = list(test_recall)
store_info['total_test_recall'] = total_test_recall
with open(log_dir + 'acc_best_validation_results.p', 'w') as file:
json.dump(store_info, file, default=default, indent=4)
| [((10, 9, 13, 50), 'tensorflow.ConfigProto', 'tf.ConfigProto', (), '', True, 'import tensorflow as tf\n'), ((40, 46, 40, 123), 'IEOMAP_dataset_AC.dataset', 'dataset', (), '', False, 'from IEOMAP_dataset_AC import dataset, IeomapSentenceIterator\n'), ((42, 17, 42, 112), 'IEOMAP_dataset_AC.IeomapSentenceIterator', 'IeomapSentenceIterator', ({(42, 40, 42, 52): 'sentences[0]', (42, 54, 42, 64): 'targets[0]', (42, 66, 42, 98): "data_info['sentences_length'][0]", (42, 100, 42, 111): 'speakers[0]'}, {}), "(sentences[0], targets[0], data_info[\n 'sentences_length'][0], speakers[0])", False, 'from IEOMAP_dataset_AC import dataset, IeomapSentenceIterator\n'), ((43, 15, 43, 110), 'IEOMAP_dataset_AC.IeomapSentenceIterator', 'IeomapSentenceIterator', ({(43, 38, 43, 50): 'sentences[1]', (43, 52, 43, 62): 'targets[1]', (43, 64, 43, 96): "data_info['sentences_length'][1]", (43, 98, 43, 109): 'speakers[1]'}, {}), "(sentences[1], targets[1], data_info[\n 'sentences_length'][1], speakers[1])", False, 'from IEOMAP_dataset_AC import dataset, IeomapSentenceIterator\n'), ((44, 16, 44, 111), 'IEOMAP_dataset_AC.IeomapSentenceIterator', 'IeomapSentenceIterator', ({(44, 39, 44, 51): 'sentences[2]', (44, 53, 44, 63): 'targets[2]', (44, 65, 44, 97): "data_info['sentences_length'][2]", (44, 99, 44, 110): 'speakers[2]'}, {}), "(sentences[2], targets[2], data_info[\n 'sentences_length'][2], speakers[2])", False, 'from IEOMAP_dataset_AC import dataset, IeomapSentenceIterator\n'), ((49, 8, 55, 48), 'models_AC.SentenceModel', 'SentenceModel', (), '', False, 'from models_AC import SentenceModel\n'), ((66, 14, 66, 38), 'os.path.dirname', 'os.path.dirname', ({(66, 30, 66, 37): 'log_dir'}, {}), '(log_dir)', False, 'import os\n'), ((76, 11, 76, 36), 'tensorflow.Session', 'tf.Session', (), '', True, 'import tensorflow as tf\n'), ((79, 12, 79, 28), 'tensorflow.train.Saver', 'tf.train.Saver', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((90, 19, 90, 67), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', ({(90, 41, 90, 54): 'train_log_dir', (90, 56, 90, 66): 'sess.graph'}, {}), '(train_log_dir, sess.graph)', True, 'import tensorflow as tf\n'), ((91, 17, 91, 51), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', ({(91, 39, 91, 50): 'val_log_dir'}, {}), '(val_log_dir)', True, 'import tensorflow as tf\n'), ((93, 18, 93, 48), 'tensorflow.placeholder', 'tf.placeholder', ({(93, 33, 93, 43): 'tf.float32', (93, 45, 93, 47): '[]'}, {}), '(tf.float32, [])', True, 'import tensorflow as tf\n'), ((94, 19, 94, 49), 'tensorflow.placeholder', 'tf.placeholder', ({(94, 34, 94, 44): 'tf.float32', (94, 46, 94, 48): '[]'}, {}), '(tf.float32, [])', True, 'import tensorflow as tf\n'), ((95, 16, 95, 46), 'tensorflow.placeholder', 'tf.placeholder', ({(95, 31, 95, 41): 'tf.float32', (95, 43, 95, 45): '[]'}, {}), '(tf.float32, [])', True, 'import tensorflow as tf\n'), ((97, 17, 97, 59), 'tensorflow.summary.scalar', 'tf.summary.scalar', ({(97, 35, 97, 45): '"""accuracy"""', (97, 47, 97, 58): 'accuracy_tf'}, {}), "('accuracy', accuracy_tf)", True, 'import tensorflow as tf\n'), ((98, 17, 98, 61), 'tensorflow.summary.scalar', 'tf.summary.scalar', ({(98, 35, 98, 46): '"""precision"""', (98, 48, 98, 60): 'precision_tf'}, {}), "('precision', precision_tf)", True, 'import tensorflow as tf\n'), ((99, 17, 99, 55), 'tensorflow.summary.scalar', 'tf.summary.scalar', ({(99, 35, 99, 43): '"""recall"""', (99, 45, 99, 54): 'recall_tf'}, {}), "('recall', recall_tf)", True, 'import tensorflow as tf\n'), ((263, 14, 263, 43), 'numpy.diagonal', 'np.diagonal', ({(263, 26, 263, 42): 'test_conf_matrix'}, {}), '(test_conf_matrix)', True, 'import numpy as np\n'), ((323, 14, 323, 43), 'numpy.diagonal', 'np.diagonal', ({(323, 26, 323, 42): 'test_conf_matrix'}, {}), '(test_conf_matrix)', True, 'import numpy as np\n'), ((67, 11, 67, 34), 'os.path.exists', 'os.path.exists', ({(67, 26, 67, 33): 'dirname'}, {}), '(dirname)', False, 'import os\n'), ((68, 8, 68, 28), 'os.makedirs', 'os.makedirs', ({(68, 20, 68, 27): 'dirname'}, {}), '(dirname)', False, 'import os\n'), ((71, 8, 71, 46), 'json.dump', 'json.dump', (), '', False, 'import json\n'), ((77, 13, 77, 46), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((78, 13, 78, 45), 'tensorflow.local_variables_initializer', 'tf.local_variables_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((294, 8, 294, 74), 'json.dump', 'json.dump', (), '', False, 'import json\n'), ((349, 8, 349, 62), 'json.dump', 'json.dump', (), '', False, 'import json\n'), ((141, 23, 141, 53), 'numpy.diagonal', 'np.diagonal', ({(141, 35, 141, 52): 'train_conf_matrix'}, {}), '(train_conf_matrix)', True, 'import numpy as np\n'), ((157, 21, 157, 49), 'numpy.diagonal', 'np.diagonal', ({(157, 33, 157, 48): 'val_conf_matrix'}, {}), '(val_conf_matrix)', True, 'import numpy as np\n'), ((108, 47, 108, 72), 'numpy.array', 'np.array', ({(108, 56, 108, 71): 'sentences_batch'}, {}), '(sentences_batch)', True, 'import numpy as np\n'), ((110, 53, 110, 77), 'numpy.array', 'np.array', ({(110, 62, 110, 76): 'speakers_batch'}, {}), '(speakers_batch)', True, 'import numpy as np\n'), ((247, 44, 247, 69), 'numpy.array', 'np.array', ({(247, 53, 247, 68): 'sentences_batch'}, {}), '(sentences_batch)', True, 'import numpy as np\n'), ((249, 50, 249, 74), 'numpy.array', 'np.array', ({(249, 59, 249, 73): 'speakers_batch'}, {}), '(speakers_batch)', True, 'import numpy as np\n'), ((307, 44, 307, 69), 'numpy.array', 'np.array', ({(307, 53, 307, 68): 'sentences_batch'}, {}), '(sentences_batch)', True, 'import numpy as np\n'), ((309, 50, 309, 74), 'numpy.array', 'np.array', ({(309, 59, 309, 73): 'speakers_batch'}, {}), '(speakers_batch)', True, 'import numpy as np\n'), ((174, 23, 174, 90), 'tensorflow.Summary.Value', 'tf.Summary.Value', (), '', True, 'import tensorflow as tf\n'), ((176, 23, 176, 92), 'tensorflow.Summary.Value', 'tf.Summary.Value', (), '', True, 'import tensorflow as tf\n'), ((177, 50, 177, 113), 'tensorflow.Summary.Value', 'tf.Summary.Value', (), '', True, 'import tensorflow as tf\n'), ((179, 48, 179, 113), 'tensorflow.Summary.Value', 'tf.Summary.Value', (), '', True, 'import tensorflow as tf\n'), ((181, 23, 181, 90), 'tensorflow.Summary.Value', 'tf.Summary.Value', (), '', True, 'import tensorflow as tf\n'), ((182, 48, 182, 109), 'tensorflow.Summary.Value', 'tf.Summary.Value', (), '', True, 'import tensorflow as tf\n'), ((109, 47, 109, 70), 'numpy.array', 'np.array', ({(109, 56, 109, 69): 'targets_batch'}, {}), '(targets_batch)', True, 'import numpy as np\n'), ((111, 52, 111, 84), 'numpy.array', 'np.array', ({(111, 61, 111, 83): 'sentences_length_batch'}, {}), '(sentences_length_batch)', True, 'import numpy as np\n'), ((125, 52, 125, 77), 'numpy.array', 'np.array', ({(125, 61, 125, 76): 'sentences_batch'}, {}), '(sentences_batch)', True, 'import numpy as np\n'), ((127, 58, 127, 82), 'numpy.array', 'np.array', ({(127, 67, 127, 81): 'speakers_batch'}, {}), '(speakers_batch)', True, 'import numpy as np\n'), ((248, 44, 248, 67), 'numpy.array', 'np.array', ({(248, 53, 248, 66): 'targets_batch'}, {}), '(targets_batch)', True, 'import numpy as np\n'), ((250, 49, 250, 81), 'numpy.array', 'np.array', ({(250, 58, 250, 80): 'sentences_length_batch'}, {}), '(sentences_length_batch)', True, 'import numpy as np\n'), ((308, 44, 308, 67), 'numpy.array', 'np.array', ({(308, 53, 308, 66): 'targets_batch'}, {}), '(targets_batch)', True, 'import numpy as np\n'), ((310, 49, 310, 81), 'numpy.array', 'np.array', ({(310, 58, 310, 80): 'sentences_length_batch'}, {}), '(sentences_length_batch)', True, 'import numpy as np\n'), ((126, 52, 126, 75), 'numpy.array', 'np.array', ({(126, 61, 126, 74): 'targets_batch'}, {}), '(targets_batch)', True, 'import numpy as np\n'), ((128, 57, 128, 89), 'numpy.array', 'np.array', ({(128, 66, 128, 88): 'sentences_length_batch'}, {}), '(sentences_length_batch)', True, 'import numpy as np\n')] |
flaree/Toxic-Cogs | dashboard/rpc/alias.py | e33c3fe3a81c86ef3c89928b0a977fae13b916a9 | import discord
from redbot.core.bot import Red
from redbot.core.commands import commands
from redbot.core.utils.chat_formatting import humanize_list
from .utils import permcheck, rpccheck
class DashboardRPC_AliasCC:
def __init__(self, cog: commands.Cog):
self.bot: Red = cog.bot
self.cog: commands.Cog = cog
# Initialize RPC handlers
self.bot.register_rpc_handler(self.fetch_aliases)
def unload(self):
self.bot.unregister_rpc_handler(self.fetch_aliases)
@staticmethod
def safe(string):
return (
string.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace('"', """)
)
@rpccheck()
@permcheck("Alias", ["aliascc"])
async def fetch_aliases(self, guild: discord.Guild, member: discord.Member):
aliascog = self.bot.get_cog("Alias")
aliases = await aliascog._aliases.get_guild_aliases(guild)
ida = {}
for alias in aliases:
if len(alias.command) > 50:
command = alias.command[:47] + "..."
else:
command = alias.command
if alias.command not in ida:
ida[alias.command] = {"aliases": [], "shortened": command}
ida[alias.command]["aliases"].append(f"{self.safe(alias.name)}")
data = {}
for command, aliases in ida.items():
data[command] = {
"humanized": humanize_list(
list(map(lambda x: f"<code>{x}</code>", aliases["aliases"]))
),
"raw": aliases["aliases"],
"shortened": aliases["shortened"],
}
return data
| [] |
hafezgh/music_classification | train.py | 68fa398b7d4455475d07ae17c3b6b94459a96ac7 | import torch
DEVICE = 'cuda'
import math
import torch.optim as optim
from model import *
import os
import copy, gzip, pickle, time
data_dir = './drive/MyDrive/music_classification/Data'
classes = os.listdir(data_dir+'/images_original')
def fit(model, train_loader, train_len, optimizer, criterion):
model.train()
batch_size = train_loader.batch_size
n_batches = math.ceil(train_len/batch_size)
#print('Batch Size:', batch_size,'Number of Batches:', n_batches)
model.train()
train_running_loss = 0.0
train_running_correct = 0
counter = 0
total = 0
#prog_bar = tqdm(enumerate(train_loader), total=int(train_len/batch_size))
for i, data in enumerate(train_loader):
counter += 1
data, target = data[0].to(DEVICE), data[1].to(DEVICE)
total += target.size(0)
optimizer.zero_grad()
outputs = model(data)
loss = criterion(outputs, target)
train_running_loss += loss.item()
_, preds = torch.max(outputs.data, 1)
train_running_correct += (preds == target).sum().item()
loss.backward()
optimizer.step()
train_loss = train_running_loss / counter
train_accuracy = 100. * train_running_correct / total
return train_loss, train_accuracy
def validate(model, val_loader, val_len, criterion):
model.eval()
val_running_loss = 0.0
val_running_correct = 0
counter = 0
total = 0
batch_size = val_len
#prog_bar = tqdm(enumerate(val_loader), total=int(val_len/batch_size))
with torch.no_grad():
for i, data in enumerate(val_loader):
counter += 1
data, target = data[0].to(DEVICE), data[1].to(DEVICE)
total += target.size(0)
outputs = model(data)
loss = criterion(outputs, target)
val_running_loss += loss.item()
_, preds = torch.max(outputs.data, 1)
val_running_correct += (preds == target).sum().item()
val_loss = val_running_loss / counter
val_accuracy = 100. * val_running_correct / total
return val_loss, val_accuracy
def train(hparams, train_loader, val_loader, train_len, val_len, checkpoint_path=None, **kwargs):
model = CRNN_Base(len(classes), hparams['c'], hparams['h'], hparams['w'], hparams['k'], hparams['filters'],\
hparams['poolings'], hparams['dropout_rate'], gru_units=hparams['gru_units'])
model.to(DEVICE)
optimizer = optim.Adam(model.parameters(), lr=hparams['lr'])
try:
path = kwargs['path']
stream = gzip.open(path, "rb")
checkpoint = pickle.load(stream)
stream.close()
train_loss = checkpoint['train_loss']
train_accuracy = checkpoint['train_accuracy']
val_loss = checkpoint['val_loss']
val_accuracy = checkpoint['val_accuracy']
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
epoch_load = checkpoint['epoch']
print(f'Checkpoint found! Training will resume from epoch {epoch_load+1}')
print('Last epoch results: ')
print(f"Train Loss: {train_loss[-1]:.4f}, Train Acc: {train_accuracy[-1]:.2f}")
print(f'Val Loss: {val_loss[-1]:.4f}, Val Acc: {val_accuracy[-1]:.2f}')
if 'lr_scheduler' in kwargs.keys() and 'scheduler_state_dict' in checkpoint.keys():
if kwargs['lr_scheduler'] == True:
print('Learning rate sceduler is active.\n')
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=20, gamma=0.1, last_epoch=-1, verbose=True)
scheduler.load_state_dict(checkpoint['scheduler_state_dict'])
else:
scheduler = False
else:
scheduler = False
except:
print('No checkpoints found! Training will start from the beginning.\n')
train_loss, train_accuracy = [], []
val_loss, val_accuracy = [], []
epoch_load = 0
scheduler = None
es = False
if 'lr_scheduler' in kwargs.keys():
if kwargs['lr_scheduler'] == True:
print('Learning rate sceduler is active.\n')
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=20, gamma=0.1, last_epoch=-1, verbose=True)
else:
scheduler = False
else:
scheduler = False
es = False
if 'early_stopping' in kwargs.keys():
print('Early stopping is active.')
print()
es = True
min_val_loss = np.inf
patience = 30
epochs_no_improve = 0
best_model = None
criterion = nn.CrossEntropyLoss()
start = time.time()
for epoch in range(hparams['epochs']-epoch_load):
print(f"Epoch {epoch+epoch_load+1} of {hparams['epochs']}")
train_epoch_loss, train_epoch_accuracy = fit(
model, train_loader, train_len, optimizer, criterion
)
val_epoch_loss, val_epoch_accuracy = validate(
model, val_loader, val_len, criterion
)
if scheduler:
scheduler.step()
train_loss.append(train_epoch_loss)
train_accuracy.append(train_epoch_accuracy)
val_loss.append(val_epoch_loss)
val_accuracy.append(val_epoch_accuracy)
if es:
if val_epoch_loss < min_val_loss:
#Saving the model
min_val_loss = val_epoch_loss
best_model = copy.deepcopy(model.state_dict())
epochs_no_improve = 0
else:
epochs_no_improve += 1
# Check early stopping condition
if epochs_no_improve == patience:
print(f'Early stopping after {epoch+epoch_load+1} epochs!')
model.load_state_dict(best_model)
break
print(f"Train Loss: {train_epoch_loss:.4f}, Train Acc: {train_epoch_accuracy:.2f}")
print(f'Val Loss: {val_epoch_loss:.4f}, Val Acc: {val_epoch_accuracy:.2f}')
checkpoint_to_save = {'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'epoch': epoch+epoch_load,
'train_loss': train_loss,
'val_loss': val_loss,
'train_accuracy': train_accuracy,
'val_accuracy': val_accuracy
}
if scheduler:
checkpoint_to_save['scheduler_state_dict'] = scheduler.state_dict()
## Saving the model
if checkpoint_path != None:
stream = gzip.open(checkpoint_path, "wb")
pickle.dump(checkpoint_to_save, stream)
stream.close()
end = time.time()
print(f"Training time: {(end-start)/60:.3f} minutes")
return model, train_loss, train_accuracy, val_loss, val_accuracy | [((9, 10, 9, 49), 'os.listdir', 'os.listdir', ({(9, 21, 9, 48): "data_dir + '/images_original'"}, {}), "(data_dir + '/images_original')", False, 'import os\n'), ((15, 16, 15, 47), 'math.ceil', 'math.ceil', ({(15, 26, 15, 46): 'train_len / batch_size'}, {}), '(train_len / batch_size)', False, 'import math\n'), ((122, 12, 122, 23), 'time.time', 'time.time', ({}, {}), '()', False, 'import copy, gzip, pickle, time\n'), ((169, 10, 169, 21), 'time.time', 'time.time', ({}, {}), '()', False, 'import copy, gzip, pickle, time\n'), ((31, 19, 31, 45), 'torch.max', 'torch.max', ({(31, 29, 31, 41): 'outputs.data', (31, 43, 31, 44): '1'}, {}), '(outputs.data, 1)', False, 'import torch\n'), ((48, 9, 48, 24), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((71, 17, 71, 38), 'gzip.open', 'gzip.open', ({(71, 27, 71, 31): 'path', (71, 33, 71, 37): '"""rb"""'}, {}), "(path, 'rb')", False, 'import copy, gzip, pickle, time\n'), ((72, 21, 72, 40), 'pickle.load', 'pickle.load', ({(72, 33, 72, 39): 'stream'}, {}), '(stream)', False, 'import copy, gzip, pickle, time\n'), ((57, 23, 57, 49), 'torch.max', 'torch.max', ({(57, 33, 57, 45): 'outputs.data', (57, 47, 57, 48): '1'}, {}), '(outputs.data, 1)', False, 'import torch\n'), ((166, 21, 166, 53), 'gzip.open', 'gzip.open', ({(166, 31, 166, 46): 'checkpoint_path', (166, 48, 166, 52): '"""wb"""'}, {}), "(checkpoint_path, 'wb')", False, 'import copy, gzip, pickle, time\n'), ((167, 12, 167, 51), 'pickle.dump', 'pickle.dump', ({(167, 24, 167, 42): 'checkpoint_to_save', (167, 44, 167, 50): 'stream'}, {}), '(checkpoint_to_save, stream)', False, 'import copy, gzip, pickle, time\n'), ((88, 28, 88, 118), 'torch.optim.lr_scheduler.StepLR', 'optim.lr_scheduler.StepLR', (), '', True, 'import torch.optim as optim\n'), ((104, 28, 104, 118), 'torch.optim.lr_scheduler.StepLR', 'optim.lr_scheduler.StepLR', (), '', True, 'import torch.optim as optim\n')] |
cstsunfu/dlkit | dlk/core/schedulers/__init__.py | 69e0efd372fa5c0ae5313124d0ba1ef55b535196 | # Copyright 2021 cstsunfu. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""schedulers"""
import importlib
import os
from dlk.utils.register import Register
from torch.optim import Optimizer
from torch.optim.lr_scheduler import LambdaLR
import math
scheduler_config_register = Register("Schedule config register.")
scheduler_register = Register("Schedule register.")
class BaseScheduler(object):
"""interface for Schedule"""
def get_scheduler(self)->LambdaLR:
"""return the initialized scheduler
Returns:
Schedule
"""
raise NotImplementedError
def __call__(self):
"""the same as self.get_scheduler()
"""
return self.get_scheduler()
def import_schedulers(schedulers_dir, namespace):
for file in os.listdir(schedulers_dir):
path = os.path.join(schedulers_dir, file)
if (
not file.startswith("_")
and not file.startswith(".")
and (file.endswith(".py") or os.path.isdir(path))
):
scheduler_name = file[: file.find(".py")] if file.endswith(".py") else file
importlib.import_module(namespace + "." + scheduler_name)
# automatically import any Python files in the schedulers directory
schedulers_dir = os.path.dirname(__file__)
import_schedulers(schedulers_dir, "dlk.core.schedulers")
| [((24, 28, 24, 65), 'dlk.utils.register.Register', 'Register', ({(24, 37, 24, 64): '"""Schedule config register."""'}, {}), "('Schedule config register.')", False, 'from dlk.utils.register import Register\n'), ((25, 21, 25, 51), 'dlk.utils.register.Register', 'Register', ({(25, 30, 25, 50): '"""Schedule register."""'}, {}), "('Schedule register.')", False, 'from dlk.utils.register import Register\n'), ((59, 17, 59, 42), 'os.path.dirname', 'os.path.dirname', ({(59, 33, 59, 41): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((47, 16, 47, 42), 'os.listdir', 'os.listdir', ({(47, 27, 47, 41): 'schedulers_dir'}, {}), '(schedulers_dir)', False, 'import os\n'), ((48, 15, 48, 49), 'os.path.join', 'os.path.join', ({(48, 28, 48, 42): 'schedulers_dir', (48, 44, 48, 48): 'file'}, {}), '(schedulers_dir, file)', False, 'import os\n'), ((55, 12, 55, 69), 'importlib.import_module', 'importlib.import_module', ({(55, 36, 55, 68): "(namespace + '.' + scheduler_name)"}, {}), "(namespace + '.' + scheduler_name)", False, 'import importlib\n'), ((52, 41, 52, 60), 'os.path.isdir', 'os.path.isdir', ({(52, 55, 52, 59): 'path'}, {}), '(path)', False, 'import os\n')] |
m4ta1l/doit | doc/samples/pos.py | d1a1b7b3abc7641d977d3b78b580d97aea4e27ea | def task_pos_args():
def show_params(param1, pos):
print('param1 is: {0}'.format(param1))
for index, pos_arg in enumerate(pos):
print('positional-{0}: {1}'.format(index, pos_arg))
return {'actions':[(show_params,)],
'params':[{'name':'param1',
'short':'p',
'default':'default value'},
],
'pos_arg': 'pos',
'verbosity': 2,
}
| [] |
vm6502q/ProjectQ | projectq/backends/_qracksim/_simulator_test.py | 1eac4b1f529551dfc1668443eba0c68dee54120b | # Copyright 2017 ProjectQ-Framework (www.projectq.ch)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests for projectq.backends._sim._simulator.py, using both the Python
and the C++ simulator as backends.
"""
import copy
import math
import cmath
import numpy
import pytest
import random
import scipy
import scipy.sparse
import scipy.sparse.linalg
from projectq import MainEngine
from projectq.cengines import (BasicEngine, BasicMapperEngine, DummyEngine,
LocalOptimizer, NotYetMeasuredError)
from projectq.ops import (All, Allocate, BasicGate, BasicMathGate, CNOT, C,
Command, H, Measure, QubitOperator, Rx, Ry, Rz, S,
TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap,
UniformlyControlledRy, UniformlyControlledRz)
from projectq.libs.math import (AddConstant,
AddConstantModN,
SubConstant,
SubConstantModN,
MultiplyByConstantModN)
from projectq.meta import Compute, Uncompute, Control, Dagger, LogicalQubitIDTag
from projectq.types import WeakQubitRef
from projectq.backends import Simulator
tolerance = 1e-6
def test_is_qrack_simulator_present():
_qracksim = pytest.importorskip("projectq.backends._qracksim._qracksim")
import projectq.backends._qracksim._qracksim as _
def get_available_simulators():
result = []
try:
test_is_qrack_simulator_present()
result.append("qrack_simulator_qengine")
result.append("qrack_simulator_qunit")
except:
pass
return result
@pytest.fixture(params=get_available_simulators())
def sim(request):
if request.param == "qrack_simulator_qengine":
from projectq.backends._qracksim._qracksim import QrackSimulator as QrackSim
sim = Simulator()
sim._simulator = QrackSim(1, -1, 1)
elif request.param == "qrack_simulator_qunit":
from projectq.backends._qracksim._qracksim import QrackSimulator as QrackSim
sim = Simulator()
sim._simulator = QrackSim(1, -1, 2)
return sim
@pytest.fixture(params=["mapper", "no_mapper"])
def mapper(request):
"""
Adds a mapper which changes qubit ids by adding 1
"""
if request.param == "mapper":
class TrivialMapper(BasicMapperEngine):
def __init__(self):
BasicEngine.__init__(self)
self.current_mapping = dict()
def receive(self, command_list):
for cmd in command_list:
for qureg in cmd.all_qubits:
for qubit in qureg:
if qubit.id == -1:
continue
elif qubit.id not in self.current_mapping:
previous_map = self.current_mapping
previous_map[qubit.id] = qubit.id + 1
self.current_mapping = previous_map
self._send_cmd_with_mapped_ids(cmd)
return TrivialMapper()
if request.param == "no_mapper":
return None
class Mock1QubitGate(BasicGate):
def __init__(self):
BasicGate.__init__(self)
self.cnt = 0
@property
def matrix(self):
self.cnt += 1
return numpy.matrix([[0, 1],
[1, 0]])
class Mock6QubitGate(BasicGate):
def __init__(self):
BasicGate.__init__(self)
self.cnt = 0
@property
def matrix(self):
self.cnt += 1
return numpy.eye(2 ** 6)
class MockNoMatrixGate(BasicGate):
def __init__(self):
BasicGate.__init__(self)
self.cnt = 0
@property
def matrix(self):
self.cnt += 1
raise AttributeError
def test_simulator_is_available(sim):
backend = DummyEngine(save_commands=True)
eng = MainEngine(backend, [])
qubit = eng.allocate_qubit()
Measure | qubit
qubit[0].__del__()
assert len(backend.received_commands) == 3
# Test that allocate, measure, basic math, and deallocate are available.
for cmd in backend.received_commands:
assert sim.is_available(cmd)
new_cmd = backend.received_commands[-1]
new_cmd.gate = Mock6QubitGate()
assert not sim.is_available(new_cmd)
new_cmd.gate = MockNoMatrixGate()
assert not sim.is_available(new_cmd)
new_cmd.gate = Mock1QubitGate()
assert sim.is_available(new_cmd)
new_cmd = backend.received_commands[-2]
assert len(new_cmd.qubits) == 1
new_cmd.gate = AddConstantModN(1, 2)
assert sim.is_available(new_cmd)
new_cmd.gate = MultiplyByConstantModN(1, 2)
assert sim.is_available(new_cmd)
#new_cmd.gate = DivideByConstantModN(1, 2)
#assert sim.is_available(new_cmd)
def test_simulator_cheat(sim):
# cheat function should return a tuple
assert isinstance(sim.cheat(), tuple)
# first entry is the qubit mapping.
# should be empty:
assert len(sim.cheat()[0]) == 0
# state vector should only have 1 entry:
assert len(sim.cheat()[1]) == 1
eng = MainEngine(sim, [])
qubit = eng.allocate_qubit()
# one qubit has been allocated
assert len(sim.cheat()[0]) == 1
assert sim.cheat()[0][0] == 0
assert len(sim.cheat()[1]) == 2
assert 1. == pytest.approx(abs(sim.cheat()[1][0]))
qubit[0].__del__()
# should be empty:
assert len(sim.cheat()[0]) == 0
# state vector should only have 1 entry:
assert len(sim.cheat()[1]) == 1
def test_simulator_functional_measurement(sim):
eng = MainEngine(sim, [])
qubits = eng.allocate_qureg(5)
# entangle all qubits:
H | qubits[0]
for qb in qubits[1:]:
CNOT | (qubits[0], qb)
All(Measure) | qubits
bit_value_sum = sum([int(qubit) for qubit in qubits])
assert bit_value_sum == 0 or bit_value_sum == 5
def test_simulator_measure_mapped_qubit(sim):
eng = MainEngine(sim, [])
qb1 = WeakQubitRef(engine=eng, idx=1)
qb2 = WeakQubitRef(engine=eng, idx=2)
cmd0 = Command(engine=eng, gate=Allocate, qubits=([qb1],))
cmd1 = Command(engine=eng, gate=X, qubits=([qb1],))
cmd2 = Command(engine=eng, gate=Measure, qubits=([qb1],), controls=[],
tags=[LogicalQubitIDTag(2)])
with pytest.raises(NotYetMeasuredError):
int(qb1)
with pytest.raises(NotYetMeasuredError):
int(qb2)
eng.send([cmd0, cmd1, cmd2])
eng.flush()
with pytest.raises(NotYetMeasuredError):
int(qb1)
assert int(qb2) == 1
def test_simulator_kqubit_exception(sim):
m1 = Rx(0.3).matrix
m2 = Rx(0.8).matrix
m3 = Ry(0.1).matrix
m4 = Rz(0.9).matrix.dot(Ry(-0.1).matrix)
m = numpy.kron(m4, numpy.kron(m3, numpy.kron(m2, m1)))
class KQubitGate(BasicGate):
@property
def matrix(self):
return m
eng = MainEngine(sim, [])
qureg = eng.allocate_qureg(3)
with pytest.raises(Exception):
KQubitGate() | qureg
with pytest.raises(Exception):
H | qureg
def test_simulator_swap(sim):
eng = MainEngine(sim, [])
qubits1 = eng.allocate_qureg(1)
qubits2 = eng.allocate_qureg(1)
X | qubits1
Swap | (qubits1, qubits2)
All(Measure) | qubits1
All(Measure) | qubits2
assert (int(qubits1[0]) == 0) and (int(qubits2[0]) == 1)
SqrtSwap | (qubits1, qubits2)
SqrtSwap | (qubits1, qubits2)
All(Measure) | qubits1
All(Measure) | qubits2
assert (int(qubits1[0]) == 1) and (int(qubits2[0]) == 0)
def test_simulator_math(sim):
eng = MainEngine(sim, [])
qubits = eng.allocate_qureg(8)
AddConstant(1) | qubits;
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 1
AddConstantModN(10, 256) | qubits;
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 11
controls = eng.allocate_qureg(1)
# Control is off
C(AddConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 11
# Turn control on
X | controls
C(AddConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 21
SubConstant(5) | qubits;
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 16
C(SubConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 6
# Turn control off
X | controls
C(SubConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 6
MultiplyByConstantModN(2, 256) | qubits;
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 12
# Control is off
C(MultiplyByConstantModN(2, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 12
# Turn control on
X | controls
C(MultiplyByConstantModN(10, 256)) | (controls, qubits)
All(Measure) | qubits
value = 0
for i in range(len(qubits)):
value += int(qubits[i]) << i
assert value == 120
def test_simulator_probability(sim, mapper):
engine_list = [LocalOptimizer()]
if mapper is not None:
engine_list.append(mapper)
eng = MainEngine(sim, engine_list=engine_list)
qubits = eng.allocate_qureg(6)
All(H) | qubits
eng.flush()
bits = [0, 0, 1, 0, 1, 0]
for i in range(6):
assert (eng.backend.get_probability(bits[:i], qubits[:i]) ==
pytest.approx(0.5**i))
extra_qubit = eng.allocate_qubit()
with pytest.raises(RuntimeError):
eng.backend.get_probability([0], extra_qubit)
del extra_qubit
All(H) | qubits
Ry(2 * math.acos(math.sqrt(0.3))) | qubits[0]
eng.flush()
assert eng.backend.get_probability([0], [qubits[0]]) == pytest.approx(0.3)
Ry(2 * math.acos(math.sqrt(0.4))) | qubits[2]
eng.flush()
assert eng.backend.get_probability([0], [qubits[2]]) == pytest.approx(0.4)
assert (numpy.isclose(0.12, eng.backend.get_probability([0, 0], qubits[:3:2]), rtol=tolerance, atol=tolerance))
assert (numpy.isclose(0.18, eng.backend.get_probability([0, 1], qubits[:3:2]), rtol=tolerance, atol=tolerance))
assert (numpy.isclose(0.28, eng.backend.get_probability([1, 0], qubits[:3:2]), rtol=tolerance, atol=tolerance))
All(Measure) | qubits
def test_simulator_amplitude(sim, mapper):
engine_list = [LocalOptimizer()]
if mapper is not None:
engine_list.append(mapper)
eng = MainEngine(sim, engine_list=engine_list)
qubits = eng.allocate_qureg(6)
All(X) | qubits
All(H) | qubits
eng.flush()
bits = [0, 0, 1, 0, 1, 0]
polR, polPhi = cmath.polar(eng.backend.get_amplitude(bits, qubits))
while polPhi < 0:
polPhi += 2 * math.pi
assert polR == pytest.approx(1. / 8.)
bits = [0, 0, 0, 0, 1, 0]
polR2, polPhi2 = cmath.polar(eng.backend.get_amplitude(bits, qubits))
while polPhi2 < math.pi:
polPhi2 += 2 * math.pi
assert polR2 == pytest.approx(polR)
assert (polPhi2 - math.pi) == pytest.approx(polPhi)
bits = [0, 1, 1, 0, 1, 0]
polR3, polPhi3 = cmath.polar(eng.backend.get_amplitude(bits, qubits))
while polPhi3 < math.pi:
polPhi3 += 2 * math.pi
assert polR3 == pytest.approx(polR)
assert (polPhi3 - math.pi) == pytest.approx(polPhi)
All(H) | qubits
All(X) | qubits
Ry(2 * math.acos(0.3)) | qubits[0]
eng.flush()
bits = [0] * 6
polR, polPhi = cmath.polar(eng.backend.get_amplitude(bits, qubits))
assert polR == pytest.approx(0.3)
bits[0] = 1
polR, polPhi = cmath.polar(eng.backend.get_amplitude(bits, qubits))
assert (polR ==
pytest.approx(math.sqrt(0.91)))
All(Measure) | qubits
# raises if not all qubits are in the list:
with pytest.raises(RuntimeError):
eng.backend.get_amplitude(bits, qubits[:-1])
# doesn't just check for length:
with pytest.raises(RuntimeError):
eng.backend.get_amplitude(bits, qubits[:-1] + [qubits[0]])
extra_qubit = eng.allocate_qubit()
eng.flush()
# there is a new qubit now!
with pytest.raises(RuntimeError):
eng.backend.get_amplitude(bits, qubits)
def test_simulator_set_wavefunction(sim, mapper):
engine_list = [LocalOptimizer()]
if mapper is not None:
engine_list.append(mapper)
eng = MainEngine(sim, engine_list=engine_list)
qubits = eng.allocate_qureg(2)
wf = [0., 0., math.sqrt(0.2), math.sqrt(0.8)]
with pytest.raises(RuntimeError):
eng.backend.set_wavefunction(wf, qubits)
eng.flush()
eng.backend.set_wavefunction(wf, qubits)
assert pytest.approx(eng.backend.get_probability('1', [qubits[0]])) == .8
assert pytest.approx(eng.backend.get_probability('01', qubits)) == .2
assert pytest.approx(eng.backend.get_probability('1', [qubits[1]])) == 1.
All(Measure) | qubits
def test_simulator_set_wavefunction_always_complex(sim):
""" Checks that wavefunction is always complex """
eng = MainEngine(sim)
qubit = eng.allocate_qubit()
eng.flush()
wf = [1., 0]
eng.backend.set_wavefunction(wf, qubit)
Y | qubit
eng.flush()
amplitude = eng.backend.get_amplitude('1', qubit)
assert amplitude == pytest.approx(1j) or amplitude == pytest.approx(-1j)
def test_simulator_collapse_wavefunction(sim, mapper):
engine_list = [LocalOptimizer()]
if mapper is not None:
engine_list.append(mapper)
eng = MainEngine(sim, engine_list=engine_list)
qubits = eng.allocate_qureg(4)
# unknown qubits: raises
with pytest.raises(RuntimeError):
eng.backend.collapse_wavefunction(qubits, [0] * 4)
eng.flush()
eng.backend.collapse_wavefunction(qubits, [0] * 4)
assert pytest.approx(eng.backend.get_probability([0] * 4, qubits)) == 1.
All(H) | qubits[1:]
eng.flush()
assert pytest.approx(eng.backend.get_probability([0] * 4, qubits)) == .125
# impossible outcome: raises
with pytest.raises(RuntimeError):
eng.backend.collapse_wavefunction(qubits, [1] + [0] * 3)
eng.backend.collapse_wavefunction(qubits[:-1], [0, 1, 0])
probability = eng.backend.get_probability([0, 1, 0, 1], qubits)
assert probability == pytest.approx(.5)
eng.backend.set_wavefunction([1.] + [0.] * 15, qubits)
H | qubits[0]
CNOT | (qubits[0], qubits[1])
eng.flush()
eng.backend.collapse_wavefunction([qubits[0]], [1])
probability = eng.backend.get_probability([1, 1], qubits[0:2])
assert probability == pytest.approx(1.)
def test_simulator_no_uncompute_exception(sim):
eng = MainEngine(sim, [])
qubit = eng.allocate_qubit()
H | qubit
with pytest.raises(RuntimeError):
qubit[0].__del__()
# If you wanted to keep using the qubit, you shouldn't have deleted it.
assert qubit[0].id == -1
def test_simulator_functional_entangle(sim):
eng = MainEngine(sim, [])
qubits = eng.allocate_qureg(5)
# entangle all qubits:
H | qubits[0]
for qb in qubits[1:]:
CNOT | (qubits[0], qb)
# check the state vector:
assert .5 == pytest.approx(abs(sim.cheat()[1][0])**2, rel=tolerance, abs=tolerance)
assert .5 == pytest.approx(abs(sim.cheat()[1][31])**2, rel=tolerance, abs=tolerance)
for i in range(1, 31):
assert 0. == pytest.approx(abs(sim.cheat()[1][i]), rel=tolerance, abs=tolerance)
# unentangle all except the first 2
for qb in qubits[2:]:
CNOT | (qubits[0], qb)
# entangle using Toffolis
for qb in qubits[2:]:
Toffoli | (qubits[0], qubits[1], qb)
# check the state vector:
assert .5 == pytest.approx(abs(sim.cheat()[1][0])**2, rel=tolerance, abs=tolerance)
assert .5 == pytest.approx(abs(sim.cheat()[1][31])**2, rel=tolerance, abs=tolerance)
for i in range(1, 31):
assert 0. == pytest.approx(abs(sim.cheat()[1][i]), rel=tolerance, abs=tolerance)
# uncompute using multi-controlled NOTs
with Control(eng, qubits[0:-1]):
X | qubits[-1]
with Control(eng, qubits[0:-2]):
X | qubits[-2]
with Control(eng, qubits[0:-3]):
X | qubits[-3]
CNOT | (qubits[0], qubits[1])
H | qubits[0]
# check the state vector:
assert 1. == pytest.approx(abs(sim.cheat()[1][0])**2, rel=tolerance, abs=tolerance)
for i in range(1, 32):
assert 0. == pytest.approx(abs(sim.cheat()[1][i]), rel=tolerance, abs=tolerance)
All(Measure) | qubits
def test_simulator_convert_logical_to_mapped_qubits(sim):
mapper = BasicMapperEngine()
def receive(command_list):
pass
mapper.receive = receive
eng = MainEngine(sim, [mapper])
qubit0 = eng.allocate_qubit()
qubit1 = eng.allocate_qubit()
mapper.current_mapping = {qubit0[0].id: qubit1[0].id,
qubit1[0].id: qubit0[0].id}
assert (sim._convert_logical_to_mapped_qureg(qubit0 + qubit1) ==
qubit1 + qubit0)
def slow_implementation(angles, control_qubits, target_qubit, eng, gate_class):
"""
Assumption is that control_qubits[0] is lowest order bit
We apply angles[0] to state |0>
"""
assert len(angles) == 2**len(control_qubits)
for index in range(2**len(control_qubits)):
with Compute(eng):
for bit_pos in range(len(control_qubits)):
if not (index >> bit_pos) & 1:
X | control_qubits[bit_pos]
with Control(eng, control_qubits):
gate_class(angles[index]) | target_qubit
Uncompute(eng)
@pytest.mark.parametrize("gate_classes", [(Ry, UniformlyControlledRy),
(Rz, UniformlyControlledRz)])
def test_uniformly_controlled_r(sim, gate_classes):
n = 2
random_angles = [3.0, 0.8, 1.2, 0.7]
basis_state_index = 2
basis_state = [0] * 2**(n+1)
basis_state[basis_state_index] = 1.
correct_eng = MainEngine(backend=Simulator())
test_eng = MainEngine(backend=sim)
correct_sim = correct_eng.backend
correct_qb = correct_eng.allocate_qubit()
correct_ctrl_qureg = correct_eng.allocate_qureg(n)
correct_eng.flush()
test_sim = test_eng.backend
test_qb = test_eng.allocate_qubit()
test_ctrl_qureg = test_eng.allocate_qureg(n)
test_eng.flush()
correct_sim.set_wavefunction(basis_state, correct_qb + correct_ctrl_qureg)
test_sim.set_wavefunction(basis_state, test_qb + test_ctrl_qureg)
test_eng.flush()
correct_eng.flush()
gate_classes[1](random_angles) | (test_ctrl_qureg, test_qb)
slow_implementation(angles=random_angles,
control_qubits=correct_ctrl_qureg,
target_qubit=correct_qb,
eng=correct_eng,
gate_class=gate_classes[0])
test_eng.flush()
correct_eng.flush()
for fstate in range(2**(n+1)):
binary_state = format(fstate, '0' + str(n+1) + 'b')
test = test_sim.get_amplitude(binary_state,
test_qb + test_ctrl_qureg)
correct = correct_sim.get_amplitude(binary_state, correct_qb +
correct_ctrl_qureg)
print(test, "==", correct)
assert correct == pytest.approx(test, rel=tolerance, abs=tolerance)
All(Measure) | test_qb + test_ctrl_qureg
All(Measure) | correct_qb + correct_ctrl_qureg
test_eng.flush(deallocate_qubits=True)
correct_eng.flush(deallocate_qubits=True)
def test_qubit_operator(sim):
test_eng = MainEngine(sim)
test_qureg = test_eng.allocate_qureg(1)
test_eng.flush()
qubit_op = QubitOperator("X0 X1", 1)
with pytest.raises(Exception):
sim.get_expectation_value(qubit_op, test_qureg)
test_eng.backend.set_wavefunction([1, 0],
test_qureg)
test_eng.flush()
qubit_op = QubitOperator("X0", 1)
qubit_op | test_qureg[0]
test_eng.flush()
amplitude = test_eng.backend.get_amplitude('0', test_qureg)
assert amplitude == pytest.approx(0.)
amplitude = test_eng.backend.get_amplitude('1', test_qureg)
assert amplitude == pytest.approx(1.)
def test_get_expectation_value(sim):
num_qubits = 2
test_eng = MainEngine(sim)
test_qureg = test_eng.allocate_qureg(num_qubits)
test_eng.flush()
qubit_op = QubitOperator("X0 X1 X2", 1)
with pytest.raises(Exception):
sim.get_expectation_value(qubit_op, test_qureg)
qubit_op = QubitOperator("X0", 1)
test_eng.backend.set_wavefunction([1 / math.sqrt(2), 1 / math.sqrt(2), 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
test_eng.backend.set_wavefunction([1 / math.sqrt(2), -1 / math.sqrt(2), 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
qubit_op = QubitOperator("Y0", 1)
test_eng.backend.set_wavefunction([1 / math.sqrt(2), 1j / math.sqrt(2), 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
test_eng.backend.set_wavefunction([1 / math.sqrt(2), -1j / math.sqrt(2), 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
qubit_op = QubitOperator("Z0", 1)
test_eng.backend.set_wavefunction([1, 0, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
test_eng.backend.set_wavefunction([0, 1, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
qubit_op = QubitOperator("Z0", 0.25)
test_eng.backend.set_wavefunction([1, 0, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(0.25, rel=tolerance, abs=tolerance))
test_eng.backend.set_wavefunction([0, 1, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-0.25, rel=tolerance, abs=tolerance))
qubit_op = QubitOperator("Z0 Z1", 1)
test_eng.backend.set_wavefunction([1, 0, 0, 0],
test_qureg)
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
X | test_qureg[0]
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
X | test_qureg[1]
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(1, rel=tolerance, abs=tolerance))
X | test_qureg[0]
test_eng.flush()
assert(sim.get_expectation_value(qubit_op, test_qureg) == pytest.approx(-1, rel=tolerance, abs=tolerance))
| [((80, 1, 80, 47), 'pytest.fixture', 'pytest.fixture', (), '', False, 'import pytest\n'), ((593, 1, 594, 71), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(593, 25, 593, 39): '"""gate_classes"""', (593, 41, 594, 70): '[(Ry, UniformlyControlledRy), (Rz, UniformlyControlledRz)]'}, {}), "('gate_classes', [(Ry, UniformlyControlledRy), (Rz,\n UniformlyControlledRz)])", False, 'import pytest\n'), ((52, 16, 52, 76), 'pytest.importorskip', 'pytest.importorskip', ({(52, 36, 52, 75): '"""projectq.backends._qracksim._qracksim"""'}, {}), "('projectq.backends._qracksim._qracksim')", False, 'import pytest\n'), ((144, 14, 144, 45), 'projectq.cengines.DummyEngine', 'DummyEngine', (), '', False, 'from projectq.cengines import BasicEngine, BasicMapperEngine, DummyEngine, LocalOptimizer, NotYetMeasuredError\n'), ((145, 10, 145, 33), 'projectq.MainEngine', 'MainEngine', ({(145, 21, 145, 28): 'backend', (145, 30, 145, 32): '[]'}, {}), '(backend, [])', False, 'from projectq import MainEngine\n'), ((169, 19, 169, 40), 'projectq.libs.math.AddConstantModN', 'AddConstantModN', ({(169, 35, 169, 36): '1', (169, 38, 169, 39): '2'}, {}), '(1, 2)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((172, 19, 172, 47), 'projectq.libs.math.MultiplyByConstantModN', 'MultiplyByConstantModN', ({(172, 42, 172, 43): '1', (172, 45, 172, 46): '2'}, {}), '(1, 2)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((188, 10, 188, 29), 'projectq.MainEngine', 'MainEngine', ({(188, 21, 188, 24): 'sim', (188, 26, 188, 28): '[]'}, {}), '(sim, [])', False, 'from projectq import MainEngine\n'), ((205, 10, 205, 29), 'projectq.MainEngine', 'MainEngine', ({(205, 21, 205, 24): 'sim', (205, 26, 205, 28): '[]'}, {}), '(sim, [])', False, 'from projectq import MainEngine\n'), ((219, 10, 219, 29), 'projectq.MainEngine', 'MainEngine', ({(219, 21, 219, 24): 'sim', (219, 26, 219, 28): '[]'}, {}), '(sim, [])', False, 'from projectq import MainEngine\n'), ((220, 10, 220, 41), 'projectq.types.WeakQubitRef', 'WeakQubitRef', (), '', False, 'from projectq.types import WeakQubitRef\n'), ((221, 10, 221, 41), 'projectq.types.WeakQubitRef', 'WeakQubitRef', (), '', False, 'from projectq.types import WeakQubitRef\n'), ((222, 11, 222, 62), 'projectq.ops.Command', 'Command', (), '', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((223, 11, 223, 55), 'projectq.ops.Command', 'Command', (), '', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((249, 10, 249, 29), 'projectq.MainEngine', 'MainEngine', ({(249, 21, 249, 24): 'sim', (249, 26, 249, 28): '[]'}, {}), '(sim, [])', False, 'from projectq import MainEngine\n'), ((258, 10, 258, 29), 'projectq.MainEngine', 'MainEngine', ({(258, 21, 258, 24): 'sim', (258, 26, 258, 28): '[]'}, {}), '(sim, [])', False, 'from projectq import MainEngine\n'), ((277, 10, 277, 29), 'projectq.MainEngine', 'MainEngine', ({(277, 21, 277, 24): 'sim', (277, 26, 277, 28): '[]'}, {}), '(sim, [])', False, 'from projectq import MainEngine\n'), ((369, 10, 369, 50), 'projectq.MainEngine', 'MainEngine', (), '', False, 'from projectq import MainEngine\n'), ((398, 10, 398, 50), 'projectq.MainEngine', 'MainEngine', (), '', False, 'from projectq import MainEngine\n'), ((449, 10, 449, 50), 'projectq.MainEngine', 'MainEngine', (), '', False, 'from projectq import MainEngine\n'), ((464, 10, 464, 25), 'projectq.MainEngine', 'MainEngine', ({(464, 21, 464, 24): 'sim'}, {}), '(sim)', False, 'from projectq import MainEngine\n'), ((479, 10, 479, 50), 'projectq.MainEngine', 'MainEngine', (), '', False, 'from projectq import MainEngine\n'), ((506, 10, 506, 29), 'projectq.MainEngine', 'MainEngine', ({(506, 21, 506, 24): 'sim', (506, 26, 506, 28): '[]'}, {}), '(sim, [])', False, 'from projectq import MainEngine\n'), ((516, 10, 516, 29), 'projectq.MainEngine', 'MainEngine', ({(516, 21, 516, 24): 'sim', (516, 26, 516, 28): '[]'}, {}), '(sim, [])', False, 'from projectq import MainEngine\n'), ((562, 13, 562, 32), 'projectq.cengines.BasicMapperEngine', 'BasicMapperEngine', ({}, {}), '()', False, 'from projectq.cengines import BasicEngine, BasicMapperEngine, DummyEngine, LocalOptimizer, NotYetMeasuredError\n'), ((568, 10, 568, 35), 'projectq.MainEngine', 'MainEngine', ({(568, 21, 568, 24): 'sim', (568, 26, 568, 34): '[mapper]'}, {}), '(sim, [mapper])', False, 'from projectq import MainEngine\n'), ((604, 15, 604, 38), 'projectq.MainEngine', 'MainEngine', (), '', False, 'from projectq import MainEngine\n'), ((645, 15, 645, 30), 'projectq.MainEngine', 'MainEngine', ({(645, 26, 645, 29): 'sim'}, {}), '(sim)', False, 'from projectq import MainEngine\n'), ((649, 15, 649, 40), 'projectq.ops.QubitOperator', 'QubitOperator', ({(649, 29, 649, 36): '"""X0 X1"""', (649, 38, 649, 39): '1'}, {}), "('X0 X1', 1)", False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((656, 15, 656, 37), 'projectq.ops.QubitOperator', 'QubitOperator', ({(656, 29, 656, 33): '"""X0"""', (656, 35, 656, 36): '1'}, {}), "('X0', 1)", False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((667, 15, 667, 30), 'projectq.MainEngine', 'MainEngine', ({(667, 26, 667, 29): 'sim'}, {}), '(sim)', False, 'from projectq import MainEngine\n'), ((671, 15, 671, 43), 'projectq.ops.QubitOperator', 'QubitOperator', ({(671, 29, 671, 39): '"""X0 X1 X2"""', (671, 41, 671, 42): '1'}, {}), "('X0 X1 X2', 1)", False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((675, 15, 675, 37), 'projectq.ops.QubitOperator', 'QubitOperator', ({(675, 29, 675, 33): '"""X0"""', (675, 35, 675, 36): '1'}, {}), "('X0', 1)", False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((685, 15, 685, 37), 'projectq.ops.QubitOperator', 'QubitOperator', ({(685, 29, 685, 33): '"""Y0"""', (685, 35, 685, 36): '1'}, {}), "('Y0', 1)", False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((695, 15, 695, 37), 'projectq.ops.QubitOperator', 'QubitOperator', ({(695, 29, 695, 33): '"""Z0"""', (695, 35, 695, 36): '1'}, {}), "('Z0', 1)", False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((705, 15, 705, 40), 'projectq.ops.QubitOperator', 'QubitOperator', ({(705, 29, 705, 33): '"""Z0"""', (705, 35, 705, 39): '0.25'}, {}), "('Z0', 0.25)", False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((715, 15, 715, 40), 'projectq.ops.QubitOperator', 'QubitOperator', ({(715, 29, 715, 36): '"""Z0 Z1"""', (715, 38, 715, 39): '1'}, {}), "('Z0 Z1', 1)", False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((71, 14, 71, 25), 'projectq.backends.Simulator', 'Simulator', ({}, {}), '()', False, 'from projectq.backends import Simulator\n'), ((72, 25, 72, 43), 'projectq.backends._qracksim._qracksim.QrackSimulator', 'QrackSim', ({(72, 34, 72, 35): '1', (72, 37, 72, 39): '-1', (72, 41, 72, 42): '1'}, {}), '(1, -1, 1)', True, 'from projectq.backends._qracksim._qracksim import QrackSimulator as QrackSim\n'), ((111, 12, 111, 36), 'projectq.ops.BasicGate.__init__', 'BasicGate.__init__', ({(111, 31, 111, 35): 'self'}, {}), '(self)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((117, 19, 118, 41), 'numpy.matrix', 'numpy.matrix', ({(117, 32, 118, 40): '[[0, 1], [1, 0]]'}, {}), '([[0, 1], [1, 0]])', False, 'import numpy\n'), ((123, 12, 123, 36), 'projectq.ops.BasicGate.__init__', 'BasicGate.__init__', ({(123, 31, 123, 35): 'self'}, {}), '(self)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((129, 19, 129, 36), 'numpy.eye', 'numpy.eye', ({(129, 29, 129, 35): '(2 ** 6)'}, {}), '(2 ** 6)', False, 'import numpy\n'), ((134, 12, 134, 36), 'projectq.ops.BasicGate.__init__', 'BasicGate.__init__', ({(134, 31, 134, 35): 'self'}, {}), '(self)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((212, 4, 212, 16), 'projectq.ops.All', 'All', ({(212, 8, 212, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((226, 9, 226, 43), 'pytest.raises', 'pytest.raises', ({(226, 23, 226, 42): 'NotYetMeasuredError'}, {}), '(NotYetMeasuredError)', False, 'import pytest\n'), ((228, 9, 228, 43), 'pytest.raises', 'pytest.raises', ({(228, 23, 228, 42): 'NotYetMeasuredError'}, {}), '(NotYetMeasuredError)', False, 'import pytest\n'), ((232, 9, 232, 43), 'pytest.raises', 'pytest.raises', ({(232, 23, 232, 42): 'NotYetMeasuredError'}, {}), '(NotYetMeasuredError)', False, 'import pytest\n'), ((238, 9, 238, 16), 'projectq.ops.Rx', 'Rx', ({(238, 12, 238, 15): '(0.3)'}, {}), '(0.3)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((239, 9, 239, 16), 'projectq.ops.Rx', 'Rx', ({(239, 12, 239, 15): '(0.8)'}, {}), '(0.8)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((240, 9, 240, 16), 'projectq.ops.Ry', 'Ry', ({(240, 12, 240, 15): '(0.1)'}, {}), '(0.1)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((251, 9, 251, 33), 'pytest.raises', 'pytest.raises', ({(251, 23, 251, 32): 'Exception'}, {}), '(Exception)', False, 'import pytest\n'), ((253, 9, 253, 33), 'pytest.raises', 'pytest.raises', ({(253, 23, 253, 32): 'Exception'}, {}), '(Exception)', False, 'import pytest\n'), ((265, 4, 265, 16), 'projectq.ops.All', 'All', ({(265, 8, 265, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((266, 4, 266, 16), 'projectq.ops.All', 'All', ({(266, 8, 266, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((271, 4, 271, 16), 'projectq.ops.All', 'All', ({(271, 8, 271, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((272, 4, 272, 16), 'projectq.ops.All', 'All', ({(272, 8, 272, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((280, 4, 280, 18), 'projectq.libs.math.AddConstant', 'AddConstant', ({(280, 16, 280, 17): '(1)'}, {}), '(1)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((281, 4, 281, 16), 'projectq.ops.All', 'All', ({(281, 8, 281, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((287, 4, 287, 28), 'projectq.libs.math.AddConstantModN', 'AddConstantModN', ({(287, 20, 287, 22): '(10)', (287, 24, 287, 27): '(256)'}, {}), '(10, 256)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((288, 4, 288, 16), 'projectq.ops.All', 'All', ({(288, 8, 288, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((298, 4, 298, 16), 'projectq.ops.All', 'All', ({(298, 8, 298, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((308, 4, 308, 16), 'projectq.ops.All', 'All', ({(308, 8, 308, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((314, 4, 314, 18), 'projectq.libs.math.SubConstant', 'SubConstant', ({(314, 16, 314, 17): '(5)'}, {}), '(5)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((315, 4, 315, 16), 'projectq.ops.All', 'All', ({(315, 8, 315, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((322, 4, 322, 16), 'projectq.ops.All', 'All', ({(322, 8, 322, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((332, 4, 332, 16), 'projectq.ops.All', 'All', ({(332, 8, 332, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((338, 4, 338, 34), 'projectq.libs.math.MultiplyByConstantModN', 'MultiplyByConstantModN', ({(338, 27, 338, 28): '(2)', (338, 30, 338, 33): '(256)'}, {}), '(2, 256)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((339, 4, 339, 16), 'projectq.ops.All', 'All', ({(339, 8, 339, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((348, 4, 348, 16), 'projectq.ops.All', 'All', ({(348, 8, 348, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((358, 4, 358, 16), 'projectq.ops.All', 'All', ({(358, 8, 358, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((366, 19, 366, 35), 'projectq.cengines.LocalOptimizer', 'LocalOptimizer', ({}, {}), '()', False, 'from projectq.cengines import BasicEngine, BasicMapperEngine, DummyEngine, LocalOptimizer, NotYetMeasuredError\n'), ((371, 4, 371, 10), 'projectq.ops.All', 'All', ({(371, 8, 371, 9): 'H'}, {}), '(H)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((378, 9, 378, 36), 'pytest.raises', 'pytest.raises', ({(378, 23, 378, 35): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((381, 4, 381, 10), 'projectq.ops.All', 'All', ({(381, 8, 381, 9): 'H'}, {}), '(H)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((384, 60, 384, 78), 'pytest.approx', 'pytest.approx', ({(384, 74, 384, 77): '(0.3)'}, {}), '(0.3)', False, 'import pytest\n'), ((387, 60, 387, 78), 'pytest.approx', 'pytest.approx', ({(387, 74, 387, 77): '(0.4)'}, {}), '(0.4)', False, 'import pytest\n'), ((391, 4, 391, 16), 'projectq.ops.All', 'All', ({(391, 8, 391, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((395, 19, 395, 35), 'projectq.cengines.LocalOptimizer', 'LocalOptimizer', ({}, {}), '()', False, 'from projectq.cengines import BasicEngine, BasicMapperEngine, DummyEngine, LocalOptimizer, NotYetMeasuredError\n'), ((400, 4, 400, 10), 'projectq.ops.All', 'All', ({(400, 8, 400, 9): 'X'}, {}), '(X)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((401, 4, 401, 10), 'projectq.ops.All', 'All', ({(401, 8, 401, 9): 'H'}, {}), '(H)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((407, 19, 407, 41), 'pytest.approx', 'pytest.approx', ({(407, 33, 407, 40): '(1.0 / 8.0)'}, {}), '(1.0 / 8.0)', False, 'import pytest\n'), ((412, 20, 412, 39), 'pytest.approx', 'pytest.approx', ({(412, 34, 412, 38): 'polR'}, {}), '(polR)', False, 'import pytest\n'), ((413, 34, 413, 55), 'pytest.approx', 'pytest.approx', ({(413, 48, 413, 54): 'polPhi'}, {}), '(polPhi)', False, 'import pytest\n'), ((418, 20, 418, 39), 'pytest.approx', 'pytest.approx', ({(418, 34, 418, 38): 'polR'}, {}), '(polR)', False, 'import pytest\n'), ((419, 34, 419, 55), 'pytest.approx', 'pytest.approx', ({(419, 48, 419, 54): 'polPhi'}, {}), '(polPhi)', False, 'import pytest\n'), ((420, 4, 420, 10), 'projectq.ops.All', 'All', ({(420, 8, 420, 9): 'H'}, {}), '(H)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((421, 4, 421, 10), 'projectq.ops.All', 'All', ({(421, 8, 421, 9): 'X'}, {}), '(X)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((426, 19, 426, 37), 'pytest.approx', 'pytest.approx', ({(426, 33, 426, 36): '(0.3)'}, {}), '(0.3)', False, 'import pytest\n'), ((431, 4, 431, 16), 'projectq.ops.All', 'All', ({(431, 8, 431, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((433, 9, 433, 36), 'pytest.raises', 'pytest.raises', ({(433, 23, 433, 35): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((436, 9, 436, 36), 'pytest.raises', 'pytest.raises', ({(436, 23, 436, 35): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((441, 9, 441, 36), 'pytest.raises', 'pytest.raises', ({(441, 23, 441, 35): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((446, 19, 446, 35), 'projectq.cengines.LocalOptimizer', 'LocalOptimizer', ({}, {}), '()', False, 'from projectq.cengines import BasicEngine, BasicMapperEngine, DummyEngine, LocalOptimizer, NotYetMeasuredError\n'), ((451, 18, 451, 32), 'math.sqrt', 'math.sqrt', ({(451, 28, 451, 31): '(0.2)'}, {}), '(0.2)', False, 'import math\n'), ((451, 34, 451, 48), 'math.sqrt', 'math.sqrt', ({(451, 44, 451, 47): '(0.8)'}, {}), '(0.8)', False, 'import math\n'), ((452, 9, 452, 36), 'pytest.raises', 'pytest.raises', ({(452, 23, 452, 35): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((459, 4, 459, 16), 'projectq.ops.All', 'All', ({(459, 8, 459, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((476, 19, 476, 35), 'projectq.cengines.LocalOptimizer', 'LocalOptimizer', ({}, {}), '()', False, 'from projectq.cengines import BasicEngine, BasicMapperEngine, DummyEngine, LocalOptimizer, NotYetMeasuredError\n'), ((482, 9, 482, 36), 'pytest.raises', 'pytest.raises', ({(482, 23, 482, 35): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((487, 4, 487, 10), 'projectq.ops.All', 'All', ({(487, 8, 487, 9): 'H'}, {}), '(H)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((491, 9, 491, 36), 'pytest.raises', 'pytest.raises', ({(491, 23, 491, 35): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((495, 26, 495, 43), 'pytest.approx', 'pytest.approx', ({(495, 40, 495, 42): '(0.5)'}, {}), '(0.5)', False, 'import pytest\n'), ((502, 26, 502, 43), 'pytest.approx', 'pytest.approx', ({(502, 40, 502, 42): '(1.0)'}, {}), '(1.0)', False, 'import pytest\n'), ((509, 9, 509, 36), 'pytest.raises', 'pytest.raises', ({(509, 23, 509, 35): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((544, 9, 544, 35), 'projectq.meta.Control', 'Control', ({(544, 17, 544, 20): 'eng', (544, 22, 544, 34): 'qubits[0:-1]'}, {}), '(eng, qubits[0:-1])', False, 'from projectq.meta import Compute, Uncompute, Control, Dagger, LogicalQubitIDTag\n'), ((546, 9, 546, 35), 'projectq.meta.Control', 'Control', ({(546, 17, 546, 20): 'eng', (546, 22, 546, 34): 'qubits[0:-2]'}, {}), '(eng, qubits[0:-2])', False, 'from projectq.meta import Compute, Uncompute, Control, Dagger, LogicalQubitIDTag\n'), ((548, 9, 548, 35), 'projectq.meta.Control', 'Control', ({(548, 17, 548, 20): 'eng', (548, 22, 548, 34): 'qubits[0:-3]'}, {}), '(eng, qubits[0:-3])', False, 'from projectq.meta import Compute, Uncompute, Control, Dagger, LogicalQubitIDTag\n'), ((558, 4, 558, 16), 'projectq.ops.All', 'All', ({(558, 8, 558, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((590, 8, 590, 22), 'projectq.meta.Uncompute', 'Uncompute', ({(590, 18, 590, 21): 'eng'}, {}), '(eng)', False, 'from projectq.meta import Compute, Uncompute, Control, Dagger, LogicalQubitIDTag\n'), ((639, 4, 639, 16), 'projectq.ops.All', 'All', ({(639, 8, 639, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((640, 4, 640, 16), 'projectq.ops.All', 'All', ({(640, 8, 640, 15): 'Measure'}, {}), '(Measure)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((650, 9, 650, 33), 'pytest.raises', 'pytest.raises', ({(650, 23, 650, 32): 'Exception'}, {}), '(Exception)', False, 'import pytest\n'), ((661, 24, 661, 41), 'pytest.approx', 'pytest.approx', ({(661, 38, 661, 40): '(0.0)'}, {}), '(0.0)', False, 'import pytest\n'), ((663, 24, 663, 41), 'pytest.approx', 'pytest.approx', ({(663, 38, 663, 40): '(1.0)'}, {}), '(1.0)', False, 'import pytest\n'), ((672, 9, 672, 33), 'pytest.raises', 'pytest.raises', ({(672, 23, 672, 32): 'Exception'}, {}), '(Exception)', False, 'import pytest\n'), ((679, 62, 679, 108), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((683, 62, 683, 109), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((689, 62, 689, 108), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((693, 62, 693, 109), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((699, 62, 699, 108), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((703, 62, 703, 109), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((709, 62, 709, 111), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((713, 62, 713, 112), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((719, 62, 719, 108), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((722, 62, 722, 109), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((725, 62, 725, 108), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((728, 62, 728, 109), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((75, 14, 75, 25), 'projectq.backends.Simulator', 'Simulator', ({}, {}), '()', False, 'from projectq.backends import Simulator\n'), ((76, 25, 76, 43), 'projectq.backends._qracksim._qracksim.QrackSimulator', 'QrackSim', ({(76, 34, 76, 35): '1', (76, 37, 76, 39): '-1', (76, 41, 76, 42): '2'}, {}), '(1, -1, 2)', True, 'from projectq.backends._qracksim._qracksim import QrackSimulator as QrackSim\n'), ((241, 28, 241, 36), 'projectq.ops.Ry', 'Ry', ({(241, 31, 241, 35): '-0.1'}, {}), '(-0.1)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((242, 38, 242, 56), 'numpy.kron', 'numpy.kron', ({(242, 49, 242, 51): 'm2', (242, 53, 242, 55): 'm1'}, {}), '(m2, m1)', False, 'import numpy\n'), ((297, 6, 297, 30), 'projectq.libs.math.AddConstantModN', 'AddConstantModN', ({(297, 22, 297, 24): '(10)', (297, 26, 297, 29): '(256)'}, {}), '(10, 256)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((307, 6, 307, 30), 'projectq.libs.math.AddConstantModN', 'AddConstantModN', ({(307, 22, 307, 24): '(10)', (307, 26, 307, 29): '(256)'}, {}), '(10, 256)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((321, 6, 321, 30), 'projectq.libs.math.SubConstantModN', 'SubConstantModN', ({(321, 22, 321, 24): '(10)', (321, 26, 321, 29): '(256)'}, {}), '(10, 256)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((331, 6, 331, 30), 'projectq.libs.math.SubConstantModN', 'SubConstantModN', ({(331, 22, 331, 24): '(10)', (331, 26, 331, 29): '(256)'}, {}), '(10, 256)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((347, 6, 347, 36), 'projectq.libs.math.MultiplyByConstantModN', 'MultiplyByConstantModN', ({(347, 29, 347, 30): '(2)', (347, 32, 347, 35): '(256)'}, {}), '(2, 256)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((357, 6, 357, 37), 'projectq.libs.math.MultiplyByConstantModN', 'MultiplyByConstantModN', ({(357, 29, 357, 31): '(10)', (357, 33, 357, 36): '(256)'}, {}), '(10, 256)', False, 'from projectq.libs.math import AddConstant, AddConstantModN, SubConstant, SubConstantModN, MultiplyByConstantModN\n'), ((376, 16, 376, 37), 'pytest.approx', 'pytest.approx', ({(376, 30, 376, 36): '(0.5 ** i)'}, {}), '(0.5 ** i)', False, 'import pytest\n'), ((430, 26, 430, 41), 'math.sqrt', 'math.sqrt', ({(430, 36, 430, 40): '(0.91)'}, {}), '(0.91)', False, 'import math\n'), ((472, 24, 472, 41), 'pytest.approx', 'pytest.approx', ({(472, 38, 472, 40): '(1.0j)'}, {}), '(1.0j)', False, 'import pytest\n'), ((472, 58, 472, 76), 'pytest.approx', 'pytest.approx', ({(472, 72, 472, 75): '(-1.0j)'}, {}), '(-1.0j)', False, 'import pytest\n'), ((584, 13, 584, 25), 'projectq.meta.Compute', 'Compute', ({(584, 21, 584, 24): 'eng'}, {}), '(eng)', False, 'from projectq.meta import Compute, Uncompute, Control, Dagger, LogicalQubitIDTag\n'), ((588, 13, 588, 41), 'projectq.meta.Control', 'Control', ({(588, 21, 588, 24): 'eng', (588, 26, 588, 40): 'control_qubits'}, {}), '(eng, control_qubits)', False, 'from projectq.meta import Compute, Uncompute, Control, Dagger, LogicalQubitIDTag\n'), ((603, 37, 603, 48), 'projectq.backends.Simulator', 'Simulator', ({}, {}), '()', False, 'from projectq.backends import Simulator\n'), ((637, 26, 637, 75), 'pytest.approx', 'pytest.approx', (), '', False, 'import pytest\n'), ((89, 16, 89, 42), 'projectq.cengines.BasicEngine.__init__', 'BasicEngine.__init__', ({(89, 37, 89, 41): 'self'}, {}), '(self)', False, 'from projectq.cengines import BasicEngine, BasicMapperEngine, DummyEngine, LocalOptimizer, NotYetMeasuredError\n'), ((225, 25, 225, 45), 'projectq.meta.LogicalQubitIDTag', 'LogicalQubitIDTag', ({(225, 43, 225, 44): '2'}, {}), '(2)', False, 'from projectq.meta import Compute, Uncompute, Control, Dagger, LogicalQubitIDTag\n'), ((241, 9, 241, 16), 'projectq.ops.Rz', 'Rz', ({(241, 12, 241, 15): '0.9'}, {}), '(0.9)', False, 'from projectq.ops import All, Allocate, BasicGate, BasicMathGate, CNOT, C, Command, H, Measure, QubitOperator, Rx, Ry, Rz, S, TimeEvolution, Toffoli, X, Y, Z, Swap, SqrtSwap, UniformlyControlledRy, UniformlyControlledRz\n'), ((422, 11, 422, 25), 'math.acos', 'math.acos', ({(422, 21, 422, 24): '(0.3)'}, {}), '(0.3)', False, 'import math\n'), ((676, 43, 676, 55), 'math.sqrt', 'math.sqrt', ({(676, 53, 676, 54): '(2)'}, {}), '(2)', False, 'import math\n'), ((676, 61, 676, 73), 'math.sqrt', 'math.sqrt', ({(676, 71, 676, 72): '(2)'}, {}), '(2)', False, 'import math\n'), ((680, 43, 680, 55), 'math.sqrt', 'math.sqrt', ({(680, 53, 680, 54): '(2)'}, {}), '(2)', False, 'import math\n'), ((680, 62, 680, 74), 'math.sqrt', 'math.sqrt', ({(680, 72, 680, 73): '(2)'}, {}), '(2)', False, 'import math\n'), ((686, 43, 686, 55), 'math.sqrt', 'math.sqrt', ({(686, 53, 686, 54): '(2)'}, {}), '(2)', False, 'import math\n'), ((686, 62, 686, 74), 'math.sqrt', 'math.sqrt', ({(686, 72, 686, 73): '(2)'}, {}), '(2)', False, 'import math\n'), ((690, 43, 690, 55), 'math.sqrt', 'math.sqrt', ({(690, 53, 690, 54): '(2)'}, {}), '(2)', False, 'import math\n'), ((690, 63, 690, 75), 'math.sqrt', 'math.sqrt', ({(690, 73, 690, 74): '(2)'}, {}), '(2)', False, 'import math\n'), ((382, 21, 382, 35), 'math.sqrt', 'math.sqrt', ({(382, 31, 382, 34): '(0.3)'}, {}), '(0.3)', False, 'import math\n'), ((385, 21, 385, 35), 'math.sqrt', 'math.sqrt', ({(385, 31, 385, 34): '(0.4)'}, {}), '(0.4)', False, 'import math\n')] |
jshwi/jss | app/deps.py | b9f29d47c63cd57d0efc1abec37152e97a92049f | """
app.deps
========
Register dependencies that are not part of a ``Flask`` extension.
"""
from flask import Flask
from redis import Redis
from rq import Queue
def init_app(app: Flask) -> None:
"""Register application helpers that are not ``Flask-`` extensions.
As these are not ``Flask`` extensions they do not have an
``init_app`` method, and so can be attached to the app by declaring
them as instance attributes.
.. todo:: These are not declared in ``__init__`` and are a bit of a
code-smell. Using ``flask.g`` may be more appropriate...
:param app: Application factory object.
"""
app.redis = Redis.from_url(app.config["REDIS_URL"]) # type: ignore
app.task_queue = Queue("jss-tasks", connection=app.redis) # type: ignore
| [((24, 16, 24, 55), 'redis.Redis.from_url', 'Redis.from_url', ({(24, 31, 24, 54): "app.config['REDIS_URL']"}, {}), "(app.config['REDIS_URL'])", False, 'from redis import Redis\n'), ((25, 21, 25, 61), 'rq.Queue', 'Queue', (), '', False, 'from rq import Queue\n')] |
sangdon/intern2020_cocal | uncertainty/util/__init__.py | 2f434b76fbf3426c6685fb92c5bbc2d32fcba7ba | from util.args import *
from util.logger import Logger
| [] |
plusterm/plusterm | com_reader.py | 45e9382accdaae7d51c65cab77e571bc6d264936 | # from wx.lib.pubsub import pub
from pubsub import pub
import serial
import threading
import queue
import time
class ComReaderThread(threading.Thread):
'''
Creates a thread that continously reads from the serial connection
Puts result as a tuple (timestamp, data) in a queue
'''
def __init__(self, ser, error_que):
threading.Thread.__init__(self)
self.ser = ser
self.error_que = error_que
self.alive = threading.Event()
self.alive.set()
def run(self):
while self.alive.isSet():
try:
if self.ser.in_waiting > 0:
timestamp = time.time()
data = self.ser.read(self.ser.in_waiting)
pub.sendMessage('serial.data', data=(timestamp, data))
except serial.SerialException as e:
reconnected = False
print('Serial connection lost, trying to reconnect.')
ts = time.time()
self.error_que.put((ts, str(e)))
while not reconnected and self.alive.isSet():
try:
# if ser still thinks it's open close it
if self.ser.is_open:
self.ser.close()
self.ser.open()
except Exception as e:
# if reconnection failed let some time pass
time.sleep(0.1)
else:
reconnected = True
print('Reconnected')
def stop(self, timeout=0.5):
self.alive.clear()
threading.Thread.join(self, timeout)
| [((16, 8, 16, 39), 'threading.Thread.__init__', 'threading.Thread.__init__', ({(16, 34, 16, 38): 'self'}, {}), '(self)', False, 'import threading\n'), ((20, 21, 20, 38), 'threading.Event', 'threading.Event', ({}, {}), '()', False, 'import threading\n'), ((56, 8, 56, 44), 'threading.Thread.join', 'threading.Thread.join', ({(56, 30, 56, 34): 'self', (56, 36, 56, 43): 'timeout'}, {}), '(self, timeout)', False, 'import threading\n'), ((27, 32, 27, 43), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((31, 20, 31, 74), 'pubsub.pub.sendMessage', 'pub.sendMessage', (), '', False, 'from pubsub import pub\n'), ((36, 21, 36, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((48, 24, 48, 39), 'time.sleep', 'time.sleep', ({(48, 35, 48, 38): '(0.1)'}, {}), '(0.1)', False, 'import time\n')] |
ganeshkumarsv/datadog-cloudfoundry-buildpack | docker/app/app.py | 7c622dfc7990da83e5dfa4f474878a642fd40fd3 | from flask import Flask
from datadog import statsd
import logging
import os
# This is a small example application
# It uses tracing and dogstatsd on a sample flask application
log = logging.getLogger("app")
app = Flask(__name__)
# The app has two routes, a basic endpoint and an exception endpoint
@app.route("/")
def hello():
statsd.increment('request.number', 1, tags=["test", "foo:bar", "my:app"])
log.info("Got a request at hello")
return "Hello World!"
@app.route("/error")
def error():
statsd.increment('request.error.number', 1, tags=["test", "foo:bar", "my:app"])
log.info("Got a request at error")
raise Exception()
# This is meant to be run directly, instead of executed through flask run
if __name__ == '__main__':
# It grabs the host and port from the environment
port = 5001
host = '0.0.0.0'
if os.environ.get('HOST'):
host = os.environ.get('HOST')
if os.environ.get('PORT'):
port = os.environ.get('PORT')
app.run(debug=True, host=host, port=port)
| [((10, 6, 10, 30), 'logging.getLogger', 'logging.getLogger', ({(10, 24, 10, 29): '"""app"""'}, {}), "('app')", False, 'import logging\n'), ((12, 6, 12, 21), 'flask.Flask', 'Flask', ({(12, 12, 12, 20): '__name__'}, {}), '(__name__)', False, 'from flask import Flask\n'), ((17, 4, 17, 77), 'datadog.statsd.increment', 'statsd.increment', (), '', False, 'from datadog import statsd\n'), ((23, 4, 23, 83), 'datadog.statsd.increment', 'statsd.increment', (), '', False, 'from datadog import statsd\n'), ((32, 7, 32, 29), 'os.environ.get', 'os.environ.get', ({(32, 22, 32, 28): '"""HOST"""'}, {}), "('HOST')", False, 'import os\n'), ((34, 7, 34, 29), 'os.environ.get', 'os.environ.get', ({(34, 22, 34, 28): '"""PORT"""'}, {}), "('PORT')", False, 'import os\n'), ((33, 15, 33, 37), 'os.environ.get', 'os.environ.get', ({(33, 30, 33, 36): '"""HOST"""'}, {}), "('HOST')", False, 'import os\n'), ((35, 15, 35, 37), 'os.environ.get', 'os.environ.get', ({(35, 30, 35, 36): '"""PORT"""'}, {}), "('PORT')", False, 'import os\n')] |
shubhamsah/OpenEDU | Data Structure using Python/Linked_List/2linked_list1.py | a4c68d05f67e7ce6d2305f4ca1567b8f4e95b835 | # Lets create a linked list that has the following elements
'''
1. FE
2. SE
3. TE
4. BE
'''
# Creating a Node class to create individual Nodes
class Node:
def __init__(self,data):
self.__data = data
self.__next = None
def get_data(self):
return self.__data
def set_data(self, data):
self.__data = data
def get_next(self):
return self.__next
def set_next(self,next_node):
self.__next = next_node
class LinkedList:
def __init__(self):
self.__head = None
self.__tail = None
def get_head(self):
return self.__head
def get_tail(self):
return self.__tail
# ADDING ELEMENT IN THE LINKED LIST
def add(self,data):
new_node = Node(data)
if(self.__head==None):
self.__head=self.__tail=new_node
else:
self.__tail.set_next(new_node)
self.__tail=new_node
number_list= LinkedList()
number_list.add("FE")
number_list.add("SE")
number_list.add("TE")
number_list.add("BE")
| [] |
function2-llx/MONAI | monai/networks/blocks/selfattention.py | 4cddaa830b61b88ec78e089bb5f21e05bb1a78f4 | # Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
from monai.utils import optional_import
Rearrange, _ = optional_import("einops.layers.torch", name="Rearrange")
class SABlock(nn.Module):
"""
A self-attention block, based on: "Dosovitskiy et al.,
An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale <https://arxiv.org/abs/2010.11929>"
"""
def __init__(self, hidden_size: int, num_heads: int, dropout_rate: float = 0.0) -> None:
"""
Args:
hidden_size: dimension of hidden layer.
num_heads: number of attention heads.
dropout_rate: faction of the input units to drop.
"""
super().__init__()
if not (0 <= dropout_rate <= 1):
raise ValueError("dropout_rate should be between 0 and 1.")
if hidden_size % num_heads != 0:
raise ValueError("hidden size should be divisible by num_heads.")
self.num_heads = num_heads
self.out_proj = nn.Linear(hidden_size, hidden_size)
self.qkv = nn.Linear(hidden_size, hidden_size * 3, bias=False)
self.input_rearrange = Rearrange("b h (qkv l d) -> qkv b l h d", qkv=3, l=num_heads)
self.out_rearrange = Rearrange("b h l d -> b l (h d)")
self.drop_output = nn.Dropout(dropout_rate)
self.drop_weights = nn.Dropout(dropout_rate)
self.head_dim = hidden_size // num_heads
self.scale = self.head_dim**-0.5
def forward(self, x):
output = self.input_rearrange(self.qkv(x))
q, k, v = output[0], output[1], output[2]
att_mat = (torch.einsum("blxd,blyd->blxy", q, k) * self.scale).softmax(dim=-1)
att_mat = self.drop_weights(att_mat)
x = torch.einsum("bhxy,bhyd->bhxd", att_mat, v)
x = self.out_rearrange(x)
x = self.out_proj(x)
x = self.drop_output(x)
return x
| [((17, 15, 17, 71), 'monai.utils.optional_import', 'optional_import', (), '', False, 'from monai.utils import optional_import\n'), ((44, 24, 44, 59), 'torch.nn.Linear', 'nn.Linear', ({(44, 34, 44, 45): 'hidden_size', (44, 47, 44, 58): 'hidden_size'}, {}), '(hidden_size, hidden_size)', True, 'import torch.nn as nn\n'), ((45, 19, 45, 70), 'torch.nn.Linear', 'nn.Linear', (), '', True, 'import torch.nn as nn\n'), ((48, 27, 48, 51), 'torch.nn.Dropout', 'nn.Dropout', ({(48, 38, 48, 50): 'dropout_rate'}, {}), '(dropout_rate)', True, 'import torch.nn as nn\n'), ((49, 28, 49, 52), 'torch.nn.Dropout', 'nn.Dropout', ({(49, 39, 49, 51): 'dropout_rate'}, {}), '(dropout_rate)', True, 'import torch.nn as nn\n'), ((58, 12, 58, 55), 'torch.einsum', 'torch.einsum', ({(58, 25, 58, 42): '"""bhxy,bhyd->bhxd"""', (58, 44, 58, 51): 'att_mat', (58, 53, 58, 54): 'v'}, {}), "('bhxy,bhyd->bhxd', att_mat, v)", False, 'import torch\n'), ((56, 19, 56, 56), 'torch.einsum', 'torch.einsum', ({(56, 32, 56, 49): '"""blxd,blyd->blxy"""', (56, 51, 56, 52): 'q', (56, 54, 56, 55): 'k'}, {}), "('blxd,blyd->blxy', q, k)", False, 'import torch\n')] |
mrakitin/opentrons | api/tests/opentrons/commands/test_protocol_commands.py | d9c7ed23d13cdb62bd1bc397dc2871d4bd5b77e9 | import pytest
from opentrons.commands import protocol_commands
@pytest.mark.parametrize(
argnames="seconds,"
"minutes,"
"expected_seconds,"
"expected_minutes,"
"expected_text",
argvalues=[
[10, 0, 10, 0, "Delaying for 0 minutes and 10.0 seconds"],
[10, 9, 10, 9, "Delaying for 9 minutes and 10.0 seconds"],
[100, 0, 40, 1, "Delaying for 1 minutes and 40.0 seconds"],
[105, 5.25, 0, 7, "Delaying for 7 minutes and 0.0 seconds"],
[0.5, 0, 0.5, 0, "Delaying for 0 minutes and 0.5 seconds"],
[105.5, 5.25, 0.5, 7, "Delaying for 7 minutes and 0.5 seconds"],
[0.998, 0, 0.998, 0, "Delaying for 0 minutes and 0.998 seconds"],
[0.9998, 0, 0.9998, 0, "Delaying for 0 minutes and 1.0 seconds"],
[1.0001, 0, 1.0001, 0, "Delaying for 0 minutes and 1.0 seconds"],
]
)
def test_delay(seconds,
minutes,
expected_seconds,
expected_minutes,
expected_text
):
command = protocol_commands.delay(seconds, minutes)
name = command['name']
payload = command['payload']
assert name == 'command.DELAY'
assert payload['seconds'] == expected_seconds
assert payload['minutes'] == expected_minutes
assert payload['text'] == expected_text
def test_delay_with_message():
"""It should allow a message to be appended to the delay text."""
command = protocol_commands.delay(seconds=1, minutes=1, msg="Waiting...")
assert command["payload"]["text"] == (
"Delaying for 1 minutes and 1.0 seconds. Waiting..."
)
| [((5, 1, 22, 1), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (), '', False, 'import pytest\n'), ((29, 14, 29, 55), 'opentrons.commands.protocol_commands.delay', 'protocol_commands.delay', ({(29, 38, 29, 45): 'seconds', (29, 47, 29, 54): 'minutes'}, {}), '(seconds, minutes)', False, 'from opentrons.commands import protocol_commands\n'), ((41, 14, 41, 77), 'opentrons.commands.protocol_commands.delay', 'protocol_commands.delay', (), '', False, 'from opentrons.commands import protocol_commands\n')] |
ess-dmsc/just-bin-it | tests/test_histogram_source.py | 8fcd03337a8a88087f25c510c589d482bdd9e4ad | from unittest.mock import patch
import pytest
from just_bin_it.endpoints.sources import HistogramSource
from tests.doubles.consumer import StubConsumer
TEST_MESSAGE = b"this is a byte message"
INVALID_FB = b"this is an invalid fb message"
class TestHistogramSource:
@pytest.fixture(autouse=True)
def prepare(self):
pass
def test_if_no_consumer_supplied_then_raises(self):
with pytest.raises(Exception):
HistogramSource(None)
def test_if_no_new_messages_then_no_data(self):
mock_consumer = StubConsumer(["broker1"], ["topic1"])
mock_consumer.add_messages([])
hs = HistogramSource(mock_consumer)
data = hs.get_new_data()
assert len(data) == 0
@patch("just_bin_it.endpoints.sources.deserialise_hs00", return_value=TEST_MESSAGE)
def test_if_five_new_messages_on_one_topic_then_data_has_five_items(
self, mock_method
):
mock_consumer = StubConsumer(["broker1"], ["topic1"])
mock_consumer.add_messages([TEST_MESSAGE] * 5)
hs = HistogramSource(mock_consumer)
data = hs.get_new_data()
_, _, message = data[0]
assert len(data) == 5
assert message == TEST_MESSAGE
def test_deserialising_invalid_fb_does_not_throw(self):
mock_consumer = StubConsumer(["broker1"], ["topic1"])
mock_consumer.add_messages([INVALID_FB])
hs = HistogramSource(mock_consumer)
hs.get_new_data()
| [((13, 5, 13, 33), 'pytest.fixture', 'pytest.fixture', (), '', False, 'import pytest\n'), ((28, 5, 28, 87), 'unittest.mock.patch', 'patch', (), '', False, 'from unittest.mock import patch\n'), ((22, 24, 22, 61), 'tests.doubles.consumer.StubConsumer', 'StubConsumer', ({(22, 37, 22, 48): "['broker1']", (22, 50, 22, 60): "['topic1']"}, {}), "(['broker1'], ['topic1'])", False, 'from tests.doubles.consumer import StubConsumer\n'), ((24, 13, 24, 43), 'just_bin_it.endpoints.sources.HistogramSource', 'HistogramSource', ({(24, 29, 24, 42): 'mock_consumer'}, {}), '(mock_consumer)', False, 'from just_bin_it.endpoints.sources import HistogramSource\n'), ((32, 24, 32, 61), 'tests.doubles.consumer.StubConsumer', 'StubConsumer', ({(32, 37, 32, 48): "['broker1']", (32, 50, 32, 60): "['topic1']"}, {}), "(['broker1'], ['topic1'])", False, 'from tests.doubles.consumer import StubConsumer\n'), ((34, 13, 34, 43), 'just_bin_it.endpoints.sources.HistogramSource', 'HistogramSource', ({(34, 29, 34, 42): 'mock_consumer'}, {}), '(mock_consumer)', False, 'from just_bin_it.endpoints.sources import HistogramSource\n'), ((43, 24, 43, 61), 'tests.doubles.consumer.StubConsumer', 'StubConsumer', ({(43, 37, 43, 48): "['broker1']", (43, 50, 43, 60): "['topic1']"}, {}), "(['broker1'], ['topic1'])", False, 'from tests.doubles.consumer import StubConsumer\n'), ((45, 13, 45, 43), 'just_bin_it.endpoints.sources.HistogramSource', 'HistogramSource', ({(45, 29, 45, 42): 'mock_consumer'}, {}), '(mock_consumer)', False, 'from just_bin_it.endpoints.sources import HistogramSource\n'), ((18, 13, 18, 37), 'pytest.raises', 'pytest.raises', ({(18, 27, 18, 36): 'Exception'}, {}), '(Exception)', False, 'import pytest\n'), ((19, 12, 19, 33), 'just_bin_it.endpoints.sources.HistogramSource', 'HistogramSource', ({(19, 28, 19, 32): 'None'}, {}), '(None)', False, 'from just_bin_it.endpoints.sources import HistogramSource\n')] |
novel/lc-tools | lctools/shortcuts.py | 1b9032357e2e87aebd76d87664077caa5747c220 | import getopt
import sys
from libcloud.compute.types import NodeState
from lc import get_lc
from printer import Printer
def lister_main(what, resource=None,
extension=False, supports_location=False, **kwargs):
"""Shortcut for main() routine for lister
tools, e.g. lc-SOMETHING-list
@param what: what we are listing, e.g. 'nodes'
@param extension: is it an extension of core libcloud functionality?
@param kwargs: additional arguments for the call
@type what: C{string}
@param supports_location: tells that objects we
listing could be filtered by location
@type supports_location: C{bool}
"""
list_method = "%slist_%s" % ({True: 'ex_', False: ''}[extension], what)
profile = "default"
format = location = None
options = "f:p:"
if supports_location:
options += "l:"
try:
opts, args = getopt.getopt(sys.argv[1:], options)
except getopt.GetoptError, err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
for o, a in opts:
if o == "-f":
format = a
if o == "-p":
profile = a
if o == "-l":
location = a
try:
conn = get_lc(profile, resource=resource)
list_kwargs = kwargs
if supports_location and location is not None:
nodelocation = filter(lambda loc: str(loc.id) == location,
conn.list_locations())[0]
list_kwargs["location"] = nodelocation
for node in getattr(conn, list_method)(**list_kwargs):
Printer.do(node, format)
except Exception, err:
sys.stderr.write("Error: %s\n" % str(err))
def save_image_main():
"""Shortcut for main() routine for provider
specific image save tools.
"""
def usage(progname):
sys.stdout.write("%s -i <node_id> -n <image_name> [-p <profile]\n\n" % progname)
profile = 'default'
name = node_id = None
try:
opts, args = getopt.getopt(sys.argv[1:], "i:n:p:")
except getopt.GetoptError, err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
for o, a in opts:
if o == "-i":
node_id = a
if o == "-n":
name = a
if o == "-p":
profile = a
if node_id is None or name is None:
usage(sys.argv[0])
sys.exit(1)
conn = get_lc(profile)
node = get_node_or_fail(conn, node_id, print_error_and_exit,
("Error: cannot find node with id '%s'." % node_id,))
Printer.do(conn.ex_save_image(node, name))
def get_node_or_fail(conn, node_id, coroutine=None, cargs=(), ckwargs={}):
"""Shortcut to get a single node by its id. In case when
such node could not be found, coroutine could be called
to handle such case. Typically coroutine will output an
error message and exit from application.
@param conn: libcloud connection handle
@param node_id: id of the node to search for
@param coroutine: a callable object to handle case
when node cannot be found
@param cargs: positional arguments for coroutine
@param kwargs: keyword arguments for coroutine
@return: node object if found, None otherwise"""
try:
node = [node for node in conn.list_nodes()
if str(node.id) == str(node_id)][0]
return node
except IndexError:
if callable(coroutine):
coroutine(*cargs, **ckwargs)
return None
def print_error_and_exit(message):
sys.stderr.write("%s\n" % message)
sys.exit(1)
| [] |
andr1976/thermo | tests/test_flash_vl.py | 42d10b3702373aacc88167d4046ea9af92abd570 | # -*- coding: utf-8 -*-
'''Chemical Engineering Design Library (ChEDL). Utilities for process modeling.
Copyright (C) 2020, Caleb Bell <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.'''
import pytest
from fluids.core import C2K
import thermo
from chemicals.utils import *
from thermo import *
from fluids.numerics import *
from math import *
import json
import os
import numpy as np
def test_C2_C5_PR():
T, P = 300, 3e6
constants = ChemicalConstantsPackage(Tcs=[305.32, 469.7], Pcs=[4872000.0, 3370000.0],
omegas=[0.098, 0.251], Tms=[90.3, 143.15],
Tbs=[184.55, 309.21], CASs=['74-84-0', '109-66-0'],
names=['ethane', 'pentane'], MWs=[30.06904, 72.14878])
HeatCapacityGases = [HeatCapacityGas(poly_fit=(50.0, 1000.0, [7.115386645067898e-21, -3.2034776773408394e-17, 5.957592282542187e-14, -5.91169369931607e-11, 3.391209091071677e-08, -1.158730780040934e-05, 0.002409311277400987, -0.18906638711444712, 37.94602410497228])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [7.537198394065234e-22, -4.946850205122326e-18, 1.4223747507170372e-14, -2.3451318313798008e-11, 2.4271676873997662e-08, -1.6055220805830093e-05, 0.006379734000450042, -1.0360272314628292, 141.84695243411866]))]
correlations = PropertyCorrelationsPackage(constants, HeatCapacityGases=HeatCapacityGases)
zs = ws_to_zs(MWs=constants.MWs, ws=[.5, .5])
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
# Check there are two phases near the dew point. don't bother checking the composition most of the time.
# When this test was written, case is still valid for a dP of 0.00000001 Pa
# Issue here was that (sum_criteria < 1e-7) was the check in the stability test result interpretation
# Fixed it by decreasing the tolerance 10x (1e-8)
res = flasher.flash(P=5475649.470049857+15, T=123.3+273.15, zs=zs)
assert_close1d(res.betas, [0.9999995457838572, 4.5421614280893863e-07], rtol=1e-4)
assert_close1d(res.gas.zs, [0.7058337751720506, 0.29416622482794935], rtol=1e-4)
assert_close1d(res.liquid0.zs, [0.49517964670906095, 0.504820353290939], rtol=1e-4)
# # In this case, the tolerance had to be decreased 10x more - to 1e-9! Triggered at a dP of 0.5
res = flasher.flash(P=5475649.470049857+0.5, T=123.3+273.15, zs=zs)
assert_close1d(res.betas, [0.999999984859061, 1.5140938947055815e-08], rtol=1e-4)
assert_close1d(res.gas.zs, [0.7058336826506021, 0.29416631734939785])
assert_close1d(res.liquid0.zs, [0.4951780663825745, 0.5048219336174254])
# # This one is too close to the border - the VF from SS is less than 0,
# # but if the tolerance is increased, it is positive (and should be)
res = flasher.flash(P=5475649.470049857+0.001, T=123.3+273.15, zs=zs)
assert_close1d(res.betas, [0.9999999999697144, 3.028555184414472e-11], rtol=3e-3)
assert_close1d(res.gas.zs, [0.7058336794959247, 0.29416632050407526])
assert_close1d(res.liquid0.zs, [0.49517801199759515, 0.5048219880024049])
# This one is presently identified as a LL... just check the number of phases
assert flasher.flash(zs=zs, P=6.615e6, T=386).phase_count == 2
def test_flash_TP_K_composition_idependent_unhappiness():
constants = ChemicalConstantsPackage(Tcs=[508.1, 536.2, 512.5], Pcs=[4700000.0, 5330000.0, 8084000.0], omegas=[0.309, 0.21600000000000003, 0.5589999999999999],
MWs=[58.07914, 119.37764000000001, 32.04186], CASs=['67-64-1', '67-66-3', '67-56-1'], names=['acetone', 'chloroform', 'methanol'])
HeatCapacityGases = [HeatCapacityGas(poly_fit=(200.0, 1000.0, [-1.3320002425347943e-21, 6.4063345232664645e-18, -1.251025808150141e-14, 1.2265314167534311e-11, -5.535306305509636e-09, -4.32538332013644e-08, 0.0010438724775716248, -0.19650919978971002, 63.84239495676709])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [1.5389278550737367e-21, -8.289631533963465e-18, 1.9149760160518977e-14, -2.470836671137373e-11, 1.9355882067011222e-08, -9.265600540761629e-06, 0.0024825718663005762, -0.21617464276832307, 48.149539665907696])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [2.3511458696647882e-21, -9.223721411371584e-18, 1.3574178156001128e-14, -8.311274917169928e-12, 4.601738891380102e-10, 1.78316202142183e-06, -0.0007052056417063217, 0.13263597297874355, 28.44324970462924]))]
VolumeLiquids = [VolumeLiquid(poly_fit=(178.51, 498.1, [6.564241965071999e-23, -1.6568522275506375e-19, 1.800261692081815e-16, -1.0988731296761538e-13, 4.118691518070104e-11, -9.701938804617744e-09, 1.4022905458596618e-06, -0.00011362923883050033, 0.0040109650220160956])),
VolumeLiquid(poly_fit=(209.63, 509.5799999999999, [2.034047306563089e-23, -5.45567626310959e-20, 6.331811062990084e-17, -4.149759318710192e-14, 1.6788970104955462e-11, -4.291900093120011e-09, 6.769385838271721e-07, -6.0166473220815445e-05, 0.0023740769479069054])),
VolumeLiquid(poly_fit=(175.7, 502.5, [3.5725079384600736e-23, -9.031033742820083e-20, 9.819637959370411e-17, -5.993173551565636e-14, 2.2442465416964825e-11, -5.27776114586072e-09, 7.610461006178106e-07, -6.148574498547711e-05, 0.00216398089328537])),]
VaporPressures = [VaporPressure(exp_poly_fit=(178.51, 508.09000000000003, [-1.3233111115238975e-19, 4.2217134794609376e-16, -5.861832547132719e-13, 4.6488594950801467e-10, -2.3199079844570237e-07, 7.548290741523459e-05, -0.015966705328994194, 2.093003523977292, -125.39006100979816])),
VaporPressure(exp_poly_fit=(207.15, 536.4, [-8.714046553871422e-20, 2.910491615051279e-16, -4.2588796020294357e-13, 3.580003116042944e-10, -1.902612144361103e-07, 6.614096470077095e-05, -0.01494801055978542, 2.079082613726621, -130.24643185169472])),
VaporPressure(exp_poly_fit=(175.7, 512.49, [-1.446088049406911e-19, 4.565038519454878e-16, -6.278051259204248e-13, 4.935674274379539e-10, -2.443464113936029e-07, 7.893819658700523e-05, -0.016615779444332356, 2.1842496316772264, -134.19766175812708]))]
liquid = GibbsExcessLiquid(VaporPressures=VaporPressures, VolumeLiquids=VolumeLiquids,
HeatCapacityGases=HeatCapacityGases, use_Poynting=True,
use_phis_sat=False)
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True, HeatCapacityGases=HeatCapacityGases,
VolumeLiquids=VolumeLiquids, VaporPressures=VaporPressures)
T, P = 350.0, 1e6
zs = [0.2, 0.0, 0.8]
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas':constants.omegas}
gas = IdealGas(HeatCapacityGases=HeatCapacityGases, T=T, P=P, zs=zs)
flashN = FlashVLN(constants, correlations, liquids=[liquid], gas=gas)
# Low - all K under zero
res = flashN.flash(T=T, P=P, zs=zs)
assert_close(res.rho_mass(), 733.1047159397776)
assert 1 == res.phase_count
assert res.liquid0 is not None
# High - all K above zero
res = flashN.flash(T=430, P=1e4, zs=zs)
assert 1 == res.phase_count
assert res.gas is not None
assert_close(res.rho_mass(), 0.10418751067559757)
# One K value is under 1, rest are above - but that component has mole frac of zero
res = flashN.flash(T=420, P=1e4, zs=zs)
assert 1 == res.phase_count
assert res.gas is not None
# phis_at for liquids was broken, breaking this calculation
res = flashN.flash(T=285.5, P=1e4, zs=zs)
assert_close1d(res.betas, [0.21860038882559643, 0.7813996111744036])
assert res.phase_count == 2
# Two cases RR was working on Ks less than 1, and coming up with a made up VF
# Need to check Ks first
res = flashN.flash(T=300.0000, P=900000.0000, zs=[0.5, 0.1, 0.4, 0.0],)
assert 1 == res.phase_count
assert res.gas is None
res = flashN.flash(T=300.0000, P=900000.0000, zs=[.5, 0, 0, .5])
assert 1 == res.phase_count
assert res.gas is None
def test_flash_combustion_products():
P = 1e5
T = 794.5305048838037
zs = [0.5939849621247668, 0.112781954982051, 0.0676691730155464, 0.2255639098776358]
constants = ChemicalConstantsPackage(atomss=[{'N': 2}, {'C': 1, 'O': 2}, {'O': 2}, {'H': 2, 'O': 1}], CASs=['7727-37-9', '124-38-9', '7782-44-7', '7732-18-5'], MWs=[28.0134, 44.0095, 31.9988, 18.01528], names=['nitrogen', 'carbon dioxide', 'oxygen', 'water'], omegas=[0.04, 0.2252, 0.021, 0.344], Pcs=[3394387.5, 7376460.0, 5042945.25, 22048320.0], Tbs=[77.355, 194.67, 90.18799999999999, 373.124], Tcs=[126.2, 304.2, 154.58, 647.14], Tms=[63.15, 216.65, 54.36, 273.15])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0, 1000.0, [-6.496329615255804e-23, 2.1505678500404716e-19, -2.2204849352453665e-16, 1.7454757436517406e-14, 9.796496485269412e-11, -4.7671178529502835e-08, 8.384926355629239e-06, -0.0005955479316119903, 29.114778709934264])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [-3.1115474168865828e-21, 1.39156078498805e-17, -2.5430881416264243e-14, 2.4175307893014295e-11, -1.2437314771044867e-08, 3.1251954264658904e-06, -0.00021220221928610925, 0.000884685506352987, 29.266811602924644])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [7.682842888382947e-22, -3.3797331490434755e-18, 6.036320672021355e-15, -5.560319277907492e-12, 2.7591871443240986e-09, -7.058034933954475e-07, 9.350023770249747e-05, -0.005794412013028436, 29.229215579932934])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759]))])
kijs = [[0.0, -0.0122, -0.0159, 0.0], [-0.0122, 0.0, 0.0, 0.0952], [-0.0159, 0.0, 0.0, 0.0], [0.0, 0.0952, 0.0, 0.0]]
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas, 'kijs': kijs}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
res = flasher.flash(T=T, P=P, zs=zs)
assert res.gas
assert res.phase == 'V'
def test_bubble_T_PR_VL():
# Last point at 8e6 Pa not yet found.
constants = ChemicalConstantsPackage(CASs=['124-38-9', '110-54-3'], MWs=[44.0095, 86.17536], names=['carbon dioxide', 'hexane'], omegas=[0.2252, 0.2975], Pcs=[7376460.0, 3025000.0], Tbs=[194.67, 341.87], Tcs=[304.2, 507.6], Tms=[216.65, 178.075])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0, 1000.0, [-3.1115474168865828e-21, 1.39156078498805e-17, -2.5430881416264243e-14, 2.4175307893014295e-11, -1.2437314771044867e-08, 3.1251954264658904e-06, -0.00021220221928610925, 0.000884685506352987, 29.266811602924644])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [1.3740654453881647e-21, -8.344496203280677e-18, 2.2354782954548568e-14, -3.4659555330048226e-11, 3.410703030634579e-08, -2.1693611029230923e-05, 0.008373280796376588, -1.356180511425385, 175.67091124888998]))])
zs = [.5, .5]
T = 300.0
P = 1e6
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
res = flasher.flash(P=7.93e6, VF=0, zs=zs)
assert_close(res.T, 419.0621213529388, rtol=1e-6)
def test_PR_four_bubble_dew_cases_VL():
zs=[.5, .5]
T=300.0
P=1E6
constants = ChemicalConstantsPackage(CASs=['98-01-1', '98-00-0'], MWs=[96.08406000000001, 98.09994], names=['2-furaldehyde', 'furfuryl alcohol'], omegas=[0.4522, 0.7340000000000001], Pcs=[5510000.0, 5350000.0], Tbs=[434.65, 441.15], Tcs=[670.0, 632.0], Tms=[235.9, 250.35])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(poly_fit=(298, 1000, [4.245751608816354e-21, -2.470461837781697e-17, 6.221823690784335e-14, -8.847967216702641e-11, 7.749899297737877e-08, -4.250059888737765e-05, 0.013882452355067994, -2.1404621487165327, 185.84988012691903])),
HeatCapacityGas(poly_fit=(250.35, 632.0, [-9.534610090167143e-20, 3.4583416772306854e-16, -5.304513883184021e-13, 4.410937690059558e-10, -2.0905505018557675e-07, 5.20661895325169e-05, -0.004134468659764938, -0.3746374641720497, 114.90130267531933]))])
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
assert_close(flasher.flash(P=1e6, VF=0, zs=zs).T, 539.1838522423529, rtol=1e-6)
assert_close(flasher.flash(P=1e6, VF=1, zs=zs).T, 540.2081697501809, rtol=1e-6)
assert_close(flasher.flash(T=600.0, VF=0, zs=zs).P, 2766476.7473238464, rtol=1e-6)
assert_close(flasher.flash(T=600.0, VF=1, zs=zs).P, 2702616.6490743402, rtol=1e-6)
def test_C1_C10_PT_flash_VL():
IDs = ['methane', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'C10']
zs=[.1]*10
T=300.0
P=1E5
constants = ChemicalConstantsPackage(CASs=['74-82-8', '74-84-0', '74-98-6', '106-97-8', '109-66-0', '110-54-3', '142-82-5', '111-65-9', '111-84-2', '124-18-5'], MWs=[16.04246, 30.06904, 44.09562, 58.1222, 72.14878, 86.17536, 100.20194000000001, 114.22852, 128.2551, 142.28168], names=['methane', 'ethane', 'propane', 'butane', 'pentane', 'hexane', 'heptane', 'octane', 'nonane', 'decane'], omegas=[0.008, 0.098, 0.152, 0.193, 0.251, 0.2975, 0.3457, 0.39399999999999996, 0.444, 0.49], Pcs=[4599000.0, 4872000.0, 4248000.0, 3796000.0, 3370000.0, 3025000.0, 2740000.0, 2490000.0, 2290000.0, 2110000.0], Tbs=[111.65, 184.55, 231.04, 272.65, 309.21, 341.87, 371.53, 398.77, 423.95, 447.25], Tcs=[190.56400000000002, 305.32, 369.83, 425.12, 469.7, 507.6, 540.2, 568.7, 594.6, 611.7], Tms=[90.75, 90.3, 85.5, 135.05, 143.15, 178.075, 182.15, 216.3, 219.9, 243.225])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0, 1000.0, [6.7703235945157e-22, -2.496905487234175e-18, 3.141019468969792e-15, -8.82689677472949e-13, -1.3709202525543862e-09, 1.232839237674241e-06, -0.0002832018460361874, 0.022944239587055416, 32.67333514157593])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [7.115386645067898e-21, -3.2034776773408394e-17, 5.957592282542187e-14, -5.91169369931607e-11, 3.391209091071677e-08, -1.158730780040934e-05, 0.002409311277400987, -0.18906638711444712, 37.94602410497228])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [7.008452174279456e-22, -1.7927920989992578e-18, 1.1218415948991092e-17, 4.23924157032547e-12, -5.279987063309569e-09, 2.5119646468572195e-06, -0.0004080663744697597, 0.1659704314379956, 26.107282495650367])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [-2.608494166540452e-21, 1.3127902917979555e-17, -2.7500977814441112e-14, 3.0563338307642794e-11, -1.866070373718589e-08, 5.4505831355984375e-06, -0.00024022110003950325, 0.04007078628096955, 55.70646822218319])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [7.537198394065234e-22, -4.946850205122326e-18, 1.4223747507170372e-14, -2.3451318313798008e-11, 2.4271676873997662e-08, -1.6055220805830093e-05, 0.006379734000450042, -1.0360272314628292, 141.84695243411866])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [1.3740654453881647e-21, -8.344496203280677e-18, 2.2354782954548568e-14, -3.4659555330048226e-11, 3.410703030634579e-08, -2.1693611029230923e-05, 0.008373280796376588, -1.356180511425385, 175.67091124888998])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [-1.4046935863496273e-21, 5.8024177500786575e-18, -7.977871529098155e-15, 7.331444047402207e-13, 9.954400606484495e-09, -1.2112107913343475e-05, 0.0062964696142858104, -1.0843106737278825, 173.87692850911935])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [-1.069661592422583e-22, -1.2992882995593864e-18, 8.808066659263286e-15, -2.1690080247294972e-11, 2.8519221306107026e-08, -2.187775092823544e-05, 0.009432620102532702, -1.5719488702446165, 217.60587499269303])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [6.513870466670624e-22, -5.318305817618858e-18, 1.8015815307749625e-14, -3.370046452151828e-11, 3.840755097595374e-08, -2.7203677889897072e-05, 0.011224516822410626, -1.842793858054514, 247.3628627781443])),
HeatCapacityGas(poly_fit=(200.0, 1000.0, [-1.702672546011891e-21, 6.6751002084997075e-18, -7.624102919104147e-15, -4.071140876082743e-12, 1.863822577724324e-08, -1.9741705032236747e-05, 0.009781408958916831, -1.6762677829939379, 252.8975930305735]))])
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
liq = CEOSLiquid(PRMIX, eos_kwargs, HeatCapacityGases=correlations.HeatCapacityGases, T=T, P=P, zs=zs)
flasher = FlashVL(constants, correlations, liquid=liq, gas=gas)
res = flasher.flash(T=T, P=P, zs=zs)
assert_close(res.VF, 0.3933480634014041, rtol=1e-5)
def test_combustion_products():
from chemicals.combustion import fuel_air_spec_solver
IDs = ['methane', 'carbon dioxide', 'ethane', 'propane',
'isobutane', 'butane', '2-methylbutane', 'pentane',
'hexane', 'nitrogen', 'oxygen', 'water']
T = C2K(15)
P = 1e5
zs_fuel = [0.9652228316853225, 0.0059558310220860665, 0.018185509193506685, 0.004595963476244076,
0.0009769695915451998, 0.001006970610302194, 0.000472984762445398, 0.0003239924667435125,
0.0006639799746946288, 0.002594967217109564, 0.0, 0.0]
zs_fuel = normalize(zs_fuel)
zs_air = [0.0]*9 + [0.79, 0.21] + [0.0]
constants, properties = ChemicalConstantsPackage.from_IDs(IDs)
combustion = fuel_air_spec_solver(zs_air=zs_air, zs_fuel=zs_fuel, CASs=constants.CASs,
atomss=constants.atomss, n_fuel=1.0, O2_excess=0.1)
zs = combustion['zs_out']
eos_kwargs = {'Pcs': constants.Pcs, 'Tcs': constants.Tcs, 'omegas': constants.omegas}
gas = CEOSGas(PRMIX, eos_kwargs, T=T, P=P, zs=zs, HeatCapacityGases=properties.HeatCapacityGases)
liquid = CEOSLiquid(PRMIX, eos_kwargs, T=T, P=P, zs=zs, HeatCapacityGases=properties.HeatCapacityGases)
flasher = FlashVL(constants, properties, liquid=liquid, gas=gas)
res = flasher.flash(T=400.0, P=1e5, zs=zs)
assert res.phase_count == 1
assert res.gas is not None
def test_furfuryl_alcohol_high_TP():
# Legacy bug, don't even remember what the original issue was
constants = ChemicalConstantsPackage(MWs=[98.09994, 18.01528], Tcs=[632.0, 647.14], Pcs=[5350000.0, 22048320.0], omegas=[0.734, 0.344], names=['furfuryl alcohol', 'water'], CASs=['98-00-0', '7732-18-5'])
correlations = PropertyCorrelationsPackage(constants=constants, skip_missing=True,
HeatCapacityGases=[HeatCapacityGas(load_data=False, poly_fit=(250.35, 632.0, [-9.534610090167143e-20, 3.4583416772306854e-16, -5.304513883184021e-13, 4.410937690059558e-10, -2.0905505018557675e-07, 5.20661895325169e-05, -0.004134468659764938, -0.3746374641720497, 114.90130267531933])),
HeatCapacityGas(load_data=False, poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759]))])
eos_kwargs = dict(Tcs=constants.Tcs, Pcs=constants.Pcs, omegas=constants.omegas)
zs = [0.4444445555555555, 1-0.4444445555555555]
T, P = 5774.577777777778, 220483199.99999997
gas = CEOSGas(eos_class=PRMIX, eos_kwargs=eos_kwargs, T=T, P=P, zs=zs, HeatCapacityGases=correlations.HeatCapacityGases)
liquid = CEOSLiquid(eos_class=PRMIX, eos_kwargs=eos_kwargs, T=T, P=P, zs=zs, HeatCapacityGases=correlations.HeatCapacityGases)
flasher = FlashVL(constants, correlations, liquid=liquid, gas=gas)
assert_close(flasher.flash(T=T, P=P, zs=zs).rho_mass(), 227.52709151903954)
def test_flash_GibbsExcessLiquid_ideal_Psat():
# Binary water-ethanol
T = 230.0
P = 1e5
zs = [.4, .6]
MWs = [18.01528, 46.06844]
Tcs = [647.086, 514.7]
Pcs = [22048320.0, 6137000.0]
omegas = [0.344, 0.635]
VaporPressures = [VaporPressure(extrapolation='DIPPR101_ABC|DIPPR101_ABC', exp_poly_fit=(273.17, 647.086, [-2.8478502840358144e-21, 1.7295186670575222e-17, -4.034229148562168e-14, 5.0588958391215855e-11, -3.861625996277003e-08, 1.886271475957639e-05, -0.005928371869421494, 1.1494956887882308, -96.74302379151317])),
VaporPressure(extrapolation='DIPPR101_ABC|DIPPR101_ABC', exp_poly_fit=(159.11, 514.7, [-2.3617526481119e-19, 7.318686894378096e-16, -9.835941684445551e-13, 7.518263303343784e-10, -3.598426432676194e-07, 0.00011171481063640762, -0.022458952185007635, 2.802615041941912, -166.43524219017118]))]
HeatCapacityGases = [HeatCapacityGas(poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [-1.162767978165682e-20, 5.4975285700787494e-17, -1.0861242757337942e-13, 1.1582703354362728e-10, -7.160627710867427e-08, 2.5392014654765875e-05, -0.004732593693568646, 0.5072291035198603, 20.037826650765965]))]
VolumeLiquids = [VolumeLiquid(poly_fit=(273.17, 637.096, [9.00307261049824e-24, -3.097008950027417e-20, 4.608271228765265e-17, -3.8726692841874345e-14, 2.0099220218891486e-11, -6.596204729785676e-09, 1.3368112879131157e-06, -0.00015298762503607717, 0.007589247005014652]),
Psat=VaporPressures[0], Tc=Tcs[0], Pc=Pcs[0], omega=omegas[0]),
VolumeLiquid(poly_fit=(159.11, 504.71000000000004, [5.388587987308587e-23, -1.331077476340645e-19, 1.4083880805283782e-16, -8.327187308842775e-14, 3.006387047487587e-11, -6.781931902982022e-09, 9.331209920256822e-07, -7.153268618320437e-05, 0.0023871634205665524]),
Psat=VaporPressures[1], Tc=Tcs[1], Pc=Pcs[1], omega=omegas[1])]
EnthalpyVaporizations = [EnthalpyVaporization(Tc=647.14, poly_fit_ln_tau=(273.17, 647.095, 647.14, [0.010220675607316746, 0.5442323619614213, 11.013674729940819, 110.72478547661254, 591.3170172192005, 1716.4863395285283, 4063.5975524922624, 17960.502354189244, 53916.28280689388])),
EnthalpyVaporization(Tc=514.0, poly_fit_ln_tau=(159.11, 513.9999486, 514.0, [-0.002197958699297133, -0.1583773493009195, -4.716256555877727, -74.79765793302774, -675.8449382004112, -3387.5058752252276, -7531.327682252346, 5111.75264050548, 50774.16034043739]))]
constants = ChemicalConstantsPackage(Tcs=Tcs, Pcs=Pcs, omegas=omegas, MWs=MWs, CASs=['7732-18-5', '64-17-5'])
correlations = PropertyCorrelationsPackage(constants, HeatCapacityGases=HeatCapacityGases, EnthalpyVaporizations=EnthalpyVaporizations,
VolumeLiquids=VolumeLiquids, VaporPressures=VaporPressures, skip_missing=True)
liquid = GibbsExcessLiquid(VaporPressures=VaporPressures,
HeatCapacityGases=HeatCapacityGases,
VolumeLiquids=VolumeLiquids,
EnthalpyVaporizations=EnthalpyVaporizations,
caloric_basis='Psat', equilibrium_basis='Psat',
T=T, P=P, zs=zs)
gas = IdealGas(T=T, P=P, zs=zs, HeatCapacityGases=HeatCapacityGases)
flasher = FlashVL(constants, correlations, liquid=liquid, gas=gas)
# All points were missing because G_dep was missing
res = flasher.flash(T=300, P=1e5, zs=zs)
assert res.liquid_count == 1
# Failing when two K values were under 1e-10
res = flasher.flash(T=100, P=1e5, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
# Wilson guessess are hard zeros
res = flasher.flash(T=5, P=1e5, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
# Wilson guesses inf, nan, and all zero
res = flasher.flash(T=6.2, P=5e4, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
# One (but not both) fugacity became zero
res = flasher.flash(T=8.4, P=1e-5, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
# Vapor fraction flashes
for VF_value in (0.0, 1e-5, .3, .5, .7, 1-1e-5, 1.0):
VF = flasher.flash(T=T, VF=VF_value, zs=zs)
check = flasher.flash(T=T, P=VF.P, zs=zs)
assert_close(VF.VF, check.VF, rtol=1e-9)
# Not exactly sure where the numerical challenge is occuring, but this is to be expected.
# The tolerance decays at very small numbers
for VF_value in (1e-7, 1e-8, 1-1e-7, 1-1e-8):
VF = flasher.flash(T=T, VF=VF_value, zs=zs)
check = flasher.flash(T=T, P=VF.P, zs=zs)
assert_close(VF.VF, check.VF, rtol=1e-5)
def test_flash_GibbsExcessLiquid_ideal_PsatPoynting():
# Binary water-ethanol
T = 230.0
P = 1e5
zs = [.4, .6]
MWs = [18.01528, 46.06844]
Tcs = [647.086, 514.7]
Pcs = [22048320.0, 6137000.0]
omegas = [0.344, 0.635]
VaporPressures = [VaporPressure(exp_poly_fit=(273.17, 647.086, [-2.8478502840358144e-21, 1.7295186670575222e-17, -4.034229148562168e-14, 5.0588958391215855e-11, -3.861625996277003e-08, 1.886271475957639e-05, -0.005928371869421494, 1.1494956887882308, -96.74302379151317])),
VaporPressure(exp_poly_fit=(159.11, 514.7, [-2.3617526481119e-19, 7.318686894378096e-16, -9.835941684445551e-13, 7.518263303343784e-10, -3.598426432676194e-07, 0.00011171481063640762, -0.022458952185007635, 2.802615041941912, -166.43524219017118]))]
HeatCapacityGases = [HeatCapacityGas(poly_fit=(50.0, 1000.0, [5.543665000518528e-22, -2.403756749600872e-18, 4.2166477594350336e-15, -3.7965208514613565e-12, 1.823547122838406e-09, -4.3747690853614695e-07, 5.437938301211039e-05, -0.003220061088723078, 33.32731489750759])),
HeatCapacityGas(poly_fit=(50.0, 1000.0, [-1.162767978165682e-20, 5.4975285700787494e-17, -1.0861242757337942e-13, 1.1582703354362728e-10, -7.160627710867427e-08, 2.5392014654765875e-05, -0.004732593693568646, 0.5072291035198603, 20.037826650765965]))]
VolumeLiquids = [VolumeLiquid(poly_fit=(273.17, 637.096, [9.00307261049824e-24, -3.097008950027417e-20, 4.608271228765265e-17, -3.8726692841874345e-14, 2.0099220218891486e-11, -6.596204729785676e-09, 1.3368112879131157e-06, -0.00015298762503607717, 0.007589247005014652]),
Psat=VaporPressures[0], Tc=Tcs[0], Pc=Pcs[0], omega=omegas[0]),
VolumeLiquid(poly_fit=(159.11, 504.71000000000004, [5.388587987308587e-23, -1.331077476340645e-19, 1.4083880805283782e-16, -8.327187308842775e-14, 3.006387047487587e-11, -6.781931902982022e-09, 9.331209920256822e-07, -7.153268618320437e-05, 0.0023871634205665524]),
Psat=VaporPressures[1], Tc=Tcs[1], Pc=Pcs[1], omega=omegas[1])]
EnthalpyVaporizations = [EnthalpyVaporization(Tc=647.14, poly_fit_ln_tau=(273.17, 647.095, 647.14, [0.010220675607316746, 0.5442323619614213, 11.013674729940819, 110.72478547661254, 591.3170172192005, 1716.4863395285283, 4063.5975524922624, 17960.502354189244, 53916.28280689388])),
EnthalpyVaporization(Tc=514.0, poly_fit_ln_tau=(159.11, 513.9999486, 514.0, [-0.002197958699297133, -0.1583773493009195, -4.716256555877727, -74.79765793302774, -675.8449382004112, -3387.5058752252276, -7531.327682252346, 5111.75264050548, 50774.16034043739]))]
constants = ChemicalConstantsPackage(Tcs=Tcs, Pcs=Pcs, omegas=omegas, MWs=MWs, CASs=['7732-18-5', '64-17-5'])
correlations = PropertyCorrelationsPackage(constants, HeatCapacityGases=HeatCapacityGases, EnthalpyVaporizations=EnthalpyVaporizations,
VolumeLiquids=VolumeLiquids, VaporPressures=VaporPressures, skip_missing=True)
eoss = [PR(Tc=Tcs[0], Pc=Pcs[0], omega=omegas[0], T=T, P=P),
PR(Tc=Tcs[1], Pc=Pcs[1], omega=omegas[1], T=T, P=P)]
liquid = GibbsExcessLiquid(VaporPressures=VaporPressures,
HeatCapacityGases=HeatCapacityGases,
VolumeLiquids=VolumeLiquids,
EnthalpyVaporizations=EnthalpyVaporizations,
caloric_basis='PhiSat', equilibrium_basis='PhiSat',
eos_pure_instances=eoss,
T=T, P=P, zs=zs)
gas = IdealGas(T=T, P=P, zs=zs, HeatCapacityGases=HeatCapacityGases)
flasher = FlashVL(constants, correlations, liquid=liquid, gas=gas)
# This was failing in pypy for a while instead of CPython
res = flasher.flash(T=15, P=1e5, zs=zs)
assert res.phase_count == 1
assert res.liquid_count == 1
| [((232, 8, 232, 15), 'fluids.core.C2K', 'C2K', ({(232, 12, 232, 14): '15'}, {}), '(15)', False, 'from fluids.core import C2K\n'), ((241, 17, 242, 89), 'chemicals.combustion.fuel_air_spec_solver', 'fuel_air_spec_solver', (), '', False, 'from chemicals.combustion import fuel_air_spec_solver\n')] |
YunMeMeThaw/python_exercises | ex38.py | 151d5d3695d578059611ac09c94b3677442197d7 | ten_things = "Apples Oranges cows Telephone Light Sugar"
print ("Wait there are not 10 things in that list. Let's fix")
stuff = ten_things.split(' ')
more_stuff = {"Day", "Night", "Song", "Firebee",
"Corn", "Banana", "Girl", "Boy"}
while len(stuff) !=10:
next_one = more_stuff.pop()
print("Adding: ", next_one)
stuff.append(next_one)
print (f"There are {len(stuff)} items n ow.")
print ("There we go : ", stuff)
print ("Let's do some things with stuff.")
print (stuff[1])
print (stuff[-1]) # whoa! cool!
print (stuff.pop())
print (' '.join(stuff)) # what? cool !
print ('#'.join(stuff[3:5])) #super stealler!
| [] |
player1537-forks/spack | var/spack/repos/builtin/packages/diffmark/package.py | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Diffmark(AutotoolsPackage):
"""Diffmark is a DSL for transforming one string to another."""
homepage = "https://github.com/vbar/diffmark"
git = "https://github.com/vbar/diffmark.git"
version('master', branch='master')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('pkgconfig', type='build')
depends_on('libxml2')
| [] |
ZhangHCFJEA/bbp | bbp/comps/irikura_gen_srf.py | 33bd999cf8d719c49f9a904872c62f02eb5850d1 | #!/usr/bin/env python
"""
Copyright 2010-2019 University Of Southern California
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import division, print_function
# Import Python modules
import os
import sys
import math
import shutil
# Import Broadband modules
import plot_srf
import bband_utils
from irikura_gen_srf_cfg import IrikuraGenSrfCfg
from install_cfg import InstallCfg
class IrikuraGenSrf(object):
"""
Implements Arben's gen_srf.csh script in Python
"""
def __init__(self, i_r_velmodel, i_r_srcfile,
o_r_srffile, i_vmodel_name, sim_id=0,
**kwargs):
self.sim_id = sim_id
self.r_velmodel = i_r_velmodel
self.r_srcfile = i_r_srcfile
self.r_srffile = o_r_srffile
self.vmodel_name = i_vmodel_name
self.r_srcfiles = []
# Get all src files that were passed to us
if kwargs is not None and len(kwargs) > 0:
for idx in range(len(kwargs)):
self.r_srcfiles.append(kwargs['src%d' % (idx)])
else:
# Not a multisegment run, just use the single src file
self.r_srcfiles.append(i_r_srcfile)
def run(self):
"""
This function prepares the parameters for Irikura's gen_srf then calls it
"""
print("IrikuraGenSrf".center(80, '-'))
# Load configuration, set sim_id
install = InstallCfg.getInstance()
sim_id = self.sim_id
# Build directory paths
a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id))
a_param_outdir = os.path.join(a_outdir, "param_files")
# Make sure the output and tmp directories exist
bband_utils.mkdirs([a_tmpdir, a_indir, a_outdir,
a_logdir, a_param_outdir])
# Now, file paths
self.log = os.path.join(a_logdir, "%d.gen_srf.log" % (sim_id))
a_srcfiles = [os.path.join(a_indir,
srcfile) for srcfile in self.r_srcfiles]
# Read src file
cfg = IrikuraGenSrfCfg(a_srcfiles)
# Define location of input velocity model and output srf file
if cfg.num_srcfiles > 1:
a_srffile = os.path.join(a_tmpdir, self.r_srffile)
a_final_srffile = os.path.join(a_indir, self.r_srffile)
else:
a_srffile = os.path.join(a_indir, self.r_srffile)
a_velmod = os.path.join(install.A_IN_DATA_DIR, str(sim_id),
self.r_velmodel)
# Run in tmpdir subdir to isolate temp fortran files
# Save cwd, change back to it at the end
old_cwd = os.getcwd()
os.chdir(a_tmpdir)
# Read parameters from the src(s) file(s)
# The following parameters should be common to all SRC files
# So we just read from the first one
simulation_seed = int(cfg.CFGDICT[0]['seed'])
dip = cfg.CFGDICT[0]['dip']
rake = cfg.CFGDICT[0]['rake']
dlen = cfg.CFGDICT[0]['dlen']
dwid = cfg.CFGDICT[0]['dwid']
lon_top_center = cfg.CFGDICT[0]['lon_top_center']
lat_top_center = cfg.CFGDICT[0]['lat_top_center']
depth_to_top = cfg.CFGDICT[0]['depth_to_top']
if cfg.num_srcfiles > 1:
fault_len = cfg.CFGDICT[0]['max_fault_length']
else:
fault_len = cfg.CFGDICT[0]['fault_length']
fault_width = cfg.CFGDICT[0]['fault_width']
# Average strike of all SRC files
strike = 0.0
for segment in range(cfg.num_srcfiles):
strike = strike + cfg.CFGDICT[segment]['strike']
strike = math.ceil(strike / cfg.num_srcfiles)
# Hypocenter (down_dip is common to all src files)
hypo_down_dip = cfg.CFGDICT[0]['hypo_down_dip']
if cfg.num_srcfiles > 1:
hypo_along_stk = 0.0
for segment in range(cfg.num_srcfiles):
current_fault_len = cfg.CFGDICT[segment]['fault_length']
current_hypo_along_stk = cfg.CFGDICT[segment]['hypo_along_stk']
if abs(current_hypo_along_stk) <= current_fault_len:
# Hypocenter in this segment!
hypo_along_stk = hypo_along_stk + (current_fault_len / 2.0) + current_hypo_along_stk
break
else:
# Not here yet, just add the total length of this segment
hypo_along_stk = hypo_along_stk + current_fault_len
# Now convert hypo_along_stk so that 0.0 is the middle of the fault
hypo_along_stk = hypo_along_stk - (fault_len / 2.0)
else:
hypo_along_stk = cfg.CFGDICT[0]['hypo_along_stk']
#
# Run gen_srf code
#
progstring = ("%s >> %s 2>&1 << END\n" %
(os.path.join(install.A_IRIKURA_BIN_DIR, cfg.GENSRF),
self.log) +
"%s\n" % a_srffile +
"%f %f %f %f %f\n" %
(fault_len, fault_width,
strike, dip, rake) +
"%f %f %f\n" %
(lon_top_center, lat_top_center, depth_to_top) +
"%f %f\n" % (dlen, dwid) +
"%f %f %f %f\n" %
(hypo_along_stk, hypo_down_dip,
cfg.DENS, cfg.VS) +
"%f\n" % (cfg.DT) +
"%d\n" % (simulation_seed) +
"%s\n" % (a_velmod) +
"%f\n" % (cfg.VEL_RUP_FRAC) +
"END")
bband_utils.runprog(progstring)
if cfg.num_srcfiles > 1:
# Assign the slip from the planar fault to each segment's SRF file
a_segs_file = os.path.join(a_tmpdir, "segments.midpoint.txt")
# Write segments' file
seg_file = open(a_segs_file, 'w')
seg_file.write("segm lon lat depth fleng fwidth shypo zhypo strike dip rake\n")
seg_file.write("%d\n" % (cfg.num_srcfiles))
total_length = 0.0
for segment in range(cfg.num_srcfiles):
if abs(cfg.CFGDICT[segment]['hypo_along_stk']) <= cfg.CFGDICT[segment]['fault_length']:
hypo_along_stk = cfg.CFGDICT[segment]['hypo_along_stk']
hypo_down_dip = cfg.CFGDICT[segment]['hypo_down_dip']
else:
hypo_along_stk = 999.0
hypo_down_dip = 999.0
seg_file.write("seg%d %.6f %.6f %.1f %.1f %.1f %.1f %.1f %.1f %d %d %d\n" %
(segment + 1,
cfg.CFGDICT[segment]['lon_top_center'],
cfg.CFGDICT[segment]['lat_top_center'],
cfg.CFGDICT[segment]['depth_to_top'],
total_length,
(total_length + cfg.CFGDICT[segment]['fault_length']),
cfg.CFGDICT[segment]['fault_width'],
hypo_along_stk, hypo_down_dip,
cfg.CFGDICT[segment]['strike'],
cfg.CFGDICT[segment]['dip'],
cfg.CFGDICT[segment]['rake']))
total_length = total_length + cfg.CFGDICT[segment]['fault_length']
seg_file.close()
#
# Run gen_srf_segment code
#
for segment in range(cfg.num_srcfiles):
progstring = ("%s >> %s 2>&1 << END\n" %
(os.path.join(install.A_IRIKURA_BIN_DIR,
cfg.GENSRFSEGMENT), self.log) +
".\n" +
"%s\n" % (self.r_srffile) +
"./segments.midpoint.txt\n" +
"%d\n" % (segment + 1) +
"%f %f\n" % (dlen, dwid) +
"END")
# Run code
bband_utils.runprog(progstring)
#
# Now add the segments together
#
progstring = ("%s >> %s 2>&1 << END\n" %
(os.path.join(install.A_IRIKURA_BIN_DIR,
cfg.SUMSEG), self.log) +
".\n" +
"%s\n" % (self.r_srffile) +
"./segments.midpoint.txt\n" +
"%d\n" % (cfg.num_srcfiles) +
"%f %f\n" % (dlen, dwid) +
"END")
# Run code
bband_utils.runprog(progstring)
# Copy file to final location
progstring = "cp %s %s" % (os.path.join(a_tmpdir,
"all_seg.%s" %
(self.r_srffile)),
a_final_srffile)
bband_utils.runprog(progstring)
# Use copied file from now on
a_srffile = a_final_srffile
# Restore working directory
os.chdir(old_cwd)
#
# Move results to outputfile
#
progstring = "cp %s %s" % (a_srffile,
os.path.join(a_tmpdir, self.r_srffile))
bband_utils.runprog(progstring)
progstring = "cp %s %s" % (a_srffile,
os.path.join(a_outdir, self.r_srffile))
bband_utils.runprog(progstring)
shutil.copy2(os.path.join(a_tmpdir, "stress_drop.out"),
os.path.join(a_param_outdir,
"stress_drop.out"))
# Plot SRF
plot_srf.run(self.r_srffile, sim_id=self.sim_id)
print("IrikuraGenSrf Completed".center(80, '-'))
if __name__ == "__main__":
print("Testing Module: %s" % os.path.basename((sys.argv[0])))
ME = IrikuraGenSrf(sys.argv[1], sys.argv[2], sys.argv[3],
sys.argv[4], sim_id=int(sys.argv[5]))
ME.run()
| [((61, 18, 61, 42), 'install_cfg.InstallCfg.getInstance', 'InstallCfg.getInstance', ({}, {}), '()', False, 'from install_cfg import InstallCfg\n'), ((69, 25, 69, 62), 'os.path.join', 'os.path.join', ({(69, 38, 69, 46): 'a_outdir', (69, 48, 69, 61): '"""param_files"""'}, {}), "(a_outdir, 'param_files')", False, 'import os\n'), ((72, 8, 73, 54), 'bband_utils.mkdirs', 'bband_utils.mkdirs', ({(72, 27, 73, 53): '[a_tmpdir, a_indir, a_outdir, a_logdir, a_param_outdir]'}, {}), '([a_tmpdir, a_indir, a_outdir, a_logdir, a_param_outdir])', False, 'import bband_utils\n'), ((76, 19, 76, 70), 'os.path.join', 'os.path.join', ({(76, 32, 76, 40): 'a_logdir', (76, 42, 76, 69): "'%d.gen_srf.log' % sim_id"}, {}), "(a_logdir, '%d.gen_srf.log' % sim_id)", False, 'import os\n'), ((81, 14, 81, 42), 'irikura_gen_srf_cfg.IrikuraGenSrfCfg', 'IrikuraGenSrfCfg', ({(81, 31, 81, 41): 'a_srcfiles'}, {}), '(a_srcfiles)', False, 'from irikura_gen_srf_cfg import IrikuraGenSrfCfg\n'), ((94, 18, 94, 29), 'os.getcwd', 'os.getcwd', ({}, {}), '()', False, 'import os\n'), ((95, 8, 95, 26), 'os.chdir', 'os.chdir', ({(95, 17, 95, 25): 'a_tmpdir'}, {}), '(a_tmpdir)', False, 'import os\n'), ((117, 17, 117, 53), 'math.ceil', 'math.ceil', ({(117, 27, 117, 52): 'strike / cfg.num_srcfiles'}, {}), '(strike / cfg.num_srcfiles)', False, 'import math\n'), ((157, 8, 157, 39), 'bband_utils.runprog', 'bband_utils.runprog', ({(157, 28, 157, 38): 'progstring'}, {}), '(progstring)', False, 'import bband_utils\n'), ((233, 8, 233, 25), 'os.chdir', 'os.chdir', ({(233, 17, 233, 24): 'old_cwd'}, {}), '(old_cwd)', False, 'import os\n'), ((240, 8, 240, 39), 'bband_utils.runprog', 'bband_utils.runprog', ({(240, 28, 240, 38): 'progstring'}, {}), '(progstring)', False, 'import bband_utils\n'), ((243, 8, 243, 39), 'bband_utils.runprog', 'bband_utils.runprog', ({(243, 28, 243, 38): 'progstring'}, {}), '(progstring)', False, 'import bband_utils\n'), ((250, 8, 250, 56), 'plot_srf.run', 'plot_srf.run', (), '', False, 'import plot_srf\n'), ((77, 22, 78, 43), 'os.path.join', 'os.path.join', ({(77, 35, 77, 42): 'a_indir', (78, 35, 78, 42): 'srcfile'}, {}), '(a_indir, srcfile)', False, 'import os\n'), ((85, 24, 85, 62), 'os.path.join', 'os.path.join', ({(85, 37, 85, 45): 'a_tmpdir', (85, 47, 85, 61): 'self.r_srffile'}, {}), '(a_tmpdir, self.r_srffile)', False, 'import os\n'), ((86, 30, 86, 67), 'os.path.join', 'os.path.join', ({(86, 43, 86, 50): 'a_indir', (86, 52, 86, 66): 'self.r_srffile'}, {}), '(a_indir, self.r_srffile)', False, 'import os\n'), ((88, 24, 88, 61), 'os.path.join', 'os.path.join', ({(88, 37, 88, 44): 'a_indir', (88, 46, 88, 60): 'self.r_srffile'}, {}), '(a_indir, self.r_srffile)', False, 'import os\n'), ((161, 26, 161, 73), 'os.path.join', 'os.path.join', ({(161, 39, 161, 47): 'a_tmpdir', (161, 49, 161, 72): '"""segments.midpoint.txt"""'}, {}), "(a_tmpdir, 'segments.midpoint.txt')", False, 'import os\n'), ((220, 12, 220, 43), 'bband_utils.runprog', 'bband_utils.runprog', ({(220, 32, 220, 42): 'progstring'}, {}), '(progstring)', False, 'import bband_utils\n'), ((227, 12, 227, 43), 'bband_utils.runprog', 'bband_utils.runprog', ({(227, 32, 227, 42): 'progstring'}, {}), '(progstring)', False, 'import bband_utils\n'), ((244, 21, 244, 62), 'os.path.join', 'os.path.join', ({(244, 34, 244, 42): 'a_tmpdir', (244, 44, 244, 61): '"""stress_drop.out"""'}, {}), "(a_tmpdir, 'stress_drop.out')", False, 'import os\n'), ((245, 21, 246, 52), 'os.path.join', 'os.path.join', ({(245, 34, 245, 48): 'a_param_outdir', (246, 34, 246, 51): '"""stress_drop.out"""'}, {}), "(a_param_outdir, 'stress_drop.out')", False, 'import os\n'), ((255, 33, 255, 64), 'os.path.basename', 'os.path.basename', ({(255, 51, 255, 62): 'sys.argv[0]'}, {}), '(sys.argv[0])', False, 'import os\n'), ((204, 16, 204, 47), 'bband_utils.runprog', 'bband_utils.runprog', ({(204, 36, 204, 46): 'progstring'}, {}), '(progstring)', False, 'import bband_utils\n'), ((239, 35, 239, 73), 'os.path.join', 'os.path.join', ({(239, 48, 239, 56): 'a_tmpdir', (239, 58, 239, 72): 'self.r_srffile'}, {}), '(a_tmpdir, self.r_srffile)', False, 'import os\n'), ((242, 35, 242, 73), 'os.path.join', 'os.path.join', ({(242, 48, 242, 56): 'a_outdir', (242, 58, 242, 72): 'self.r_srffile'}, {}), '(a_outdir, self.r_srffile)', False, 'import os\n'), ((223, 39, 225, 69), 'os.path.join', 'os.path.join', ({(223, 52, 223, 60): 'a_tmpdir', (224, 52, 225, 68): "('all_seg.%s' % self.r_srffile)"}, {}), "(a_tmpdir, 'all_seg.%s' % self.r_srffile)", False, 'import os\n'), ((210, 27, 211, 51), 'os.path.join', 'os.path.join', ({(210, 40, 210, 65): 'install.A_IRIKURA_BIN_DIR', (211, 40, 211, 50): 'cfg.SUMSEG'}, {}), '(install.A_IRIKURA_BIN_DIR, cfg.SUMSEG)', False, 'import os\n'), ((194, 31, 195, 62), 'os.path.join', 'os.path.join', ({(194, 44, 194, 69): 'install.A_IRIKURA_BIN_DIR', (195, 44, 195, 61): 'cfg.GENSRFSEGMENT'}, {}), '(install.A_IRIKURA_BIN_DIR, cfg.GENSRFSEGMENT)', False, 'import os\n'), ((140, 23, 140, 74), 'os.path.join', 'os.path.join', ({(140, 36, 140, 61): 'install.A_IRIKURA_BIN_DIR', (140, 63, 140, 73): 'cfg.GENSRF'}, {}), '(install.A_IRIKURA_BIN_DIR, cfg.GENSRF)', False, 'import os\n')] |
EthanMarrs/digit2 | core/tests/test_models.py | 207569a3b7a61282a2d0bd5f354a837ad81ef55d | """test_models.py: runs tests on the models for digit."""
import pytest
from core.models import (Grade,
Subject,
Question,
Comment,
Option,
Topic,
Block,
Syllabus,
StateException,
)
from django.test import TestCase
from django.contrib.auth.models import User
class TestQuestion(TestCase):
"""Test the Question Model."""
def setUp(self):
"""Create questions for testing."""
grade_test = Grade(name="Grade Example")
grade_test.save()
subject_test = Subject(name="addition",
grade=grade_test)
subject_test.save()
question1 = Question(question_content='what is 1 + 1?',
answer_content='This is an addition question',
subject=subject_test)
question1.save()
def test_question_default_state(self):
"""Confirm that default state is Incomplete."""
question1 = Question.objects.all()[0]
assert(question1.state == question1.INCOMPLETE)
def test_question_state_from_incomplete(self):
"""Check that question state.
Confirm that state can only go from 'incomplete' to
'ready for review'.
"""
question1 = Question.objects.all()[0]
with pytest.raises(StateException) as exception_info:
question1.change_to_needs_reworking()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 0")
assert(question1.state == question1.INCOMPLETE)
with pytest.raises(StateException) as exception_info:
question1.change_to_complete()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 0")
assert(question1.state == question1.INCOMPLETE)
with pytest.raises(StateException) as exception_info:
question1.change_to_flagged()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 0")
assert(question1.state == question1.INCOMPLETE)
question1.change_to_review_ready()
assert(question1.state == question1.REVIEW_READY)
def test_question_state_from_ready_for_review(self):
"""Check that question state.
Confirm that state can only go from 'ready to review' to
'complete' or 'needs reworking'.
"""
question1 = Question.objects.all()[0]
question1.state = question1.REVIEW_READY
with pytest.raises(StateException) as exception_info:
question1.change_to_review_ready()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 1")
with pytest.raises(StateException) as exception_info:
question1.change_to_flagged()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 1")
assert(question1.state == question1.REVIEW_READY)
question1.change_to_complete()
assert(question1.state == question1.COMPLETE)
question1.state = question1.REVIEW_READY
question1.change_to_needs_reworking()
assert(question1.state == question1.NEEDS_REWORKING)
def test_question_state_from_needs_reworking(self):
"""Check that question state.
Confirm that state can only go from 'needs reworking' to
'ready for review'.
"""
question1 = Question.objects.all()[0]
question1.state = question1.NEEDS_REWORKING
with pytest.raises(StateException) as exception_info:
question1.change_to_needs_reworking()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 2")
assert(question1.state == question1.NEEDS_REWORKING)
with pytest.raises(StateException) as exception_info:
question1.change_to_complete()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 2")
assert(question1.state == question1.NEEDS_REWORKING)
with pytest.raises(StateException) as exception_info:
question1.change_to_flagged()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 2")
assert(question1.state == question1.NEEDS_REWORKING)
question1.change_to_review_ready()
assert(question1.state == question1.REVIEW_READY)
def test_question_state_from_complete(self):
"""Check that question state.
Confirm that state can only go from 'complete' to
'flagged for review'.
"""
question1 = Question.objects.all()[0]
question1.state = question1.COMPLETE
with pytest.raises(StateException) as exception_info:
question1.change_to_review_ready()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 3")
assert(question1.state == question1.COMPLETE)
with pytest.raises(StateException) as exception_info:
question1.change_to_complete()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 3")
assert(question1.state == question1.COMPLETE)
with pytest.raises(StateException) as exception_info:
question1.change_to_needs_reworking()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 3")
assert(question1.state == question1.COMPLETE)
question1.change_to_flagged()
assert(question1.state == question1.FLAGGED)
def test_question_state_from_flagged_for_review(self):
"""Check that question state.
Confirm that state can only go from 'flagged for review' to
'complete'.
"""
question1 = Question.objects.all()[0]
question1.state = question1.FLAGGED
with pytest.raises(StateException) as exception_info:
question1.change_to_review_ready()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 4")
assert(question1.state == question1.FLAGGED)
with pytest.raises(StateException) as exception_info:
question1.change_to_needs_reworking()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 4")
assert(question1.state == question1.FLAGGED)
with pytest.raises(StateException) as exception_info:
question1.change_to_flagged()
assert(exception_info.value.__str__() ==
"Incorrect state change. Current state is 4")
assert(question1.state == question1.FLAGGED)
question1.change_to_complete()
assert(question1.state == question1.COMPLETE)
def test_question_option_save(self):
"""Test that question cannot have option with correct answer."""
question1 = Question.objects.all()[0]
option = Option.objects.first()
option.correct = True
option.save()
assert(len(question1.option_set.all()) == 3)
assert(len(Option.objects.all()) == 3)
def test_get_comments(self):
"""
Test that the get_comments() function returns all comments
relating to a question.
"""
user = User.objects.create(username="testuser")
question1 = Question.objects.all()[0]
Comment.objects.create(text="Test comment!", question=question1, user=user)
Comment.objects.create(text="Another comment!", question=question1, user=user)
assert(len(question1.get_comments()) == 2)
assert(question1.get_comments()[0].text == "Test comment!")
assert(question1.get_comments()[0].created_at < question1.get_comments()[1].created_at)
def test_get_options(self):
"""
Test that the get_options() function returns all options
relating to a question.
"""
question1 = Question.objects.all()[0]
assert(question1.get_number_of_options() == 3)
def test_get_state(self):
question1 = Question.objects.all()[0]
assert(question1.state == question1.INCOMPLETE)
assert(question1.get_state() == "Incomplete")
class TestTopic(TestCase):
"""Test the Topic Model."""
def setUp(self):
"""Create Topic for testing."""
grade_test = Grade.objects.create(name="Grade Example")
syllabus_test = Syllabus.objects.create(grade=grade_test)
Topic.objects.create(name="Financial Mathematics",
description="Topic that involves sinking funds "
"and loan calculations",
syllabus=syllabus_test, week_start=1,
duration=3)
def test_topic_creates_blocks(self):
"""
Confirm that blocks are created automatically and associated with the
topic.
"""
blocks = Block.objects.all()
assert(len(blocks) == 3)
assert(blocks[0].topic.name == "Financial Mathematics")
def test_topic_creates_questions(self):
"""
Confirm that questions are created automatically and associated with the
correct block and topic.
"""
questions = Question.objects.all()
assert(len(questions) == 3 * 15)
assert(questions[0].block.topic.name == "Financial Mathematics")
def test_topic_number_of_questions(self):
"""
Confirm that the correct number of questions is returned by the helper
function.
"""
questions = Question.objects.all()
topics = Topic.objects.all()
assert(len(questions) == topics[0].get_number_of_questions())
def test_topic_number_of_blocks(self):
"""
Confirm that the correct number of blocks is returned by the helper
function.
"""
blocks = Block.objects.all()
topics = Topic.objects.all()
assert(len(blocks) == topics[0].get_number_of_blocks())
def test_topic_save_does_not_duplicate_questions(self):
already_created_topic = Topic.objects.get(name="Financial Mathematics")
count = 0
for block in Block.objects.filter(topic=already_created_topic):
for question in Question.objects.filter(block=block):
count += 1
assert(count == 45)
new_description = "This is a new description"
already_created_topic.description = new_description
already_created_topic.save()
edited_topic = Topic.objects.get(name="Financial Mathematics")
count = 0
for block in Block.objects.filter(topic=edited_topic):
for question in Question.objects.filter(block=block):
count += 1
assert(count == 45)
| [((22, 21, 22, 48), 'core.models.Grade', 'Grade', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((24, 23, 25, 48), 'core.models.Subject', 'Subject', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((27, 20, 29, 50), 'core.models.Question', 'Question', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((188, 17, 188, 39), 'core.models.Option.objects.first', 'Option.objects.first', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((200, 15, 200, 55), 'django.contrib.auth.models.User.objects.create', 'User.objects.create', (), '', False, 'from django.contrib.auth.models import User\n'), ((202, 8, 202, 83), 'core.models.Comment.objects.create', 'Comment.objects.create', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((203, 8, 203, 86), 'core.models.Comment.objects.create', 'Comment.objects.create', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((231, 21, 231, 63), 'core.models.Grade.objects.create', 'Grade.objects.create', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((232, 24, 232, 65), 'core.models.Syllabus.objects.create', 'Syllabus.objects.create', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((233, 8, 237, 40), 'core.models.Topic.objects.create', 'Topic.objects.create', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((244, 17, 244, 36), 'core.models.Block.objects.all', 'Block.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((253, 20, 253, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((262, 20, 262, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((263, 17, 263, 36), 'core.models.Topic.objects.all', 'Topic.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((271, 17, 271, 36), 'core.models.Block.objects.all', 'Block.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((272, 17, 272, 36), 'core.models.Topic.objects.all', 'Topic.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((276, 32, 276, 79), 'core.models.Topic.objects.get', 'Topic.objects.get', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((278, 21, 278, 70), 'core.models.Block.objects.filter', 'Block.objects.filter', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((286, 23, 286, 70), 'core.models.Topic.objects.get', 'Topic.objects.get', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((288, 21, 288, 61), 'core.models.Block.objects.filter', 'Block.objects.filter', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((34, 20, 34, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((43, 20, 43, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((45, 13, 45, 42), 'pytest.raises', 'pytest.raises', ({(45, 27, 45, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((51, 13, 51, 42), 'pytest.raises', 'pytest.raises', ({(51, 27, 51, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((57, 13, 57, 42), 'pytest.raises', 'pytest.raises', ({(57, 27, 57, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((72, 20, 72, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((75, 13, 75, 42), 'pytest.raises', 'pytest.raises', ({(75, 27, 75, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((80, 13, 80, 42), 'pytest.raises', 'pytest.raises', ({(80, 27, 80, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((100, 20, 100, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((103, 13, 103, 42), 'pytest.raises', 'pytest.raises', ({(103, 27, 103, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((109, 13, 109, 42), 'pytest.raises', 'pytest.raises', ({(109, 27, 109, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((115, 13, 115, 42), 'pytest.raises', 'pytest.raises', ({(115, 27, 115, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((130, 20, 130, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((133, 13, 133, 42), 'pytest.raises', 'pytest.raises', ({(133, 27, 133, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((139, 13, 139, 42), 'pytest.raises', 'pytest.raises', ({(139, 27, 139, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((145, 13, 145, 42), 'pytest.raises', 'pytest.raises', ({(145, 27, 145, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((160, 20, 160, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((163, 13, 163, 42), 'pytest.raises', 'pytest.raises', ({(163, 27, 163, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((169, 13, 169, 42), 'pytest.raises', 'pytest.raises', ({(169, 27, 169, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((175, 13, 175, 42), 'pytest.raises', 'pytest.raises', ({(175, 27, 175, 41): 'StateException'}, {}), '(StateException)', False, 'import pytest\n'), ((186, 20, 186, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((201, 20, 201, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((214, 20, 214, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((219, 20, 219, 42), 'core.models.Question.objects.all', 'Question.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((279, 28, 279, 64), 'core.models.Question.objects.filter', 'Question.objects.filter', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((289, 28, 289, 64), 'core.models.Question.objects.filter', 'Question.objects.filter', (), '', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n'), ((193, 19, 193, 39), 'core.models.Option.objects.all', 'Option.objects.all', ({}, {}), '()', False, 'from core.models import Grade, Subject, Question, Comment, Option, Topic, Block, Syllabus, StateException\n')] |
tbabej/betterbib | betterbib/__init__.py | 80a3c9040232d9988f9a1e4c40724b40b9b9ed85 | # -*- coding: utf-8 -*-
#
from __future__ import print_function
from betterbib.__about__ import (
__version__,
__author__,
__author_email__,
__website__,
)
from betterbib.tools import (
create_dict,
decode,
pybtex_to_dict,
pybtex_to_bibtex_string,
write,
update,
JournalNameUpdater,
translate_month
)
from betterbib.crossref import Crossref
from betterbib.dblp import Dblp
try:
import pipdate
except ImportError:
pass
else:
if pipdate.needs_checking(__name__):
print(pipdate.check(__name__, __version__), end='')
| [((30, 7, 30, 39), 'pipdate.needs_checking', 'pipdate.needs_checking', ({(30, 30, 30, 38): '__name__'}, {}), '(__name__)', False, 'import pipdate\n'), ((31, 14, 31, 50), 'pipdate.check', 'pipdate.check', ({(31, 28, 31, 36): '__name__', (31, 38, 31, 49): '__version__'}, {}), '(__name__, __version__)', False, 'import pipdate\n')] |
omololevy/my_portfolio | base/views.py | 29f8892c3a6e40a9c05c85110301987005d2c5c1 | from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.mail import EmailMessage
from django.conf import settings
from django.template.loader import render_to_string
from django.contrib.auth.models import User
from django.contrib import messages
from django.contrib.auth import logout, login, authenticate
from django.contrib.auth.forms import UserCreationForm
from .decorators import *
from .forms import PostForm, CustomUserCreationForm, ProfileForm, UserForm
from .filters import PostFilter
from .models import *
# Create your views here.
def home(request):
posts = Post.objects.filter(active=True, featured=True)[0:3]
context = {'posts':posts}
return render(request, 'base/index.html', context)
def posts(request):
posts = Post.objects.filter(active=True)
myFilter = PostFilter(request.GET, queryset=posts)
posts = myFilter.qs
page = request.GET.get('page')
paginator = Paginator(posts, 5)
try:
posts = paginator.page(page)
except PageNotAnInteger:
posts = paginator.page(1)
except EmptyPage:
posts = paginator.page(paginator.num_pages)
context = {'posts':posts, 'myFilter':myFilter}
return render(request, 'base/posts.html', context)
def post(request, slug):
post = Post.objects.get(slug=slug)
if request.method == 'POST':
PostComment.objects.create(
author=request.user.profile,
post=post,
body=request.POST['comment']
)
messages.success(request, "Your comment has been posted successfully!")
return redirect('post', slug=post.slug)
context = {'post':post}
return render(request, 'base/post.html', context)
def profile(request):
return render(request, 'base/profile.html')
#CRUD VIEWS
@admin_only
@login_required(login_url="home")
def createPost(request):
form = PostForm()
if request.method == 'POST':
form = PostForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('posts')
context = {'form':form}
return render(request, 'base/post_form.html', context)
@admin_only
@login_required(login_url="home")
def updatePost(request, slug):
post = Post.objects.get(slug=slug)
form = PostForm(instance=post)
if request.method == 'POST':
form = PostForm(request.POST, request.FILES, instance=post)
if form.is_valid():
form.save()
return redirect('posts')
context = {'form':form}
return render(request, 'base/post_form.html', context)
@admin_only
@login_required(login_url="home")
def deletePost(request, slug):
post = Post.objects.get(slug=slug)
if request.method == 'POST':
post.delete()
return redirect('posts')
context = {'item':post}
return render(request, 'base/delete.html', context)
def sendEmail(request):
if request.method == 'POST':
template = render_to_string('base/email_template.html', {
'name':request.POST['name'],
'email':request.POST['email'],
'message':request.POST['message'],
})
email = EmailMessage(
request.POST['subject'],
template,
settings.EMAIL_HOST_USER,
['[email protected]']
)
email.fail_silently=False
email.send()
return render(request, 'base/email_sent.html')
def loginPage(request):
if request.user.is_authenticated:
return redirect('home')
if request.method == 'POST':
email = request.POST.get('email')
password =request.POST.get('password')
#Little Hack to work around re-building the usermodel
try:
user = User.objects.get(email=email)
user = authenticate(request, username=user.username, password=password)
except:
messages.error(request, 'User with this email does not exists')
return redirect('login')
if user is not None:
login(request, user)
return redirect('home')
else:
messages.error(request, 'Email OR password is incorrect')
context = {}
return render(request, 'base/login.html', context)
def registerPage(request):
form = CustomUserCreationForm()
if request.method == 'POST':
form = CustomUserCreationForm(request.POST)
if form.is_valid():
user = form.save(commit=False)
user.save()
messages.success(request, 'Account successfuly created!')
user = authenticate(request, username=user.username, password=request.POST['password1'])
if user is not None:
login(request, user)
next_url = request.GET.get('next')
if next_url == '' or next_url == None:
next_url = 'home'
return redirect(next_url)
else:
messages.error(request, 'An error has occured with registration')
context = {'form':form}
return render(request, 'base/register.html', context)
def logoutUser(request):
logout(request)
return redirect('home')
@admin_only
@login_required(login_url="home")
def userAccount(request):
profile = request.user.profile
context = {'profile':profile}
return render(request, 'base/account.html', context)
@login_required(login_url="home")
def updateProfile(request):
user = request.user
profile = user.profile
form = ProfileForm(instance=profile)
if request.method == 'POST':
user_form = UserForm(request.POST, instance=user)
if user_form.is_valid():
user_form.save()
form = ProfileForm(request.POST, request.FILES, instance=profile)
if form.is_valid():
form.save()
return redirect('account')
context = {'form':form}
return render(request, 'base/profile_form.html', context)
def myEducation(request):
return render(request, 'base/education.html')
def myExperience(request):
return render(request, 'base/experience.html')
def myAchievements(request):
return render(request, 'base/achievements.html')
def myAbout(request):
return render(request, 'base/about.html')
def myContact(request):
return render(request, 'base/contact.html')
def mySkills(request):
return render(request, 'base/skills.html')
| [((65, 1, 65, 33), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required\n'), ((80, 1, 80, 33), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required\n'), ((95, 1, 95, 33), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required\n'), ((182, 1, 182, 33), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required\n'), ((189, 1, 189, 33), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required\n'), ((23, 8, 23, 51), 'django.shortcuts.render', 'render', ({(23, 15, 23, 22): 'request', (23, 24, 23, 41): '"""base/index.html"""', (23, 43, 23, 50): 'context'}, {}), "(request, 'base/index.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((31, 13, 31, 32), 'django.core.paginator.Paginator', 'Paginator', ({(31, 23, 31, 28): 'posts', (31, 30, 31, 31): '5'}, {}), '(posts, 5)', False, 'from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n'), ((41, 8, 41, 51), 'django.shortcuts.render', 'render', ({(41, 15, 41, 22): 'request', (41, 24, 41, 41): '"""base/posts.html"""', (41, 43, 41, 50): 'context'}, {}), "(request, 'base/posts.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((58, 8, 58, 50), 'django.shortcuts.render', 'render', ({(58, 15, 58, 22): 'request', (58, 24, 58, 40): '"""base/post.html"""', (58, 42, 58, 49): 'context'}, {}), "(request, 'base/post.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((61, 8, 61, 44), 'django.shortcuts.render', 'render', ({(61, 15, 61, 22): 'request', (61, 24, 61, 43): '"""base/profile.html"""'}, {}), "(request, 'base/profile.html')", False, 'from django.shortcuts import render, redirect\n'), ((76, 8, 76, 55), 'django.shortcuts.render', 'render', ({(76, 15, 76, 22): 'request', (76, 24, 76, 45): '"""base/post_form.html"""', (76, 47, 76, 54): 'context'}, {}), "(request, 'base/post_form.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((92, 8, 92, 55), 'django.shortcuts.render', 'render', ({(92, 15, 92, 22): 'request', (92, 24, 92, 45): '"""base/post_form.html"""', (92, 47, 92, 54): 'context'}, {}), "(request, 'base/post_form.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((103, 8, 103, 52), 'django.shortcuts.render', 'render', ({(103, 15, 103, 22): 'request', (103, 24, 103, 42): '"""base/delete.html"""', (103, 44, 103, 51): 'context'}, {}), "(request, 'base/delete.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((127, 8, 127, 47), 'django.shortcuts.render', 'render', ({(127, 15, 127, 22): 'request', (127, 24, 127, 46): '"""base/email_sent.html"""'}, {}), "(request, 'base/email_sent.html')", False, 'from django.shortcuts import render, redirect\n'), ((152, 8, 152, 51), 'django.shortcuts.render', 'render', ({(152, 15, 152, 22): 'request', (152, 24, 152, 41): '"""base/login.html"""', (152, 43, 152, 50): 'context'}, {}), "(request, 'base/login.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((175, 8, 175, 54), 'django.shortcuts.render', 'render', ({(175, 15, 175, 22): 'request', (175, 24, 175, 44): '"""base/register.html"""', (175, 46, 175, 53): 'context'}, {}), "(request, 'base/register.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((178, 1, 178, 16), 'django.contrib.auth.logout', 'logout', ({(178, 8, 178, 15): 'request'}, {}), '(request)', False, 'from django.contrib.auth import logout, login, authenticate\n'), ((179, 8, 179, 24), 'django.shortcuts.redirect', 'redirect', ({(179, 17, 179, 23): '"""home"""'}, {}), "('home')", False, 'from django.shortcuts import render, redirect\n'), ((187, 8, 187, 53), 'django.shortcuts.render', 'render', ({(187, 15, 187, 22): 'request', (187, 24, 187, 43): '"""base/account.html"""', (187, 45, 187, 52): 'context'}, {}), "(request, 'base/account.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((206, 8, 206, 58), 'django.shortcuts.render', 'render', ({(206, 15, 206, 22): 'request', (206, 24, 206, 48): '"""base/profile_form.html"""', (206, 50, 206, 57): 'context'}, {}), "(request, 'base/profile_form.html', context)", False, 'from django.shortcuts import render, redirect\n'), ((209, 8, 209, 46), 'django.shortcuts.render', 'render', ({(209, 15, 209, 22): 'request', (209, 24, 209, 45): '"""base/education.html"""'}, {}), "(request, 'base/education.html')", False, 'from django.shortcuts import render, redirect\n'), ((212, 8, 212, 47), 'django.shortcuts.render', 'render', ({(212, 15, 212, 22): 'request', (212, 24, 212, 46): '"""base/experience.html"""'}, {}), "(request, 'base/experience.html')", False, 'from django.shortcuts import render, redirect\n'), ((215, 8, 215, 49), 'django.shortcuts.render', 'render', ({(215, 15, 215, 22): 'request', (215, 24, 215, 48): '"""base/achievements.html"""'}, {}), "(request, 'base/achievements.html')", False, 'from django.shortcuts import render, redirect\n'), ((218, 11, 218, 45), 'django.shortcuts.render', 'render', ({(218, 18, 218, 25): 'request', (218, 27, 218, 44): '"""base/about.html"""'}, {}), "(request, 'base/about.html')", False, 'from django.shortcuts import render, redirect\n'), ((221, 11, 221, 47), 'django.shortcuts.render', 'render', ({(221, 18, 221, 25): 'request', (221, 27, 221, 46): '"""base/contact.html"""'}, {}), "(request, 'base/contact.html')", False, 'from django.shortcuts import render, redirect\n'), ((224, 11, 224, 46), 'django.shortcuts.render', 'render', ({(224, 18, 224, 25): 'request', (224, 27, 224, 45): '"""base/skills.html"""'}, {}), "(request, 'base/skills.html')", False, 'from django.shortcuts import render, redirect\n'), ((52, 2, 52, 73), 'django.contrib.messages.success', 'messages.success', ({(52, 19, 52, 26): 'request', (52, 28, 52, 72): '"""Your comment has been posted successfully!"""'}, {}), "(request, 'Your comment has been posted successfully!')", False, 'from django.contrib import messages\n'), ((54, 9, 54, 41), 'django.shortcuts.redirect', 'redirect', (), '', False, 'from django.shortcuts import render, redirect\n'), ((73, 9, 73, 26), 'django.shortcuts.redirect', 'redirect', ({(73, 18, 73, 25): '"""posts"""'}, {}), "('posts')", False, 'from django.shortcuts import render, redirect\n'), ((89, 9, 89, 26), 'django.shortcuts.redirect', 'redirect', ({(89, 18, 89, 25): '"""posts"""'}, {}), "('posts')", False, 'from django.shortcuts import render, redirect\n'), ((101, 9, 101, 26), 'django.shortcuts.redirect', 'redirect', ({(101, 18, 101, 25): '"""posts"""'}, {}), "('posts')", False, 'from django.shortcuts import render, redirect\n'), ((111, 13, 115, 5), 'django.template.loader.render_to_string', 'render_to_string', ({(111, 30, 111, 56): '"""base/email_template.html"""', (111, 58, 115, 4): "{'name': request.POST['name'], 'email': request.POST['email'], 'message':\n request.POST['message']}"}, {}), "('base/email_template.html', {'name': request.POST['name'],\n 'email': request.POST['email'], 'message': request.POST['message']})", False, 'from django.template.loader import render_to_string\n'), ((117, 10, 122, 4), 'django.core.mail.EmailMessage', 'EmailMessage', ({(118, 3, 118, 26): "request.POST['subject']", (119, 3, 119, 11): 'template', (120, 3, 120, 27): 'settings.EMAIL_HOST_USER', (121, 3, 121, 27): "['[email protected]']"}, {}), "(request.POST['subject'], template, settings.EMAIL_HOST_USER, [\n '[email protected]'])", False, 'from django.core.mail import EmailMessage\n'), ((131, 9, 131, 25), 'django.shortcuts.redirect', 'redirect', ({(131, 18, 131, 24): '"""home"""'}, {}), "('home')", False, 'from django.shortcuts import render, redirect\n'), ((139, 10, 139, 39), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((140, 10, 140, 74), 'django.contrib.auth.authenticate', 'authenticate', (), '', False, 'from django.contrib.auth import logout, login, authenticate\n'), ((146, 3, 146, 23), 'django.contrib.auth.login', 'login', ({(146, 9, 146, 16): 'request', (146, 18, 146, 22): 'user'}, {}), '(request, user)', False, 'from django.contrib.auth import logout, login, authenticate\n'), ((147, 10, 147, 26), 'django.shortcuts.redirect', 'redirect', ({(147, 19, 147, 25): '"""home"""'}, {}), "('home')", False, 'from django.shortcuts import render, redirect\n'), ((149, 3, 149, 60), 'django.contrib.messages.error', 'messages.error', ({(149, 18, 149, 25): 'request', (149, 27, 149, 59): '"""Email OR password is incorrect"""'}, {}), "(request, 'Email OR password is incorrect')", False, 'from django.contrib import messages\n'), ((161, 3, 161, 60), 'django.contrib.messages.success', 'messages.success', ({(161, 20, 161, 27): 'request', (161, 29, 161, 59): '"""Account successfuly created!"""'}, {}), "(request, 'Account successfuly created!')", False, 'from django.contrib import messages\n'), ((163, 10, 163, 91), 'django.contrib.auth.authenticate', 'authenticate', (), '', False, 'from django.contrib.auth import logout, login, authenticate\n'), ((171, 10, 171, 28), 'django.shortcuts.redirect', 'redirect', ({(171, 19, 171, 27): 'next_url'}, {}), '(next_url)', False, 'from django.shortcuts import render, redirect\n'), ((173, 3, 173, 68), 'django.contrib.messages.error', 'messages.error', ({(173, 18, 173, 25): 'request', (173, 27, 173, 67): '"""An error has occured with registration"""'}, {}), "(request, 'An error has occured with registration')", False, 'from django.contrib import messages\n'), ((202, 10, 202, 29), 'django.shortcuts.redirect', 'redirect', ({(202, 19, 202, 28): '"""account"""'}, {}), "('account')", False, 'from django.shortcuts import render, redirect\n'), ((142, 3, 142, 66), 'django.contrib.messages.error', 'messages.error', ({(142, 18, 142, 25): 'request', (142, 27, 142, 65): '"""User with this email does not exists"""'}, {}), "(request, 'User with this email does not exists')", False, 'from django.contrib import messages\n'), ((143, 10, 143, 27), 'django.shortcuts.redirect', 'redirect', ({(143, 19, 143, 26): '"""login"""'}, {}), "('login')", False, 'from django.shortcuts import render, redirect\n'), ((166, 4, 166, 24), 'django.contrib.auth.login', 'login', ({(166, 10, 166, 17): 'request', (166, 19, 166, 23): 'user'}, {}), '(request, user)', False, 'from django.contrib.auth import logout, login, authenticate\n')] |
hephaestus9/Radio | radioLib/pastebin/pastebin.py | c1560c25def211ab6354fb0aa5cc935e2851c8f0 | #!/usr/bin/env python
#############################################################################
# Pastebin.py - Python 3.2 Pastebin API.
# Copyright (C) 2012 Ian Havelock
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
# This software is a derivative work of:
# http://winappdbg.sourceforge.net/blog/pastebin.py
#############################################################################
__ALL__ = ['delete_paste', 'user_details', 'trending', 'pastes_by_user', 'generate_user_key',
'legacy_paste', 'paste', 'Pastebin', 'PastebinError']
import sys
import urllib
class PastebinError(RuntimeError):
"""Pastebin API error.
The error message returned by the web application is stored as the Python exception message."""
class PastebinAPI(object):
"""Pastebin API interaction object.
Public functions:
paste -- Pastes a user-specified file or string using the new API-key POST method.
legacy_paste -- Pastes a user-specified file or string using the old anonymous POST method.
generate_user_key -- Generates a session-key that is required for other functions.
pastes_by_user -- Returns all public pastes submitted by the specified login credentials.
trending -- Returns the top trending paste.
user_details -- Returns details about the user for the specified API user key.
delete_paste -- Adds two numbers together and returns the result."""
# String to determine bad API requests
_bad_request = 'Bad API request'
# Base domain name
_base_domain = 'pastebin.com'
# Valid Pastebin URLs begin with this string (kinda bvious)
_prefix_url = 'http://%s/' % _base_domain
# Valid Pastebin URLs with a custom subdomain begin with this string
_subdomain_url = 'http://%%s.%s/' % _base_domain
# URL to the LEGACY POST API
_legacy_api_url= 'http://%s/api_public.php' % _base_domain
# URL to the POST API
_api_url= 'http://%s/api/api_post.php' % _base_domain
# URL to the login POST API
_api_login_url= 'http://%s/api/api_login.php' % _base_domain
# Valid paste_expire_date values (Never, 10 minutes, 1 Hour, 1 Day, 1 Month)
paste_expire_date = ('N', '10M', '1H', '1D', '1M')
# Valid paste_expire_date values (0 = public, 1 = unlisted, 2 = private)
paste_private = ('public', 'unlisted', 'private')
# Valid parse_format values
paste_format = (
'4cs', # 4CS
'6502acme', # 6502 ACME Cross Assembler
'6502kickass', # 6502 Kick Assembler
'6502tasm', # 6502 TASM/64TASS
'abap', # ABAP
'actionscript', # ActionScript
'actionscript3', # ActionScript 3
'ada', # Ada
'algol68', # ALGOL 68
'apache', # Apache Log
'applescript', # AppleScript
'apt_sources', # APT Sources
'asm', # ASM (NASM)
'asp', # ASP
'autoconf', # autoconf
'autohotkey', # Autohotkey
'autoit', # AutoIt
'avisynth', # Avisynth
'awk', # Awk
'bascomavr', # BASCOM AVR
'bash', # Bash
'basic4gl', # Basic4GL
'bibtex', # BibTeX
'blitzbasic', # Blitz Basic
'bnf', # BNF
'boo', # BOO
'bf', # BrainFuck
'c', # C
'c_mac', # C for Macs
'cil', # C Intermediate Language
'csharp', # C#
'cpp', # C++
'cpp-qt', # C++ (with QT extensions)
'c_loadrunner', # C: Loadrunner
'caddcl', # CAD DCL
'cadlisp', # CAD Lisp
'cfdg', # CFDG
'chaiscript', # ChaiScript
'clojure', # Clojure
'klonec', # Clone C
'klonecpp', # Clone C++
'cmake', # CMake
'cobol', # COBOL
'coffeescript', # CoffeeScript
'cfm', # ColdFusion
'css', # CSS
'cuesheet', # Cuesheet
'd', # D
'dcs', # DCS
'delphi', # Delphi
'oxygene', # Delphi Prism (Oxygene)
'diff', # Diff
'div', # DIV
'dos', # DOS
'dot', # DOT
'e', # E
'ecmascript', # ECMAScript
'eiffel', # Eiffel
'email', # Email
'epc', # EPC
'erlang', # Erlang
'fsharp', # F#
'falcon', # Falcon
'fo', # FO Language
'f1', # Formula One
'fortran', # Fortran
'freebasic', # FreeBasic
'freeswitch', # FreeSWITCH
'gambas', # GAMBAS
'gml', # Game Maker
'gdb', # GDB
'genero', # Genero
'genie', # Genie
'gettext', # GetText
'go', # Go
'groovy', # Groovy
'gwbasic', # GwBasic
'haskell', # Haskell
'hicest', # HicEst
'hq9plus', # HQ9 Plus
'html4strict', # HTML
'html5', # HTML 5
'icon', # Icon
'idl', # IDL
'ini', # INI file
'inno', # Inno Script
'intercal', # INTERCAL
'io', # IO
'j', # J
'java', # Java
'java5', # Java 5
'javascript', # JavaScript
'jquery', # jQuery
'kixtart', # KiXtart
'latex', # Latex
'lb', # Liberty BASIC
'lsl2', # Linden Scripting
'lisp', # Lisp
'llvm', # LLVM
'locobasic', # Loco Basic
'logtalk', # Logtalk
'lolcode', # LOL Code
'lotusformulas', # Lotus Formulas
'lotusscript', # Lotus Script
'lscript', # LScript
'lua', # Lua
'm68k', # M68000 Assembler
'magiksf', # MagikSF
'make', # Make
'mapbasic', # MapBasic
'matlab', # MatLab
'mirc', # mIRC
'mmix', # MIX Assembler
'modula2', # Modula 2
'modula3', # Modula 3
'68000devpac', # Motorola 68000 HiSoft Dev
'mpasm', # MPASM
'mxml', # MXML
'mysql', # MySQL
'newlisp', # newLISP
'text', # None
'nsis', # NullSoft Installer
'oberon2', # Oberon 2
'objeck', # Objeck Programming Langua
'objc', # Objective C
'ocaml-brief', # OCalm Brief
'ocaml', # OCaml
'pf', # OpenBSD PACKET FILTER
'glsl', # OpenGL Shading
'oobas', # Openoffice BASIC
'oracle11', # Oracle 11
'oracle8', # Oracle 8
'oz', # Oz
'pascal', # Pascal
'pawn', # PAWN
'pcre', # PCRE
'per', # Per
'perl', # Perl
'perl6', # Perl 6
'php', # PHP
'php-brief', # PHP Brief
'pic16', # Pic 16
'pike', # Pike
'pixelbender', # Pixel Bender
'plsql', # PL/SQL
'postgresql', # PostgreSQL
'povray', # POV-Ray
'powershell', # Power Shell
'powerbuilder', # PowerBuilder
'proftpd', # ProFTPd
'progress', # Progress
'prolog', # Prolog
'properties', # Properties
'providex', # ProvideX
'purebasic', # PureBasic
'pycon', # PyCon
'python', # Python
'q', # q/kdb+
'qbasic', # QBasic
'rsplus', # R
'rails', # Rails
'rebol', # REBOL
'reg', # REG
'robots', # Robots
'rpmspec', # RPM Spec
'ruby', # Ruby
'gnuplot', # Ruby Gnuplot
'sas', # SAS
'scala', # Scala
'scheme', # Scheme
'scilab', # Scilab
'sdlbasic', # SdlBasic
'smalltalk', # Smalltalk
'smarty', # Smarty
'sql', # SQL
'systemverilog', # SystemVerilog
'tsql', # T-SQL
'tcl', # TCL
'teraterm', # Tera Term
'thinbasic', # thinBasic
'typoscript', # TypoScript
'unicon', # Unicon
'uscript', # UnrealScript
'vala', # Vala
'vbnet', # VB.NET
'verilog', # VeriLog
'vhdl', # VHDL
'vim', # VIM
'visualprolog', # Visual Pro Log
'vb', # VisualBasic
'visualfoxpro', # VisualFoxPro
'whitespace', # WhiteSpace
'whois', # WHOIS
'winbatch', # Winbatch
'xbasic', # XBasic
'xml', # XML
'xorg_conf', # Xorg Config
'xpp', # XPP
'yaml', # YAML
'z80', # Z80 Assembler
'zxbasic', # ZXBasic
)
def __init__(self):
pass
def delete_paste(self, api_dev_key, api_user_key, api_paste_key):
"""Delete the paste specified by the api_paste_key.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> paste_to_delete = x.delete_paste('453a994e0e2f1efae07f8759e59e075b',
... 'c57a18e6c0ae228cd4bd16fe36da381a',
... 'WkgcTFtv')
>>> print paste_to_delete
Paste Removed
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type api_user_key: string
@param api_user_key: The API User Key of a U{http://pastebin.com} registered user.
@type api_paste_key: string
@param api_paste_key: The Paste Key of the paste to be deleted (string after final / in U{http://pastebin.com} URL).
@rtype: string
@returns: A successful deletion returns 'Paste Removed'.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Requires pre-registered account
if api_user_key is not None:
argv['api_user_key'] = str(api_user_key)
# Key of the paste to be deleted.
if api_paste_key is not None:
argv['api_paste_key'] = str(api_paste_key)
# Valid API option - 'user_details' in this instance
argv['api_option'] = str('delete')
# lets try to read the URL that we've just built.
request = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = self._submit_paste(request)
return response
def user_details(self, api_dev_key, api_user_key):
"""Return user details of the user specified by the api_user_key.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> details = x.user_details('453a994e0e2f1efae07f8759e59e075b',
... 'c57a18e6c0ae228cd4bd16fe36da381a')
>>> print details
<user>
<user_name>MonkeyPuzzle</user_name>
<user_format_short>python</user_format_short>
<user_expiration>N</user_expiration>
<user_avatar_url>http://pastebin.com/i/guest.gif</user_avatar_url>
<user_private>0</user_private>
<user_website></user_website>
<user_email>[email protected]</user_email>
<user_location></user_location>
<user_account_type>0</user_account_type>
</user>
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type api_user_key: string
@param api_user_key: The API User Key of a U{http://pastebin.com} registered user.
@rtype: string
@returns: Returns an XML string containing user information.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Requires pre-registered account to generate an api_user_key (see generate_user_key)
if api_user_key is not None:
argv['api_user_key'] = str(api_user_key)
# Valid API option - 'user_details' in this instance
argv['api_option'] = str('userdetails')
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith('<user>'):
raise PastebinError(response)
return response
def trending(self, api_dev_key):
"""Returns the top trending paste details.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> details = x.trending('453a994e0e2f1efae07f8759e59e075b')
>>> print details
<paste>
<paste_key>jjMRFDH6</paste_key>
<paste_date>1333230838</paste_date>
<paste_title></paste_title>
<paste_size>6416</paste_size>
<paste_expire_date>0</paste_expire_date>
<paste_private>0</paste_private>
<paste_format_long>None</paste_format_long>
<paste_format_short>text</paste_format_short>
<paste_url>http://pastebin.com/jjMRFDH6</paste_url>
<paste_hits>6384</paste_hits>
</paste>
Note: Returns multiple trending pastes, not just 1.
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@rtype: string
@return: Returns the string (XML formatted) containing the top trending pastes.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Valid API option - 'trends' is returns trending pastes
argv['api_option'] = str('trends')
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith('<paste>'):
raise PastebinError(response)
return response
def pastes_by_user(self, api_dev_key, api_user_key, results_limit = None):
"""Returns all pastes for the provided api_user_key.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> details = x.user_details('453a994e0e2f1efae07f8759e59e075b',
... 'c57a18e6c0ae228cd4bd16fe36da381a',
... 100)
>>> print details
<paste>
<paste_key>DLiSspYT</paste_key>
<paste_date>1332714730</paste_date>
<paste_title>Pastebin.py - Python 3.2 Pastebin.com API</paste_title>
<paste_size>25300</paste_size>
<paste_expire_date>0</paste_expire_date>
<paste_private>0</paste_private>
<paste_format_long>Python</paste_format_long>
<paste_format_short>python</paste_format_short>
<paste_url>http://pastebin.com/DLiSspYT</paste_url>
<paste_hits>70</paste_hits>
</paste>
Note: Returns multiple pastes, not just 1.
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type api_user_key: string
@param api_user_key: The API User Key of a U{http://pastebin.com} registered user.
@type results_limit: number
@param results_limit: The number of pastes to return between 1 - 1000.
@rtype: string
@returns: Returns an XML string containing number of specified pastes by user.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Requires pre-registered account
if api_user_key is not None:
argv['api_user_key'] = str(api_user_key)
# Number of results to return - between 1 & 1000, default = 50
if results_limit is None:
argv['api_results_limit'] = 50
if results_limit is not None:
if results_limit < 1:
argv['api_results_limit'] = 50
elif results_limit > 1000:
argv['api_results_limit'] = 1000
else:
argv['api_results_limit'] = int(results_limit)
# Valid API option - 'paste' is default for new paste
argv['api_option'] = str('list')
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith('<paste>'):
raise PastebinError(response)
return response
def generate_user_key(self, api_dev_key, username, password):
"""Generate a user session key - needed for other functions.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> my_key = x.generate_user_key('453a994e0e2f1efae07f8759e59e075b',
... 'MonkeyPuzzle',
... '12345678')
>>> print my_key
c57a18e6c0ae228cd4bd16fe36da381a
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type username: string
@param username: The username of a registered U{http://pastebin.com} account.
@type password: string
@param password: The password of a registered U{http://pastebin.com} account.
@rtype: string
@returns: Session key (api_user_key) to allow authenticated interaction to the API.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Requires pre-registered pastebin account
if username is not None:
argv['api_user_name'] = str(username)
# Requires pre-registered pastebin account
if password is not None:
argv['api_user_password'] = str(password)
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_login_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
return response
def paste(self, api_dev_key, api_paste_code,
api_user_key = None, paste_name = None, paste_format = None,
paste_private = None, paste_expire_date = None):
"""Submit a code snippet to Pastebin using the new API.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> url = x.paste('453a994e0e2f1efae07f8759e59e075b' ,
... 'Snippet of code to paste goes here',
... paste_name = 'title of paste',
... api_user_key = 'c57a18e6c0ae228cd4bd16fe36da381a',
... paste_format = 'python',
... paste_private = 'unlisted',
... paste_expire_date = '10M')
>>> print url
http://pastebin.com/tawPUgqY
@type api_dev_key: string
@param api_dev_key: The API Developer Key of a registered U{http://pastebin.com} account.
@type api_paste_code: string
@param api_paste_code: The file or string to paste to body of the U{http://pastebin.com} paste.
@type api_user_key: string
@param api_user_key: The API User Key of a U{http://pastebin.com} registered user.
If none specified, paste is made as a guest.
@type paste_name: string
@param paste_name: (Optional) Title of the paste.
Default is to paste anonymously.
@type paste_format: string
@param paste_format: (Optional) Programming language of the code being
pasted. This enables syntax highlighting when reading the code in
U{http://pastebin.com}. Default is no syntax highlighting (text is
just text and not source code).
@type paste_private: string
@param paste_private: (Optional) C{'public'} if the paste is public (visible
by everyone), C{'unlisted'} if it's public but not searchable.
C{'private'} if the paste is private and not searchable or indexed.
The Pastebin FAQ (U{http://pastebin.com/faq}) claims
private pastes are not indexed by search engines (aka Google).
@type paste_expire_date: str
@param paste_expire_date: (Optional) Expiration date for the paste.
Once past this date the paste is deleted automatically. Valid
values are found in the L{PastebinAPI.paste_expire_date} class member.
If not provided, the paste never expires.
@rtype: string
@return: Returns the URL to the newly created paste.
"""
# Valid api developer key
argv = {'api_dev_key' : str(api_dev_key) }
# Code snippet to submit
if api_paste_code is not None:
argv['api_paste_code'] = str(api_paste_code)
# Valid API option - 'paste' is default for new paste
argv['api_option'] = str('paste')
# API User Key
if api_user_key is not None:
argv['api_user_key'] = str(api_user_key)
elif api_user_key is None:
argv['api_user_key'] = str('')
# Name of the poster
if paste_name is not None:
argv['api_paste_name'] = str(paste_name)
# Syntax highlighting
if paste_format is not None:
paste_format = str(paste_format).strip().lower()
argv['api_paste_format'] = paste_format
# Is the snippet private?
if paste_private is not None:
if paste_private == 'public':
argv['api_paste_private'] = int(0)
elif paste_private == 'unlisted':
argv['api_paste_private'] = int(1)
elif paste_private == 'private':
argv['api_paste_private'] = int(2)
# Expiration for the snippet
if paste_expire_date is not None:
paste_expire_date = str(paste_expire_date).strip().upper()
argv['api_paste_expire_date'] = paste_expire_date
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith(self._prefix_url):
raise PastebinError(response)
return response
def legacy_paste(self, paste_code,
paste_name = None, paste_private = None,
paste_expire_date = None, paste_format = None):
"""Unofficial python interface to the Pastebin legacy API.
Unlike the official API, this one doesn't require an API key, so it's
virtually anonymous.
Usage Example::
>>> from pastebin import PastebinAPI
>>> x = PastebinAPI()
>>> url = x.legacy_paste('Snippet of code to paste goes here',
... paste_name = 'title of paste',
... paste_private = 'unlisted',
... paste_expire_date = '10M',
... paste_format = 'python')
>>> print url
http://pastebin.com/tawPUgqY
@type paste_code: string
@param paste_code: The file or string to paste to body of the U{http://pastebin.com} paste.
@type paste_name: string
@param paste_name: (Optional) Title of the paste.
Default is to paste with no title.
@type paste_private: string
@param paste_private: (Optional) C{'public'} if the paste is public (visible
by everyone), C{'unlisted'} if it's public but not searchable.
C{'private'} if the paste is private and not searchable or indexed.
The Pastebin FAQ (U{http://pastebin.com/faq}) claims
private pastes are not indexed by search engines (aka Google).
@type paste_expire_date: string
@param paste_expire_date: (Optional) Expiration date for the paste.
Once past this date the paste is deleted automatically. Valid
values are found in the L{PastebinAPI.paste_expire_date} class member.
If not provided, the paste never expires.
@type paste_format: string
@param paste_format: (Optional) Programming language of the code being
pasted. This enables syntax highlighting when reading the code in
U{http://pastebin.com}. Default is no syntax highlighting (text is
just text and not source code).
@rtype: string
@return: Returns the URL to the newly created paste.
"""
# Code snippet to submit
argv = { 'paste_code' : str(paste_code) }
# Name of the poster
if paste_name is not None:
argv['paste_name'] = str(paste_name)
# Is the snippet private?
if paste_private is not None:
argv['paste_private'] = int(bool(int(paste_private)))
# Expiration for the snippet
if paste_expire_date is not None:
paste_expire_date = str(paste_expire_date).strip().upper()
argv['paste_expire_date'] = paste_expire_date
# Syntax highlighting
if paste_format is not None:
paste_format = str(paste_format).strip().lower()
argv['paste_format'] = paste_format
# lets try to read the URL that we've just built.
request_string = urllib.urlopen(self._legacy_api_url, urllib.urlencode(argv))
response = request_string.read()
# do some basic error checking here so we can gracefully handle any errors we are likely to encounter
if response.startswith(self._bad_request):
raise PastebinError(response)
elif not response.startswith(self._prefix_url):
raise PastebinError(response)
return response
######################################################
delete_paste = PastebinAPI.delete_paste
user_details = PastebinAPI.user_details
trending = PastebinAPI.trending
pastes_by_user = PastebinAPI.pastes_by_user
generate_user_key = PastebinAPI.generate_user_key
legacy_paste = PastebinAPI.legacy_paste
paste = PastebinAPI.paste
######################################################
if __name__ == "__main__":
main()
| [((338, 48, 338, 70), 'urllib.urlencode', 'urllib.urlencode', ({(338, 65, 338, 69): 'argv'}, {}), '(argv)', False, 'import urllib\n'), ((388, 55, 388, 77), 'urllib.urlencode', 'urllib.urlencode', ({(388, 72, 388, 76): 'argv'}, {}), '(argv)', False, 'import urllib\n'), ((441, 55, 441, 77), 'urllib.urlencode', 'urllib.urlencode', ({(441, 72, 441, 76): 'argv'}, {}), '(argv)', False, 'import urllib\n'), ((517, 55, 517, 77), 'urllib.urlencode', 'urllib.urlencode', ({(517, 72, 517, 76): 'argv'}, {}), '(argv)', False, 'import urllib\n'), ((569, 61, 569, 83), 'urllib.urlencode', 'urllib.urlencode', ({(569, 78, 569, 82): 'argv'}, {}), '(argv)', False, 'import urllib\n'), ((678, 55, 678, 77), 'urllib.urlencode', 'urllib.urlencode', ({(678, 72, 678, 76): 'argv'}, {}), '(argv)', False, 'import urllib\n'), ((763, 62, 763, 84), 'urllib.urlencode', 'urllib.urlencode', ({(763, 79, 763, 83): 'argv'}, {}), '(argv)', False, 'import urllib\n')] |
seron-ux/News-app | app/requests.py | d22b256b26fb9fa2bb77658952139b9ddebb8f8c | import urllib.request,json
from .models import News
import requests
News = News
# Getting api key
api_key = None
# Getting the news base url
base_url = None
base_url2 = None
def configure_request(app):
global api_key,base_url,base_url2
api_key = app.config['NEWS_API_KEY']
base_url = app.config['NEWS_API_BASE_URL']
base_url2 = app.config['ARTICLE_API_BASE_URL']
def get_news(category):
'''
Function that gets the json responce to our url request
'''
get_news_url = base_url.format(category,api_key)
print(get_news_url)
get_news_response = requests.get(get_news_url).json()
print(get_news_response)
news_results = None
if get_news_response['articles']:
news_results_list = get_news_response['articles']
news_results = process_results(news_results_list)
return news_results
def search_news(news_name):
search_news_url = 'https://api.thenewsdb.org/3/search/news?api_key={}&query={}'.format(api_key,news_name)
search_news_response = requests.get(search_news_url).json()
search_news_results = None
if search_news_response['results']:
search_news_list = search_news_response['results']
search_news_results = process_results(search_news_list)
return search_news_results
def process_results(news_list):
'''
Function that processes the news result and transform them to a list of Objects
Args:
news_list: A list of dictionaries that contain news details
Returns :
news_results: A list of news objects
'''
news_results = []
for news_item in news_list:
title = news_item.get('title')
image = news_item.get('urlToImage')
description = news_item.get('description')
date = news_item.get('publishedAt')
article = news_item.get('url')
if image:
news_object = News(title,image,description,date,article)
news_results.append(news_object)
return news_results
def get_article(source):
'''
Function that gets the json responce to our url request
'''
get_news_url = base_url.format(source,api_key)
with urllib.request.urlopen(get_news_url) as url:
get_news_data = url.read()
get_news_response = json.loads(get_news_data)
news_results = None
if get_news_response['articles']:
news_results_list = get_news_response['articles']
news_results = process_results(news_results_list)
return news_results
| [((82, 31, 82, 56), 'json.loads', 'json.loads', ({(82, 42, 82, 55): 'get_news_data'}, {}), '(get_news_data)', False, 'import urllib.request, json\n'), ((26, 24, 26, 50), 'requests.get', 'requests.get', ({(26, 37, 26, 49): 'get_news_url'}, {}), '(get_news_url)', False, 'import requests\n'), ((42, 27, 42, 56), 'requests.get', 'requests.get', ({(42, 40, 42, 55): 'search_news_url'}, {}), '(search_news_url)', False, 'import requests\n')] |
caoxudong/code_practice | leetcode/151_reverse _words_in_a_string.py | cb960cf69d67ae57b35f0691d35e15c11989e6d2 | """
Given an input string, reverse the string word by word.
For example,
Given s = "the sky is blue",
return "blue is sky the".
For C programmers: Try to solve it in-place in O(1) space.
Clarification:
* What constitutes a word?
A sequence of non-space characters constitutes a word.
* Could the input string contain leading or trailing spaces?
Yes. However, your reversed string should not contain leading or trailing spaces.
* How about multiple spaces between two words?
Reduce them to a single space in the reversed string.
https://leetcode.com/problems/reverse-words-in-a-string/
"""
class Solution:
# @param s, a string
# @return a string
def reverseWords(self, s):
elements = s.split(" ")
elements = [x for x in elements if x != ""]
elements = elements[::-1]
return " ".join(elements) | [] |
LittleNed/toontown-stride | toontown/uberdog/DistributedInGameNewsMgr.py | 1252a8f9a8816c1810106006d09c8bdfe6ad1e57 | import socket, datetime, os
from direct.distributed.DistributedObjectGlobal import DistributedObjectGlobal
from direct.distributed.DistributedObject import DistributedObject
from toontown.toonbase import ToontownGlobals
from toontown.uberdog import InGameNewsResponses
class DistributedInGameNewsMgr(DistributedObject):
notify = directNotify.newCategory('InGameNewsMgr')
neverDisable = 1
def __init__(self, cr):
DistributedObject.__init__(self, cr)
base.cr.inGameNewsMgr = self
def delete(self):
DistributedObject.delete(self)
self.cr.inGameNewsMgr = None
return
def disable(self):
self.notify.debug("i'm disabling InGameNewsMgr rightnow.")
DistributedObject.disable(self)
def generate(self):
self.notify.debug('BASE: generate')
DistributedObject.generate(self)
def setLatestIssueStr(self, issueStr):
self.latestIssueStr = issueStr
self.latestIssue = base.cr.toontownTimeManager.convertUtcStrToToontownTime(issueStr)
messenger.send('newIssueOut')
self.notify.info('latestIssue=%s' % self.latestIssue)
def getLatestIssueStr(self):
pass
def getLatestIssue(self):
return self.latestIssue
| [((12, 8, 12, 44), 'direct.distributed.DistributedObject.DistributedObject.__init__', 'DistributedObject.__init__', ({(12, 35, 12, 39): 'self', (12, 41, 12, 43): 'cr'}, {}), '(self, cr)', False, 'from direct.distributed.DistributedObject import DistributedObject\n'), ((16, 8, 16, 38), 'direct.distributed.DistributedObject.DistributedObject.delete', 'DistributedObject.delete', ({(16, 33, 16, 37): 'self'}, {}), '(self)', False, 'from direct.distributed.DistributedObject import DistributedObject\n'), ((22, 8, 22, 39), 'direct.distributed.DistributedObject.DistributedObject.disable', 'DistributedObject.disable', ({(22, 34, 22, 38): 'self'}, {}), '(self)', False, 'from direct.distributed.DistributedObject import DistributedObject\n'), ((26, 8, 26, 40), 'direct.distributed.DistributedObject.DistributedObject.generate', 'DistributedObject.generate', ({(26, 35, 26, 39): 'self'}, {}), '(self)', False, 'from direct.distributed.DistributedObject import DistributedObject\n')] |
azeemchaudhrry/30DaysofPython | Day10/loops.py | 8aa80c81967d87e4bc70254a41517d0303ca0599 | # Day 10 Loops
from countries import *
# While Loop
# count = 0
# while count < 5:
# if count == 3:
# break
# print(count)
# count = count + 1
# numbers = [0,2,3,4,5,6,7,8,9,10]
# for number in numbers:
# print(number)
# language = 'Python'
# for letter in language:
# print(letter)
# tpl = ('python','updates','wow')
# for number in tpl:
# print(number)
# person = {
# 'first_name':'Asabeneh',
# 'last_name':'Yetayeh',
# 'age':250,
# 'country':'Finland',
# 'is_marred':True,
# 'skills':['JavaScript', 'React', 'Node', 'MongoDB', 'Python'],
# 'address':{
# 'street':'Space street',
# 'zipcode':'02210'
# }
# }
# print('------------------------------------')
# for key in person:
# print(key)
# print('------------------------------------')
# for key,value in person.items():
# print(key, value)
# print('--------------------------------------')
# it_companies = {'Facebook', 'Google', 'Microsoft', 'Apple', 'IBM', 'Oracle', 'Amazon'}
# for company in it_companies:
# print(company)
# print('--------------------------------------')
# numbers = (0,1,2,3,4,5,6,7)
# for number in numbers:
# print(number)
# if(number == 3):
# break
# print('--------------------------------------')
# for number in numbers:
# print(number)
# if(number == 3):
# continue
# print('--------------------------------------')
# numbers = (0,1,2,3,4,5)
# for number in numbers:
# print(number)
# if number == 3:
# continue
# print('Next number should be ', number + 1) if number != 5 else print("loop's end") # for short hand conditions need both if and else statements
# print('outside the loop')
# print('--------------------------------------')
# lst = list(range(11))
# print(lst)
# st = set(range(1,11))
# print(st)
# lst = list(range(0,11,2))
# print(lst)
# st = set(range(0,11,2))
# print(st)
# Exercises: Day 10
# Iterate 0 to 10 using for loop, do the same using while loop.
# numbers = [0,1,2,3,4,5,6,7,8,9,10]
# for number in numbers:
# print(number)
# count = 0
# while count < 10:
# print(count)
# count += 1
# Iterate 10 to 0 using for loop, do the same using while loop.
# for number in range(10,-1,-1):
# print(number)
# count = 10
# while count > -1:
# print(count)
# count -= 1
# Write a loop that makes seven calls to print(), so we get on the output the following triangle:
for index in range(0,8):
print(index * '#')
limit = 9
for i in range(0,limit):
for j in range(0,limit):
print('# ', end='')
print('')
for i in range(0, 11):
print(f'{i} x {i} = {i * i}')
frameworks = ['Python', 'Numpy','Pandas','Django', 'Flask']
for framework in frameworks:
print(framework)
for i in range(0,101):
if i % 2 == 0:
print(i)
for i in range(0,101):
if i % 2 != 0:
print(i)
sum = 0
for i in range(0,101):
sum += i
print('The sum of all numbers is : ', sum)
even_sum = odd_sum = 0
for i in range(0,101):
if i % 2 == 0:
even_sum += i
elif i % 2 != 0:
odd_sum += i
print(f'The sum of all evens is {even_sum}. And the sum of all odds is {odd_sum}.')
for country in countries:
if 'land' in country:
print(country)
fruits = ['banana', 'orange', 'mango', 'lemon']
total_elements = len(fruits) - 1
for i in range(0, int(len(fruits) / 2)):
temp_element = fruits[i]
fruits[i] = fruits[total_elements - i]
fruits[total_elements - i] = temp_element
print(fruits) | [] |
Dimas625/tessera | tessera-server/tessera/views_api.py | 8e554f217220228fb8a0662fb5075cb839e9f1b1 | # -*- mode:python -*-
import flask
import json
import logging
from datetime import datetime
import inflection
from functools import wraps
from flask import request, url_for
from werkzeug.exceptions import HTTPException
from .client.api.model import *
from . import database
from . import helpers
from .application import db
mgr = database.DatabaseManager(db)
log = logging.getLogger(__name__)
api = flask.Blueprint('api', __name__)
# =============================================================================
# API Helpers
# =============================================================================
def route_api(application, *args, **kwargs):
def decorator(fn):
@application.route(*args, **kwargs)
@wraps(fn)
def wrapper(*args, **kwargs):
headers = None
status_code = 200
try:
value = fn(*args, **kwargs)
except HTTPException as e:
raise helpers.set_exception_response(e)
if isinstance(value, tuple):
if len(value) > 2:
headers = value[2]
status_code = value[1]
value = value[0]
return helpers.jsonify(value, status_code, headers)
return fn
return decorator
def _dashboard_sort_column():
"""Return a SQLAlchemy column descriptor to sort results by, based on
the 'sort' and 'order' request parameters.
"""
columns = {
'created' : database.DashboardRecord.creation_date,
'modified' : database.DashboardRecord.last_modified_date,
'category' : database.DashboardRecord.category,
'id' : database.DashboardRecord.id,
'title' : database.DashboardRecord.title
}
colname = helpers.get_param('sort', 'created')
order = helpers.get_param('order')
column = database.DashboardRecord.creation_date
if colname in columns:
column = columns[colname]
if order == 'desc' or order == u'desc':
return column.desc()
else:
return column.asc()
def _set_dashboard_hrefs(dash):
"""Add the various ReSTful hrefs to an outgoing dashboard
representation. dash should be the dictionary for of the dashboard,
not the model object.
"""
id = dash['id']
dash['href'] = url_for('api.dashboard_get', id=id)
dash['definition_href'] = url_for('api.dashboard_get_definition', id=id)
dash['view_href'] = url_for('ui.dashboard_with_slug',
id=id,
slug=inflection.parameterize(dash['title']))
if 'definition' in dash:
definition = dash['definition']
definition['href'] = url_for('api.dashboard_get_definition', id=id)
return dash
def _dashboards_response(dashboards):
"""Return a Flask response object for a list of dashboards in API
format. dashboards must be a list of dashboard model objects, which
will be converted to their JSON representation.
"""
if not isinstance(dashboards, list):
dashboards = [dashboards]
include_definition = helpers.get_param_boolean('definition', False)
return [ _set_dashboard_hrefs(d.to_json(include_definition=include_definition)) for d in dashboards]
def _set_tag_hrefs(tag):
"""Add ReSTful href attributes to a tag's dictionary
representation.
"""
id = tag['id']
tag['href'] = url_for('api.tag_get', id=id)
return tag
def _tags_response(tags):
"""Return a Flask response object for a list of tags in API
format. tags must be a list of tag model objects, which
will be converted to their JSON representation.
"""
if not isinstance(tags, list):
tags = [tags]
return [_set_tag_hrefs(t.to_json()) for t in tags]
# =============================================================================
# Dashboards
# =============================================================================
@route_api(api, '/dashboard/')
def dashboard_list():
"""Listing for all dashboards. Returns just the metadata, not the
definitions.
"""
imported_from = request.args.get('imported_from')
if imported_from:
query = database.DashboardRecord.query.filter_by(imported_from=imported_from) \
.order_by(_dashboard_sort_column())
else:
query = database.DashboardRecord.query.order_by(_dashboard_sort_column())
dashboards = [d for d in query.all()]
return _dashboards_response(dashboards)
@route_api(api, '/dashboard/tagged/<tag>')
def dashboard_list_tagged(tag):
"""Listing for a set of dashboards with a tag applied. Returns just
the metadata, not the definitions.
"""
tag = database.TagRecord.query.filter_by(name=tag).first()
if not tag:
return _dashboards_response([])
dashboards = [d for d in tag.dashboards.order_by(_dashboard_sort_column()) if tag]
return _dashboards_response(dashboards)
@route_api(api, '/dashboard/category/<category>')
def dashboard_list_dashboards_in_category(category):
"""Listing for a set of dashboards in a specified category. Returns
just the metadata, not the definitions.
"""
dashboards = [d for d in database.DashboardRecord.query
.filter_by(category=category)
.order_by(_dashboard_sort_column()) ]
return _dashboards_response(dashboards)
@route_api(api, '/dashboard/category/')
def dashboard_list_all_dashboard_categories():
result = db.session.query(
database.DashboardRecord.category,
db.func.count(database.DashboardRecord.category)
).group_by(database.DashboardRecord.category).all()
categories = []
for (name, count) in result:
categories.append({
'name' : name,
'count' : count,
})
return categories
@route_api(api, '/dashboard/<id>')
def dashboard_get(id):
"""Get the metadata for a single dashboard.
"""
dashboard = database.DashboardRecord.query.get_or_404(id)
rendering = helpers.get_param('rendering', False)
include_definition = helpers.get_param_boolean('definition', False)
dash = _set_dashboard_hrefs(dashboard.to_json(rendering or include_definition))
if rendering:
dash['preferences'] = helpers.get_preferences()
return dash
@route_api(api, '/dashboard/<id>/for-rendering')
def dashboard_get_for_rendering(id):
"""Get a dashboard with its definition, and current settings necessary
for rendering.
"""
dashboard = database.DashboardRecord.query.get_or_404(id)
dash = _set_dashboard_hrefs(dashboard.to_json(True))
return {
'dashboard' : dash,
'preferences' : helpers.get_preferences()
}
@route_api(api, '/dashboard/', methods=['POST'])
def dashboard_create():
"""Create a new dashboard with an empty definition.
"""
dashboard = database.DashboardRecord.from_json(request.json)
if not dashboard.title:
return {
'error_message': "Missing required field 'title'"
}, 400
if 'definition' in request.json:
dashboard.definition = database.DefinitionRecord(dumps(request.json['definition']))
else:
dashboard.definition = database.DefinitionRecord(dumps(DashboardDefinition()))
mgr.store_dashboard(dashboard)
href = url_for('api.dashboard_get', id=dashboard.id)
return {
'dashboard_href' : href,
'view_href' : url_for('ui.dashboard_with_slug',
id=dashboard.id,
slug=inflection.parameterize(dashboard.title))
}, 201, { 'Location' : href }
@route_api(api, '/dashboard/<id>', methods=['PUT'])
def dashboard_update(id):
"""Update the metadata for an existing dashboard.
"""
body = request.json
dashboard = database.DashboardRecord.query.get_or_404(id)
dashboard.merge_from_json(body)
mgr.store_dashboard(dashboard)
# TODO - return similar to create, above
return {}
@route_api(api, '/dashboard/<id>', methods=['DELETE'])
def dashboard_delete(id):
"""Delete a dashboard. Use with caution.
"""
dashboard = database.DashboardRecord.query.get_or_404(id)
db.session.delete(dashboard)
db.session.commit()
return {}, 204
@route_api(api, '/dashboard/<id>/definition')
def dashboard_get_definition(id):
"""Fetch the definition for a dashboard. This returns the
representation to use when modifiying a dashboard.
"""
dashboard = database.DashboardRecord.query.filter_by(id=id)[0]
definition = database.DashboardRecord.query.get_or_404(id).definition.to_json()
definition['href'] = url_for('api.dashboard_get_definition', id=id)
definition['dashboard_href'] = url_for('api.dashboard_get', id=id)
return definition
@route_api(api, '/dashboard/<id>/definition', methods=['PUT'])
def dashboard_update_definition(id):
"""Update the definition of the dashboard. This should use the
representation returned by /api/dashboard/<id>/definition, and
should NOT have any embedded variables expanded, nor should it
have complete graphite URLs in the queries.
"""
dashboard = database.DashboardRecord.query.get_or_404(id)
# Validate the payload
definition = DashboardDefinition.from_json(json.loads(request.data.decode('utf-8')))
if dashboard.definition:
dashboard.definition.definition = dumps(definition)
else:
dashboard.definition = database.DashboardRecordDef(request.data)
mgr.store_dashboard(dashboard)
return {}
# =============================================================================
# Tags
# =============================================================================
@route_api(api, '/tag/')
def tag_list():
"""Listing for all tags.
"""
tags = db.session.query(database.TagRecord).all()
return _tags_response(tags)
@route_api(api, '/tag/<id>')
def tag_get(id):
tag = database.TagRecord.query.get_or_404(id)
return _tags_response(tag)
# =============================================================================
# Miscellany
# =============================================================================
@route_api(api, '/preferences/')
def preferences_get():
return helpers.get_preferences()
@route_api(api, '/preferences/', methods=['PUT'])
def preferences_put():
helpers.set_preferences(request.json)
return helpers.get_preferences()
| [((19, 6, 19, 33), 'logging.getLogger', 'logging.getLogger', ({(19, 24, 19, 32): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((21, 6, 21, 38), 'flask.Blueprint', 'flask.Blueprint', ({(21, 22, 21, 27): '"""api"""', (21, 29, 21, 37): '__name__'}, {}), "('api', __name__)", False, 'import flask\n'), ((75, 30, 75, 65), 'flask.url_for', 'url_for', (), '', False, 'from flask import request, url_for\n'), ((76, 30, 76, 76), 'flask.url_for', 'url_for', (), '', False, 'from flask import request, url_for\n'), ((101, 18, 101, 47), 'flask.url_for', 'url_for', (), '', False, 'from flask import request, url_for\n'), ((123, 20, 123, 53), 'flask.request.args.get', 'request.args.get', ({(123, 37, 123, 52): '"""imported_from"""'}, {}), "('imported_from')", False, 'from flask import request, url_for\n'), ((211, 11, 211, 56), 'flask.url_for', 'url_for', (), '', False, 'from flask import request, url_for\n'), ((249, 25, 249, 71), 'flask.url_for', 'url_for', (), '', False, 'from flask import request, url_for\n'), ((250, 35, 250, 70), 'flask.url_for', 'url_for', (), '', False, 'from flask import request, url_for\n'), ((30, 9, 30, 18), 'functools.wraps', 'wraps', ({(30, 15, 30, 17): 'fn'}, {}), '(fn)', False, 'from functools import wraps\n'), ((82, 29, 82, 75), 'flask.url_for', 'url_for', (), '', False, 'from flask import request, url_for\n'), ((79, 43, 79, 81), 'inflection.parameterize', 'inflection.parameterize', ({(79, 67, 79, 80): "dash['title']"}, {}), "(dash['title'])", False, 'import inflection\n'), ((264, 58, 264, 86), 'flask.request.data.decode', 'request.data.decode', ({(264, 78, 264, 85): '"""utf-8"""'}, {}), "('utf-8')", False, 'from flask import request, url_for\n'), ((216, 35, 216, 75), 'inflection.parameterize', 'inflection.parameterize', ({(216, 59, 216, 74): 'dashboard.title'}, {}), '(dashboard.title)', False, 'import inflection\n')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.