content
stringlengths 5
1.05M
|
---|
#!/usr/bin/env python
# Copyright 2015 Criteo. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import re
import argparse
import yaml
from ciscoconfparse import CiscoConfParse
def cli_parser(argv=None):
parser = argparse.ArgumentParser(
description='Generating configuration commands by finding differences'
' between two Cisco IOS style configuration files')
parser.add_argument('--origin', metavar='origin',
type=str, help='Origin configuration file')
parser.add_argument('--target', metavar='target',
type=str, help='Target configuration file')
parser.add_argument('--vendor', help='Vendor or OS definition',
type=str, metavar='vendor')
parser.add_argument('--config', metavar='config',
type=str, help='config file name',
default='etc/netcompare.yml')
return parser.parse_args(argv)
def clean_line(line, vendor):
cleaned_lines = []
if vendor == 'tmsh':
# Remove text after a # (Because CiscoConfParse crash if there is a
# bracket in a comment
remove_comment = re.search('(?P<before_comment>[^\#]*)\#', line)
if remove_comment:
line = remove_comment.group('before_comment')
# match " begin } end"
tmsh_curly_bracket_left = re.search(
'^(?P<space>\s*)(?P<begin>.*)'
'(?P<bracket>[\}\{])(?'
'P<end>[^\}\{]*)$',
line)
if tmsh_curly_bracket_left:
# replace
# " begin } end"
# by
# " begin }
# end
cleaned_lines = clean_line(tmsh_curly_bracket_left.
group('begin'), vendor)
cleaned_lines.append(tmsh_curly_bracket_left.group('bracket'))
cleaned_lines.append(tmsh_curly_bracket_left.group('end').
rstrip(' \t\r\n\0'))
else:
cleaned_lines.append(line.rstrip(' \t\r\n\0'))
else:
cleaned_lines.append(line.rstrip(' \t\r\n\0'))
return cleaned_lines
def clean_file(file, vendor, config):
with open(file) as file_opened:
list = file_opened.readlines()
list_clean = []
try:
config[vendor]['dont_compare']
for line in list:
for dont_compare in config[vendor]['dont_compare']:
if dont_compare in line:
break
else:
list_clean = (list_clean +
clean_line(line, vendor))
return list_clean
except:
for line in list:
list_clean = (list_clean +
clean_line(line, vendor))
return list_clean
def get_one_line(line, vendor, config):
if line[0] == 'NO':
line_text_no = re.match("^(\s*)" +
config[vendor]['no_command'] +
" (.*)", line[1])
if line_text_no:
cmd = (line_text_no.group(1) + line_text_no.group(2))
else:
line_text_without_no = re.match("^(\s*)(.*)", line[1])
cmd = (line_text_without_no.group(1) +
config[vendor]['no_command'] + " " +
line_text_without_no.group(2))
return cmd
else:
return line[1]
def get_diff_lines(d, vendor, config, depth=0):
result = []
for k, v in sorted(d.items(), key=lambda x: x[0]):
result.append(get_one_line(k, vendor, config))
result.extend(get_diff_lines(v, vendor, config, depth+1))
return result
def netcompare(origin, target, vendor, config):
origin_file = CiscoConfParse(origin,
comment=config[vendor]
['CiscoConfParse_comment'],
syntax=config[vendor]
['CiscoConfParse_syntax'],
factory=False)
target_file = CiscoConfParse(target,
comment=config[vendor]
['CiscoConfParse_comment'],
syntax=config[vendor]
['CiscoConfParse_syntax'],
factory=False)
result = {}
for line_origin in origin_file.objs:
eq_lines = (target_file.find_objects(
'^' + re.escape(line_origin.text) + '$'))
for line_target in eq_lines:
if line_origin.geneology_text == line_target.geneology_text:
break
else: # Delete needed
pointer = result
index = len(line_origin.geneology_text)
for cmd in line_origin.geneology_text:
index = index - 1
if ('NO', cmd) in pointer:
break
if ('_CR', cmd) in pointer:
pointer = pointer.get(('_CR', cmd))
elif index == 0:
pointer[('NO', cmd)] = {}
pointer = pointer.get(('NO', cmd))
else:
pointer[('_CR', cmd)] = {}
pointer = pointer.get(('_CR', cmd))
for line_target in target_file.objs:
find = 0
eq_lines = (origin_file.find_objects(
'^' + re.escape(line_target.text) + '$'))
for line_origin in eq_lines:
if line_origin.geneology_text == line_target.geneology_text:
find = 1
if find == 0: # Create needed
pointer = result
for cmd in line_target.geneology_text:
if not ('_CR', cmd) in pointer:
pointer[('_CR', cmd)] = {}
pointer = pointer.get(('_CR', cmd))
return result
def main(argv=None):
args = cli_parser(argv)
with open(args.config, 'r') as f:
config = yaml.load(f)
origin_list = clean_file(args.origin, args.vendor, config)
target_list = clean_file(args.target, args.vendor, config)
display_commands = netcompare(origin_list,
target_list, args.vendor, config)
result = get_diff_lines(display_commands, args.vendor, config)
for line in result:
print line
if __name__ == '__main__':
main()
|
from enum import Enum
import matplotlib as mpl
from cycler import cycler
import seaborn
#[x for x in list(rcParams.keys()) if 'font' in x or 'size' in x]
class Project(Enum):
THESIS = 1
SLIDE = 2
PAPER = 3
def cm2inch(value):
return value / 2.54
class KAOBOOK():
TEXTWIDTH = cm2inch(9)
FULLWIDTH = cm2inch(13)
MARGINWIDTH = cm2inch(4)
FONTSIZE = 7
# a colorblind-friendly color cycle
#COLOR_CYCLE = cycler(color = [(230,159,0), (86,180,233), (0,158,115), (0,114,178),
# (213,94,0), (0,0,0), (204,121,167), (240,228,66)])
COLOR_CYCLE = cycler(color=seaborn.color_palette('colorblind'))
def set_kaobook_settings():
font = {'family': 'Open Sans', 'size': KAOBOOK.FONTSIZE}
mpl.rc('font', **font)
mpl.rc('legend', handlelength=1)
mpl.rcParams['legend.fontsize'] = KAOBOOK.FONTSIZE
mpl.rcParams['figure.titlesize'] = KAOBOOK.FONTSIZE
mpl.rcParams['axes.titlesize'] = KAOBOOK.FONTSIZE
mpl.rcParams['xtick.direction'] = 'in'
mpl.rcParams['ytick.direction'] = 'in'
mpl.rcParams['axes.labelpad'] = 4
mpl.rcParams["legend.borderaxespad"] = 0.2
mpl.rcParams["legend.handlelength"] = 0.8
mpl.rcParams['lines.markersize'] = 3
mpl.rcParams['lines.linewidth'] = 0.8
mpl.rc('axes', prop_cycle=KAOBOOK.COLOR_CYCLE) |
from . import *
class AWS_AppStream_ImageBuilder_VpcConfig(CloudFormationProperty):
def write(self, w):
with w.block("vpc_config"):
self.property(w, "SecurityGroupIds", "security_group_ids", ListValueConverter(StringValueConverter()))
self.property(w, "SubnetIds", "subnet_ids", ListValueConverter(StringValueConverter()))
class AWS_AppStream_ImageBuilder_AccessEndpoint(CloudFormationProperty):
def write(self, w):
with w.block("access_endpoint"):
self.property(w, "EndpointType", "endpoint_type", StringValueConverter())
self.property(w, "VpceId", "vpce_id", StringValueConverter())
class AWS_AppStream_Stack_ApplicationSettings(CloudFormationProperty):
def write(self, w):
with w.block("application_settings"):
self.property(w, "SettingsGroup", "settings_group", StringValueConverter())
self.property(w, "Enabled", "enabled", BasicValueConverter())
class AWS_AppStream_Fleet_DomainJoinInfo(CloudFormationProperty):
def write(self, w):
with w.block("domain_join_info"):
self.property(w, "OrganizationalUnitDistinguishedName", "organizational_unit_distinguished_name", StringValueConverter())
self.property(w, "DirectoryName", "directory_name", StringValueConverter())
class AWS_AppStream_DirectoryConfig_ServiceAccountCredentials(CloudFormationProperty):
def write(self, w):
with w.block("service_account_credentials"):
self.property(w, "AccountName", "account_name", StringValueConverter())
self.property(w, "AccountPassword", "account_password", StringValueConverter())
class AWS_AppStream_Stack_AccessEndpoint(CloudFormationProperty):
def write(self, w):
with w.block("access_endpoint"):
self.property(w, "EndpointType", "endpoint_type", StringValueConverter())
self.property(w, "VpceId", "vpce_id", StringValueConverter())
class AWS_AppStream_Fleet_ComputeCapacity(CloudFormationProperty):
def write(self, w):
with w.block("compute_capacity"):
self.property(w, "DesiredInstances", "desired_instances", BasicValueConverter())
class AWS_AppStream_ImageBuilder_DomainJoinInfo(CloudFormationProperty):
def write(self, w):
with w.block("domain_join_info"):
self.property(w, "OrganizationalUnitDistinguishedName", "organizational_unit_distinguished_name", StringValueConverter())
self.property(w, "DirectoryName", "directory_name", StringValueConverter())
class AWS_AppStream_Stack_StorageConnector(CloudFormationProperty):
def write(self, w):
with w.block("storage_connector"):
self.property(w, "Domains", "domains", ListValueConverter(StringValueConverter()))
self.property(w, "ResourceIdentifier", "resource_identifier", StringValueConverter())
self.property(w, "ConnectorType", "connector_type", StringValueConverter())
class AWS_AppStream_Fleet_VpcConfig(CloudFormationProperty):
def write(self, w):
with w.block("vpc_config"):
self.property(w, "SubnetIds", "subnet_ids", ListValueConverter(StringValueConverter()))
self.property(w, "SecurityGroupIds", "security_group_ids", ListValueConverter(StringValueConverter()))
class AWS_AppStream_Stack_UserSetting(CloudFormationProperty):
def write(self, w):
with w.block("user_setting"):
self.property(w, "Action", "action", StringValueConverter())
self.property(w, "Permission", "permission", StringValueConverter())
class AWS_AppStream_Stack(CloudFormationResource):
cfn_type = "AWS::AppStream::Stack"
tf_type = "aws_app_stream_stack" # TODO: Most likely not working
ref = "arn"
attrs = {}
def write(self, w):
with self.resource_block(w):
self.property(w, "Description", "description", StringValueConverter())
self.repeated_block(w, "StorageConnectors", AWS_AppStream_Stack_StorageConnector)
self.property(w, "DeleteStorageConnectors", "delete_storage_connectors", BasicValueConverter())
self.property(w, "EmbedHostDomains", "embed_host_domains", ListValueConverter(StringValueConverter()))
self.repeated_block(w, "UserSettings", AWS_AppStream_Stack_UserSetting)
self.property(w, "AttributesToDelete", "attributes_to_delete", ListValueConverter(StringValueConverter()))
self.property(w, "RedirectURL", "redirect_url", StringValueConverter())
self.property(w, "Name", "name", StringValueConverter())
self.property(w, "FeedbackURL", "feedback_url", StringValueConverter())
self.block(w, "ApplicationSettings", AWS_AppStream_Stack_ApplicationSettings)
self.property(w, "DisplayName", "display_name", StringValueConverter())
self.property(w, "Tags", "tags", ListValueConverter(ResourceTag()))
self.repeated_block(w, "AccessEndpoints", AWS_AppStream_Stack_AccessEndpoint)
class AWS_AppStream_User(CloudFormationResource):
cfn_type = "AWS::AppStream::User"
tf_type = "aws_app_stream_user" # TODO: Most likely not working
ref = "arn"
attrs = {}
def write(self, w):
with self.resource_block(w):
self.property(w, "UserName", "user_name", StringValueConverter())
self.property(w, "FirstName", "first_name", StringValueConverter())
self.property(w, "MessageAction", "message_action", StringValueConverter())
self.property(w, "LastName", "last_name", StringValueConverter())
self.property(w, "AuthenticationType", "authentication_type", StringValueConverter())
class AWS_AppStream_Fleet(CloudFormationResource):
cfn_type = "AWS::AppStream::Fleet"
tf_type = "aws_app_stream_fleet" # TODO: Most likely not working
ref = "arn"
attrs = {}
def write(self, w):
with self.resource_block(w):
self.property(w, "Description", "description", StringValueConverter())
self.block(w, "ComputeCapacity", AWS_AppStream_Fleet_ComputeCapacity)
self.block(w, "VpcConfig", AWS_AppStream_Fleet_VpcConfig)
self.property(w, "FleetType", "fleet_type", StringValueConverter())
self.property(w, "EnableDefaultInternetAccess", "enable_default_internet_access", BasicValueConverter())
self.block(w, "DomainJoinInfo", AWS_AppStream_Fleet_DomainJoinInfo)
self.property(w, "Name", "name", StringValueConverter())
self.property(w, "ImageName", "image_name", StringValueConverter())
self.property(w, "MaxUserDurationInSeconds", "max_user_duration_in_seconds", BasicValueConverter())
self.property(w, "IdleDisconnectTimeoutInSeconds", "idle_disconnect_timeout_in_seconds", BasicValueConverter())
self.property(w, "DisconnectTimeoutInSeconds", "disconnect_timeout_in_seconds", BasicValueConverter())
self.property(w, "DisplayName", "display_name", StringValueConverter())
self.property(w, "InstanceType", "instance_type", StringValueConverter())
self.property(w, "Tags", "tags", ListValueConverter(ResourceTag()))
self.property(w, "ImageArn", "image_arn", StringValueConverter())
class AWS_AppStream_ImageBuilder(CloudFormationResource):
cfn_type = "AWS::AppStream::ImageBuilder"
tf_type = "aws_app_stream_image_builder" # TODO: Most likely not working
ref = "arn"
attrs = {
"StreamingUrl": "streaming_url",
}
def write(self, w):
with self.resource_block(w):
self.property(w, "ImageName", "image_name", StringValueConverter())
self.property(w, "Description", "description", StringValueConverter())
self.block(w, "VpcConfig", AWS_AppStream_ImageBuilder_VpcConfig)
self.property(w, "EnableDefaultInternetAccess", "enable_default_internet_access", BasicValueConverter())
self.property(w, "DisplayName", "display_name", StringValueConverter())
self.block(w, "DomainJoinInfo", AWS_AppStream_ImageBuilder_DomainJoinInfo)
self.property(w, "AppstreamAgentVersion", "appstream_agent_version", StringValueConverter())
self.property(w, "InstanceType", "instance_type", StringValueConverter())
self.property(w, "Tags", "tags", ListValueConverter(ResourceTag()))
self.property(w, "Name", "name", StringValueConverter())
self.property(w, "ImageArn", "image_arn", StringValueConverter())
self.repeated_block(w, "AccessEndpoints", AWS_AppStream_ImageBuilder_AccessEndpoint)
class AWS_AppStream_DirectoryConfig(CloudFormationResource):
cfn_type = "AWS::AppStream::DirectoryConfig"
tf_type = "aws_app_stream_directory_config" # TODO: Most likely not working
ref = "arn"
attrs = {}
def write(self, w):
with self.resource_block(w):
self.property(w, "OrganizationalUnitDistinguishedNames", "organizational_unit_distinguished_names", ListValueConverter(StringValueConverter()))
self.block(w, "ServiceAccountCredentials", AWS_AppStream_DirectoryConfig_ServiceAccountCredentials)
self.property(w, "DirectoryName", "directory_name", StringValueConverter())
class AWS_AppStream_StackFleetAssociation(CloudFormationResource):
cfn_type = "AWS::AppStream::StackFleetAssociation"
tf_type = "aws_app_stream_stack_fleet_association" # TODO: Most likely not working
ref = "arn"
attrs = {}
def write(self, w):
with self.resource_block(w):
self.property(w, "FleetName", "fleet_name", StringValueConverter())
self.property(w, "StackName", "stack_name", StringValueConverter())
class AWS_AppStream_StackUserAssociation(CloudFormationResource):
cfn_type = "AWS::AppStream::StackUserAssociation"
tf_type = "aws_app_stream_stack_user_association" # TODO: Most likely not working
ref = "arn"
attrs = {}
def write(self, w):
with self.resource_block(w):
self.property(w, "SendEmailNotification", "send_email_notification", BasicValueConverter())
self.property(w, "UserName", "user_name", StringValueConverter())
self.property(w, "StackName", "stack_name", StringValueConverter())
self.property(w, "AuthenticationType", "authentication_type", StringValueConverter())
|
from threading import Thread
import time
# IO
def music():
print('begin to listen music {}'.format(time.ctime()))
time.sleep(3)
print('stop to listen music {}'.format(time.ctime()))
def game():
print('begin to play game {}'.format(time.ctime()))
time.sleep(5)
print('stop to play game {}'.format(time.ctime()))
# CPU
def add():
sum = 0
i = 1
while i<=1000000:
sum += i
i += 1
print('sum:',sum)
def mul():
sum2 = 1
i = 1
while i<=100000:
sum2 = sum2 * i
i += 1
print('sum2:',sum2)
if __name__ == '__main__':
start = time.time()
t1 = Thread(target=add)
t2 = Thread(target=mul)
# sequence cost time 4.990154027938843
# add()
# mul()
t1.start()
t2.start()
t2.join()
print('cost time {}'.format(time.time()-start))
# t1 = Thread(target=music) #创建一个线程对象t1 子线程
# t2 = Thread(target=game) #创建一个线程对象t2 子线程
#
# t1.start()
# t2.start()
#
# # t1.join() #等待子线程执行完 t1不执行完,谁也不准往下走
# t2.join()
# print('ending.......') #主线程
# print(time.ctime())
|
'''
Created on 15.04.2019
@author: mayers
'''
#
import os, shutil,sys
#
lib_path = os.path.dirname( os.path.dirname( os.path.abspath(__file__) ) )
sys.path.append(lib_path)
print (lib_path)
from etc.settings import GIT_ROOT, SOURCE_DIR, GIT_BASE_DIR, TEMPLATE_DIR, LIBRARY_URL
from tkinter import filedialog
from tkinter import messagebox
from tkinter import *
import tkinter.ttk as ttk
#
from tkinter.simpledialog import Dialog
#
import git
#
from ui.inputpanels import InputFrame
from ui.getData import getData
#
from django.template.defaultfilters import slugify
#
from jinja2 import Template
#
import datetime
#
from six import text_type
from docutils.utils import column_width
#
class SpInputFrame(InputFrame):
'''
Add local functions to InputPanelUpdate
'''
def InputPanelUpdate(self, tkVar, key, value):
#print(tkVar, key, tkVar.get(),'#')
#
if type(self.datadict['values'][key])==type(True):
self.datadict['values'][key] = True if tkVar.get()=='1' else False
else:
self.datadict['values'][key] = tkVar.get()
if key=='project':
self.datadict['values']['project_fn']=slugify(self.datadict['values'][key])
self.datadict['callback_vars']['project_fn'].set(self.datadict['values']['project_fn'])
class gui_startpanel(Dialog):
'''
classdocs
'''
def __init__(self, parent, title=None, data=None):
'''
Constructor
'''
self.parent=parent
self.data=data
#
self.Row1Frame = LabelFrame(parent, relief=GROOVE, text=' 1.) Enter project name',bd=5,font=("Arial", 10, "bold"))
self.Row1Frame.grid(row=1,column=1,padx=8,pady=5,sticky=W+E, columnspan=3)
#
self.Row2Frame = LabelFrame(parent, relief=GROOVE, text=' 2.) Choose base directory and template directory' ,bd=5,font=("Arial", 10, "bold"))
self.Row2Frame.grid(row=2,column=1,padx=8,pady=5,sticky=W+E, columnspan=3 )
#
self.Row3Frame = LabelFrame(parent, relief=GROOVE, text=' 3.) Enter main parameters',bd=5,font=("Arial", 10, "bold"))
self.Row3Frame.grid(row=3,column=1,padx=8,pady=5,sticky=W)
#
self.Row4Frame = LabelFrame(parent, relief=GROOVE, text=' 4.) Run quickstart',bd=5,font=("Arial", 10, "bold"))
self.Row4Frame.grid(row=4,column=1,padx=8,pady=5,sticky=W)
#
self.Row1IFrame=SpInputFrame(self.Row1Frame, title='Project Name',datadict=self.data,order=['project'])
#
self.b2=Button(self.Row2Frame,text="Choose location for project directory")
self.b2.grid(row=1,column=1,padx=8,pady=5,stick=W+E, columnspan=3)
self.b2.bind("<ButtonRelease-1>", self.Button_2_Click)
#
self.b2a=Button(self.Row2Frame,text="Choose location of template files")
self.b2a.grid(row=2,column=1,padx=8,pady=5,stick=W+E, columnspan=3)
self.b2a.bind("<ButtonRelease-1>", self.Button_2a_Click)
#
self.Row3IFrame=SpInputFrame(self.Row3Frame, title='Main configuration',datadict=self.data)
#
self.b4=Button(self.Row4Frame,text="Run this configuration and build the project from templates")
self.b4.grid(row=1,column=1,padx=8,pady=5,stick=W+E, columnspan=3)
self.b4.bind("<ButtonRelease-1>", self.runQuickstart)
#
def Button_2_Click(self,event):
'''
'''
START_DIR = os.path.dirname(os.path.abspath(__file__) )
#
BASE_DIR = filedialog.askdirectory(parent=self.parent, initialdir=GIT_BASE_DIR ,title="Choose base directory")
self.data['values']['BASE_DIR']=GIT_BASE_DIR
self.data['callback_vars']['BASE_DIR'].set(self.data['values']['BASE_DIR'])
#
self.data['values']['path']=os.path.join(BASE_DIR,self.data['values']['project_fn'])
self.data['callback_vars']['path'].set(self.data['values']['path'])
#
def Button_2a_Click(self,event):
'''
'''
START_DIR = os.path.dirname(os.path.abspath(__file__) )
#
LOCAL_TEMPLATE_DIR = filedialog.askdirectory(parent=self.parent, initialdir=TEMPLATE_DIR ,title="Choose Templatedirectory")
#
self.data['values']['TEMPLATE_DIR']=LOCAL_TEMPLATE_DIR
self.data['callback_vars']['TEMPLATE_DIR'].set(self.data['values']['TEMPLATE_DIR'])
#
def runQuickstart(self,event):
'''
run template build with gathered information
'''
#
self.data['values']['project_underline'] = column_width(self.data['values']['project']) * '='
#
if self.data['values']['path']=='.' :
print('path is not configured')
else :
if not os.path.exists(self.data['values']['path']):
os.makedirs(self.data['values']['path'])
#
root_src_dir = self.data['values']['TEMPLATE_DIR']
root_dst_dir = self.data['values']['path']
#
for src_dir, dirs, files in os.walk(self.data['values']['TEMPLATE_DIR']):
#print(src_dir, dirs, files)
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
if dst_dir.split('/')[-1]=='source':
pass
# create link from library
#dst_link=os.path.join(dst_dir,'library')
#src_link=os.path.join(self.data['values']['BASE_DIR'],'library')
#print('creating link from {} to {} '.format(src_link,dst_link))
#os.symlink(src_link, dst_link)
for file_ in files:
src_file = os.path.join(src_dir, file_)
dst_file = os.path.join(dst_dir, file_)
indexfile=True
if os.path.exists(dst_file):
if file_ == 'index.rst':
indexfile=False
else:
print('Deleting : ', dst_file)
os.remove(dst_file)
if file_ in ['.gitignore',]:
if os.path.exists(dst_file):
pass
else:
shutil.copy(src_file, dst_dir)
else :
if file_ == 'index.rst' and not indexfile:
pass
else:
if file_.endswith('.py') or file_.endswith('.rst') or file_.endswith('.sh') or file_=='Makefile':
print('Templating : ', dst_file)
with open(src_file) as file_:
template = Template(file_.read())
output=template.render(self.data)
#
with open(dst_file,'w') as file__:
file__.writelines(output)
else :
print('Copying : ', dst_file)
shutil.copy(src_file, dst_dir)
#
print('Init of git repo')
#
repo = git.Repo.init(self.data['values']['path'])
repo.git.add(A=True)
repo.git.commit('-m','Initial creation by startpanel')
print('Finished runQuickstart',self.data['values']['path'])
runonce='''
#!/bin/bash
#
# Run this once if you have a repository for common before you push the directory
#
cd {}
git submodule add {} library/common
git submodule init
git submodule update
#
echo "if you need a fresh copy do a git pull in the library/common directory"
#
'''.format(self.data['values']['path'],LIBRARY_URL)
#
dst_file=os.path.join(self.data['values']['path'], 'run_me_once.sh')
if os.path.exists(dst_file):
os.remove(dst_file)
with open(dst_file,'w') as file__:
file__.writelines(runonce)
class GUI_Dialog:
def __init__(self, master):
frame = Frame(master)
frame.grid()
dummyvar = gui_startpanel(master,data=getData())
def gui_main():
root = Tk()
app = GUI_Dialog(root)
root.mainloop()
def tui_main():
from ui.tui_curses import TUI_Dialog
TUI_Dialog()
print('using urwid/curses')
def X_is_running():
from subprocess import Popen, PIPE
p = Popen(["xset", "-q"], stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
if __name__ == '__main__':
if X_is_running():
gui_main()
else:
tui_main()
|
from moai.modules.lightning.highres.highres import HighResolution
from moai.modules.lightning.highres.transition import (
StartTransition,
StageTransition
)
from moai.modules.lightning.highres.head import (
TopBranch as TopBranchHead,
AllBranches as AllBranchesHead,
Higher as HigherHead
)
__all__ = [
"HighResolution",
"StartTransition",
"StageTransition",
"TopBranchHead",
"AllBranchesHead",
"HigherHead",
] |
from django.urls import path
from . import views
urlpatterns = [
path('add/', views.add_task, name='add_task'),
] |
#! /usr/bin/env python
# coding=utf-8
#================================================================
# Copyright (C) 2019 * Ltd. All rights reserved.
#
# Editor : VIM
# File name : demo.py
# Author : YunYang1994
# Created date: 2019-10-20 15:06:46
# Description :
#
#================================================================
import cv2
import numpy as np
import tensorflow as tf
from PIL import Image
from rpn import RPNplus
from utils import compute_iou, plot_boxes_on_image, wandhG, load_gt_boxes, compute_regression, decode_output
pos_thresh = 0.5
neg_thresh = 0.1
iou_thresh = 0.5
grid_width = 16
grid_height = 16
image_height = 720
image_width = 960
wandhG = np.array(wandhG, dtype=np.float32)
image_path = "/home/yang/dataset/synthetic_dataset/image/1.jpg"
gt_boxes = load_gt_boxes("/home/yang/dataset/synthetic_dataset/imageAno/1.txt")
raw_image = cv2.imread(image_path)
image_with_gt_boxes = np.copy(raw_image)
plot_boxes_on_image(image_with_gt_boxes, gt_boxes)
Image.fromarray(image_with_gt_boxes).show()
encoded_image = np.copy(raw_image)
target_scores = np.zeros(shape=[45, 60, 9, 2]) # 0: background, 1: foreground, ,
target_bboxes = np.zeros(shape=[45, 60, 9, 4]) # t_x, t_y, t_w, t_h
target_masks = np.zeros(shape=[45, 60, 9]) # negative_samples: -1, positive_samples: 1
################################### ENCODE INPUT #################################
for i in range(45):
for j in range(60):
for k in range(9):
center_x = j * grid_width + grid_width * 0.5
center_y = i * grid_height + grid_height * 0.5
xmin = center_x - wandhG[k][0] * 0.5
ymin = center_y - wandhG[k][1] * 0.5
xmax = center_x + wandhG[k][0] * 0.5
ymax = center_y + wandhG[k][1] * 0.5
# ignore cross-boundary anchors
if (xmin > -5) & (ymin > -5) & (xmax < (image_width+5)) & (ymax < (image_height+5)):
anchor_boxes = np.array([xmin, ymin, xmax, ymax])
anchor_boxes = np.expand_dims(anchor_boxes, axis=0)
# compute iou between this anchor and all ground-truth boxes in image.
ious = compute_iou(anchor_boxes, gt_boxes)
positive_masks = ious > pos_thresh
negative_masks = ious < neg_thresh
if np.any(positive_masks):
plot_boxes_on_image(encoded_image, anchor_boxes, thickness=1)
print("=> Encoding positive sample: %d, %d, %d" %(i, j, k))
cv2.circle(encoded_image, center=(int(0.5*(xmin+xmax)), int(0.5*(ymin+ymax))),
radius=1, color=[255,0,0], thickness=4)
target_scores[i, j, k, 1] = 1.
target_masks[i, j, k] = 1 # labeled as a positive sample
# find out which ground-truth box matches this anchor
max_iou_idx = np.argmax(ious)
selected_gt_boxes = gt_boxes[max_iou_idx]
target_bboxes[i, j, k] = compute_regression(selected_gt_boxes, anchor_boxes[0])
if np.all(negative_masks):
target_scores[i, j, k, 0] = 1.
target_masks[i, j, k] = -1 # labeled as a negative sample
cv2.circle(encoded_image, center=(int(0.5*(xmin+xmax)), int(0.5*(ymin+ymax))),
radius=1, color=[0,0,0], thickness=4)
Image.fromarray(encoded_image).show()
################################### DECODE OUTPUT #################################
decode_image = np.copy(raw_image)
pred_boxes = []
pred_score = []
for i in range(45):
for j in range(60):
for k in range(9):
# 预测的 pred boxes 坐标
center_x = j * grid_width + 0.5 * grid_width
center_y = i * grid_height + 0.5 * grid_height
anchor_xmin = center_x - 0.5 * wandhG[k, 0]
anchor_ymin = center_y - 0.5 * wandhG[k, 1]
xmin = target_bboxes[i, j, k, 0] * wandhG[k, 0] + anchor_xmin
ymin = target_bboxes[i, j, k, 1] * wandhG[k, 1] + anchor_ymin
xmax = tf.exp(target_bboxes[i, j, k, 2]) * wandhG[k, 0] + xmin
ymax = tf.exp(target_bboxes[i, j, k, 3]) * wandhG[k, 1] + ymin
if target_scores[i, j, k, 1] > 0: # it is a positive sample
print("=> Decoding positive sample: %d, %d, %d" %(i, j, k))
cv2.circle(decode_image, center=(int(0.5*(xmin+xmax)), int(0.5*(ymin+ymax))),
radius=1, color=[255,0,0], thickness=4)
pred_boxes.append(np.array([xmin, ymin, xmax, ymax]))
pred_score.append(target_scores[i, j, k, 1])
pred_boxes = np.array(pred_boxes)
plot_boxes_on_image(decode_image, pred_boxes, color=[0, 255, 0])
Image.fromarray(np.uint8(decode_image)).show()
############################## FASTER DECODE OUTPUT ###############################
faster_decode_image = np.copy(raw_image)
# pred_bboxes = target_bboxes
# pred_scores = target_scores.astype(np.float32)
pred_bboxes = np.expand_dims(target_bboxes, 0).astype(np.float32)
pred_scores = np.expand_dims(target_scores, 0).astype(np.float32)
pred_scores, pred_bboxes = decode_output(pred_bboxes, pred_scores)
plot_boxes_on_image(faster_decode_image, pred_bboxes, color=[255, 0, 0]) # red boundig box
Image.fromarray(np.uint8(faster_decode_image)).show()
## bboxes
# grid_x, grid_y = tf.range(60, dtype=tf.int32), tf.range(45, dtype=tf.int32)
# grid_x, grid_y = tf.meshgrid(grid_x, grid_y)
# grid_x, grid_y = tf.expand_dims(grid_x, -1), tf.expand_dims(grid_y, -1)
# grid_xy = tf.stack([grid_x, grid_y], axis=-1)
# center_xy = grid_xy * 16 + 8
# center_xy = tf.cast(center_xy, tf.float32)
# anchor_xymin = center_xy - 0.5 * wandhG
# xy_min = pred_bboxes[..., 0:2] * wandhG[:, 0:2] + anchor_xymin
# xy_max = tf.exp(pred_bboxes[..., 2:4]) * wandhG[:, 0:2] + xy_min
# pred_bboxes = tf.concat([xy_min, xy_max], axis=-1)
# score_mask = pred_scores > 0.
# pred_bboxes = tf.reshape(pred_bboxes[score_mask], shape=[-1,4]).numpy()
# pred_scores = tf.reshape(pred_scores[score_mask], shape=[-1,]).numpy()
|
from GreedyGRASP.Solver import Solver
from GreedyGRASP.Solution import Solution
from GreedyGRASP.LocalSearch import LocalSearch
# Inherits from a parent abstract solver.
class Solver_Greedy(Solver):
def greedyFunctionCost(self, solution, remainCap, busesAssignments):
for busAssi in busesAssignments:
bus = solution.getBuses()[busAssi.bus]
service = solution.getServices()[busAssi.service]
if (remainCap <= bus.getCapacity()):
cost = busAssi.cost + busAssi.cost*(bus.getCapacity()-remainCap)/bus.getCapacity()
else:
cost = busAssi.cost + (busAssi.cost + service.getMinutes()*solution.inputData.CBM) * remainCap / bus.getCapacity()
busAssi.greedyCost = cost
return busesAssignments
def greedyConstruction(self, config, problem):
# get an empty solution for the problem
solution = Solution.createEmptySolution(config, problem)
# get tasks and sort them by their total required resources in descending order
services = problem.getServices()
sortedServices = sorted(services,
key=lambda service: (service.getPassengers(), service.getNumOverlappingServices()),
reverse=True)
elapsedEvalTime = 0
evaluatedCandidates = 0
# for each task taken in sorted order
for service in sortedServices:
serviceId = service.getId()
busesAssignments, driversAssignments = solution.findFeasibleAssignments(serviceId)
remainCap = service.getPassengers()
selBuses = []
while (remainCap > 0 and len(busesAssignments) > 0):
busesAssignments = self.greedyFunctionCost(solution, remainCap, busesAssignments)
busesAssignments = sorted(busesAssignments, key=lambda busAssi: busAssi.greedyCost)
candidate = busesAssignments[0]
if (candidate is None):
solution.makeInfeasible()
break
selBuses.append(candidate)
busesAssignments.remove(candidate)
remainCap -= problem.getBuses()[candidate.bus].getCapacity()
if (remainCap > 0):
solution.makeInfeasible()
break
sortedDriversAssignments = sorted(driversAssignments, key=lambda driverAssi: driverAssi.cost)
if (len(sortedDriversAssignments) < len(selBuses)):
solution.makeInfeasible()
break
for i in range(0,len(selBuses)):
solution.assign(sortedDriversAssignments[i], selBuses[i])
return(solution, elapsedEvalTime, evaluatedCandidates)
def solve(self, config, problem):
self.startTimeMeasure()
self.writeLogLine(float('infinity'), 0)
solution, elapsedEvalTime, evaluatedCandidates = self.greedyConstruction(config, problem)
self.writeLogLine((solution.cost), 1)
localSearch = LocalSearch(config)
solution = localSearch.run(solution)
self.writeLogLine(solution.cost, 1)
avg_evalTimePerCandidate = 0.0
if (evaluatedCandidates != 0):
avg_evalTimePerCandidate = 1000.0 * elapsedEvalTime / float(evaluatedCandidates)
print ('')
print ('Greedy Candidate Evaluation Performance:')
print (' Num. Candidates Eval.', evaluatedCandidates)
print (' Total Eval. Time ', elapsedEvalTime, 's')
print (' Avg. Time / Candidate', avg_evalTimePerCandidate, 'ms')
localSearch.printPerformance()
return(solution)
|
from pathlib import Path
import json
import pandas as pd
import geopandas as gpd
from requests import Response
from shapely.geometry import Point
from utils import get_raw_data, create_folder, write_df_to_json, get_overpass_gdf, transform_dataframe
def test_get_raw_data(query_fixture):
response = get_raw_data(query_fixture)
assert isinstance(response, Response)
assert response.ok
def test_create_folder(path_fixture):
create_folder(path_fixture)
assert path_fixture.parent.exists()
path_fixture.parent.rmdir()
assert not path_fixture.parent.exists()
def test_get_overpass_gdf(response_fixture):
result = get_overpass_gdf(response_fixture)
assert result["type"] is not None
assert result["id"] is not None
assert result["lat"] is not None
assert result["lon"] is not None
assert result["tags"] is not None
assert result["geometry"] is not None
assert isinstance(result, pd.DataFrame) == True
assert isinstance(result["geometry"][0], Point) == True
def test_transform_dataframe(response_fixture):
gdf = get_overpass_gdf(response_fixture)
cleaned_gdf = transform_dataframe(gdf)
assert cleaned_gdf["pump:status"][1] == "defekt"
assert cleaned_gdf["pump:status"][0] == "unbekannt"
assert cleaned_gdf["pump:status"][2] == "defekt"
assert cleaned_gdf["pump:status"][3] == "defekt"
assert cleaned_gdf["pump:status"][4] == "funktionsfähig"
assert cleaned_gdf["pump:status"][5] == "verriegelt"
assert cleaned_gdf["pump:status"][6] == "verriegelt"
assert cleaned_gdf["check_date"][3] == "unbekannt"
assert cleaned_gdf["addr:full"][0] == "unbekannt"
assert cleaned_gdf["pump:style"][0] == "unbekannt"
assert cleaned_gdf["check_date"][1] != "unbekannt"
assert cleaned_gdf["addr:full"][1] != "unbekannt"
assert cleaned_gdf["pump:style"][1] != "unbekannt"
assert "geometry" in cleaned_gdf.columns
assert "has_no_lat_lon" not in cleaned_gdf.values
def test_write_df_to_json(path_fixture, dataframe_fixture):
json_path = path_fixture
min_json_path = Path(str(path_fixture)+".min.json")
create_folder(json_path)
write_df_to_json(dataframe_fixture, str(json_path))
assert json_path.is_file
assert min_json_path.is_file
assert dataframe_fixture.equals(gpd.read_file(str(json_path)))
json_path.unlink()
min_json_path.unlink()
json_path.parent.rmdir()
assert not json_path.parent.exists() |
'''
With a starting point from train_latent_batching.py, a much more modularized generation script.
These all have no object!
'''
import sys
sys.path.append('curiosity')
sys.path.append('tfutils')
import tensorflow as tf
from curiosity.interaction import train, environment, data, cfg_generation
from curiosity.interaction.models import mario_world_model_config
from tfutils import base, optimizer
import numpy as np
import os
#EXP_IDS = dict(((arch, lr, opt), 'actopt_' + str(arch) + str(lr) + str(opt)) for arch in range(4) for lr in range(6) for opt in range(2)
BATCH_SIZE = 32
STATE_DESC = 'depths1'
arch_idx = int(sys.argv[2])
lr_idx = int(sys.argv[3])
opt_idx = int(sys.argv[4])
EXP_ID = 'actoptfix_' + str(arch_idx) + str(lr_idx) + str(opt_idx)
noobj_scene_info = [
{
'type' : 'SHAPENET',
'scale' : .4,
'mass' : 1.,
'scale_var' : .01,
'num_items' : 0,
}
]
wm_cfg_gen_params = [
{
'encode_deets' : {'sizes' : [3, 3, 3, 3], 'strides' : [2, 2, 2, 2], 'nf' : [32, 32, 32, 32]},
'action_deets' : {'nf' : [256]},
'future_deets' : {'nf' : [512]}
},
{
'encode_deets' : {'sizes' : [5, 5], 'strides' : [2, 2], 'nf' : [4, 4]},
'action_deets' : {'nf' : [256]},
'future_deets' : {'nf' : [512]}
},
{
'encode_deets' : {'sizes' : [3, 3, 3, 3], 'strides' : [2, 2, 2, 2], 'nf' : [32, 32, 32, 32]},
'action_deets' : {'nf' : [256, 256]},
'future_deets' : {'nf' : [512]}
},
{
'encode_deets' : {'sizes' : [5, 5, 3], 'strides' : [2, 2, 2], 'nf' : [4, 4, 4]},
'action_deets' : {'nf' : [256, 256]},
'future_deets' : {'nf' : [256]}
}
]
wm_params = wm_cfg_gen_params[arch_idx]
lrs = [1e-2, 1e-3, 1e-4, 1e-5, 1e-6, 1e-7]
lr = lrs[lr_idx]
opts = [tf.train.AdamOptimizer, tf.train.RMSPropOptimizer]
opt = opts[opt_idx]
dp_config = cfg_generation.generate_batching_data_provider(batch_size = BATCH_SIZE, image_scale = (64, 64), scene_info = noobj_scene_info)
save_params_config = cfg_generation.generate_latent_save_params(EXP_ID, location = 'freud', state_desc = STATE_DESC)
um_cfg = cfg_generation.generate_uncertainty_model_cfg(image_shape = (64, 64), state_desc = STATE_DESC, loss_factor = 1/ float(BATCH_SIZE))
wm_cfg= cfg_generation.generate_latent_marioish_world_model_cfg(image_shape = (64, 64), act_loss_factor = 1/float(BATCH_SIZE), act_loss_type = 'one_l2', **wm_params)
print('printing future model!')
print(wm_cfg['future_model'])
model_cfg = cfg_generation.generate_latent_model_cfg(world_cfg = wm_cfg, uncertainty_cfg = um_cfg)
params = cfg_generation.generate_latent_standards(model_cfg = model_cfg, learning_rate = lr, optimizer_class = opt)
params.update(save_params_config)
params['data_params'] = dp_config
if __name__ == '__main__':
os.environ['CUDA_VISIBLE_DEVICES'] = sys.argv[1]
train.train_from_params(**params)
|
import json
from functools import reduce
class Example(object):
"""Defines a single training or test example.
Stores each column of the example as an attribute.
"""
@classmethod
def fromJSON(cls, data, fields):
ex = cls()
obj = json.loads(data)
for key, vals in fields.items():
if vals is not None:
if not isinstance(vals, list):
vals = [vals]
for val in vals:
# for processing the key likes 'foo.bar'
name, field = val
ks = key.split('.')
def reducer(obj, key):
if isinstance(obj, list):
results = []
for data in obj:
if key not in data:
# key error
raise ValueError("Specified key {} was not found in "
"the input data".format(key))
else:
results.append(data[key])
return results
else:
# key error
if key not in obj:
raise ValueError("Specified key {} was not found in "
"the input data".format(key))
else:
return obj[key]
v = reduce(reducer, ks, obj)
setattr(ex, name, field.preprocess(v))
return ex
@classmethod
def fromdict(cls, data, fields):
ex = cls()
for key, vals in fields.items():
if key not in data:
raise ValueError("Specified key {} was not found in "
"the input data".format(key))
if vals is not None:
if not isinstance(vals, list):
vals = [vals]
for val in vals:
name, field = val
setattr(ex, name, field.preprocess(data[key]))
return ex
@classmethod
def fromCSV(cls, data, fields, field_to_index=None):
if field_to_index is None:
return cls.fromlist(data, fields)
else:
assert(isinstance(fields, dict))
data_dict = {f: data[idx] for f, idx in field_to_index.items()}
return cls.fromdict(data_dict, fields)
@classmethod
def fromlist(cls, data, fields):
ex = cls()
for (name, field), val in zip(fields, data):
if field is not None:
if isinstance(val, str):
val = val.rstrip('\n')
# Handle field tuples
if isinstance(name, tuple):
for n, f in zip(name, field):
setattr(ex, n, f.preprocess(val))
else:
setattr(ex, name, field.preprocess(val))
return ex
@classmethod
def fromtree(cls, data, fields, subtrees=False):
try:
from nltk.tree import Tree
except ImportError:
print("Please install NLTK. "
"See the docs at http://nltk.org for more information.")
raise
tree = Tree.fromstring(data)
if subtrees:
return [cls.fromlist(
[' '.join(t.leaves()), t.label()], fields) for t in tree.subtrees()]
return cls.fromlist([' '.join(tree.leaves()), tree.label()], fields)
|
from typing import Dict
import asyncpg
async def create_pg_connection(pg_config: Dict[str, str]) -> asyncpg.Connection:
return await asyncpg.create_pool(**pg_config)
|
#!/usr/bin/python3
import os
from datetime import datetime
log_file = "/home/sysadmin/logs.txt"
backup_dir = "/home/sysadmin/backups"
time = datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S")
php_sessions = []
for f in os.listdir('/tmp'):
if f[:5] == 'sess_':
php_sessions.append(f)
if not os.path.exists(backup_dir):
os.mkdir(backup_dir)
if php_sessions:
with open(log_file, 'a') as log:
log.write('{} => Backing up the following files:\n'.format(time))
for sess in php_sessions:
log.write('\t- {}\n'.format(sess))
os.system('/bin/cp "/tmp/{0}" "{1}/{0}"'.format(sess, backup_dir))
log.write('-'*50 + '\n')
|
# -*- coding: utf-8 -*-
__author__ = 'Brice Olivier'
import os
import scipy.io
from sea import MODWT
from sea import DATA_PATH
from .test_synchronized_eeg_trial import synchronized_eeg_trial_init
from sea import SynchronizedEEGTrial
import pytest
@pytest.fixture(scope="module")
def modwt_init():
eeg_data = scipy.io.loadmat(os.path.join(DATA_PATH, 's01_sample.mat'),
squeeze_me=True, struct_as_record=False)['EEG']
text_id = 0
channel_id = 0
return MODWT(eeg_data.data[channel_id, :, text_id], tmin=400,
tmax=2000, margin=100, nlevels=7, wf='la8')
def test_modwt(modwt_init):
assert modwt_init.wt.shape[0] == 7
assert modwt_init.wt.shape[1] == 1600
def test_plot_modwt(modwt_init):
modwt_init.plot_time_series_and_wavelet_transform()
@pytest.mark.usefixtures("synchronized_eeg_trial_init")
def test_plot_modwt_with_phases_and_tags(synchronized_eeg_trial_init):
fixations_time = synchronized_eeg_trial_init.get_fixations_time(from_zero=True)
tags = synchronized_eeg_trial_init.get_fixed_words()
ts = synchronized_eeg_trial_init.eeg_trial[0, :]
tmin = synchronized_eeg_trial_init.get_first_fixation_time_id()
tmax = synchronized_eeg_trial_init.get_last_fixation_time_id()
margin = - synchronized_eeg_trial_init.eeg_times[0]
phases = synchronized_eeg_trial_init.compute_epoch_phases(from_zero=True,
tmax=synchronized_eeg_trial_init.get_last_fixation_time())
wt = MODWT(ts, tmin=tmin, tmax=tmax, margin=margin, nlevels=7, wf='la8')
wt.plot_time_series_and_wavelet_transform_with_phases(phases, events=fixations_time, tags=tags)
@pytest.mark.usefixtures("synchronized_eeg_trial_init")
def test_standardize_eeg(synchronized_eeg_trial_init):
fixations_time = synchronized_eeg_trial_init.get_fixations_time(from_zero=True)
ts = synchronized_eeg_trial_init.eeg_trial[0, :]
tmin = synchronized_eeg_trial_init.get_first_fixation_time_id()
tmax = synchronized_eeg_trial_init.get_last_fixation_time_id()
margin = - synchronized_eeg_trial_init.eeg_times[0]
wt = MODWT(ts, tmin=tmin, tmax=tmax, margin=margin, nlevels=7, wf='la8')
wt.plot_time_series_and_wavelet_transform(last_x_scales=3, events=fixations_time)
index_zero = synchronized_eeg_trial_init.get_time_index(0)
baseline = synchronized_eeg_trial_init.eeg_trial[:, 0:index_zero]
eeg_trial_additive = SynchronizedEEGTrial.standardize_time_series(synchronized_eeg_trial_init.eeg_trial,
baseline, method='additive')
ts = eeg_trial_additive[0, :]
wt = MODWT(ts, tmin=tmin, tmax=tmax, margin=margin, nlevels=7, wf='la8')
wt.plot_time_series_and_wavelet_transform(last_x_scales=3, events=fixations_time)
eeg_trial_gain = SynchronizedEEGTrial.standardize_time_series(synchronized_eeg_trial_init.eeg_trial,
baseline, method='gain')
ts = eeg_trial_gain[0, :]
wt = MODWT(ts, tmin=tmin, tmax=tmax, margin=margin, nlevels=7, wf='la8')
wt.plot_time_series_and_wavelet_transform(last_x_scales=3, events=fixations_time)
"""
wt = MODWT(ts, tmin=tmin, tmax=tmax, margin=margin, nlevels=7, wf='la8')
eeg_trial_freq_gain = synchronized_eeg_init.eeg_trial[0, :]
baseline_wt = MODWT(eeg_trial_freq_gain, tmin=10, tmax=index_zero, margin=8, nlevels=7, wf='la8')
print(wt.wt.shape, baseline_wt.wt.shape)
wt.wt = SynchronizedEEGTrial.standardize_time_series(wt.wt, baseline_wt.wt, method='gain')
wt.plot_time_series_and_wavelet_transform(last_x_scales=3, events=fixations_time)
"""
wt = MODWT(ts, tmin=synchronized_eeg_trial_init.get_time_index(-100), tmax=tmax,
margin=margin, nlevels=7, wf='la8')
wt_baseline = wt.wt[:, 0:100]
wt_wt = wt.wt[:, tmin-(-synchronized_eeg_trial_init.eeg_times[0] - 100):]
wt_wt = SynchronizedEEGTrial.standardize_time_series(wt_wt, wt_baseline, method='additive')
wt.wt = wt_wt
wt.time_series = ts[tmin:tmax]
wt.plot_time_series_and_wavelet_transform(last_x_scales=3, events=fixations_time)
wt = MODWT(ts, tmin=synchronized_eeg_trial_init.get_time_index(-100), tmax=tmax,
margin=margin, nlevels=7, wf='la8')
wt_baseline = wt.wt[:, 0:100]
wt_wt = wt.wt[:, tmin - (-synchronized_eeg_trial_init.eeg_times[0] - 100):]
wt_wt = SynchronizedEEGTrial.standardize_time_series(wt_wt, wt_baseline, method='gain')
wt.wt = wt_wt
wt.time_series = ts[tmin:tmax]
wt.plot_time_series_and_wavelet_transform(last_x_scales=3, events=fixations_time)
|
#!/usr/bin/env python
# encoding: utf-8
#
######################################################################
## Application file name: greenhouse.py ##
## Description: A component of Ay-yahs-Greenhouse Automation System ##
## Description: Performs the primary greenhouse automation process. ##
## Description: ##
## Version: 1.04 ##
## Project Repository: https://git.io/fhhsY ##
## Copyright (C) 2019 The Groundhog Whisperer ##
######################################################################
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This is a for-fun project created for the purpose of automating climate
# control and irrigation in a small greenhouse. Climate control and
# irrigation control is achieved by monitoring environmental sensor
# measurements. The environmental sensors measurements are then used to
# control a linear actuator, solenoid valve, small fan, and small heating
# pad. The information produced is displayed on a 16x2 LCD screen,
# broadcast via a wall message to the console, CSV file, and
# a SQLite database file.
# sqlite3 /var/www/html/greenhouse.db table creation command
# CREATE TABLE greenhouse(id INTEGER PRIMARY KEY AUTOINCREMENT, luminosity
# NUMERIC, temperature NUMERIC, humidity NUMERIC, soilmoisture NUMERIC,
# solenoidstatus TEXT, actuatorstatus TEXT, outputonestatus TEXT,
# outputtwostatus TEXT, outputthreestatus TEXT, currentdate DATE,
# currenttime TIME);
# Enable fake sensor input mode during execution
# Set this value to True if you would like to execute this
# script without any sensors (e.g. DHT22, LDR, soil moisture)
# and without a Pimoroni Automation hat.
ENABLE_FAKE_SENSOR_VALUES = True
# Define the fake sensor values
FAKE_SOIL_MOISTURE_SENSOR_VALUE = 1.9
FAKE_LUMINOSITY_SENSOR_VALUE = 4.2
FAKE_HUMIDITY_SENSOR_VALUE = 50.01
FAKE_TEMPERATURE_SENSOR_VALUE = 72.28
# Do not attempt to import the Adafruit_DHT module if fake sensor input mode is enabled
if ENABLE_FAKE_SENSOR_VALUES == False: import Adafruit_DHT
#import Adafruit_DHT
import datetime
import math
import time
# Do not attempt to import the Pimoroni automationhat module if fake sensor input mode is enabled
if ENABLE_FAKE_SENSOR_VALUES == False: import automationhat
# Import automationhat
time.sleep(0.1) # short pause after ads1015 class creation recommended
import serial
import statistics
import subprocess
import sqlite3
import numpy as np
import matplotlib as plt
# plt initilized because it needs a different backend for the display
# to not crash when executed from the console
plt.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from matplotlib import style
#style.use('fivethirtyeight') # select the style of graph
from dateutil import parser
# Define the 16x2 RGB LCD device name connect via USB serial backpack kit
SERIAL_LCD_DEVICE_NAME = '/dev/ttyACM0'
# Define the length of time in seconds to display each message on the LCD screen
DISPLAY_LCD_MESSAGE_LENGTH_SECONDS = .9
# Messages broadcast via the wall command are suffixed with this string
WALL_TERMINAL_MESSAGE_SUFFIX_STRING = "Ay-yahs.Greenhouse.Garden.Area.One.Version.1.04"
# Switch to enable or disable LCD screen messages True/False
DISPLAY_LCD_SCREEN_MESSAGES_ACTIVE_SWTICH = False
# Switch to enable or disable broadcasting console wall messages True/False
DISPLAY_CONSOLE_WALL_MESSAGES_ACTIVE_SWTICH = False
# Enable verbose mode during execution
DISPLAY_PROCESS_MESSAGES = True
# Define the model temperature sensor
# TEMPERATURE_SENSOR_MODEL = Adafruit_DHT.AM2302
# TEMPERATURE_SENSOR_MODEL = Adafruit_DHT.DHT11
# TEMPERATURE_SENSOR_MODEL = Adafruit_DHT.DHT22
# Do not attempt to define the temperature sensor modle if fake sensor input mode is enabled
if ENABLE_FAKE_SENSOR_VALUES == False: TEMPERATURE_SENSOR_MODEL = Adafruit_DHT.DHT22
# Define which GPIO data pin number the sensors DATA pin two is connected on
TEMPERATURE_SENSOR_GPIO = 25
# Define the minimum and maximum humidity/temperature sensor values
# minimum humidity value
MIMIMUM_HUMIDITY_VALUE = 0
# Maximum humidity value
MAXIMUM_HUMIDITY_VALUE = 100
# Minimum temperature value
MINIMUM_TEMPERATURE_VALUE = -72
# Maximum temperature value
MAXIMUM_TEMPERATURE_VALUE = 176
# Define the the minimum luminosity sensor value at 0.01VDC
MINIMUM_LUMINOSITY_SENSOR_VALUE = 0.01
# Define the the soil moisture sensor value at 0.01VDC
MINIMUM_SOIL_MOISTURE_SENSOR_VALUE = 0.01
# SQLite database file name
SQLITE_DATABASE_FILE_NAME = '/var/www/html/greenhouse.db'
# Comma separated value output local file name
INDEX_LOG_DATA_CSV_FILE_NAME = "/var/www/html/index.csv"
# Comma separated value web/url file name
INDEX_LOG_DATA_CSV_URL_FILE_NAME = "index.csv"
# Linear actuator status file name (Retracted | Extended
ACTUATOR_STATUS_FILE_NAME = '/var/www/html/actuator.txt'
# Solenoid valve status file name (Open | Closed)
SOLENOID_STATUS_FILE_NAME = '/var/www/html/solenoid.txt'
# Outputs status file name (On | Off)
OUTPUTS_STATUS_FILE_NAME = '/var/www/html/outputs.txt'
# Linear actuator runtime value file name (seconds)
LINEAR_ACTUATOR_RUNTIME_VALUE_FILE_NAME = '/var/www/html/actuatorruntime.txt'
# Minimum temperature sensor actuator retraction value file name (degrees)
MINIMUM_TEMPERATURE_SENSOR_ACTUATOR_RETRACT_VALUE_FILE_NAME = '/var/www/html/mintemactretract.txt'
# Minimum temperature sensor output one off value file name (degrees)
MINIMUM_TEMPERATURE_SENSOR_OUTPUT_ONE_OFF_VALUE_FILE_NAME = '/var/www/html/mintemoutoneoff.txt'
# Minimum humidity sensor output one off value file name (percrent)
MINIMUM_HUMIDITY_SENSOR_OUTPUT_ONE_OFF_VALUE_FILE_NAME = '/var/www/html/minhumoutoneoff.txt'
# Minimum temperature sensor output two off value file name (degrees)
MINIMUM_TEMPERATURE_SENSOR_OUTPUT_TWO_OFF_VALUE_FILE_NAME = '/var/www/html/mintemouttwooff.txt'
# Minimum luminosity sensor output two off value file name (volts)
MINIMUM_LUMINOSITY_SENSOR_OUTPUT_TWO_OFF_VALUE_FILE_NAME = '/var/www/html/minlumouttwooff.txt'
# Minimum soil moisture sensor open solenoid valve value file name (volts)
MINIMUM_SOIL_MOISTURE_SENSOR_SOLENOID_VALVE_OPEN_VALUE_FILE_NAME = '/var/www/html/minsoilsoleopen.txt'
# Output two configuration between using temperature or luminosity value file name (Temperature | Luminosity)
OUTPUT_TWO_CONFIGURATION_BETWEEN_TEMPERATURE_OR_LUMINOSITY_VALUE_FILE_NAME = '/var/www/html/outtwotemlum.txt'
# Linear actuator configuration between off, sensor based operation, or scheduled operation (Off | Sensor | Schedule)
LINEAR_ACTUATOR_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME = '/var/www/html/linoffsensch.txt'
# Output one configuration between off, sensor based operation, or scheduled operation (Off | Sensor | Schedule)
OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME = '/var/www/html/outoneoffsensch.txt'
# Output two configuration between off, sensor based operation, or scheduled operation (Off | Sensor | Schedule)
OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME = '/var/www/html/outtwooffsensch.txt'
# Solenoid valve configuration between off, sensor based watering, or scheduled watering (Off | Sensor | Schedule)
SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME = '/var/www/html/soleoffsensch.txt'
# Luminosity graph image local output file name
GRAPH_IMAGE_LUMINOSITY_FILE_NAME = "/var/www/html/ghouselumi.png"
# Temperature graph image local output file name
GRAPH_IMAGE_TEMPERATURE_FILE_NAME = "/var/www/html/ghousetemp.png"
# Humidity graph image local output file name
GRAPH_IMAGE_HUMIDITY_FILE_NAME = "/var/www/html/ghousehumi.png"
# Soil moisture graph image local output file name
GRAPH_IMAGE_SOIL_MOISTURE_FILE_NAME = "/var/www/html/ghousesoil.png"
# Read control constant values from files on disk
def read_control_values_from_files():
if DISPLAY_PROCESS_MESSAGES == True: print ("Reading control values from files on disk.")
try:
global CURRENT_SOLENOID_VALVE_STATUS
# Read the current solenoid valve status
solenoid_status_file_handle = open(SOLENOID_STATUS_FILE_NAME, 'r')
CURRENT_SOLENOID_VALVE_STATUS = solenoid_status_file_handle.readline()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read CURRENT_SOLENOID_VALVE_STATUS from file", CURRENT_SOLENOID_VALVE_STATUS)
solenoid_status_file_handle.close()
except OSError:
print ("An error occurred reading file name: ", SOLENOID_STATUS_FILE_NAME)
quit()
try:
global CURRENT_ACTUATOR_EXTENSION_STATUS
# Read the current linear actuator status
actuator_status_file_handle = open(ACTUATOR_STATUS_FILE_NAME, 'r')
CURRENT_ACTUATOR_EXTENSION_STATUS = actuator_status_file_handle.readline().strip('\n')
if DISPLAY_PROCESS_MESSAGES == True: print ("Read CURRENT_ACTUATOR_EXTENSION_STATUS from file", CURRENT_ACTUATOR_EXTENSION_STATUS)
actuator_status_file_handle.close()
except OSError:
print ("An error occurred reading file name: ", ACTUATOR_STATUS_FILE_NAME)
quit()
try:
global CURRENT_OUTPUT_STATUS_LIST
# Read the outputs status values
outputs_status_file_handle = open(OUTPUTS_STATUS_FILE_NAME, 'r')
CURRENT_OUTPUT_STATUS_LIST = outputs_status_file_handle.readlines()
outputs_status_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read CURRENT_OUTPUT_STATUS_LIST[0], CURRENT_OUTPUT_STATUS_LIST[1], CURRENT_OUTPUT_STATUS_LIST[2] from file", CURRENT_OUTPUT_STATUS_LIST[0], CURRENT_OUTPUT_STATUS_LIST[1], CURRENT_OUTPUT_STATUS_LIST[2])
# Remove the \n new line char from the end of the line
CURRENT_OUTPUT_STATUS_LIST[0] = CURRENT_OUTPUT_STATUS_LIST[0].strip('\n')
CURRENT_OUTPUT_STATUS_LIST[1] = CURRENT_OUTPUT_STATUS_LIST[1].strip('\n')
CURRENT_OUTPUT_STATUS_LIST[2] = CURRENT_OUTPUT_STATUS_LIST[2].strip('\n')
except OSError:
print ("An error occurred reading file name: ", OUTPUTS_STATUS_FILE_NAME)
quit()
try:
global LINEAR_ACTUATOR_RUN_TIME_VALUE
# Read the current linear actuator runtime value from a file
actuator_runtime_value_file_handle = open(LINEAR_ACTUATOR_RUNTIME_VALUE_FILE_NAME, 'r')
LINEAR_ACTUATOR_RUN_TIME_VALUE = actuator_runtime_value_file_handle.readline()
actuator_runtime_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read LINEAR_ACTUATOR_RUN_TIME_VALUE from file", LINEAR_ACTUATOR_RUN_TIME_VALUE)
except OSError:
print ("An error occurred reading file name: ", LINEAR_ACTUATOR_RUNTIME_VALUE_FILE_NAME)
quit()
try:
global MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT
# Read the minimum temperature linear actuator retract value from a file
minimum_temperature_acturator_retract_value_file_handle = open(MINIMUM_TEMPERATURE_SENSOR_ACTUATOR_RETRACT_VALUE_FILE_NAME, 'r')
MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT = minimum_temperature_acturator_retract_value_file_handle.readline()
minimum_temperature_acturator_retract_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT from file", MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT)
except OSError:
print ("An error occurred reading file name: ", MINIMUM_TEMPERATURE_SENSOR_ACTUATOR_RETRACT_VALUE_FILE_NAME)
quit()
try:
global MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF
# Read the minimum temperature sensor output one off value from a file
minimum_temperature_sensor_output_one_off_value_file_handle = open(MINIMUM_TEMPERATURE_SENSOR_OUTPUT_ONE_OFF_VALUE_FILE_NAME, 'r')
MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF = minimum_temperature_sensor_output_one_off_value_file_handle.readline()
minimum_temperature_sensor_output_one_off_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF from file", MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF)
except OSError:
print ("An error occurred reading file name: ", MINIMUM_TEMPERATURE_SENSOR_OUTPUT_ONE_OFF_VALUE_FILE_NAME)
quit()
try:
global MINIMUM_HUMIDITY_OUTPUT_ONE_OFF
# Read the minimum humidity sensor output one off value from a file
minimum_humidity_sensor_output_one_off_value_file_handle = open(MINIMUM_HUMIDITY_SENSOR_OUTPUT_ONE_OFF_VALUE_FILE_NAME, 'r')
MINIMUM_HUMIDITY_OUTPUT_ONE_OFF = minimum_humidity_sensor_output_one_off_value_file_handle.readline()
minimum_humidity_sensor_output_one_off_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read MINIMUM_HUMIDITY_OUTPUT_ONE_OFF from file", MINIMUM_HUMIDITY_OUTPUT_ONE_OFF)
except OSError:
print ("An error occurred reading file name: ", MINIMUM_HUMIDITY_SENSOR_OUTPUT_ONE_OFF_VALUE_FILE_NAME)
quit()
try:
global MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF
# Read the minimum temperature sensor output two on value from a file
minimum_temperature_sensor_output_two_off_value_file_handle = open(MINIMUM_TEMPERATURE_SENSOR_OUTPUT_TWO_OFF_VALUE_FILE_NAME, 'r')
MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF = minimum_temperature_sensor_output_two_off_value_file_handle.readline()
minimum_temperature_sensor_output_two_off_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF from file", MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF)
except OSError:
print ("An error occurred reading file name: ", MINIMUM_TEMPERATURE_SENSOR_OUTPUT_TWO_OFF_VALUE_FILE_NAME)
quit()
try:
global MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF
# Read the minimum luminosity sensor output two on value from a file
minimum_luminosity_sensor_output_two_off_value_file_handle = open(MINIMUM_LUMINOSITY_SENSOR_OUTPUT_TWO_OFF_VALUE_FILE_NAME, 'r')
MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF = minimum_luminosity_sensor_output_two_off_value_file_handle.readline()
minimum_luminosity_sensor_output_two_off_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF from file", MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF)
except OSError:
print ("An error occurred reading file name: ", MINIMUM_LUMINOSITY_SENSOR_OUTPUT_TWO_OFF_VALUE_FILE_NAME)
quit()
try:
global MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN
# Read the soil moisture sensor solenoid open value from a file
minimum_soil_moisture_sensor_solenoid_open_value_file_handle = open(MINIMUM_SOIL_MOISTURE_SENSOR_SOLENOID_VALVE_OPEN_VALUE_FILE_NAME, 'r')
MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN = minimum_soil_moisture_sensor_solenoid_open_value_file_handle.readline()
minimum_soil_moisture_sensor_solenoid_open_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN from file", MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN)
except OSError:
print ("An error occurred reading file name: ", MINIMUM_SOIL_MOISTURE_SENSOR_SOLENOID_VALVE_OPEN_VALUE_FILE_NAME)
quit()
try:
global OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY
# Read the output two control configuration value switching between temperature or luminosity from a file
output_two_configuration_value_file_handle = open(OUTPUT_TWO_CONFIGURATION_BETWEEN_TEMPERATURE_OR_LUMINOSITY_VALUE_FILE_NAME, 'r')
OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY = output_two_configuration_value_file_handle.readline()
output_two_configuration_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY from file", OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY)
except OSError:
print ("An error occurred reading file name: ", OUTPUT_TWO_CONFIGURATION_BETWEEN_TEMPERATURE_OR_LUMINOSITY_VALUE_FILE_NAME)
quit()
try:
global LINEAR_ACTUATOR_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE
# Read the output two control configuration value switching between temperature or luminosity from a file
linear_actuator_configuration_between_off_sensor_schedule_value_file_handle = open(LINEAR_ACTUATOR_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME, 'r')
LINEAR_ACTUATOR_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE = linear_actuator_configuration_between_off_sensor_schedule_value_file_handle.readline()
linear_actuator_configuration_between_off_sensor_schedule_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read LINEAR_ACTUATOR_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE from file", LINEAR_ACTUATOR_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE)
except OSError:
print ("An error occurred reading file name: ", LINEAR_ACTUATOR_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME)
quit()
try:
global OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE
# Read the output two control configuration value switching between temperature or luminosity from a file
output_one_configuration_between_off_sensor_schedule_value_file_handle = open(OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME, 'r')
OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE = output_one_configuration_between_off_sensor_schedule_value_file_handle.readline()
output_one_configuration_between_off_sensor_schedule_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE from file", OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE)
except OSError:
print ("An error occurred reading file name: ", OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME)
quit()
try:
global OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE
# Read the output two control configuration value switching between temperature or luminosity from a file
output_two_configuration_between_off_sensor_schedule_value_file_handle = open(OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME, 'r')
OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE = output_two_configuration_between_off_sensor_schedule_value_file_handle.readline()
output_two_configuration_between_off_sensor_schedule_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE from file", OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE)
except OSError:
print ("An error occurred reading file name: ", OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME)
quit()
try:
global SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE
# Read the output two control configuration value switching between temperature or luminosity from a file
solenoid_valve_configuration_between_off_sensor_schedule_value_file_handle = open(SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME, 'r')
SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE = solenoid_valve_configuration_between_off_sensor_schedule_value_file_handle.readline()
solenoid_valve_configuration_between_off_sensor_schedule_value_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE from file", SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE)
except OSError:
print ("An error occurred reading file name: ", SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE_FILE_NAME)
quit()
# Temperature and humidity value read input subroutine
def read_temperature_humidity_values():
if DISPLAY_PROCESS_MESSAGES == True: print ("Reading temperature and humidity values")
# The sensor may produce an erroneous reading greater or less than
# the possible measuring range of the device
# A for loop retrys the read process until values within the scope
# of the possbile measuring range are obtained
for i in range(0, 15):
try:
global current_temperature_sensor_value
global current_humidity_sensor_value
if ENABLE_FAKE_SENSOR_VALUES == True: current_humidity_sensor_value = FAKE_HUMIDITY_SENSOR_VALUE
if ENABLE_FAKE_SENSOR_VALUES == True: current_temperature_sensor_value = FAKE_TEMPERATURE_SENSOR_VALUE
if ENABLE_FAKE_SENSOR_VALUES == True: print ("Fake sensor values enabled. Returning current_humidity_sensor_value, current_temperature_sensor_value:", current_humidity_sensor_value, current_temperature_sensor_value)
if ENABLE_FAKE_SENSOR_VALUES == True: return(current_humidity_sensor_value, current_temperature_sensor_value)
# Create an instance of the dht22 class
# Pass the GPIO data pin number connected to the signal line
# (pin #25 is broken out on the Pimoroni Automation HAT)
# Read the temperature and humidity values
current_humidity_sensor_value, current_temperature_sensor_value = Adafruit_DHT.read_retry(
TEMPERATURE_SENSOR_MODEL, TEMPERATURE_SENSOR_GPIO)
if DISPLAY_PROCESS_MESSAGES == True: print ("Reading humidity value:", current_humidity_sensor_value)
if DISPLAY_PROCESS_MESSAGES == True: print ("Reading temperature value:", current_temperature_sensor_value)
if (current_temperature_sensor_value is not None and
current_humidity_sensor_value is not None
):
# Convert from a string to a floating-point number to an interger
int(float(current_temperature_sensor_value))
# Convert from celsius to fahrenheit
current_temperature_sensor_value = (current_temperature_sensor_value * 1.8) + 32
# Reformat as two decimals
current_temperature_sensor_value = float("{0:.2f}".format(current_temperature_sensor_value))
# Reformat as two decimals
current_humidity_sensor_value = float("{0:.2f}".format(current_humidity_sensor_value))
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_humidity_sensor_value < MIMIMUM_HUMIDITY_VALUE:", current_humidity_sensor_value, MIMIMUM_HUMIDITY_VALUE)
if (current_humidity_sensor_value < MIMIMUM_HUMIDITY_VALUE):
print('DHT sensor error humidity value less than minimum humidity value = %.2f Attempting reread' % current_humidity_sensor_value)
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_humidity_sensor_value > MAXIMUM_HUMIDITY_VALUE:", current_humidity_sensor_value, MAXIMUM_HUMIDITY_VALUE)
if (current_humidity_sensor_value > MAXIMUM_HUMIDITY_VALUE):
print('DHT sensor error humidity value greater than = %.2f Attempting reread' % current_humidity_sensor_value)
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_temperature_sensor_value < MINIMUM_TEMPERATURE_VALUE:", current_temperature_sensor_value, MINIMUM_TEMPERATURE_VALUE)
if (current_temperature_sensor_value < MINIMUM_TEMPERATURE_VALUE):
print('DHT sensor error temperature value less than minimum temperature value = %.2f Attempting reread' % current_humidity_sensor_value)
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_temperature_sensor_value > MAXIMUM_TEMPERATURE_VALUE:", current_temperature_sensor_value, MAXIMUM_TEMPERATURE_VALUE)
if (current_temperature_sensor_value > MAXIMUM_TEMPERATURE_VALUE):
print('DHT sensor error temperature value greater than maximum temperature value = %.2f Attempting reread' % current_humidity_sensor_value)
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_humidity_sensor_value > MIMIMUM_HUMIDITY_VALUE and current_humidity_sensor_value < MAXIMUM_HUMIDITY_VALUE and current_temperature_sensor_value > MINIMUM_TEMPERATURE_VALUE and current_temperature_sensor_value < MAXIMUM_TEMPERATURE_VALUE")
if (current_humidity_sensor_value > MIMIMUM_HUMIDITY_VALUE and current_humidity_sensor_value < MAXIMUM_HUMIDITY_VALUE and
current_temperature_sensor_value > MINIMUM_TEMPERATURE_VALUE and current_temperature_sensor_value < MAXIMUM_TEMPERATURE_VALUE):
if DISPLAY_PROCESS_MESSAGES == True: print ("Success! Returning current_humidity_sensor_value, current_temperature_sensor_value:",current_humidity_sensor_value, current_temperature_sensor_value)
return(current_humidity_sensor_value, current_temperature_sensor_value)
break
except RuntimeError as e:
# Print an error if the sensor read fails
print ("DHT sensor read failed: ", e.args)
# Enable and disable outputs subroutine
# Output #1 = 0, #2 = 1, #3 = 2
def control_outputs(output_number, output_status):
if DISPLAY_PROCESS_MESSAGES == True: print ("Read OUTPUTS_STATUS_FILE_NAME:", OUTPUTS_STATUS_FILE_NAME)
outputs_status_file_handle = open(OUTPUTS_STATUS_FILE_NAME, 'r')
CURRENT_OUTPUT_STATUS_LIST = outputs_status_file_handle.readlines()
outputs_status_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read CURRENT_OUTPUT_STATUS_LIST:", CURRENT_OUTPUT_STATUS_LIST)
current_output_status = CURRENT_OUTPUT_STATUS_LIST[output_number]
# Remove the \n new line char from the end of the line
CURRENT_OUTPUT_STATUS_LIST[output_number] = CURRENT_OUTPUT_STATUS_LIST[output_number].strip('\n')
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing CURRENT_OUTPUT_STATUS_LIST[output_number] == output_status:", CURRENT_OUTPUT_STATUS_LIST[output_number], output_status)
if (CURRENT_OUTPUT_STATUS_LIST[output_number] == output_status):
if DISPLAY_PROCESS_MESSAGES == True: print ("Output already in correct state. Returning:", current_output_status)
return(current_output_status)
else:
if (output_status == 'On'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Using pigs to enable the output")
# Toggle output on
if (output_number == 0):
pigs_gpio_command_line = ["/usr/bin/pigs", "w 5 1"]
p = subprocess.Popen(pigs_gpio_command_line)
elif (output_number == 1):
pigs_gpio_command_line = ["/usr/bin/pigs", "w 12 1"]
p = subprocess.Popen(pigs_gpio_command_line)
elif (output_number == 2):
pigs_gpio_command_line = ["/usr/bin/pigs", "w 6 1"]
p = subprocess.Popen(pigs_gpio_command_line)
current_output_status = 'On'
CURRENT_OUTPUT_STATUS_LIST[output_number] = "On\n"
# Write the modified status to a text file
outputs_status_file_handle = open(OUTPUTS_STATUS_FILE_NAME, 'w')
outputs_status_file_handle.writelines(CURRENT_OUTPUT_STATUS_LIST)
outputs_status_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Wrote the output status to:", OUTPUTS_STATUS_FILE_NAME)
if DISPLAY_PROCESS_MESSAGES == True: print ("Returning:", current_output_status)
return(current_output_status)
if (output_status == 'Off'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Using pigs to disable the output")
# Toggle output off
if (output_number == 0):
pigs_gpio_command_line = ["/usr/bin/pigs", "w 5 0"]
p = subprocess.Popen(pigs_gpio_command_line)
elif (output_number == 1):
pigs_gpio_command_line = ["/usr/bin/pigs", "w 12 0"]
p = subprocess.Popen(pigs_gpio_command_line)
elif (output_number == 2):
pigs_gpio_command_line = ["/usr/bin/pigs", "w 6 0"]
p = subprocess.Popen(pigs_gpio_command_line)
current_output_status = 'Off'
CURRENT_OUTPUT_STATUS_LIST[output_number] = "Off\n"
# Write the modified status to a text file
outputs_status_file_handle = open(OUTPUTS_STATUS_FILE_NAME, 'w')
outputs_status_file_handle.writelines(CURRENT_OUTPUT_STATUS_LIST)
outputs_status_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Wrote the output status to:", OUTPUTS_STATUS_FILE_NAME)
if DISPLAY_PROCESS_MESSAGES == True: print ("Returning:", current_output_status)
return(current_output_status)
# Linear actuator extension and retraction subroutine
def linear_actuator_extension_retraction(actuator_extension_status):
if DISPLAY_PROCESS_MESSAGES == True: print ("Reading ACTUATOR_STATUS_FILE_NAME:", ACTUATOR_STATUS_FILE_NAME)
global CURRENT_ACTUATOR_EXTENSION_STATUS
actuator_status_file_handle = open(ACTUATOR_STATUS_FILE_NAME, 'r')
CURRENT_ACTUATOR_EXTENSION_STATUS = actuator_status_file_handle.readline()
actuator_status_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Read CURRENT_ACTUATOR_EXTENSION_STATUS:", CURRENT_ACTUATOR_EXTENSION_STATUS)
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing CURRENT_ACTUATOR_EXTENSION_STATUS == actuator_extension_status:", CURRENT_ACTUATOR_EXTENSION_STATUS, actuator_extension_status)
if (CURRENT_ACTUATOR_EXTENSION_STATUS == actuator_extension_status):
if DISPLAY_PROCESS_MESSAGES == True: print ("Linear actuator already in correct state. Returning CURRENT_ACTUATOR_EXTENSION_STATUS:", CURRENT_ACTUATOR_EXTENSION_STATUS)
return(CURRENT_ACTUATOR_EXTENSION_STATUS)
else:
if (actuator_extension_status == 'Extended'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Extending the linear actuator now")
# Toggle relay #2 on to extend the linear actuator
if ENABLE_FAKE_SENSOR_VALUES == False: automationhat.relay.one.toggle()
time.sleep(float(LINEAR_ACTUATOR_RUN_TIME_VALUE))
# Toggle relay #2 off
if ENABLE_FAKE_SENSOR_VALUES == False: automationhat.relay.one.toggle()
CURRENT_ACTUATOR_EXTENSION_STATUS = 'Extended'
if DISPLAY_PROCESS_MESSAGES == True: print ("Writing the linear actuator status to ACTUATOR_STATUS_FILE_NAME", ACTUATOR_STATUS_FILE_NAME)
# Write the modified status to a text file
actuator_status_file_handle = open(ACTUATOR_STATUS_FILE_NAME, 'w')
actuator_status_file_handle.write(CURRENT_ACTUATOR_EXTENSION_STATUS)
actuator_status_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Returning CURRENT_ACTUATOR_EXTENSION_STATUS", CURRENT_ACTUATOR_EXTENSION_STATUS)
return(CURRENT_ACTUATOR_EXTENSION_STATUS)
if (actuator_extension_status == 'Retracted'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Retracting the linear actuator now")
# Toggle relay #1 on to retract the linear actuator
# only call the automationhat module if fake sensor input is disabled = False
if ENABLE_FAKE_SENSOR_VALUES == False: automationhat.relay.two.toggle()
time.sleep(float(LINEAR_ACTUATOR_RUN_TIME_VALUE))
if DISPLAY_PROCESS_MESSAGES == True: print ("Writing the linear actuator status to ACTUATOR_STATUS_FILE_NAME", ACTUATOR_STATUS_FILE_NAME)
# Toggle relay #1 off
# only call the automationhat module if fake sensor input is disabled = False
if ENABLE_FAKE_SENSOR_VALUES == False: automationhat.relay.two.toggle()
CURRENT_ACTUATOR_EXTENSION_STATUS = 'Retracted'
# Write the modified status to a text file
actuator_status_file_handle = open(ACTUATOR_STATUS_FILE_NAME, 'w')
actuator_status_file_handle.write(CURRENT_ACTUATOR_EXTENSION_STATUS)
actuator_status_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Returning CURRENT_ACTUATOR_EXTENSION_STATUS", CURRENT_ACTUATOR_EXTENSION_STATUS)
return(CURRENT_ACTUATOR_EXTENSION_STATUS)
# Solenoid valve open and close subroutine
def solenoid_valve_operation(solenoid_valve_status):
if DISPLAY_PROCESS_MESSAGES == True: print ("Opening:", SOLENOID_STATUS_FILE_NAME)
solenoid_status_file_handle = open(SOLENOID_STATUS_FILE_NAME, 'r')
CURRENT_SOLENOID_VALVE_STATUS = solenoid_status_file_handle.readline()
solenoid_status_file_handle.close()
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing CURRENT_SOLENOID_VALVE_STATUS == Comparing CURRENT_SOLENOID_VALVE_STATUS:", CURRENT_SOLENOID_VALVE_STATUS, solenoid_valve_status)
if (CURRENT_SOLENOID_VALVE_STATUS == solenoid_valve_status):
if DISPLAY_PROCESS_MESSAGES == True: print ("Solenoid already in correct state. Returning CURRENT_SOLENOID_VALVE_STATUS:", CURRENT_SOLENOID_VALVE_STATUS)
return(CURRENT_SOLENOID_VALVE_STATUS)
else:
if (solenoid_valve_status == 'Open'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling pigs to open the solenoid valve")
# Toggle relay #3 on to open the solenoid valve
pigs_gpio_command_line = ["/usr/bin/pigs", "w 16 1"]
p = subprocess.Popen(pigs_gpio_command_line)
CURRENT_SOLENOID_VALVE_STATUS = 'Open'
if DISPLAY_PROCESS_MESSAGES == True: print ("Saving the solenoid valve value to:", SOLENOID_STATUS_FILE_NAME)
# Write the modified status to a text file
solenoid_status_file_handle = open(SOLENOID_STATUS_FILE_NAME, 'w')
solenoid_status_file_handle.write(CURRENT_SOLENOID_VALVE_STATUS)
solenoid_status_file_handle.close()
return(CURRENT_SOLENOID_VALVE_STATUS)
if (solenoid_valve_status == 'Closed'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling pigs to close the solenoid valve")
# Toggle relay #3 off to close the solenoid valve
pigs_gpio_command_line = ["/usr/bin/pigs", "w 16 0"]
p = subprocess.Popen(pigs_gpio_command_line)
CURRENT_SOLENOID_VALVE_STATUS = 'Closed'
if DISPLAY_PROCESS_MESSAGES == True: print ("Saving the solenoid valve value to:", SOLENOID_STATUS_FILE_NAME)
# Write the modified status to a text file
solenoid_status_file_handle = open(SOLENOID_STATUS_FILE_NAME, 'w')
solenoid_status_file_handle.write(CURRENT_SOLENOID_VALVE_STATUS)
solenoid_status_file_handle.close()
return(CURRENT_SOLENOID_VALVE_STATUS)
# Analog to digital converter #1 read soil moisture sensor value subroutine
def read_soil_moisture_sensor_value():
global current_soil_moisture_sensor_value
if ENABLE_FAKE_SENSOR_VALUES == True: current_soil_moisture_sensor_value = FAKE_SOIL_MOISTURE_SENSOR_VALUE
if ENABLE_FAKE_SENSOR_VALUES == True: print ("Fake sensor values enabled. Returning current_soil_moisture_sensor_value:", current_soil_moisture_sensor_value)
if ENABLE_FAKE_SENSOR_VALUES == True: return(current_soil_moisture_sensor_value)
if DISPLAY_PROCESS_MESSAGES == True: print ("Attempting to read the soil moisture sensor")
# The ADC may produce an erroneous moisture reading less than 0.05VDC
# a for loop retrys the read process until a value > 0.05VDC is returned
for i in range(0, 25):
try:
# Initilized the counter variable
read_counter = 0
temporary_value = float()
temporary_values_list = list()
current_soil_moisture_sensor_value = float()
standard_deviation_of_sensor_values = 0
# Loop through multiple data reads
while read_counter < 2:
# Read the moisture value from analog to
# digital converter #1
temporary_value = automationhat.analog[0].read()
# Keep one of the values in case the read is
# consistent
good_temporary_value = temporary_value
time.sleep(.9)
# populate a list of values
temporary_values_list.append(temporary_value)
read_counter = read_counter + 1
# If the standard deviation of the series of
# readings is zero then the sensor produced
# multiple consistent values and we should
# consider the data reliable and take actions
# Return the standard deviation of the list of values
standard_deviation_of_sensor_values = math.sqrt(
statistics.pvariance(temporary_values_list))
# If there is no difference in the values
# use the good_temporary_value they are all
# the same
if (standard_deviation_of_sensor_values == 0):
current_soil_moisture_sensor_value = good_temporary_value
elif (standard_deviation_of_sensor_values != 0):
# If there is a difference set the value
# to zero and try again for a consistent
# data read
current_soil_moisture_sensor_value = 0
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_soil_moisture_sensor_value <= MINIMUM_SOIL_MOISTURE_SENSOR_VALUE", current_soil_moisture_sensor_value, MINIMUM_SOIL_MOISTURE_SENSOR_VALUE)
if (current_soil_moisture_sensor_value <= MINIMUM_SOIL_MOISTURE_SENSOR_VALUE):
print('ADC error read soil moisture value less than 0.05VDC = %.2f Attempting reread' % current_soil_moisture_sensor_value)
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_soil_moisture_sensor_value > MINIMUM_SOIL_MOISTURE_SENSOR_VALUE", current_soil_moisture_sensor_value, MINIMUM_SOIL_MOISTURE_SENSOR_VALUE)
if (current_soil_moisture_sensor_value > MINIMUM_SOIL_MOISTURE_SENSOR_VALUE):
if DISPLAY_PROCESS_MESSAGES == True: print ("Have a good value for current_soil_moisture_sensor_value returning: ", current_soil_moisture_sensor_value)
return(current_soil_moisture_sensor_value)
break
except RuntimeError as e:
# Print an error if the sensor read fails
print("ADC sensor read failed: ", e.args)
# Analog to digital converter #2 read light dependent resistor value subroutine
def read_luminosity_sensor_value():
global current_luminosity_sensor_value
if ENABLE_FAKE_SENSOR_VALUES == True: current_luminosity_sensor_value = FAKE_LUMINOSITY_SENSOR_VALUE
if ENABLE_FAKE_SENSOR_VALUES == True: print ("Fake sensor values enabled. Returning current_luminosity_sensor_value:", current_luminosity_sensor_value)
if ENABLE_FAKE_SENSOR_VALUES == True: return(current_luminosity_sensor_value)
# The ADC may produce an erroneous luminisoty reading less than 0.00VDC
# a for loop retrys the read process until a value > 0.00VDC is returned
for i in range(0, 25):
try:
if DISPLAY_PROCESS_MESSAGES == True: print ("Attempting to read the luminosity sensor")
# Initilized the counter variable
read_counter = 0
temporary_value = float()
temporary_values_list = list()
current_luminosity_sensor_value = float()
standard_deviation_of_sensor_values = 0
# Loop through multiple data reads
while read_counter < 2:
# Read the light value from analog to digital converter #2
temporary_value = automationhat.analog[1].read()
# Keep one of the values in case the read is
# consistent
good_temporary_value = temporary_value
time.sleep(.9)
# Populate a list of values
temporary_values_list.append(temporary_value)
read_counter = read_counter + 1
# If the standard deviation of the series of
# readings is zero then the sensor produced
# multiple consistent values and we should
# consider the data reliable and take actions
# return the standard deviation of the list of values
standard_deviation_of_sensor_values = math.sqrt(statistics.pvariance(temporary_values_list))
# If there is no difference in the values
# use the good_temporary_value they are all
# the same
if (standard_deviation_of_sensor_values == 0):
current_luminosity_sensor_value = good_temporary_value
elif (standard_deviation_of_sensor_values != 0):
# If there is a difference set the value
# to zero and try again for a consistent
# data read
current_luminosity_sensor_value = 0
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_luminosity_sensor_value < MINIMUM_LUMINOSITY_SENSOR_VALUE", current_luminosity_sensor_value, MINIMUM_LUMINOSITY_SENSOR_VALUE)
if (current_luminosity_sensor_value < MINIMUM_LUMINOSITY_SENSOR_VALUE):
print('ADC error read LDR value less than 0.01VDC = %.3f Attempting reread' %
current_luminosity_sensor_value)
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_luminosity_sensor_value > MINIMUM_LUMINOSITY_SENSOR_VALUE", current_luminosity_sensor_value, MINIMUM_LUMINOSITY_SENSOR_VALUE)
if (current_luminosity_sensor_value > MINIMUM_LUMINOSITY_SENSOR_VALUE):
if DISPLAY_PROCESS_MESSAGES == True: print ("Have a good value for current_luminosity_sensor_value returning: ", current_luminosity_sensor_value)
return(current_luminosity_sensor_value)
break
except RuntimeError as e:
# Print an error if the sensor read fails
print("ADC sensor read failed: ", e.args)
# Write text data to the 16x2 LCD subroutine as a serial device subroutine
def write_lcd_messages(write_lcd_message_content):
if DISPLAY_PROCESS_MESSAGES == True: print ("Writing the 16x2 LCD screen:", write_lcd_message_content)
ser = serial.Serial(SERIAL_LCD_DEVICE_NAME, 9600, timeout=1)
# Enable auto scrolling
ser.write("%c%c" % (0xfe, 0x51))
time.sleep(.1)
# Clear the screen
ser.write("%c%c" % (0xfe, 0x58))
time.sleep(.1)
# Change the lcd back light color
# ser.write("%c%c%c%c%c" % (0xfe, 0xd0, 0x0, 0x0, 0xff))
# time.sleep(.5)
# ser.write("%c%c%c%c%c" % (0xfe, 0xd0, 0xff, 0xff, 0xff))
# time.sleep(.5)
ser.write(write_lcd_message_content)
if DISPLAY_PROCESS_MESSAGES == True: print ("LCD screen content active for:", DISPLAY_LCD_MESSAGE_LENGTH_SECONDS)
time.sleep(DISPLAY_LCD_MESSAGE_LENGTH_SECONDS)
ser.write("%c%c" % (0xfe, 0x58))
# Send console broadcast messages via wall
def write_wall_messages(write_wall_message_content):
if DISPLAY_PROCESS_MESSAGES == True: print ("Broadcasting terminal wall message:", write_wall_message_content)
wall_message_text = '%s' % write_wall_message_content
wall_message_text = wall_message_text + ' @' + WALL_TERMINAL_MESSAGE_SUFFIX_STRING
# The wall applications -n no banner
# option requires root thus sudo
wall_message_command_line = ['sudo', 'wall', '-n', wall_message_text]
# Comment out the following line to disable console notifications
p = subprocess.Popen(wall_message_command_line)
# Write CSV output file subroutine
def write_csv_output_file(current_luminosity_sensor_value, current_temperature_sensor_value, current_humidity_sensor_value, current_soil_moisture_sensor_value, CURRENT_SOLENOID_VALVE_STATUS, CURRENT_ACTUATOR_EXTENSION_STATUS, CURRENT_OUTPUT_STATUS_LIST):
# Begin file append of CSV file to the web server root
# "Luminosity","Temperature","Humidity","Moisture",
# "Solenoid","Actuator","Output1","Output2","Output3","Epoch"
if DISPLAY_PROCESS_MESSAGES == True: print ("Opening to append INDEX_LOG_DATA_CSV_FILE_NAME", INDEX_LOG_DATA_CSV_FILE_NAME)
csv_file_handle = open(INDEX_LOG_DATA_CSV_FILE_NAME, "a")
if DISPLAY_PROCESS_MESSAGES == True: print ("Writing: ", current_luminosity_sensor_value, current_temperature_sensor_value, current_humidity_sensor_value, current_soil_moisture_sensor_value, CURRENT_SOLENOID_VALVE_STATUS, CURRENT_ACTUATOR_EXTENSION_STATUS, CURRENT_OUTPUT_STATUS_LIST)
csv_file_handle.write('"')
csv_file_handle.write(str(current_luminosity_sensor_value))
csv_file_handle.write('",\"')
csv_file_handle.write('')
csv_file_handle.write(str(current_temperature_sensor_value))
csv_file_handle.write('","')
csv_file_handle.write('')
csv_file_handle.write(str(current_humidity_sensor_value))
csv_file_handle.write('","')
csv_file_handle.write('')
csv_file_handle.write(str(current_soil_moisture_sensor_value))
csv_file_handle.write('","')
csv_file_handle.write('')
csv_file_handle.write(CURRENT_SOLENOID_VALVE_STATUS)
csv_file_handle.write('","')
csv_file_handle.write('')
csv_file_handle.write(CURRENT_ACTUATOR_EXTENSION_STATUS)
csv_file_handle.write('","')
csv_file_handle.write('')
csv_file_handle.write('%s' % CURRENT_OUTPUT_STATUS_LIST[0])
csv_file_handle.write('","')
csv_file_handle.write('')
csv_file_handle.write('%s' % CURRENT_OUTPUT_STATUS_LIST[1])
csv_file_handle.write('","')
csv_file_handle.write('')
csv_file_handle.write('%s' % CURRENT_OUTPUT_STATUS_LIST[2])
csv_file_handle.write('","')
# Second since the epoch
csv_file_handle.write('')
csv_file_handle.write('%s' % time.time())
csv_file_handle.write('"' + '\n')
csv_file_handle.write('')
if DISPLAY_PROCESS_MESSAGES == True: print ("Closing the file")
csv_file_handle.close
# Write sqlite database subroutine
def write_database_output(current_luminosity_sensor_value, current_temperature_sensor_value, current_humidity_sensor_value, current_soil_moisture_sensor_value, CURRENT_SOLENOID_VALVE_STATUS, CURRENT_ACTUATOR_EXTENSION_STATUS, CURRENT_OUTPUT_STATUS_LIST):
# Begin file table data insert of row
try:
if DISPLAY_PROCESS_MESSAGES == True: print ("Attempting to access SQLITE_DATABASE_FILE_NAME", SQLITE_DATABASE_FILE_NAME)
# Establish a connection to the database
connection_sqlite_database = sqlite3.connect(SQLITE_DATABASE_FILE_NAME)
curs = connection_sqlite_database.cursor()
if DISPLAY_PROCESS_MESSAGES == True: print ("Performing row INSERT INTO table: ", current_luminosity_sensor_value, current_temperature_sensor_value, current_humidity_sensor_value, current_soil_moisture_sensor_value, CURRENT_SOLENOID_VALVE_STATUS, CURRENT_ACTUATOR_EXTENSION_STATUS, CURRENT_OUTPUT_STATUS_LIST[0], CURRENT_OUTPUT_STATUS_LIST[1], CURRENT_OUTPUT_STATUS_LIST[2])
# Insert data rows into the table
curs.execute("INSERT INTO greenhouse (luminosity, temperature, humidity, soilmoisture, solenoidstatus, actuatorstatus, outputonestatus, outputtwostatus, outputthreestatus, currentdate, currenttime) VALUES((?), (?), (?), (?), (?), (?), (?), (?), (?), date('now','localtime'), time('now','localtime'))",
(current_luminosity_sensor_value, current_temperature_sensor_value, current_humidity_sensor_value, current_soil_moisture_sensor_value, CURRENT_SOLENOID_VALVE_STATUS, CURRENT_ACTUATOR_EXTENSION_STATUS, CURRENT_OUTPUT_STATUS_LIST[0], CURRENT_OUTPUT_STATUS_LIST[1], CURRENT_OUTPUT_STATUS_LIST[2]))
# Commit the changes
connection_sqlite_database.commit()
curs.close
if DISPLAY_PROCESS_MESSAGES == True: print ("Closing the database connection")
connection_sqlite_database.close()
except sqlite3.IntegrityError as e:
print('Sqlite Error: ', e.args[0]) # error output
# Read sqlite database generate graphs subroutine
def read_database_output_graphs():
# Begin file append of CSV file to the web server root
# read a sqlite database table and generate a graph
try:
if DISPLAY_PROCESS_MESSAGES == True: print ("Attempting to access SQLITE_DATABASE_FILE_NAME", SQLITE_DATABASE_FILE_NAME)
# Establish a connection to the database
connection_sqlite_database = sqlite3.connect(SQLITE_DATABASE_FILE_NAME)
curs = connection_sqlite_database.cursor()
if DISPLAY_PROCESS_MESSAGES == True: print ("Attempting to execute query")
# Select data rows from the table
curs.execute('SELECT luminosity, temperature, humidity, soilmoisture, solenoidstatus, actuatorstatus, outputonestatus, outputtwostatus, outputthreestatus, currentdate, currenttime FROM greenhouse ORDER BY ROWID DESC LIMIT 720 ')
data_row_fetched_all = curs.fetchall()
date_values = []
date_values_no_year = []
values_luminosity = []
values_temperature = []
values_humidity = []
values_soil_moisture = []
for row in data_row_fetched_all:
values_luminosity.append(row[0])
values_temperature.append(row[1])
values_humidity.append(row[2])
values_soil_moisture.append(row[3])
date_values.append(parser.parse(row[9]))
tempString = row[9].split("-", 1)
date_values_no_year.append(tempString[1])
if DISPLAY_PROCESS_MESSAGES == True: print ("Query complete")
if DISPLAY_PROCESS_MESSAGES == True: print ("Generating luminosity graph image file: ", GRAPH_IMAGE_LUMINOSITY_FILE_NAME)
plt.figure(0)
plt.plot(values_luminosity)
plt.ylabel('Luminosity [0.01-5.00 Volts]')
plt.xlabel('720 x two minute read intervals = Last 24 Hours')
#plt.show(block=True)
plt.savefig(GRAPH_IMAGE_LUMINOSITY_FILE_NAME)
if DISPLAY_PROCESS_MESSAGES == True: print ("Generating temperature graph image file: ", GRAPH_IMAGE_TEMPERATURE_FILE_NAME)
plt.figure(1)
plt.plot(values_temperature)
plt.ylabel('Temperature [Degrees Fahrenheit] ')
plt.xlabel('720 x two minute read intervals = Last 24 Hours')
#plt.show(block=True)
plt.savefig(GRAPH_IMAGE_TEMPERATURE_FILE_NAME)
if DISPLAY_PROCESS_MESSAGES == True: print ("Generating humidity graph image file: ", GRAPH_IMAGE_HUMIDITY_FILE_NAME)
plt.figure(2)
plt.plot(values_humidity)
plt.ylabel('Humidity [0%-100%] ')
plt.xlabel('720 x two minute read intervals = Last 24 Hours')
#plt.show(block=True)
plt.savefig(GRAPH_IMAGE_HUMIDITY_FILE_NAME)
if DISPLAY_PROCESS_MESSAGES == True: print ("Generating soil moisture graph image file: ", GRAPH_IMAGE_SOIL_MOISTURE_FILE_NAME)
plt.figure(3)
plt.plot(values_soil_moisture)
plt.ylabel('Soil Moisture [0.01-5.00 Volts] ')
plt.xlabel('720 x two minute read intervals = Last 24 Hours')
#plt.show(block=True)
plt.savefig(GRAPH_IMAGE_SOIL_MOISTURE_FILE_NAME)
# Commit the changes
connection_sqlite_database.commit()
curs.close
if DISPLAY_PROCESS_MESSAGES == True: print ("Closing the database connection")
connection_sqlite_database.close()
except sqlite3.IntegrityError as e:
print('Sqlite Error: ', e.args[0]) # error output
# Display the current environmental information on the 16x2 LCD screen
def display_lcd_screen_messages():
if (DISPLAY_LCD_SCREEN_MESSAGES_ACTIVE_SWTICH is True):
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling write_lcd_message_content() from within display_lcd_screen_messages()")
# Display the luminosity value on the LCD
write_lcd_message_content = 'Luminosity: %s' % current_luminosity_sensor_value
write_lcd_messages(write_lcd_message_content)
# Display the temperature value on the LCD
write_lcd_message_content = 'Temp: %s' % current_temperature_sensor_value
write_lcd_messages(write_lcd_message_content)
# Display the humidity value on the LCD
write_lcd_message_content = 'Humidity: %s' % current_humidity_sensor_value
write_lcd_messages(write_lcd_message_content)
# Display soil moisture sensor on the LCD
write_lcd_message_content = 'Soil moisture: %s' % current_soil_moisture_sensor_value
write_lcd_messages(write_lcd_message_content)
# Display the linear actuator status on the LCD
write_lcd_message_content = 'Linear actuator: %s' % CURRENT_ACTUATOR_EXTENSION_STATUS
write_lcd_messages(write_lcd_message_content)
# Display the solenoid value status on the LCD
write_lcd_message_content = 'Solenoid: %s' % CURRENT_SOLENOID_VALVE_STATUS
write_lcd_messages(write_lcd_message_content)
# Display the outputs status on the LCD
write_lcd_message_content = 'Output #1 status: %s' % CURRENT_OUTPUT_STATUS_LIST[0]
write_lcd_messages(write_lcd_message_content)
write_lcd_message_content = 'Output #2 status: %s' % CURRENT_OUTPUT_STATUS_LIST[1]
write_lcd_messages(write_lcd_message_content)
write_lcd_message_content = 'Output #3 status: %s' % CURRENT_OUTPUT_STATUS_LIST[2]
write_lcd_messages(write_lcd_message_content)
else:
print ("LCD screen messages disabled in greenhouse.py header: DISPLAY_LCD_SCREEN_MESSAGES_ACTIVE_SWTICH = ", DISPLAY_LCD_SCREEN_MESSAGES_ACTIVE_SWTICH)
# Display the current environmental information in the console window via wall messages
def display_console_wall_messages():
if (DISPLAY_CONSOLE_WALL_MESSAGES_ACTIVE_SWTICH is True):
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling write_wall_message_content() from within display_console_wall_messages()")
# Display the luminosity value via a console broadcast message
write_wall_message_content = 'Luminosity: %s' % current_luminosity_sensor_value
write_wall_messages(write_wall_message_content)
# Display the temperature value via a console broadcast message
write_wall_message_content = 'Temp: %s' % current_temperature_sensor_value
write_wall_messages(write_wall_message_content)
# Display the humidity value via a console broadcast message
write_wall_message_content = 'Humidity: %s' % current_humidity_sensor_value
write_wall_messages(write_wall_message_content)
# Display the soil moisture value via a console broadcast message
write_wall_message_content = 'Soil moisture: %s' % current_soil_moisture_sensor_value
write_wall_messages(write_wall_message_content)
# Display the solenoid value status via a console broadcast message
write_wall_message_content = 'Solenoid: %s' % CURRENT_SOLENOID_VALVE_STATUS
write_wall_messages(write_wall_message_content)
# Display the linear actuator status via a console broadcast message
write_wall_message_content = 'Linear actuator: %s' % CURRENT_ACTUATOR_EXTENSION_STATUS
write_wall_messages(write_wall_message_content)
# Display the outputs status via a console broadcast message
write_wall_message_content = 'Output #1 status: %s' % CURRENT_OUTPUT_STATUS_LIST[0]
write_wall_messages(write_wall_message_content)
write_wall_message_content = 'Output #2 status: %s' % CURRENT_OUTPUT_STATUS_LIST[1]
write_wall_messages(write_wall_message_content)
write_wall_message_content = 'Output #3 status: %s' % CURRENT_OUTPUT_STATUS_LIST[2]
write_wall_messages(write_wall_message_content)
else:
print ("Console wall messages disabled in greenhouse.py header: DISPLAY_CONSOLE_WALL_MESSAGES_ACTIVE_SWTICH = ", DISPLAY_CONSOLE_WALL_MESSAGES_ACTIVE_SWTICH)
# Begin the process reading evaluating environmental data and broadcasting messages
def read_values_display_messages():
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling read_control_values_from_files()")
# Call the read system control values from files on disk subroutine
read_control_values_from_files()
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling read_luminosity_sensor_value()")
# Call the read luminosity sensor value subroutine
current_luminosity_sensor_value = read_luminosity_sensor_value()
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling read_temperature_humidity_values()")
# Call the read temperature and humidity value subroutine
current_humidity_sensor_value, current_temperature_sensor_value = read_temperature_humidity_values()
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling read_soil_moisture_sensor_value()")
# Call the read soil moisture sensor value subroutine
current_soil_moisture_sensor_value = read_soil_moisture_sensor_value()
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling display_lcd_screen_messages()")
# Call the display notifications on the 16x2 LCD screen subroutine
display_lcd_screen_messages()
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling display_console_wall_messages()")
# Call the display notifications in the console as wall messages subroutine
display_console_wall_messages()
# Begin the process of evaluating environmental conditions and
# respond accordingly
def evaluate_environmental_conditions_perform_automated_responses():
# Evaulate if we close or open the window
if DISPLAY_PROCESS_MESSAGES == True: print ("Performing evaluate_environmental_conditions_perform_automated_responses() comparison process now")
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate the linear actuator is configured in a state of: Off or Schedule or Sensor. Only continue if the value is Sensor.")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE == 'Sensor':", OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE)
if (LINEAR_ACTUATOR_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE.rstrip() == 'Sensor'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_temperature_sensor_value <= float(MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT) and CURRENT_SOLENOID_VALVE_STATUS == 'Closed':", current_temperature_sensor_value, float(MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT), CURRENT_SOLENOID_VALVE_STATUS)
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_temperature_sensor_value > float(MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT) and CURRENT_SOLENOID_VALVE_STATUS == 'Closed':", current_temperature_sensor_value, float(MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT), CURRENT_SOLENOID_VALVE_STATUS)
if (current_temperature_sensor_value <= float(MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT) and
CURRENT_SOLENOID_VALVE_STATUS == 'Closed'
):
if DISPLAY_PROCESS_MESSAGES == True: print ("Closing the window now")
# Retract the linear actuator and close the window
actuator_extension_status = 'Retracted'
CURRENT_ACTUATOR_EXTENSION_STATUS = linear_actuator_extension_retraction(actuator_extension_status)
elif (current_temperature_sensor_value > float(MINIMUM_TEMPERATURE_SENSOR_VALUE_ACTUATOR_RETRACT) and
CURRENT_SOLENOID_VALVE_STATUS == 'Closed'
):
if DISPLAY_PROCESS_MESSAGES == True: print ("Opening the window now")
# extend the linear actuator and open the window
actuator_extension_status = 'Extended'
CURRENT_ACTUATOR_EXTENSION_STATUS = linear_actuator_extension_retraction(actuator_extension_status)
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate if output one is configured in a state of: Off or Schedule or Sensor. Only continue if the value is Sensor.")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE == 'Sensor':", OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE)
if (OUTPUT_ONE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE.rstrip() == 'Sensor'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_temperature_sensor_value >= float(MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF) or current_humidity_sensor_value >= float(MINIMUM_HUMIDITY_OUTPUT_ONE_OFF) and CURRENT_SOLENOID_VALVE_STATUS == 'Closed':", current_temperature_sensor_value, float(MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF), current_humidity_sensor_value, float(MINIMUM_HUMIDITY_OUTPUT_ONE_OFF), CURRENT_SOLENOID_VALVE_STATUS)
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_temperature_sensor_value < float(MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF) or current_humidity_sensor_value < float(MINIMUM_HUMIDITY_OUTPUT_ONE_OFF) and CURRENT_SOLENOID_VALVE_STATUS == 'Closed':", current_temperature_sensor_value, float(MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF), current_humidity_sensor_value, float(MINIMUM_HUMIDITY_OUTPUT_ONE_OFF), CURRENT_SOLENOID_VALVE_STATUS)
# Evaulate if we need to enable output #1 turn on the fan
if (current_temperature_sensor_value >= float(MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF) and
current_humidity_sensor_value >= float(MINIMUM_HUMIDITY_OUTPUT_ONE_OFF) and
CURRENT_SOLENOID_VALVE_STATUS == 'Closed'
):
if DISPLAY_PROCESS_MESSAGES == True: print ("Enabling Output #1")
# Enable output one
output_number = 0
output_status = 'On'
current_output_status = control_outputs(output_number, output_status)
elif (current_temperature_sensor_value < float(MINIMUM_TEMPERATURE_OUTPUT_ONE_OFF) and
current_humidity_sensor_value < float(MINIMUM_HUMIDITY_OUTPUT_ONE_OFF)
):
if DISPLAY_PROCESS_MESSAGES == True: print ("Disabling Output #1")
# Disable output one
output_number = 0
output_status = 'Off'
current_output_status = control_outputs(output_number, output_status)
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate if output two is configured in a state of: Off or Schedule or Sensor. Only continue if the value is Sensor.")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE == 'Sensor':", OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE)
if (OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE.rstrip() == 'Sensor'):
# Evaluate if temperature controls output two
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate if temperature or luminosity controls output #2")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY == 'Temperature':", OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY)
if (OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY.rstrip() == 'Temperature'):
#if (OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY == 'Temperature'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate output #2 turned on by temperature")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_temperature_sensor_value <= float(MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF:", current_temperature_sensor_value, float(MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF))
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate output #2 turn off by temperature")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_temperature_sensor_value > float(MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF:", current_temperature_sensor_value, float(MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF))
# Evaulate if we need to enable output #2 turn on the USB heating pad
if (float(int(current_temperature_sensor_value)) <= float(MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF)):
if DISPLAY_PROCESS_MESSAGES == True: print ("Enabling Output #2 by temperature")
# Enable output two
output_number = 1
output_status = 'On'
current_output_status = control_outputs(output_number, output_status)
# Evaulate if we need to disable output #2 turn off the USB heating pad
elif (float(int(current_temperature_sensor_value)) > float(MINIMUM_TEMPERATURE_OUTPUT_TWO_OFF)):
if DISPLAY_PROCESS_MESSAGES == True: print ("Disable Output #2 by temperature")
# Disable output two
output_number = 1
output_status = 'Off'
current_output_status = control_outputs(output_number, output_status)
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate if output two is configured in a state of: Off or Schedule or Sensor. Only continue if the value is Sensor.")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE == 'Sensor':", OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE)
if (OUTPUT_TWO_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE.rstrip() == 'Sensor'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY == 'Luminosity':", OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY)
# Evaluate if luminosity controls output two
if (OUTPUT_TWO_CONFIGURATION_VALUE_BETWEEN_TEMPERATURE_OR_LUMINOSITY.rstrip() == 'Luminosity'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate output #2 turn on by luminosity")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_luminosity_sensor_value <= float(MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF:", current_luminosity_sensor_value, float(MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF))
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate output #2 turn off by luminosity")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_luminosity_sensor_value > float(MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF:", current_luminosity_sensor_value, float(MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF))
# Evaulate if we need to enable output #2 turn on the grow light
if (current_luminosity_sensor_value <= float(MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF)):
if DISPLAY_PROCESS_MESSAGES == True: print ("Enable Output #2 by luminosity")
# Enable output two
output_number = 1
output_status = 'On'
current_output_status = control_outputs(output_number, output_status)
# Evaulate if we need to disable output #2 turn off the grow light
elif (current_luminosity_sensor_value > float(MINIMUM_LUMINOSITY_OUTPUT_TWO_OFF)):
if DISPLAY_PROCESS_MESSAGES == True: print ("Disable Output #2 by luminosity")
# Disable output two
output_number = 1
output_status = 'Off'
current_output_status = control_outputs(output_number, output_status)
# Evaluate if temperature controls output two
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate if the solenoid valve is configured in a state of: Off or Schedule or Sensor. Only continue if the value is Sensor.")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE == 'Sensor':", SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE)
if (SOLENOID_VALVE_CONFIGURATION_BETWEEN_OFF_SENSOR_SCHEDULE_VALUE.rstrip() == 'Sensor'):
if DISPLAY_PROCESS_MESSAGES == True: print ("Evaluate if the solenoid valve should be open or closed")
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_soil_moisture_sensor_value >= float(MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN", current_soil_moisture_sensor_value, float(MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN))
if DISPLAY_PROCESS_MESSAGES == True: print ("Comparing current_soil_moisture_sensor_value < float(MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN", current_soil_moisture_sensor_value, float(MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN))
# Evaluate if the solenoid valve should be open or closed
if (current_soil_moisture_sensor_value >= float(MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN)):
if DISPLAY_PROCESS_MESSAGES == True: print ("Disabling output #1 to conserve power for the solenoid valve")
# Disable output one
output_number = 0
output_status = 'Off'
current_output_status = control_outputs(output_number, output_status)
if DISPLAY_PROCESS_MESSAGES == True: print ("Opening the solenoid valve now")
# Enable relay three opening the solenoid valve
solenoid_valve_status = 'Open'
solenoid_valve_operation(solenoid_valve_status)
elif (current_soil_moisture_sensor_value < float(MINIMUM_SOIL_MOISTURE_SENSOR_VALUE_SOLENOID_OPEN)):
if DISPLAY_PROCESS_MESSAGES == True: print ("Closing the solenoid valve now")
# Disable relay three closing the solenoid valve
solenoid_valve_status = 'Closed'
solenoid_valve_operation(solenoid_valve_status)
# Begin Sqlite database, CSV file, and graph image updates
def perform_write_database_csv_graph_image_update_process():
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling write_database_output() writing: ", current_luminosity_sensor_value, current_temperature_sensor_value, current_humidity_sensor_value, current_soil_moisture_sensor_value, CURRENT_SOLENOID_VALVE_STATUS, CURRENT_ACTUATOR_EXTENSION_STATUS, CURRENT_OUTPUT_STATUS_LIST)
# Call the write database table subroutine
write_database_output(current_luminosity_sensor_value, current_temperature_sensor_value, current_humidity_sensor_value, current_soil_moisture_sensor_value,
CURRENT_SOLENOID_VALVE_STATUS, CURRENT_ACTUATOR_EXTENSION_STATUS, CURRENT_OUTPUT_STATUS_LIST)
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling write_csv_output_file() writing: ", current_luminosity_sensor_value, current_temperature_sensor_value, current_humidity_sensor_value, current_soil_moisture_sensor_value, CURRENT_SOLENOID_VALVE_STATUS, CURRENT_ACTUATOR_EXTENSION_STATUS, CURRENT_OUTPUT_STATUS_LIST)
# Call the write CSV output file subroutine
write_csv_output_file(current_luminosity_sensor_value, current_temperature_sensor_value, current_humidity_sensor_value, current_soil_moisture_sensor_value,
CURRENT_SOLENOID_VALVE_STATUS, CURRENT_ACTUATOR_EXTENSION_STATUS, CURRENT_OUTPUT_STATUS_LIST)
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling read_database_output_graphs()")
# Call the read database table data output graph files subroutine
read_database_output_graphs()
# Begin reading system control values, current sensor values, and
# display system status messages
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling read_values_display_messages()")
read_values_display_messages()
# Begin evaluating environmental conditions and performing
# automation responses and configured
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling evaluate_environmental_conditions_perform_automated_responses()")
evaluate_environmental_conditions_perform_automated_responses()
# Begin Sqlite database file, CSV file, and graph
# image file updates
if DISPLAY_PROCESS_MESSAGES == True: print ("Calling perform_write_database_csv_graph_image_update_process()")
perform_write_database_csv_graph_image_update_process()
|
import os
import click
from mlcommons_box import parse # Do not remove (it registers schemas on import)
from mlcommons_box.common import mlbox_metadata
from mlcommons_box_ssh import ssh_metadata
from mlcommons_box_ssh.ssh_run import SSHRun
def configure_(mlbox: str, platform: str):
mlbox: mlbox_metadata.MLBox = mlbox_metadata.MLBox(path=mlbox)
mlbox.platform = ssh_metadata.Platform(path=platform)
print(mlbox)
runner = SSHRun(mlbox)
runner.configure()
def run_(mlbox: str, platform: str, task: str):
mlbox: mlbox_metadata.MLBox = mlbox_metadata.MLBox(path=mlbox)
mlbox.platform = ssh_metadata.Platform(path=platform)
mlbox.invoke = mlbox_metadata.MLBoxInvoke(task)
mlbox.task = mlbox_metadata.MLBoxTask(os.path.join(mlbox.tasks_path, f'{mlbox.invoke.task_name}.yaml'))
print(mlbox)
runner = SSHRun(mlbox)
runner.run(task_file=task)
@click.group(name='mlcommons_box_ssh')
def cli():
"""
MLCommons-Box SSH Runner runs boxes (packaged Machine Learning (ML) workloads) in the remote environment.
"""
pass
@cli.command(name='configure', help='Configure remote environment for MLCommons-Box ML workload.')
@click.option('--mlbox', required=True, type=click.Path(exists=True), help='Path to MLBox directory.')
@click.option('--platform', required=True, type=click.Path(exists=True), help='Path to MLBox Platform definition file.')
def configure(mlbox: str, platform: str):
configure_(mlbox, platform)
@cli.command(name='run', help='Run MLCommons-Box ML workload in the remote environment.')
@click.option('--mlbox', required=True, type=click.Path(exists=True), help='Path to MLBox directory.')
@click.option('--platform', required=True, type=click.Path(exists=True), help='Path to MLBox Platform definition file.')
@click.option('--task', required=True, type=click.Path(exists=True), help='Path to MLBox Task definition file.')
def run(mlbox: str, platform: str, task: str):
run_(mlbox, platform, task)
if __name__ == '__main__':
cli()
|
import os
from typing import Dict, List
class GlobalInputParser:
def __init__(self, command_args: Dict):
"""
Parses CLI args and environment variables for inputs that appear before the command
:param command_args: command_args is expected to be initialized using doc_opt
"""
self._args = command_args
@property
def token(self) -> str:
return self._args.get("--token", None) or os.environ.get("TORQUE_TOKEN", None)
@property
def space(self) -> str:
return self._args.get("--space", None) or os.environ.get("TORQUE_SPACE", None)
@property
def account(self) -> str:
return self._args.get("--account", None) or os.environ.get("TORQUE_ACCOUNT", None)
@property
def profile(self) -> str:
return self._args.get("--profile", None)
@property
def debug(self) -> str:
return self._args.get("--debug", None)
@property
def disable_version_check(self) -> str:
return self._args.get("--disable-version-check", None)
@property
def command(self) -> str:
return self._args.get("<command>", None)
@property
def command_args(self) -> List[str]:
return self._args.get("<args>", None)
@staticmethod
def get_config_path() -> str:
return os.environ.get("TORQUE_CONFIG_PATH", None)
@property
def output_json(self) -> bool:
return self._args.get("--output", None) == "json"
|
from typing import Optional
from pincer import Client, command
from pincer.objects import InteractionFlags, TextChannel
from app.bot import Bot
from app.classes.pot import Pot
CHANNEL_ID: int = 889523568298307594
MESSAGE_ID: int = 890313495030157313
class TicketCog:
"""A simple commands cog template."""
def __init__(self, client: Bot):
"""Link to bot instance."""
self.name = 'Cagnotte'
self.client: Bot = client
self.channel: Optional[TextChannel] = None
self.pot: Optional[Pot] = None
@Client.event
async def on_ready(self):
self.channel = self.client.guild.get_channel(CHANNEL_ID)
self.pot = Pot(
await self.channel.fetch_message(MESSAGE_ID),
)
await self.pot.update()
@command(
name="vente",
description="Ajoute le montant rapporté par la cagnotte"
)
async def sell_command(self, amount: int):
await self.pot.add(amount)
return (
"> Ajouté!",
InteractionFlags.EPHEMERAL
)
@command(
name="erreur",
description="Corrige une erreur sur la cagnotte"
)
async def error_remove_command(self, amount: int):
await self.pot.correct(amount)
return (
"> Corrigé",
InteractionFlags.EPHEMERAL
)
@command(
name="achat",
description="Retire le montant utilisé depuis la cagnotte"
)
async def buy_remove_command(self, amount: int):
await self.pot.remove(amount)
return (
"> Retiré",
InteractionFlags.EPHEMERAL
)
@command(
name="desc-4",
description="Met à jour la description de la cagnotte."
)
async def set_drink_list_description(self, message: str):
with open(
"assets/pot_description.txt",
'w', encoding='utf-8'
) as f:
f.write(message)
await self.pot.update()
return (
f"Description mise à jour\n>>> {message}",
InteractionFlags.EPHEMERAL
)
setup = TicketCog
|
from random import randint
import random
class Player:
def __init__(self, startX = None, startY = None, startDirection = None, color = (50, 255, 50), window = None, apples = None, players = None, ki = True):
if startX == None:
startX = randint(0, 640 / 20 - 1)
if startY == None:
startY = randint(0, 480 / 20 - 1)
if startDirection == None:
startDirection = randint(0, 3)
self.players = players
self.apples = apples
self.window = window
self.direction = startDirection
self.color = color
self.parts = [[startX, startY]]
self.count = 0
self.ki = ki
self.moved = False
def checkOnApple(self, x, y, destroy = True):
count = 0
for apple in self.apples.apples:
if apple[0] == x and apple[1] == y:
if destroy:
del self.apples.apples[count]
return True
count += 1
return False
def checkAppleDirect(self, direct, only = True, tcoords = None):
if tcoords == None:
coords = self.parts[0].copy()
else:
coords = tcoords
dist = 0
for i in range(32):
dist += 1
if self.checkOnApple(*coords, False):
return dist
if not only:
if self.checkAppleDirect(self.calcDirect(direct + 1), tcoords = coords.copy()) != None or self.checkAppleDirect(self.calcDirect(direct - 1), tcoords = coords.copy()) != None:
return dist - 1
coords = self.calculateDirectCoords(coords, direct)
def checkDeathDirect(self, direct):
coords = self.parts[0].copy()
dist = 0
for i in range(32):
dist += 1
if self.checkOnDeath(coords):
return dist
coords = self.calculateDirectCoords(coords, direct)
def getBestDirect(self):
direct = self.direction
adistances = []
ddistances = []
directions = [direct, direct + 1, direct - 1]
adistances.append(self.checkAppleDirect(self.direction, False))
ddistances.append(self.checkDeathDirect(self.direction))
self.changeDirection(direct + 1)
adistances.append(self.checkAppleDirect(self.direction))
ddistances.append(self.checkDeathDirect(self.direction))
self.changeDirection(direct - 1)
adistances.append(self.checkAppleDirect(self.direction))
ddistances.append(self.checkDeathDirect(self.direction))
self.changeDirection(directions[0])
highest = 0
for i in ddistances:
if i != None:
if i > highest:
highest = i
lowest = 640
for i in adistances:
if i != None and i < lowest:
lowest = i
apossible = []
adpossible = []
count = 0
for i in adistances:
if i != None:
if i == lowest or i < 10:
apossible.append(directions[count])
adpossible.append(i)
count += 1
dpossible = []
ddpossible = []
count = 0
for i in ddistances:
if i != None:
if i == highest or i > 2:
dpossible.append(directions[count])
ddpossible.append(i)
count += 1
possible = []
count = 0
for d in dpossible:
acount = 0
for a in apossible:
if d == a:
if ddpossible[count] > adpossible[acount]:
possible.append(d)
acount += 1
count += 1
if possible == []:
count = 0
for i in dpossible:
if ddpossible[count] == highest:
possible.append(i)
count += 1
directs = []
for i in possible:
if self.checkDirectFreeBlockSide(i):
directs.append(i)
if directs == []:
directs = possible
return random.choice(directs)
def checkDirectFreeBlockSide(self, direct, coords = None, no = False):
if coords == None:
coords = self.parts[0].copy()
for i in range(32):
coords = self.calculateDirectCoords(coords, direct)
if no:
deaths = [self.checkOnDeath(self.calculateDirectCoords(self.parts[0].copy(), direct)),
self.checkOnDeath(self.calculateDirectCoords(self.parts[0].copy(), self.calcDirect(direct + 1))),
self.checkOnDeath(self.calculateDirectCoords(self.parts[0].copy(), self.calcDirect(direct - 1)))]
else:
deaths = [self.checkOnDeath(self.calculateDirectCoords(self.parts[0].copy(), direct)),
self.checkDirectFreeBlockSide(self.calcDirect(direct + 1), self.calculateDirectCoords(self.parts[0].copy(), self.calcDirect(direct + 1)), True),
self.checkDirectFreeBlockSide(self.calcDirect(direct - 1), self.calculateDirectCoords(self.parts[0].copy(), self.calcDirect(direct - 1)), True)]
if not deaths[1] or not deaths[2]:
return True
elif deaths[0]:
break
return False
def go(self):
self.moved = False
if self.ki:
self.changeDirection(self.getBestDirect())
if self.checkOnDeath(self.calculateDirectCoords(self.parts[0].copy(), self.direction)):
if not self.checkOnDeath(self.calculateDirectCoords(self.parts[0].copy(), 0)):
self.changeDirection(0)
if not self.checkOnDeath(self.calculateDirectCoords(self.parts[0].copy(), 1)):
self.changeDirection(1)
if not self.checkOnDeath(self.calculateDirectCoords(self.parts[0].copy(), 2)):
self.changeDirection(2)
if not self.checkOnDeath(self.calculateDirectCoords(self.parts[0].copy(), 3)):
self.changeDirection(3)
self.addPart(True)
if not self.checkOnApple(*self.parts[0]):
del self.parts[-1]
def calculateDirectCoords(self, coords, direction):
if direction == 0:
coords[1] += 1
if direction == 1:
coords[0] += 1
if direction == 2:
coords[1] -= 1
if direction == 3:
coords[0] -= 1
return coords
def addPart(self, start = False):
if start:
self.parts.insert(0, self.calculateDirectCoords(self.parts[0].copy(), self.direction))
else:
self.parts.append(self.calculateDirectCoords(self.parts[0].copy(), self.direction))
def calcDirect(self, direct):
while direct < 0:
direct += 4
while direct > 3:
direct -= 4
return direct
def changeDirection(self, number):
if not self.moved or self.ki:
self.moved = True
if self.calcDirect(self.direction + 1) == self.calcDirect(number) or self.calcDirect(self.direction - 1) == self.calcDirect(number) or self.ki:
self.direction = self.calcDirect(number)
def update(self, time):
self.count += self.window.dt
if self.count >= time:
self.count -= time
self.go()
if self.checkOnDeath(self.parts[0]):
self.die()
def checkOnDeath(self, coords):
touch = False
for player in self.players:
if player == self:
parts = self.parts[1:]
else:
parts = player.parts
for part in parts:
if part == coords:
touch = True
break
if touch:
break
if coords[0] < 0 or coords[1] < 0 or coords[0] > 31 or coords[1] > 23:
touch = True
return touch
def die(self):
if self.ki:
for i in range(len(self.players)):
if self.players[i] == self:
del self.players[i]
break
else:
self.window.terminate()
def render(self):
for part in self.parts:
self.window.render.cube(*part, self.color)
|
#!/usr/bin/env python
import sys
for line in sys.stdin:
sys.stdout.write(line.lower()) |
import os
with open(os.path.join(os.path.dirname(__file__), "VERSION")) as f:
__version__ = f.read().strip()
from nics_fix_pt.consts import QuantizeMethod, RangeMethod
from nics_fix_pt.quant import *
import nics_fix_pt.nn_fix_inner
from nics_fix_pt import nn_fix
from nics_fix_pt.fix_modules import register_fix_module
FIX_NONE = QuantizeMethod.FIX_NONE
FIX_AUTO = QuantizeMethod.FIX_AUTO
FIX_FIXED = QuantizeMethod.FIX_FIXED
RANGE_MAX = RangeMethod.RANGE_MAX
RANGE_3SIGMA = RangeMethod.RANGE_3SIGMA
class nn_auto_register(object):
"""
An auto register helper that automatically register all not-registered modules
by proxing to modules in torch.nn.
NOTE: We do not guarantee all auto-registered fixed nn modules will well behave,
as they are not tested. Although, I thought it will work in normal cases.
Use with care!
Usage: from nics_fix_pt import NAR as nnf
then e.g. `nnf.Bilinear_fix` and `nnf.Bilinear` can all be used as a fixed-point module.
"""
def __getattr__(self, name):
import torch
attr = getattr(nn_fix, name, None)
if attr is None:
if name.endswith("_fix"):
ori_name = name[:-4]
else:
ori_name = name
ori_cls = getattr(torch.nn, ori_name)
register_fix_module(ori_cls, register_name=ori_name + "_fix")
return getattr(nn_fix, ori_name + "_fix", None)
return attr
NAR = nn_auto_register()
|
import adafruit_ahtx0
import adafruit_sgp40
import board
import click
import datetime
import json
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import os
import pandas as pd
import pathlib
import sys
import time
import traceback
# Setup I2C Sensor
i2c = board.I2C()
aht = adafruit_ahtx0.AHTx0(i2c)
sgp = adafruit_sgp40.SGP40(i2c)
# Convert Celsius to Fahrenheit
def c_to_f(input):
return (input * 9 / 5) + 32
# Convert to two decimal places cleanly
# round() won't include trailing zeroes
def round_num(input):
return '{:.2f}'.format(input)
# Save climate information locally
def save_entry(location, temperature, humidity, aqi):
# Ensure the file exists before attempting to read
if not os.path.isfile('entries.json'):
pathlib.Path('entries.json').touch(exist_ok=False)
entries = []
else:
# Load any old entries
with open("entries.json", "r") as f:
try:
entries = json.loads(f.read())
except Exception as e:
print('Error: Parsing entries.json failed')
raise e
# Add this new entry to the list
entries.append({
'location': location,
'temperature': temperature,
'humidity': humidity,
'aqi': aqi,
'date': datetime.datetime.now().isoformat()
})
# Save the list
with open("entries.json", "w") as f:
try:
f.write(json.dumps(entries, indent=2))
except Exception as e:
print('Error: Saving entries.json failed')
raise e
def average_date(date, entries):
# Get a list of all entries for this date
events = list(filter(lambda x: (x['date'][0:10] == date), entries))
return {
'date': pd.to_datetime(date),
'temperature': sum(float(e['temperature']) for e in events) / len(events),
'humidity': sum(float(e['humidity']) for e in events) / len(events),
'aqi': sum(e['aqi'] for e in events) / len(events)
}
# Returns entries as DataFrames
def get_entries(location):
# load entries from file
with open("entries.json", "r") as f:
all_entries = json.loads(f.read())
# Filter entries by our location
entries = list(filter(lambda e: e['location'] == location, all_entries))
# Ensure at least one entry is returned for this location
if len(entries) == 0:
print('Error: No entries found for location ({}). Try another?'.format(location))
sys.exit(1)
# Get a set/list of unique dates in YYYY-MM-DD format from the entries
dates = set(map(lambda e: e['date'][0:10], entries))
# Get the average temperature and humidity per day and convert to a DataFrame
df = pd.DataFrame(map(lambda date: average_date(date, entries), dates))
# Sort values by the date and set it as the index
df = df.sort_values('date', ascending=True).set_index('date')
return df
# Plot dataset on a axis with it's display information
def plot_data(data, field, ax, x_label, y_label, color, alpha = 1):
color = 'tab:{}'.format(color)
# Set labels
ax.set_xlabel(x_label)
ax.set_ylabel(y_label, color=color)
ax.tick_params(axis='y', labelcolor=color)
# Plot data
ax.plot(data.index, data[field], marker='o', color=color, alpha=alpha)
# Measure index from sensor
def get_air_quality_index():
return sgp.measure_index(aht.temperature, aht.relative_humidity)
# Return the time in milliseconds
def get_ms():
return round(time.time() * 1000)
# Sample index readings over 3-minutes to ensure sensor was fully calibrated
def sample_air_quality_index():
# Loop over each second in the range
for x in range(180):
start = get_ms()
# Show an update every 30s
if x % 30 == 0:
print(f'{x}/180 - sampling still in progress, please wait...')
# Sample the index for calibration
get_air_quality_index()
# Only sleep for what time remains in this iteration to achieve 1hz sampling
time.sleep((1000 - (get_ms() - start)) / 1000)
# After the sampling time frame, return a final reading
return get_air_quality_index()
@click.group()
def cli():
pass
@cli.command()
@click.option(
'--chart-path',
required=True,
help='Path to store chart at'
)
@click.option(
'--location',
required=True,
help='Which entry location to export information for'
)
@click.option(
'--export-type',
required=True,
type=click.Choice(['climate', 'air-quality'], case_sensitive=False),
help='Which data to export'
)
def export(chart_path, location, export_type):
# Ensure the entries.json file is not missing
if not os.path.isfile('entries.json'):
print('Error: entries.json file is missing, please run the collect command first.')
sys.exit(1)
# Load entries from JSON file and convert to DataFrames
data = get_entries(location)
# Create the figure and initial axis
fig, ax1 = plt.subplots(figsize=(10, 8))
if export_type == 'climate':
# Plot the data on two separate axes
plot_data(data, 'temperature', ax1, 'Date', 'Temperature (F)', 'red')
plot_data(data, 'humidity', ax1.twinx(), 'Date', 'Humidity %', 'blue', 0.33)
else:
# Plot the data on a separate chart for visibility
plot_data(data, 'aqi', ax1, 'Date', 'Air Quality Index (AQI)', 'green')
# Show the grid
plt.grid()
# Set the date and label formatter for the x-axis
ax1.xaxis.set_major_formatter(mdates.DateFormatter("%Y-%m-%d"))
fig.autofmt_xdate()
# Save the chart
plt.savefig(chart_path)
print('Chart saved to:', chart_path)
@cli.command()
@click.option('--location', required=True, help='Sensor location name')
def collect(location):
# Sample the air quality index
aqi = sample_air_quality_index()
# Collect climate data and convert/round once AQI is calculated
temperature = round_num(c_to_f(aht.temperature))
humidity = round_num(aht.relative_humidity)
# Save entry
try:
save_entry(location, temperature, humidity, aqi)
except:
# Print error traceback
print(traceback.format_exc())
sys.exit(1)
print('Entry saved:', temperature, 'F,', humidity, '% H,', aqi, 'AQI')
if __name__ == '__main__':
cli()
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import openstack
from otcextensions.tests.functional import base
_logger = openstack._log.setup_logging('openstack')
class TestHost(base.BaseFunctionalTest):
def setUp(self):
super(TestHost, self).setUp()
openstack.enable_logging(debug=True, http_debug=True)
self.client = self.conn.deh
res = self.client.create_host(
name=uuid.uuid4().hex,
availability_zone='eu-de-01',
host_type='general',
quantity=1
)
assert len(res.dedicated_host_ids) == 1
host_id = res.dedicated_host_ids[0]
self.host = self.client.get_host(host_id)
def tearDown(self):
try:
if self.host.id:
self.client.delete_host(self.host)
except openstack.exceptions.SDKException as e:
_logger.warning('Got exception during clearing resources %s'
% e.message)
def test_list(self):
self.hosts = list(self.conn.deh.hosts())
self.assertGreaterEqual(len(self.hosts), 0)
if len(self.hosts) > 0:
host = self.hosts[0]
servers = self.client.servers(host=host.id)
for server in servers:
_logger.debug(server)
def test_host_types(self):
deh = self.conn.deh
host_types = list(deh.host_types('eu-de-01'))
self.assertIsNotNone(host_types)
_logger.debug(host_types)
|
# coding=utf-8
import itertools
import cv2
import numpy as np
class Distortion(object):
def __init__(self):
# K - Intrinsic camera matrix for the raw (distorted) images.
camera_matrix = [
305.5718893575089, 0, 303.0797142544728,
0, 308.8338858195428, 231.8845403702499,
0, 0, 1,
]
self.camera_matrix = np.reshape(camera_matrix, (3, 3))
# distortion parameters - (k1, k2, t1, t2, k3)
distortion_coefs = [
-0.2, 0.0305,
0.0005859930422629722, -0.0006697840226199427, 0
]
self.distortion_coefs = np.reshape(distortion_coefs, (1, 5))
# R - Rectification matrix - stereo cameras only, so identity
self.rectification_matrix = np.eye(3)
# P - Projection Matrix - specifies the intrinsic (camera) matrix
# of the processed (rectified) image
projection_matrix = [
220.2460277141687, 0, 301.8668918355899, 0,
0, 238.6758484095299, 227.0880056118307, 0,
0, 0, 1, 0,
]
self.projection_matrix = np.reshape(projection_matrix, (3, 4))
# Initialize mappings
# Used for rectification
self.mapx = None
self.mapy = None
# Used for distortion
self.rmapx = None
self.rmapy = None
def distort(self, observation):
"""
Distort observation using parameters in constructor
"""
if self.mapx is None:
# Not initialized - initialize all the transformations we'll need
self.mapx = np.zeros(observation.shape)
self.mapy = np.zeros(observation.shape)
H, W, _ = observation.shape
# Initialize self.mapx and self.mapy (updated)
self.mapx, self.mapy = cv2.initUndistortRectifyMap(self.camera_matrix,
self.distortion_coefs, self.rectification_matrix,
self.projection_matrix, (W, H), cv2.CV_32FC1)
# Invert the transformations for the distortion
self.rmapx, self.rmapy = self._invert_map(self.mapx, self.mapy)
return cv2.remap(observation, self.rmapx, self.rmapy, interpolation=cv2.INTER_NEAREST)
def _undistort(self, observation):
"""
Undistorts a distorted image using camera parameters
"""
# If mapx is None, then distort was never called
assert self.mapx is not None, "You cannot call undistort on a rectified image"
return cv2.remap(observation, self.mapx, self.mapy, cv2.INTER_NEAREST)
def _invert_map(self, mapx, mapy):
"""
Utility function for simulating distortion
Source: https://github.com/duckietown/Software/blob/master18/catkin_ws
... /src/10-lane-control/ground_projection/include/ground_projection/
... ground_projection_geometry.py
"""
H, W = mapx.shape[0:2]
rmapx = np.empty_like(mapx)
rmapx.fill(np.nan)
rmapy = np.empty_like(mapx)
rmapy.fill(np.nan)
for y, x in itertools.product(range(H), range(W)):
tx = mapx[y, x]
ty = mapy[y, x]
tx = int(np.round(tx))
ty = int(np.round(ty))
if (0 <= tx < W) and (0 <= ty < H):
rmapx[ty, tx] = x
rmapy[ty, tx] = y
self._fill_holes(rmapx, rmapy)
return rmapx, rmapy
def _fill_holes(self, rmapx, rmapy):
"""
Utility function for simulating distortion
Source: https://github.com/duckietown/Software/blob/master18/catkin_ws
... /src/10-lane-control/ground_projection/include/ground_projection/
... ground_projection_geometry.py
"""
H, W = rmapx.shape[0:2]
R = 2
F = R * 2 + 1
def norm(_):
return np.hypot(_[0], _[1])
deltas0 = [(i - R - 1, j - R - 1) for i, j in itertools.product(range(F), range(F))]
deltas0 = [x for x in deltas0 if norm(x) <= R]
deltas0.sort(key=norm)
def get_deltas():
return deltas0
holes = set()
for i, j in itertools.product(range(H), range(W)):
if np.isnan(rmapx[i, j]):
holes.add((i, j))
while holes:
nholes = len(holes)
nholes_filled = 0
for i, j in list(holes):
# there is nan
nholes += 1
for di, dj in get_deltas():
u = i + di
v = j + dj
if (0 <= u < H) and (0 <= v < W):
if not np.isnan(rmapx[u, v]):
rmapx[i, j] = rmapx[u, v]
rmapy[i, j] = rmapy[u, v]
nholes_filled += 1
holes.remove((i, j))
break
if nholes_filled == 0:
break
|
import nmslib
import numpy as np
def search(system_parameters, DB_features, query_features):
####################################################
# input->system_parameters:システムのパラメータ
# DB_features:DB内の画像をベクトル変換したリスト
# query_features:クエリ内の画像をベクトル変換したリスト
# output->result:result:検索結果のidと距離が格納されたlist
####################################################
result = []
if system_parameters["sys"]["search_method"] == "concat":
if system_parameters["sys"]["custom"] == False:
if len(system_parameters["sys"]["use_feature"]) == 1:
for i, query_feature in enumerate(query_features[0]):
if i == 0:
index = nmslib.init(method="hnsw", space="l2")
index.addDataPointBatch(DB_features[0])
index.createIndex({"post":2},print_progress=True)
search_ids, distances = index.knnQuery(query_feature, k=system_parameters["sys"]["search_num"])
result.append([])
result[i].append(search_ids)
result[i].append(distances)
elif len(system_parameters["sys"]["use_feature"]) == 2:
for i, (x,y) in enumerate(zip(query_features[0], query_features[1])):
if i == 0:
index = nmslib.init(method="hnsw", space="l2")
index.addDataPointBatch([np.concatenate([x_,y_]) for (x_,y_) in zip(DB_features[0], DB_features[1])])
index.createIndex({"post":2},print_progress=True)
search_ids, distances = index.knnQuery(np.concatenate([x,y]), k=system_parameters["sys"]["search_num"])
result.append([])
result[i].append(search_ids)
result[i].append(distances)
else:#カスタム検索
if len(system_parameters["sys"]["use_feature"]) == 2:
for i, pair in enumerate(system_parameters["sys"]["custom_pair"]):
if i == 0:
index = nmslib.init(method="hnsw", space="l2")
index.addDataPointBatch([np.concatenate([x_,y_]) for (x_,y_) in zip(DB_features[0], DB_features[1])])
index.createIndex({"post":2},print_progress=True)
search_ids, distances = index.knnQuery(np.concatenate([query_features[0][pair[0]],query_features[1][pair[1]]]), k=system_parameters["sys"]["search_num"])
result.append([])
result[i].append(search_ids)
result[i].append(distances)
print("-----------------------------------------------------------------------")
else:
print("Please set color and type in use_feature")
return result
def search2(system_parameters, DB_features, query_features):
####################################################
# input->system_parameters:システムのパラメータ
# DB_features:DB内の画像をベクトル変換したリスト
# query_features:クエリ内の画像をベクトル変換したリスト
# output->result:result:検索結果のidと距離が格納されたlist
####################################################
# 次元削減結合法
# 10近傍の平均距離計算
if system_parameters["sys"]["custom"] == False:
dis_aves1 = []
for i, query_feature in enumerate(query_features[0]):
if i == 0:
index = nmslib.init(method="hnsw", space="l2")
index.addDataPointBatch(DB_features[0])
index.createIndex({"post":2},print_progress=False)
search_ids, distances = index.knnQuery(query_feature, k=10)
dis_aves1.append(np.average(distances))
dis_aves2 = []
for i, query_feature in enumerate(query_features[1]):
if i == 0:
index = nmslib.init(method="hnsw", space="l2")
index.addDataPointBatch(DB_features[1])
index.createIndex({"post":2},print_progress=False)
search_ids, distances = index.knnQuery(query_feature, k=10)
dis_aves2.append(np.average(distances))
else:
dis_aves1 = []
for i, pair in enumerate(system_parameters["sys"]["custom_pair"]):
if i == 0:
index = nmslib.init(method="hnsw", space="l2")
index.addDataPointBatch(DB_features[0])
index.createIndex({"post":2},print_progress=False)
search_ids, distances = index.knnQuery(query_features[0][pair[0]], k=10)
dis_aves1.append(np.average(distances))
dis_aves2 = []
for i, pair in enumerate(system_parameters["sys"]["custom_pair"]):
if i == 0:
index = nmslib.init(method="hnsw", space="l2")
index.addDataPointBatch(DB_features[1])
index.createIndex({"post":2},print_progress=False)
search_ids, distances = index.knnQuery(query_features[1][pair[1]], k=10)
dis_aves2.append(np.average(distances))
# DBより各次元の標準偏差からポイントを付与
db_std1 = np.argsort(np.std(DB_features[0], axis=0))
db_std2 = np.argsort(np.std(DB_features[1], axis=0))
# pointの最大値を計算
point_max = 0
for a in range(len(query_features[0][0])):
point_max += a
# 平均距離の比を取得
ave_ratio = {}
for i, (dis_ave1, dis_ave2) in enumerate(zip(dis_aves1, dis_aves2)):
ave_ratio[i] = {}
if dis_ave2 > dis_ave1:
# if (dis_ave1/dis_ave2) <= 0.33:
# ave_ratio[i]["point"] = point_max *0.7
# else:
# ave_ratio[i]["point"] = point_max
# ave_ratio[i]["point"] = point_max * 0.75
ave_ratio[i]["point"] = point_max * point_magnification(dis_ave2, dis_ave1)
ave_ratio[i]["target"] = 1
else:
# if (dis_ave2/dis_ave1) <= 0.33:
# ave_ratio[i]["point"] = point_max * 0.7
# else:
# ave_ratio[i]["point"] = point_max
ave_ratio[i]["point"] = point_max * point_magnification(dis_ave1, dis_ave2)
# ave_ratio[i]["point"] = point_max * 0.75
ave_ratio[i]["target"] = 2
# 各クエリで使用するインデックスを指定
for i in range(len(dis_aves1)):
indexs = []
point_calc = ave_ratio[i]["point"]
if ave_ratio[i]["target"] == 1:
# for j in np.sort(np.arange(np.amax(db_std1)+1)):
for j in np.sort(np.arange(np.amax(db_std1)+1))[::-1]:
if point_calc - j >= 0:
point_calc -= j
indexs.append(int(np.where(db_std1 == j)[0]))
else:
break
indexs.sort()
ave_ratio[i]["indexs"] = indexs
elif ave_ratio[i]["target"] == 2:
# for j in np.sort(np.arange(np.amax(db_std2)+1)):
for j in np.sort(np.arange(np.amax(db_std2)+1))[::-1]:
if point_calc - j >= 0:
point_calc -= j
indexs.append(int(np.where(db_std2 == j)[0]))
else:
break
indexs.sort()
ave_ratio[i]["indexs"] = indexs
result = []
if system_parameters["sys"]["search_method"] == "concat":
if system_parameters["sys"]["custom"] == False:
if len(system_parameters["sys"]["use_feature"]) == 1:
for i, query_feature in enumerate(query_features[0]):
if i == 0:
index = nmslib.init(method="hnsw", space="l2")
index.addDataPointBatch(DB_features[0])
index.createIndex({"post":2},print_progress=False)
search_ids, distances = index.knnQuery(query_feature, k=system_parameters["sys"]["search_num"])
result.append([])
result[i].append(search_ids)
result[i].append(distances)
elif len(system_parameters["sys"]["use_feature"]) == 2:
for i, (x,y) in enumerate(zip(query_features[0], query_features[1])):
index = nmslib.init(method="hnsw", space="l2")
if ave_ratio[i]["target"] == 1:
index.addDataPointBatch([np.concatenate([x_[ave_ratio[i]["indexs"]],y_]) for (x_,y_) in zip(DB_features[0], DB_features[1])])
elif ave_ratio[i]["target"] == 2:
index.addDataPointBatch([np.concatenate([x_,y_[ave_ratio[i]["indexs"]]]) for (x_,y_) in zip(DB_features[0], DB_features[1])])
index.createIndex({"post":2},print_progress=False)
if ave_ratio[i]["target"] == 1:
search_ids, distances = index.knnQuery(np.concatenate([x[ave_ratio[i]["indexs"]],y]), k=system_parameters["sys"]["search_num"])
elif ave_ratio[i]["target"] == 2:
search_ids, distances = index.knnQuery(np.concatenate([x,y[ave_ratio[i]["indexs"]]]), k=system_parameters["sys"]["search_num"])
result.append([])
result[i].append(search_ids)
result[i].append(distances)
else:#カスタム検索
if len(system_parameters["sys"]["use_feature"]) == 2:
for i, pair in enumerate(system_parameters["sys"]["custom_pair"]):
index = nmslib.init(method="hnsw", space="l2")
if ave_ratio[i]["target"] == 1:
index.addDataPointBatch([np.concatenate([x_[ave_ratio[i]["indexs"]],y_]) for (x_,y_) in zip(DB_features[0], DB_features[1])])
elif ave_ratio[i]["target"] == 2:
index.addDataPointBatch([np.concatenate([x_,y_[ave_ratio[i]["indexs"]]]) for (x_,y_) in zip(DB_features[0], DB_features[1])])
index.createIndex({"post":2},print_progress=False)
if ave_ratio[i]["target"] == 1:
search_ids, distances = index.knnQuery(np.concatenate([query_features[0][pair[0]][ave_ratio[i]["indexs"]],query_features[1][pair[1]]]), k=system_parameters["sys"]["search_num"])
elif ave_ratio[i]["target"] == 2:
search_ids, distances = index.knnQuery(np.concatenate([query_features[0][pair[0]],query_features[1][pair[1]][ave_ratio[i]["indexs"]]]), k=system_parameters["sys"]["search_num"])
result.append([])
result[i].append(search_ids)
result[i].append(distances)
print("-----------------------------------------------------------------------")
else:
print("Please set color and type in use_feature")
return result
def point_magnification(bigger,smaller):
alpha = 3
x = bigger/smaller
if x >= alpha:
return 0.8
else:
# 線形
return -0.2/alpha*x + 1
# 2次_1
# return -0.2/alpha/alpha*x*x + 1
# 2次_2
# return -0.2/(alpha*alpha-2*alpha*alpha*alpha)*x*x + 0.4*alpha/(alpha*alpha-2*alpha*alpha*alpha)*x + 1 |
confluence_space_name = 'DEMO'
confluence_space_home_page_name = 'DEMO Home'
confluence_name_of_root_directory = 'Atomic Threat Coverage'
md_name_of_root_directory = 'Atomic_Threat_Coverage'
list_of_detection_rules_directories = ['../detectionrules']
list_of_triggering_directories = ['../atomics']
confluence_rest_api_url = 'https://atomicthreatcoverage.atlassian.net/wiki/rest/api/'
|
# https://wiki.xxiivv.com/site/varvara.html
# https://wiki.xxiivv.com/site/uxntal.html
# https://wiki.xxiivv.com/site/uxntal_cheatsheet.html
# https://wiki.xxiivv.com/site/uxntal_reference.html
# https://wiki.xxiivv.com/site/uxntal_stacking.html
# https://wiki.xxiivv.com/site/uxntal_macros.html
from rich.console import Console
from rich.traceback import install
import fileinput
import argparse
console = Console(markup=False)
python_print = print
print = console.print
install(show_locals=True)
indent_width = 4
cur_indent = 0
cmd_stack = []
ops = """
brk 0x00 a b c m[pc+1]
lit 0x00
inc 0x01
pop 0x02
dup 0x03
nip 0x04
swp 0x05
ovr 0x06
rot 0x07
equ 0x08
neq 0x09
gth 0x0a
lth 0x0b
jmp 0x0c
jcn 0x0d
jsr 0x0e
sth 0x0f
ldz 0x10
stz 0x11
ldr 0x12
str 0x13
lda 0x14
sta 0x15
dei 0x16
deo 0x17
add 0x18
sub 0x19
mul 0x1a
div 0x1b
and 0x1c
ora 0x1d
eor 0x1e
sft 0x1f
""".strip()
op_table = {}
reverse_op_table = {}
for line in ops.split('\n'):
op, code, *comment = line.split(' ', 2)
n = int(code, 16)
op_table[op] = n
reverse_op_table[n] = op
class UxnRom():
def __init__(self):
self.rom = bytearray()
self.pc = 0
self.scope = None
self.refs = []
self.labels = {}
self.debug = False
def __repr__(self):
return 'Rom: ' + ' '.join(f'{c:02x}' for c in self.rom[0x100:])
def write(self, token, note=''):
if note and self.debug:
print(f"{note:6s} {token}")
first_char = token[:1]
if first_char == '#':
n = int(token[1:], 16)
assert n >= 0
assert n <= 0xffff
if n > 0xff:
self.write_op('lit2')
self.write_short(n)
else:
self.write_op('lit')
self.write_byte(n)
elif first_char == '|':
n = int(token[1:], 16)
assert n < 0x10000
self.pc = n
elif first_char == '@':
label_name = token[1:]
self.make_label(label_name)
self.scope = label_name
elif first_char == '&': # sub-label define
assert self.scope != None
sub_name = token[1:]
self.make_label(self.sub_label(sub_name))
elif first_char == ';': # literal address absolute
self.make_reference(token, self.pc)
self.write_lit_short(0xffff)
elif first_char == ',': # literal address relative
self.make_reference(token, self.pc)
self.write_lit_byte(0xff)
elif first_char == '"':
for b in bytes(token[1:], 'ascii'):
self.write_byte(b)
elif token[:3].lower() in op_table:
self.write_op(token)
elif token == 'rpn':
pass
else:
n = int(token, 16)
if n > 0xff:
self.write_short(n)
else:
self.write_byte(n)
def sub_label(self, name):
label_name = f"{self.scope}/{name}"
return label_name
def make_label(self, label_name):
assert label_name not in self.labels
self.labels[label_name] = self.pc
def make_reference(self, label, addr):
rune = label[0]
if label[1] == '&':
ref_name = self.sub_label(label[2:])
else:
ref_name = label[1:]
self.refs.append([ref_name, rune, addr])
def write_byte(self, n):
assert n >= 0
assert n <= 0xff
delta = self.pc - len(self.rom) + 1
if delta > 0:
self.rom += bytes(delta)
self.rom[self.pc] = n
self.pc += 1
def write_signed_byte(self, n):
if n < 0:
u = 255 + n
elif n > 127:
assert False
else:
u = n
self.write_byte(u)
def write_short(self, n):
assert n >= 0
assert n <= 0xffff
low = n & 0x00ff
high = n >> 8
self.write_byte(high)
self.write_byte(low)
def write_lit_byte(self, n):
self.write_op('lit')
self.write_byte(n)
def write_lit_short(self, n):
self.write_op('lit2')
self.write_short(n)
def write_op(self, op):
lhs, rhs = op[:3], op[3:]
if lhs == 'lit': # force keep for lit
if 'k' not in rhs:
rhs += 'k'
code = op_table[lhs.lower()]
for c in rhs:
if c == 'k':
code = code | 0x80
elif c == 'r':
code = code | 0x40
elif c == '2':
code = code | 0x20
else:
raise SyntaxError(f"unknown mode: {c}")
self.write_byte(code)
def resolve(self):
# print(self.labels)
for v in self.refs:
label, rune, ref_addr = v
label_addr = self.labels[label]
# print(label, label_addr)
# print(rune, ref_addr)
if rune == '.':
assert False
elif rune == ',':
pc = self.pc
self.pc = ref_addr + 1
delta = label_addr - self.pc - 1
self.write_signed_byte(delta)
elif rune == ';':
self.pc = ref_addr + 1
self.write_short(label_addr)
elif rune == ':':
assert False
else:
assert False
def write_file(self, filename):
with open(filename, 'wb') as f:
f.write(self.rom[0x100:])
class Tokeniser:
def __init__(self, data):
self.i = 0
self.queued_tokens = []
self.data = data
def push_token(self, token):
self.queued_tokens.append(token)
def peek_token(self):
if self.queued_tokens:
t = self.queued_tokens[-1]
return t
t = self.read_token()
self.queued_tokens.append(t)
return t
def read_token(self):
if self.queued_tokens:
t = self.queued_tokens.pop()
return t
start_pos = self.i
try:
c = self.data[self.i]
if c == ' ':
while self.data[self.i] in ' ':
self.i += 1
elif c == '\n':
while self.data[self.i] in '\n':
self.i += 1
elif c == '"':
self.i += 1
while self.data[self.i] not in '"':
self.i += 1
self.i += 1
elif c in '(),':
self.i += 1
else:
while self.data[self.i] not in ' \n(),':
self.i += 1
except IndexError:
pass
t = self.data[start_pos:self.i]
if t.startswith('\n'):
return '\n'
try:
c = self.data[self.i]
if c == '(':
self.queued_tokens.append(t)
t = 'ie/neoteric'
while self.data[self.i] in ' ':
self.i += 1
except IndexError:
pass
return t
class IndentParser:
def __init__(self, data):
self.tokens = Tokeniser(data)
self.indent_width = 4
self.new_indent = 0
self.cur_indent = 0
self.skip_blank_lines()
def skip_blank_lines(self):
while self.tokens.peek_token() == '\n':
_ = self.tokens.read_token()
def read_token(self):
t = self.tokens.read_token()
# print(f"t1 = {repr(t)}")
while t == '\n':
nt = self.tokens.peek_token()
if nt.startswith(' '):
space_token = self.tokens.read_token()
spaces = len(space_token)
assert not spaces % self.indent_width
if self.tokens.peek_token() == '\n':
pass
else:
self.new_indent = spaces // self.indent_width
else:
self.new_indent = 0
# print(f"new_indent = {self.new_indent}")
diff = self.new_indent - self.cur_indent
# print(f"diff = {diff}")
nt = self.tokens.peek_token()
# print(f"nt = {repr(nt)}")
# print(f"2 {self.cmd_stack}")
if nt == '\\':
assert False
elif diff > 1:
assert False
elif diff == 1:
t = 'ie/indent'
self.cur_indent += 1
elif diff == 0:
t = 'ie/newline'
# print(f"3t = {repr(t)}")
else:
self.cur_indent += diff
self.tokens.push_token("ie/newline")
for j in range(abs(diff)):
self.tokens.push_token("ie/dedent")
t = self.tokens.read_token()
if t == '':
ci = self.cur_indent
assert ci == 0
# print(f"t2 = {repr(t)}")
return t
class ExpressionParser:
def __init__(self, data):
self.ip = None
self.stack = []
self.special_forms = []
self.queued_tokens = []
self.ip = IndentParser(data)
def read_token(self):
if self.queued_tokens:
t = self.queued_tokens.pop(0)
return t
t = self.ip.read_token()
# print(f"h {t= }")
if t == '':
s = self.stack
assert not self.stack
return ''
if t == 'ie/neoteric':
name = self.read_token()
self.stack.append(name)
self.stack.append(t)
return self.read_token()
elif t == '(':
self.parse_infix()
prev = self.stack[-1] if self.stack else None
if prev == 'ie/neoteric':
_ = self.stack.pop()
name = self.stack.pop()
self.queued_tokens.append(name)
return self.read_token()
assert False
return t
def parse_infix(self):
stack = []
op = None
i = 0
while True:
t = self.ip.read_token()
if t == '(':
assert False
elif t == ')':
if op:
stack.append(op)
break
elif t == ',':
if op:
stack.append(op)
self.queued_tokens.extend(stack)
stack = []
i = -1
op = None
elif t == '':
assert False
elif i % 2 == 1:
if op:
assert t == op
else:
op = t
else:
stack.append(t)
i += 1
n = len(stack)
if n == 0:
pass
elif n == 1:
self.queued_tokens.append(stack[0])
else:
self.queued_tokens.extend(stack)
def assemble(rom, data):
global cur_indent
xp = ExpressionParser(data)
xp.special_forms.append('inline')
xp.special_forms.append('org')
xp.special_forms.append('label')
xp.special_forms.append('sub-label')
xp.special_forms.append('lit-addr')
xp.special_forms.append('rel-addr-sub')
while True:
t = xp.read_token()
if t == '':
break
print(t)
inline_words = {}
queue = []
def next_word():
if queue:
return queue.pop(0)
return xp.read_token()
def read_until(end_marker):
body = []
while True:
w = next_word()
if w == end_marker:
break
elif w == '':
break
else:
body.append(w)
return body
while True:
w = next_word()
# print(f"{w = }")
# print(f"{queue = }")
# print(f"{w = } {queue[:3]}")
if w == '':
# print("break")
break;
elif w in xp.special_forms:
end_marker = f'end-{w}'
body = read_until(end_marker)
if w == 'inline':
name, *body = body
inline_words[name] = body
elif w == 'org':
offset, *body = body
queue = body + queue
cmd = '|' + offset
rom.write(cmd, 'set pc')
elif w == 'label':
name, *body = body
queue = body + queue
cmd = f'@{name}'
rom.write(cmd, 'label')
elif w == 'sub-label':
name, *body = body
queue = body + queue
cmd = f'&{name}'
rom.write(cmd, 'sub-label')
elif w == 'lit-addr':
name, *body = body
queue = body + queue
cmd = f';{name}'
rom.write(cmd, 'label')
elif w == 'rel-addr-sub':
name, *body = body
queue = body + queue
cmd = f',&{name}'
rom.write(cmd, 'label')
else:
assert False
elif w in inline_words:
body = inline_words[w]
assert body
queue = body + queue
elif w[0] == '"':
s = w[1:-1]
for b in bytes(s, 'ascii'):
rom.write_byte(b)
rom.write_byte(0)
else:
rom.write(w, 'asm')
def disassemble(filename):
with open(filename, 'rb') as f:
rom = bytearray(f.read())
rom_iter = iter(rom)
i = 0
while True:
try:
b = next(rom_iter)
except StopIteration:
break
data = [b]
base_op_code = b & 0b00011111
base_op = reverse_op_table[base_op_code]
op = base_op
short_mode = False
if b & 0b10000000:
op += 'k'
if b & 0b01000000:
op += 'r'
if b & 0b00100000:
short_mode = True
op += '2'
if base_op == 'lit':
if short_mode:
sep = ' '
high = next(rom_iter)
low = next(rom_iter)
n = (high << 8) + low
data += [high, low]
op = f"#{n:04x}"
elif b & 0b10000000:
n = next(rom_iter)
data += [n]
op = f"#{n:02x}"
else:
op = 'brk'
s = ' '.join(f"{b:02x}" for b in data)
a = ' '.join(repr(chr(b)) for b in data)
print(f"{i:04x} | {s:8} | {a:20} | {op:5} |")
i += len(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="uxn tool")
parser.add_argument("--assemble")
parser.add_argument("--disassemble")
args = parser.parse_args()
if args.disassemble:
disassemble(args.disassemble)
elif args.assemble:
filename = args.assemble
with open(filename) as f:
data = f.read()
rom = UxnRom()
assemble(rom, data)
rom.resolve()
rom.write_file('out.rom')
print("done")
else:
assert False
|
# -*- coding: utf-8 -*-
"""Module for parsing ISA-Tab files."""
# Make all models and the ``*Reader`` classes visible within this module.
from .headers import * # noqa: F403, F401
from .models import * # noqa: F403, F401
from .parse_assay_study import ( # noqa: F401
AssayReader,
AssayRowReader,
StudyReader,
StudyRowReader,
)
from .parse_investigation import InvestigationReader # noqa: F401
from .validate_assay_study import AssayValidator, StudyValidator # noqa: F401
from .validate_investigation import InvestigationValidator # noqa: F401
from .write_assay_study import AssayWriter, StudyWriter # noqa: F401
from .write_investigation import InvestigationWriter # noqa: F401
|
import re
from icon_validator.rules.validator import KomandPluginValidator
from icon_validator.exceptions import ValidationException
class HelpValidator(KomandPluginValidator):
taskExist = False
HELP_HEADERS_LIST = [
"# Description",
"# Key Features",
"# Requirements",
"# Documentation",
"## Setup",
"## Technical Details",
"### Actions",
"### Triggers",
"### Custom Output Types",
"## Troubleshooting",
"# Version History",
"# Links",
"## References"
]
@staticmethod
def validate_help_exists(spec):
if "help" in spec:
raise ValidationException("Help section should exist in help.md and not in the plugin.spec.yaml file.")
@staticmethod
def validate_version_history(help_str):
if "- Initial plugin" not in help_str:
raise ValidationException("Initial plugin version line is missing: 1.0.0 - Initial plugin.")
if "Support web server mode" not in help_str and "1.0.0 - Initial plugin" not in help_str:
# Match legacy versioning which indicates this plugin came before web server mode existed
if "* 0." in help_str:
# Takes advantage of the fact that versioning used to start from 0.1.0 instead of 1.0.0
raise ValidationException(
"Initial plugin was released prior to schema V2 but versioning history."
"does not document the upgrade to web server mode: Support web server mode."
)
@staticmethod
def validate_same_actions_title(spec, help_):
if "actions" in spec:
HelpValidator.validate_same_actions_loop(spec["actions"], help_)
if "triggers" in spec:
HelpValidator.validate_same_actions_loop(spec["triggers"], help_)
if "tasks" in spec:
HelpValidator.validate_same_actions_loop(spec["tasks"], help_)
@staticmethod
def validate_same_actions_loop(section, help_str):
for i in section:
if "title" in section[i]:
if f"#### {section[i]['title']}" not in help_str:
raise ValidationException(f"Help section is missing title of: #### {section[i]['title']}")
@staticmethod
def remove_example_output(help_content):
example_outputs = re.findall(r"Example output:\n\n```\n.*?```\n\n", help_content, re.DOTALL)
for example_output in example_outputs:
help_content = help_content.replace(example_output, "")
return help_content
@staticmethod
def validate_title_spelling(spec, help_):
if "title" in spec:
title = spec["title"]
lower_title = title.lower()
help_ = HelpValidator.remove_example_output(help_)
for line in help_.split("\n"):
lower_line = line.lower()
if lower_title in lower_line:
if title not in line:
if lower_line[lower_line.find(title.lower()) - 1].isspace():
if line.startswith("$"):
pass
elif line.startswith(">>>"):
pass
else:
raise ValidationException(
"Help section contains non-matching title in line: {}".format(line))
@staticmethod
def validate_help_headers(help_str):
# if plugin without tasks needs not to be regenerated, help.md won't be having Tasks section
# Only raise exception if plugin.spec.yaml contains task and help.md does not
if HelpValidator.taskExist and "### Tasks" not in help_str:
raise ValidationException("Help section is missing header: ### Tasks")
help_headers_errors = []
for header in HelpValidator.HELP_HEADERS_LIST:
if header not in help_str:
help_headers_errors.append(f"Help section is missing header: {header}")
if help_headers_errors:
raise ValidationException("\n".join(help_headers_errors))
@staticmethod
def validate_duplicate_headings(help_raw: str):
header_errors = []
for header in HelpValidator.HELP_HEADERS_LIST:
normalize_header = header.strip(" #")
pattern = re.compile(f"#[ ]*{normalize_header}")
if len(pattern.findall(help_raw)) > 1:
header_errors.append(f"Please check {header} headings and remove duplicates.")
if header_errors:
joined_errors = "\n".join(header_errors)
raise ValidationException(f"More than one headings in type was found. \n{joined_errors}")
def validate(self, spec):
HelpValidator.validate_help_exists(spec.spec_dictionary())
HelpValidator.validate_help_headers(spec.raw_help())
if spec.spec_dictionary().get("tasks"):
HelpValidator.taskExist = True
HelpValidator.validate_version_history(spec.raw_help())
HelpValidator.validate_same_actions_title(spec.spec_dictionary(), spec.raw_help())
HelpValidator.validate_title_spelling(spec.spec_dictionary(), spec.raw_help())
HelpValidator.validate_duplicate_headings(spec.raw_help())
|
from bs4 import BeautifulSoup
import csv
import io
import pandas as pd
import re
from datetime import date
from datetime import datetime
import requests
from colorama import Back, Fore, Style
# With this function I make the webscrapping I need to extract the data from the tarifaluzahora website
def scrapping (tarifa, day = str(date.today())):
# Web to scrap
url = 'https://tarifaluzhora.es/?tarifa=' + tarifa
page = requests.get(url)
soup = BeautifulSoup(page.text, "html.parser")
# Web scraping to price & description
price_ = soup.findAll("span", {"itemprop": "price"})
hours_ = soup.findAll("span", {"itemprop": "description"})
# Get the values of price & hours with a for loop
price_hour_ = [price.get_text() for price in price_]
schedule_ = [time.get_text() for time in hours_]
# I've created a dataframe, its name is DF and it has two columns at the moment
df = pd.DataFrame.from_dict({'precio':price_hour_,'horario':schedule_})
# I have created two more columns, Time contains the 2nd digit of the time column,
# to be able to operate with the hours if necessary.
# ‘Fare' contains the chosen fare
df['hora'] = [int(x[:2]) for x in df['horario']]
df['tarifa'] = tarifa
df['minimo'] = df['precio'].min()
df['precio'] = [re.sub(r'/[k][W][h]','', str(x)) for x in df['precio']]
#df['precio'] = [re.sub(r'\€\/[k][W][h]','', str(x)) for x in df['precio']]
df['horario'] = [re.sub(r'[:]','', str(x)) for x in df['horario']]
#df['minimo'] = [re.sub(r'\€\/[k][W][h]','', str(x)) for x in df['minimo']]
return df
def main():
print("¿Sobre qué tarifa quieres saber el precio más económico?")
choice = input(Fore.CYAN + "Puedes elegir entre: coche_electrico, normal, discriminacion ")
#choice = input(Fore.WHITE + "¿De qué tarifa quieres saber el precio? ")
#if choice == "coche_electrico":
df = scrapping(choice)
df = df.filter(items = ['tarifa', 'precio','hora'])
df = df.groupby("precio").min().reset_index()
if df['hora'][0] <= 12:
print(Fore.GREEN + f"El precio más barato para la tarifa {choice} es de, {df.precio[0]} y la hora a la {df.hora[0]} am.")
print(Style.RESET_ALL)
else:
print(Fore.GREEN + f"El precio más barato para la tarifa {choice} es de, {df.precio[0]} y la hora a las {df.hora[0]} pm.")
print(Style.RESET_ALL)
if __name__ == "__main__":
main() |
"""This declares the protocol routing for Ghostwriter."""
# Django & Other 3rd Party Libraries
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter
# Ghostwriter Libraries
import ghostwriter.home.routing
import ghostwriter.oplog.routing
application = ProtocolTypeRouter(
{
# http->django views is added by default
"websocket": AuthMiddlewareStack(
URLRouter(ghostwriter.home.routing.websocket_urlpatterns +
ghostwriter.oplog.routing.websocket_urlpatterns)
),
}
)
|
from moonleap.session import get_session
def _get_extended_scope_names(root_block):
scope_names = []
for block in root_block.get_blocks(include_children=True):
for scope_name in block.scope_names:
if scope_name not in scope_names:
scope_names.append(scope_name)
return scope_names
def add_meta_data_to_blocks(blocks):
scope_manager = get_session().scope_manager
for block in blocks:
block.set_scopes(
[
scope_manager.get_scope(scope_name)
for scope_name in _get_extended_scope_names(block)
]
)
for block in blocks:
child_blocks = block.get_blocks(include_children=True, include_self=False)
parent_blocks = block.get_blocks(include_parents=True, include_self=False)
block.competing_blocks = list(child_blocks)
for parent_block in parent_blocks:
sibling_blocks = [x for x in parent_block.child_blocks if x is not block]
block.competing_blocks += list(sibling_blocks)
|
#!/usr/bin/python
import sys
from lxml import html
card = ''; expectUserResponse = False
tree = html.fromstring(sys.stdin.read())
speech = html.tostring(tree.xpath('//speak')[0])
subtree = tree.xpath('//body/p')
try:
card = subtree[0].xpath('string()')
if subtree[1].xpath('string()') == "False":
expectUserResponse = True
except:
card = ''; expectUserResponse = False
if card in ("#Name", "#Email"):
response = {
"payload": {
"google": {
"expectUserResponse": True,
"systemIntent": {
"intent": "actions.intent.SIGN_IN",
"data": {
"@type": "type.googleapis.com/google.actions.v2.SignInValueSpec"
}
}
}
}
}
else:
response = {
"payload": {
"google": {
"expectUserResponse": expectUserResponse,
"richResponse": {
"items": [
{
"simpleResponse": {
"textToSpeech": speech,
"displayText": card
}
}
]
}
}
}
}
print response
|
# -*- coding: utf-8 -*-
"""
Function: Transform the four corners of the bounding box from one frame to another.
@author: Wenbo Zhang
"""
import numpy as np
from skimage import transform as tf
def applyGeometricTransformation(startXs, startYs, newXs, newYs, bbox):
# (INPUT) startXs: N × F matrix
# (INPUT) startYs: N × F matrix
# (INPUT) newXs: N × F matrix
# (INPUT) newYs: N × F matrix
# (INPUT) bbox: F × 4 × 2 matrix
# (OUTPUT) Xs: N1 × F matrix
# (OUTPUT) Ys: N1 × F matrix
# (OUTPUT) newbbox: F × 4 × 2 matrix
# (PARAMETER) N: Number of features in an object
# (PARAMETER) F: Number of objects you would like to track
# Initialization
N, F = startXs.shape
count = 0
Xs = np.zeros([N,F], dtype=np.int)
Ys = np.zeros([N,F], dtype=np.int)
startXsTemp = np.zeros([N,F], dtype=np.int)
startYsTemp = np.zeros([N,F], dtype=np.int)
newbbox = np.zeros([F, 4, 2], dtype=np.int)
# Calculate matrix difference
diffx = newXs - startXs
diffy = newYs - startYs
matrixDistance = np.sqrt(diffx**2 + diffy**2)
correspondPointDistantThreshold = 4
# Delete feature points whose distances are less than threshold
for j in range(F):
for i in range(N):
if matrixDistance[i][j] < correspondPointDistantThreshold:
Xs[count][j] = newXs[i][j]
Ys[count][j] = newYs[i][j]
startXsTemp[count][j] = startXs[i][j]
startYsTemp[count][j] = startYs[i][j]
count += 1
count = 0
# Resize output variables
maxCount = np.max(sum(matrixDistance < correspondPointDistantThreshold))
Xs = Xs[:maxCount][:]
Ys = Ys[:maxCount][:]
startXsTemp = startXsTemp[:maxCount][:]
startYsTemp = startYsTemp[:maxCount][:]
# Trim and resize
for k in range(F):
X = np.trim_zeros(Xs[:,k], trim='b')
Y = np.trim_zeros(Ys[:,k], trim='b')
startX = startXsTemp[:len(X),k]
startY = startYsTemp[:len(X),k]
# bounding box
src = np.vstack([startX, startY]).T
dst = np.vstack([X, Y]).T
x,y,w,h = cv2.boundingRect(dst)
offset = 8
Xbox = [x-offset, x+w+2*offset, x+w+2*offset, x-offset]
Ybox = [y-offset, y-offset, y+h+2*offset, y+h+2*offset]
newbbox[k,:,:] = np.vstack([Xbox, Ybox]).T
#tform = tf.estimate_transform('similarity', src, dst)
#box = tform(bbox[k,:,:])
#newbbox[k,:,:] = box
return Xs, Ys, newbbox |
import wx
import re
import os, os.path
import cPickle as pickle
# directory containing parsed opinions
OPINION_PATH = r"C:\Users\Daniel\Dropbox\Class_Files\CBH_301\Word_Cloud\supreme_court_opinions\test_output\test_opinions"
PICKLE_PATH = r"C:\Users\Daniel\Dropbox\Class_Files\CBH_301\Word_Cloud\supreme_court_opinions\test_output\test_pickled"
class WordCloudInitDialog(wx.MessageDialog):
'''
Daniel Klein
Computer-Based Honors Program
The University of Alabama
1.20.2014
'''
def __init__(self, parent, message,
caption, style=wx.OK):
self.opinion_list = []
# TODO: add functionality for packing opinions the first time
wx.MessageDialog.__init__(self, parent, message, caption, style=wx.OK)
# show info about loading of opinion files, then
# display a button to click when user wants to continue
'''self.panel = wx.Panel(self, -1)
self.main_box = wx.BoxSizer(wx.VERTICAL)
self.info = wx.StaticText(self.panel, -1, "Loading opinions?")
self.info.SetFont(wx.Font(8, wx.SWISS, wx.NORMAL, wx.BOLD))
self.info.SetSize(self.info.GetBestSize())
self.main_box.Add(self.info, flag = wx.ALIGN_CENTER)
# TODO: make "load" button unclickable after loading first time
load_opinions = wx.Button(self.panel, wx.ID_CLOSE, "Load")
load_opinions.Bind(wx.EVT_BUTTON, self.unpack_opinions)
self.main_box.Add(load_opinions, flag = wx.ALIGN_CENTER)
self.panel.SetSizer(self.main_box)
self.panel.Layout()'''
def unpack_opinions(self, event):
'''
Unpickle all of the Document files from PICKLE_PATH into
Document objects.
'''
print "Unpacking Document objects from serialized files..."
doc_regex = re.compile(r"\.Document$")
num_unpacked = 0
num_failed = 0
file_list = os.listdir(PICKLE_PATH)
for pickle_file in os.listdir(PICKLE_PATH):
'''
print "Unpacking Document object from {0}... "\
"({1} of {2})".format(pickle_file, num_unpacked+1,
len(file_list))
'''
self.info.SetLabel("Unpacking Document object from {0}... "\
"({1} of {2})".format(pickle_file, num_unpacked+1,
len(file_list)))
# if a file doesn't have a .Document extension, we ignore it
is_document_file = re.search(doc_regex, pickle_file)
if not is_document_file:
print ("{0} is not file containing a pickled Document,"
"so we can't unpack it!".format(pickle_file))
num_failed += 1
continue
# we attempt to un-pickle the file into a Document object
full_path = os.path.join(PICKLE_PATH, pickle_file)
with open(full_path, 'r') as doc_file:
try:
unpacked_doc = pickle.load(doc_file)
num_unpacked += 1
self.opinion_list.append(unpacked_doc)
except:
print "Unable to unpack Document contained in "\
"{0}!".format(pickle_file)
num_failed += 1
continue
done_string = "Unpacking complete.\n"\
"{0} Documents unpacked.\n"\
"{1} Documents failed to unpack.\n".format(num_unpacked,num_failed)
'''
print "Unpacking complete."
print "{0} Documents unpacked.".format(num_unpacked)
print "{0} Documents failed to unpack.".format(num_failed)
'''
self.info.SetLabel(done_string)
self.done = wx.Button(self.panel, wx.ID_OK, "Done")
self.done.Bind(wx.EVT_BUTTON, self.OnDone)
self.main_box.Add(self.done, flag = wx.ALIGN_CENTER)
return
def OnDone(self, event):
self.Destroy()
|
import numpy as np
import astropy.units as u
import astropy.constants as const
from .optic import Optic
class Cutoff_Filter(Optic):
def __init__(self, name, cutoff_freq, absorption, temperature,
spill, spill_temperature):
self.cutoff = cutoff_freq
self.absorption = self._init_value(absorption)
self.spill = spill
super(Cutoff_Filter, self).__init__(name,
self.absorption,
self.reflection,
temperature, spill, spill_temperature)
def __str__(self):
s = '{:10}\t&\t{:02}\t&\t'.format(self.name, self.temperature)
if np.isscalar(self.absorption):
s += 'Emissivity: {:04}'.format(np.round(self.absorption, 4))
else:
s += 'Emissivity: FUNC'
s += ' '
s += 'Freq Cutoff: {:02}'.format(self.cutoff)
s += '\t&\t'
s += '{:04}\t&\t{:02}'.format(self.spill, self.spill_temperature)
return s
def reflection(self, freqs):
val = np.zeros( np.shape(freqs) )
val[freqs >= self.cutoff] = 1.0-self.absorption-self.spill
return val
class Metal_Mesh_Filter(Optic):
'''
Metal Mesh filters are the only ones defined with a transmission
filename. Because that is the main measurement we have.
'''
def __init__(self, name, transmission, absorption, temperature,
spill, spill_temperature):
self.meas_trans = self._init_value(transmission)
self.absorption = self._init_value(absorption)
self.spill = self._init_value(spill)
if np.isscalar(self.absorption):
self.reflection = lambda freqs: self._check_values(1 -
self.meas_trans(freqs) -
self.absorption -
self.spill, True)
else:
self.reflection = lambda freqs: self._check_values( 1 -
self.meas_trans(freqs) -
self.absorption(freqs) -
self.spill, True)
super(Metal_Mesh_Filter, self).__init__(name,
self.absorption,
self.reflection,
temperature, spill, spill_temperature)
|
# Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
import logging
from typing import List, Optional, Set, Tuple
from copr.v3 import CoprRequestException
from ogr.abstract import GitProject
from ogr.parsing import parse_git_repo
from ogr.services.github import GithubProject
from packit.config import JobConfig, JobType
from packit.config.aliases import get_valid_build_targets
from packit.config.package_config import PackageConfig
from packit.exceptions import PackitCoprException, PackitCoprSettingsException
from packit_service import sentry_integration
from packit_service.celerizer import celery_app
from packit_service.config import Deployment, ServiceConfig
from packit_service.constants import MSG_RETRIGGER
from packit_service.models import AbstractTriggerDbType, CoprBuildModel
from packit_service.worker.events import EventData
from packit_service.service.urls import (
get_copr_build_info_url,
get_srpm_build_info_url,
)
from packit_service.worker.build.build_helper import BaseBuildJobHelper
from packit_service.worker.result import TaskResults
from packit_service.worker.reporting import BaseCommitStatus
logger = logging.getLogger(__name__)
class CoprBuildJobHelper(BaseBuildJobHelper):
job_type_build = JobType.copr_build
job_type_test = JobType.tests
status_name_build: str = "rpm-build"
status_name_test: str = "testing-farm"
def __init__(
self,
service_config: ServiceConfig,
package_config: PackageConfig,
project: GitProject,
metadata: EventData,
db_trigger: AbstractTriggerDbType,
job_config: JobConfig,
targets_override: Optional[Set[str]] = None,
):
super().__init__(
service_config=service_config,
package_config=package_config,
project=project,
metadata=metadata,
db_trigger=db_trigger,
job_config=job_config,
targets_override=targets_override,
)
self.msg_retrigger: str = MSG_RETRIGGER.format(
job="build",
command="copr-build" if self.job_build else "build",
place="pull request",
)
@property
def default_project_name(self) -> str:
"""
Project name for copr.
* use hostname prefix for non-github service
* replace slash in namespace with dash
* add `-stg` suffix for the stg app
"""
service_hostname = parse_git_repo(self.project.service.instance_url).hostname
service_prefix = (
"" if isinstance(self.project, GithubProject) else f"{service_hostname}-"
)
namespace = self.project.namespace.replace("/", "-")
stg = "-stg" if self.service_config.deployment == Deployment.stg else ""
# We want to share project between all releases.
# More details: https://github.com/packit/packit-service/issues/1044
identifier = "releases" if self.metadata.tag_name else self.metadata.identifier
return f"{service_prefix}{namespace}-{self.project.repo}-{identifier}{stg}"
@property
def job_project(self) -> Optional[str]:
"""
The job definition from the config file.
"""
if self.job_build and self.job_build.metadata.project:
return self.job_build.metadata.project
if self.job_tests and self.job_tests.metadata.project:
return self.job_tests.metadata.project
return self.default_project_name
@property
def job_owner(self) -> Optional[str]:
"""
Owner used for the copr build -- search the config or use the copr's config.
"""
if self.job_build and self.job_build.metadata.owner:
return self.job_build.metadata.owner
if self.job_tests and self.job_tests.metadata.owner:
return self.job_tests.metadata.owner
return self.api.copr_helper.copr_client.config.get("username")
@property
def preserve_project(self) -> Optional[bool]:
"""
If the project will be preserved or can be removed after 60 days.
"""
return self.job_build.metadata.preserve_project if self.job_build else None
@property
def list_on_homepage(self) -> Optional[bool]:
"""
If the project will be shown on the copr home page.
"""
return self.job_build.metadata.list_on_homepage if self.job_build else None
@property
def additional_repos(self) -> Optional[List[str]]:
"""
Additional repos that will be enable for copr build.
"""
return self.job_build.metadata.additional_repos if self.job_build else None
@property
def build_targets_all(self) -> Set[str]:
"""
Return all valid Copr build targets/chroots from config.
"""
return get_valid_build_targets(*self.configured_build_targets, default=None)
@property
def tests_targets_all(self) -> Set[str]:
"""
Return all valid test targets/chroots from config.
"""
return get_valid_build_targets(*self.configured_tests_targets, default=None)
@property
def available_chroots(self) -> Set[str]:
"""
Returns set of available COPR targets.
"""
return {
*filter(
lambda chroot: not chroot.startswith("_"),
self.api.copr_helper.get_copr_client()
.mock_chroot_proxy.get_list()
.keys(),
)
}
def get_built_packages(self, build_id: int, chroot: str) -> List:
return self.api.copr_helper.copr_client.build_chroot_proxy.get_built_packages(
build_id, chroot
).packages
def get_build(self, build_id: int):
return self.api.copr_helper.copr_client.build_proxy.get(build_id)
def run_copr_build(self) -> TaskResults:
self.report_status_to_all(
description="Building SRPM ...",
state=BaseCommitStatus.running,
# pagure requires "valid url"
url="",
)
if results := self.create_srpm_if_needed():
return results
if not self.srpm_model.success:
msg = "SRPM build failed, check the logs for details."
self.report_status_to_all(
state=BaseCommitStatus.failure,
description=msg,
url=get_srpm_build_info_url(self.srpm_model.id),
)
return TaskResults(success=False, details={"msg": msg})
try:
build_id, web_url = self.run_build()
except Exception as ex:
sentry_integration.send_to_sentry(ex)
# TODO: Where can we show more info about failure?
# TODO: Retry
self.report_status_to_all(
state=BaseCommitStatus.error,
description=f"Submit of the build failed: {ex}",
)
return TaskResults(
success=False,
details={"msg": "Submit of the Copr build failed.", "error": str(ex)},
)
unprocessed_chroots = []
for chroot in self.build_targets:
if chroot not in self.available_chroots:
self.report_status_to_all_for_chroot(
state=BaseCommitStatus.error,
description=f"Not supported target: {chroot}",
url=get_srpm_build_info_url(self.srpm_model.id),
chroot=chroot,
)
unprocessed_chroots.append(chroot)
continue
copr_build = CoprBuildModel.create(
build_id=str(build_id),
commit_sha=self.metadata.commit_sha,
project_name=self.job_project,
owner=self.job_owner,
web_url=web_url,
target=chroot,
status="pending",
run_model=self.run_model,
task_accepted_time=self.metadata.task_accepted_time,
)
url = get_copr_build_info_url(id_=copr_build.id)
self.report_status_to_all_for_chroot(
state=BaseCommitStatus.running,
description="Starting RPM build...",
url=url,
chroot=chroot,
)
if unprocessed_chroots:
unprocessed = "\n".join(sorted(unprocessed_chroots))
available = "\n".join(sorted(self.available_chroots))
self.status_reporter.comment(
body="There are build targets that are not supported by COPR.\n"
"<details>\n<summary>Unprocessed build targets</summary>\n\n"
f"```\n{unprocessed}\n```\n</details>\n"
"<details>\n<summary>Available build targets</summary>\n\n"
f"```\n{available}\n```\n</details>",
)
# release the hounds!
celery_app.send_task(
"task.babysit_copr_build",
args=(build_id,),
countdown=120, # do the first check in 120s
)
return TaskResults(success=True, details={})
def run_build(
self, target: Optional[str] = None
) -> Tuple[Optional[int], Optional[str]]:
"""
Trigger the build and return id and web_url
:param target: str, run for all if not set
:return: task_id, task_url
"""
owner = self.job_owner or self.api.copr_helper.configured_owner
if not owner:
raise PackitCoprException(
"Copr owner not set. Use Copr config file or `--owner` when calling packit CLI."
)
try:
overwrite_booleans = owner == "packit"
self.api.copr_helper.create_copr_project_if_not_exists(
project=self.job_project,
chroots=list(self.build_targets_all),
owner=owner,
description=None,
instructions=None,
list_on_homepage=self.list_on_homepage if overwrite_booleans else None,
preserve_project=self.preserve_project if overwrite_booleans else None,
additional_repos=self.additional_repos,
request_admin_if_needed=True,
)
except PackitCoprSettingsException as ex:
# notify user first, PR if exists, commit comment otherwise
table = (
"| field | old value | new value |\n"
"| ----- | --------- | --------- |\n"
)
for field, (old, new) in ex.fields_to_change.items():
table += f"| {field} | {old} | {new} |\n"
boolean_note = ""
if "unlisted_on_hp" in ex.fields_to_change:
boolean_note += (
"The `unlisted_on_hp` field is represented as `list_on_homepage`"
" in the packit config."
"By default we create projects with `list_on_homepage: False`.\n"
)
if "delete_after_days" in ex.fields_to_change:
boolean_note += (
"The `delete_after_days` field is represented as `preserve_project`"
" in the packit config (`True` is `-1` and `False` is `60`)."
"By default we create projects with `preserve: True` "
"which means `delete_after_days=60`.\n"
)
permissions_url = self.api.copr_helper.get_copr_settings_url(
owner, self.job_project, section="permissions"
)
settings_url = self.api.copr_helper.get_copr_settings_url(
owner, self.job_project
)
msg = (
"Based on your Packit configuration the settings "
f"of the {owner}/{self.job_project} "
"Copr project would need to be updated as follows:\n"
"\n"
f"{table}"
"\n"
f"{boolean_note}"
"\n"
"Packit was unable to update the settings above as it is missing `admin` "
f"permissions on the {owner}/{self.job_project} Copr project.\n"
"\n"
"To fix this you can do one of the following:\n"
"\n"
f"- Grant Packit `admin` permissions on the {owner}/{self.job_project} "
f"Copr project on the [permissions page]({permissions_url}).\n"
"- Change the above Copr project settings manually "
f"on the [settings page]({settings_url}) "
"to match the Packit configuration.\n"
"- Update the Packit configuration to match the Copr project settings.\n"
"\n"
"Please retrigger the build, once the issue above is fixed.\n"
)
self.status_reporter.comment(body=msg)
raise ex
logger.debug(
f"owner={owner}, project={self.job_project}, path={self.srpm_path}"
)
try:
build = self.api.copr_helper.copr_client.build_proxy.create_from_file(
ownername=owner,
projectname=self.job_project,
path=self.srpm_path,
buildopts={
"chroots": list(self.build_targets),
},
)
except CoprRequestException as ex:
if "You don't have permissions to build in this copr." in str(
ex
) or "is not allowed to build in the copr" in str(ex):
self.api.copr_helper.copr_client.project_proxy.request_permissions(
ownername=owner,
projectname=self.job_project,
permissions={"builder": True},
)
# notify user, PR if exists, commit comment otherwise
permissions_url = self.api.copr_helper.get_copr_settings_url(
owner, self.job_project, section="permissions"
)
self.status_reporter.comment(
body="We have requested the `builder` permissions "
f"for the {owner}/{self.job_project} Copr project.\n"
"\n"
"Please confirm the request on the "
f"[{owner}/{self.job_project} Copr project permissions page]"
f"({permissions_url})"
" and retrigger the build.",
)
raise ex
return build.id, self.api.copr_helper.copr_web_build_url(build)
|
"""Introduce several additons to `pygame gui
<https://pygame-gui.readthedocs.io/en/latest/>`_.
A toggle button, which is the same as a
:class:`UIButton <pygame_gui.elements.UIButton>` with additonal
settings. It stores a boolean value that remember which state the button is.
Event:
UI_TOGGLEBUTTON_TOGGLED
.................................................................................................
Fired when a user clicks on a Toggle Button.
- **'type'** : pygame.USEREVENT
- **'user_type'** : pygame_gui.UI_TOGGLEBUTTON_TOGGLED
- **'value'** : The current value of the button (True of False).
- **'ui_element'** : The :class:`UIToggleButton <.UIToggleButton>` that fired this event.
- **'ui_object_id'** : The most unique ID for the button that fired this event.
**Example usage**:
.. code-block:: python
:linenos:
for event in pygame.event.get():
if event.type == pygame.USEREVENT:
if event.user_type == pygame_gui.UI_TOGGLEBUTTON_TOGGLED:
if event.ui_element == toggle_button:
print('current value:', event.value)
"""
from typing import Any, Callable, Dict, List, Union
import pygame
from pygame_gui.core import ui_element
from pygame_gui.core.interfaces.container_interface import (
IContainerLikeInterface,
)
from pygame_gui.core.interfaces.manager_interface import IUIManagerInterface
from pygame_gui.elements.ui_button import UIButton
from pygame_gui.ui_manager import UIManager
UI_TOGGLEBUTTON_TOGGLED = "ui_button_toggled"
def set_button_color(button: UIButton, color: pygame.Color) -> None:
"""Set a new color to the button and display color change."""
button.colours["normal_bg"] = color
button.colours["hovered_bg"] = color
button.rebuild()
def get_new_close_button(UI_MANAGER: UIManager):
rect = pygame.Rect(0, 0, 50, 50)
rect.topright = (0, 0)
close_button = UIButton(
rect,
"X",
UI_MANAGER,
tool_tip_text="Return to Menu",
anchors={
"left": "right",
"right": "right",
"top": "top",
"bottom": "top",
},
object_id="#close_button",
starting_height=1000, # Ensure will show on top of the others
)
return close_button
class UIToggleButton(UIButton):
"""Togglable button.
A toggle button, a lot of the appearance of the button, including
images to be displayed, is
setup via the theme file.
This button is designed to be toggled on or off.
The button element is reused throughout the UI as part of other
elements as it happens to be a
very flexible interactive element.
:param relative_rect: A rectangle describing the position (relative
to its container) and dimensions.
:param text: Text for the button.
:param manager: The UIManager that manages this element.
:param container: The container that this element is within. If set
to None will be the root window's container.
:param tool_tip_text: Optional tool tip text, can be formatted with
HTML. If supplied will appear on hover.
:param starting_height: The height in layers above it's container
that this element will be placed.
:param parent_element: The element this element 'belongs to' in the
theming hierarchy.
:param object_id: A custom defined ID for fine tuning of theming.
:param anchors: A dictionary describing what this element's
relative_rect is relative to.
:param allow_double_clicks: Enables double clicking on buttons which
will generate a unique event.
:param visible: Whether the element is visible by default.
Warning - container visibility may override this.
"""
toggled: bool
switched_event: bool = False
colors_parameters: List[str] = [
"normal_bg",
"hovered_bg",
"disabled_bg",
"selected_bg",
"active_bg",
"normal_text",
"hovered_text",
"disabled_text",
"selected_text",
"active_text",
"normal_border",
"hovered_border",
"disabled_border",
"selected_border",
"active_border",
]
def __init__(
self,
relative_rect: pygame.Rect,
text: str,
manager: IUIManagerInterface,
initial_state: bool = False,
container: Union[IContainerLikeInterface, None] = None,
tool_tip_text: Union[str, None] = None,
starting_height: int = 1,
parent_element: ui_element = None,
object_id: Union[ui_element.ObjectID, str, None] = None,
anchors: Dict[str, str] = None,
visible: int = 1,
):
self.toggled = initial_state
super().__init__(
relative_rect,
text,
manager,
container=container,
tool_tip_text=tool_tip_text,
starting_height=starting_height,
parent_element=parent_element,
object_id=object_id,
anchors=anchors,
allow_double_clicks=False, # Toggle does not need double clicks
visible=visible,
)
def process_event(self, event: pygame.event.Event) -> bool:
"""
Handles various interactions with the button.
:param event: The event to process.
:return: Return True if we want to consume this event so it is not passed on to the
rest of the UI.
"""
consumed_event = super().process_event(event)
if consumed_event and self.pressed_event:
# Toggle the button when it is pressed
self.toggled = not self.toggled
self.switched_event = True
# Send a toggle event
event_data = {
"user_type": UI_TOGGLEBUTTON_TOGGLED,
"ui_element": self,
"ui_object_id": self.most_specific_combined_id,
"value": self.toggled,
}
pygame.event.post(pygame.event.Event(pygame.USEREVENT, event_data))
self.rebuild_from_changed_theme_data()
return consumed_event
def update(self, time_delta: float):
super().update(time_delta)
if self.alive():
# clear the event for the new cycle
self.switched_event = False
def add_if_toggled(self, s: str):
return "toggled_" + s if self.toggled else s
def rebuild_from_changed_theme_data(self):
"""Rebuild the button if any theming parameters have changed.
Check if any theming parameters have changed, and if so triggers
a full Rebuild of the button's drawable shape.
As each different types of parameters has a different implementation
we summarize here how they are handled for the different
style when toggled.
- colors: direcly in this method
- font: TODO
- misc: _check_misc_theme_data_changed
- images: TODO
"""
ui_element.UIElement.rebuild_from_changed_theme_data(self)
has_any_changed = False
font = self.ui_theme.get_font(self.combined_element_ids)
if font != self.font:
self.font = font
has_any_changed = True
cols = {
el_name: self.ui_theme.get_colour_or_gradient(
# Workaround for the colors, to change toggled
self.add_if_toggled(el_name),
self.combined_element_ids,
)
for el_name in self.colors_parameters
}
if cols != self.colours:
self.colours = cols
has_any_changed = True
if self._set_any_images_from_theme():
has_any_changed = True
# misc
if self._check_misc_theme_data_changed(
attribute_name="shape",
default_value="rectangle",
casting_func=str,
allowed_values=["rectangle", "rounded_rectangle", "ellipse"],
):
has_any_changed = True
if self._check_shape_theming_changed(
defaults={
"border_width": 1,
"shadow_width": 2,
"shape_corner_radius": 2,
}
):
has_any_changed = True
if self._check_misc_theme_data_changed(
attribute_name="tool_tip_delay",
default_value=1.0,
casting_func=float,
):
has_any_changed = True
if self._check_text_alignment_theming():
has_any_changed = True
try:
state_transitions = self.ui_theme.get_misc_data(
"state_transitions", self.combined_element_ids
)
except LookupError:
self.state_transitions = {}
else:
if isinstance(state_transitions, dict):
for key in state_transitions:
states = key.split("_")
if len(states) == 2:
start_state = states[0]
target_state = states[1]
try:
duration = float(state_transitions[key])
except ValueError:
duration = 0.0
self.state_transitions[
(start_state, target_state)
] = duration
if has_any_changed:
self.rebuild()
def rebuild(self):
return super().rebuild()
def _check_misc_theme_data_changed(
self,
attribute_name: str,
default_value: Any,
casting_func: Callable[[Any], Any],
allowed_values: Union[List, None] = None,
) -> bool:
has_changed = False
attribute_value = default_value
try:
attribute_value = casting_func(
self.ui_theme.get_misc_data(
# Adds the toggled name
self.add_if_toggled(attribute_name),
self.combined_element_ids,
)
)
except (LookupError, ValueError):
attribute_value = default_value
finally:
if allowed_values and attribute_value not in allowed_values:
attribute_value = default_value
if attribute_value != getattr(self, attribute_name, default_value):
setattr(self, attribute_name, attribute_value)
has_changed = True
return has_changed
|
#
# Copyright (c) 2021 Blickfeld GmbH.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE.md file in the root directory of this source tree.
from __future__ import print_function
import argparse
from blickfeld_scanner import scanner
import numpy as np
from time import sleep
from blickfeld_scanner.protocol.config.advanced_pb2 import Advanced
def calibrate_accelerometer(args):
"""Calibrate the rotational offset of the Blickfeld Cube 1 Inertial Measurement Unit (IMU).
The upright pose is identified by the static acceleration reading [0, 0, -1]. This means, that
the gravitational acceleration is measured along the negative direction of the devices Z-Axis.
Place the Blickfeld Cube 1 on a level surface for calibrating the IMU.
Avoid any kind of movement of the Blickfeld Cube 1 while running the script.
If the Blickfeld Cube 1 has already configured a rotational offset remove it first by running
this script with the '--remove' flag.
"""
ORIENTATION_UPRIGHT = [0, 0, -1]
ERROR_ALLOWED_NORM = 1e-2
# ensure a given vector is normalized to length 1
def _unit_vector(v: list) -> np.array:
return np.array(v) / np.linalg.norm(v)
# calculate the rotation matrix
def _calculate_rotation_matrix(acc_imu: list, acc_calib: list = ORIENTATION_UPRIGHT) -> np.array:
acc_imu = _unit_vector(acc_imu)
acc_calib = _unit_vector(acc_calib)
# see https://math.stackexchange.com/questions/180418/calculate-rotation-matrix-to-align-vector-a-to-vector-b-in-3d
imu_static_rotation_offset = np.eye(3)
if np.linalg.norm(np.cross(acc_calib, acc_imu)) < 1e-6:
imu_static_rotation_offset = -imu_static_rotation_offset
else:
axis = np.cross(acc_calib, acc_imu)
s = np.linalg.norm(axis)
c = np.dot(acc_calib, acc_imu)
axis_cross = np.zeros(9)
np.put(axis_cross, [1, 2, 3, 5, 6, 7], [-axis[2], axis[1], axis[2], -axis[0], -axis[1], axis[0]])
axis_cross = axis_cross.reshape(3, 3)
imu_static_rotation_offset = np.eye(3) + axis_cross + np.dot(axis_cross, axis_cross) * (1 - c) / (s ** 2)
return imu_static_rotation_offset
device = scanner(args.target)
print(f"connected to {args.target}")
cfg = device.get_advanced_config()
# clear imu_static_rotation_offset and exit
if args.remove:
del cfg.processing.imu_static_rotation_offset[:]
device.set_advanced_config(cfg, persist=args.persist)
print("static rotation offset removed")
exit(0)
# check for configured imu_static_rotation_offset
if cfg.HasField("processing") and len(cfg.processing.imu_static_rotation_offset) != 0:
print("imu_static_rotation_offset is already configured")
print("remove configuration by starting this script with '--remove'")
exit(0)
# measure the actual static acceleration by removing configured imu_static_rotation_offset
del cfg.processing.imu_static_rotation_offset[:]
device.set_advanced_config(cfg)
sleep(0.3)
# calculate and set imu_static_rotation_offset
imu_static_rotation_offset = _calculate_rotation_matrix(acc_imu=list(device.get_status().imu.static_state.acceleration))
cfg_new = Advanced()
cfg_new.MergeFrom(cfg)
cfg_new.processing.imu_static_rotation_offset[:] = imu_static_rotation_offset.flatten()
device.set_advanced_config(cfg_new)
sleep(0.3)
# check error after calibration
state = device.get_status()
acc_imu = _unit_vector(state.imu.static_state.acceleration)
print(f"offset after calibration: {np.linalg.norm(ORIENTATION_UPRIGHT - acc_imu)} [g]")
# rollback in case error is too large (e.g. device was moved during calibration)
if np.linalg.norm(ORIENTATION_UPRIGHT - acc_imu) > ERROR_ALLOWED_NORM:
print(f"error too large, maximum allowed is {ERROR_ALLOWED_NORM}")
print("rolling back changes")
del cfg.processing.imu_static_rotation_offset[:]
device.set_advanced_config(cfg)
exit(0)
device.set_advanced_config(cfg_new, persist=args.persist)
if __name__ == "__main__":
parser = argparse.ArgumentParser() # Command line argument parser
parser.add_argument("target", help="hostname or IP of scanner to connect to") # host name or IP address of the device
parser.add_argument("-p", "--persist", help="Persist configuration on device", action="store_true")
parser.add_argument("-r", "--remove", help="Remove static rotation offset. Enable persist to remove permanently.", action="store_true")
args = parser.parse_args()
calibrate_accelerometer(args)
|
################################################################################
# These computations are based off of Bernd Schober's notes for chart 66 in
# August.
# Maglione, August 2019
################################################################################
import Zeta
from sage.all import QQ, Polyhedron, PolynomialRing, var
from SingularZeta.src.globalVars import _DEFAULT_p, _DEFAULT_t
p = var(_DEFAULT_p)
t = var(_DEFAULT_t)
# ==============================================================================
# Chart 66:
# ==============================================================================
# ------------------------------------------------------------------------------
# Case: 1
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1),
( 2,-1, 2, 0, 2, 0, 0),
( 2,-1, 1, 1, 2, 0, 0),
( 0, 0,-1, 2, 0, 0, 0),
( 3,-1, 0, 2, 2, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_1 = Zeta.smurf.SMURF.from_polyhedron(P, R)
factor_1 = (1 - p**-1)**-3*(1 - p**-1)**6*p**-1*t**2*p**-3
zed_1 = sm_1.evaluate()(x0=p**2*t, x1=t, x2=p**-2*t, x3=p**-3*t, x4=t, x5=p**-1)
int_1 = factor_1*zed_1
# Double checked
# ------------------------------------------------------------------------------
# Case: 2
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1),
( 1,-1, 2, 0, 1, 0, 0),
( 1,-1, 1, 1, 1, 0, 0),
( 0, 0,-1, 2, 0, 0, 0),
( 2,-1, 0, 2, 1, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_2 = Zeta.smurf.SMURF.from_polyhedron(P, R)
factor_2 = (1 - p**-1)**-3*(1 - p**-1)**6*t*p**-3
zed_2 = sm_2.evaluate()(x0=p**2*t, x1=t, x2=p**-2*t, x3=p**-2, x4=t, x5=p**-1)
int_2 = factor_2*zed_2
# Double checked
# ------------------------------------------------------------------------------
# Case: 3
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1),
( 1,-1, 2, 0, 1, 0, 0),
( 0,-1, 1, 1, 0, 0, 0),
( 0, 0,-1, 2, 0, 0, 0),
( 1,-1, 0, 2, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_3 = Zeta.smurf.SMURF.from_polyhedron(P, R)
factor_3 = (1 - p**-1)**-3*(1 - p**-1)**6*p*t*p**-3
zed_3 = sm_3.evaluate()(x0=p**2*t, x1=t, x2=p**-2*t, x3=p**-1, x4=t, x5=p**-1)
int_3 = factor_3*zed_3
# Double checked
# ------------------------------------------------------------------------------
# Case: 4
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1),
( 0,-1, 2, 0, 0, 0, 0),
( 0,-1, 1, 1, 0, 0, 0),
( 0, 0,-1, 2, 0, 0, 0),
( 1,-1, 0, 2, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_4 = Zeta.smurf.SMURF.from_polyhedron(P, R)
factor_4 = (1 - p**-1)**-3*(1 - p**-1)**6*(p - 3)*p*t*p**-3
zed_4 = sm_4.evaluate()(x0=p**2*t, x1=t, x2=p**-2*t, x3=p**-1, x4=t, x5=p**-1)
int_4 = factor_4*zed_4
# Double checked
int_66 = (int_1 + int_2 + int_3 + int_4).simplify().factor().simplify()
################################################################################
# No cone conditions
################################################################################
# ==============================================================================
# Chart 66:
# ==============================================================================
# ------------------------------------------------------------------------------
# Case: 1
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_1_cc = Zeta.smurf.SMURF.from_polyhedron(P, R)
zed_1_cc = sm_1_cc.evaluate()(x0=p**2*t, x1=t, x2=p**-2*t, x3=p**-3*t, x4=t, x5=p**-1)
int_1_cc = factor_1*zed_1_cc
# ------------------------------------------------------------------------------
# Case: 2
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_2_cc = Zeta.smurf.SMURF.from_polyhedron(P, R)
zed_2_cc = sm_2_cc.evaluate()(x0=p**2*t, x1=t, x2=p**-2*t, x3=p**-2, x4=t, x5=p**-1)
int_2_cc = factor_2*zed_2_cc
# ------------------------------------------------------------------------------
# Case: 3
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_3_cc = Zeta.smurf.SMURF.from_polyhedron(P, R)
zed_3_cc = sm_3_cc.evaluate()(x0=p**2*t, x1=t, x2=p**-2*t, x3=p**-1, x4=t, x5=p**-1)
int_3_cc = factor_3*zed_3_cc
# ------------------------------------------------------------------------------
# Case: 4
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_4_cc = Zeta.smurf.SMURF.from_polyhedron(P, R)
zed_4_cc = sm_4_cc.evaluate()(x0=p**2*t, x1=t, x2=p**-2*t, x3=p**-1, x4=t, x5=p**-1)
int_4_cc = factor_4*zed_4_cc
int_66_cc = (int_1_cc + int_2_cc + int_3_cc + int_4_cc).simplify().factor().simplify()
################################################################################
# No cone conditions and no integrand
################################################################################
# ==============================================================================
# Chart 66:
# ==============================================================================
# ------------------------------------------------------------------------------
# Case: 1
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_1_null = Zeta.smurf.SMURF.from_polyhedron(P, R)
factor_1_null = (1 - p**-1)**6*p**-3*p**-3
zed_1_null = sm_1_null.evaluate()(x0=p**-1, x1=p**-2, x2=p**-3, x3=p**-4, x4=p**-1, x5=p**-1)
int_1_null = factor_1_null*zed_1_null
# ------------------------------------------------------------------------------
# Case: 2
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_2_null = Zeta.smurf.SMURF.from_polyhedron(P, R)
factor_2_null = (1 - p**-1)**6*p**-1*p**-3
zed_2_null = sm_2_null.evaluate()(x0=p**-1, x1=p**-2, x2=p**-3, x3=p**-2, x4=p**-1, x5=p**-1)
int_2_null = factor_2_null*zed_2_null
# ------------------------------------------------------------------------------
# Case: 3
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_3_null = Zeta.smurf.SMURF.from_polyhedron(P, R)
factor_3_null = (1 - p**-1)**6*p**-3
zed_3_null = sm_3_null.evaluate()(x0=p**-1, x1=p**-2, x2=p**-3, x3=p**-1, x4=p**-1, x5=p**-1)
int_3_null = factor_3_null*zed_3_null
# ------------------------------------------------------------------------------
# Case: 4
# ------------------------------------------------------------------------------
# b n1 n2 n3 n4 n5 n6
P = Polyhedron(ieqs=[( 0, 1, 0, 0, 0, 0, 0),
( 0, 0, 1, 0, 0, 0, 0),
( 0, 0, 0, 1, 0, 0, 0),
( 0, 0, 0, 0, 1, 0, 0),
( 0, 0, 0, 0, 0, 1, 0),
( 0, 0, 0, 0, 0, 0, 1)])
R = PolynomialRing(QQ,'x', 6)
sm_4_null = Zeta.smurf.SMURF.from_polyhedron(P, R)
factor_4_null = (1 - p**-1)**6*(p - 3)*p**-3
zed_4_null = sm_4_null.evaluate()(x0=p**-1, x1=p**-2, x2=p**-3, x3=p**-1, x4=p**-1, x5=p**-1)
int_4_null = factor_4_null*zed_4_null
int_66_null = (int_1_null + int_2_null + int_3_null + int_4_null).simplify().factor().simplify()
|
import sys
import pathlib
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import alpha_vantage
import plot_style
def show_frontier(symbol1, symbol2, interval='MONTHLY'):
returns1 = alpha_vantage.get_stock_returns_history(symbol1, interval)
returns2 = alpha_vantage.get_stock_returns_history(symbol2, interval)
if len(returns1) > len(returns2):
returns1 = returns1[-len(returns2):]
if len(returns2) > len(returns1):
returns2 = returns2[-len(returns1):]
mean_returns1 = np.mean(returns1)
variance1 = np.var(returns1)
standard_deviation1 = np.sqrt(variance1)
#print(f'Mean returns ({symbol1}) = {mean_returns1}')
#print(f'Variance ({symbol1}) = {variance1}')
#print(f'Standard Deviation ({symbol1}) = {standard_deviation1}')
mean_returns2 = np.mean(returns2)
variance2 = np.var(returns2)
standard_deviation2 = np.sqrt(variance2)
#print(f'Mean returns ({symbol2}) = {mean_returns2}')
#print(f'Variance ({symbol2}) = {variance2}')
#print(f'Standard Deviation ({symbol2}) = {standard_deviation2}')
correlation = np.corrcoef(returns1, returns2)[0][1]
#print(f'Corellation = {correlation}')
weights = []
for n in range(0, 101):
weights.append((1 - 0.01 * n, 0 + 0.01 * n))
returns = []
standard_deviations = []
portfolio_50_50_standard_deviation = None
portfolio_50_50_returns = None
plot_style.scatter()
for w1, w2 in weights:
returns.append(w1 * mean_returns1 + w2 * mean_returns2)
variance = w1**2 * standard_deviation1**2 + w2**2 * standard_deviation2**2 + \
2 * w1 * w2 * standard_deviation1 * standard_deviation2 * correlation
standard_deviation = np.sqrt(variance)
standard_deviations.append(standard_deviation)
plt.scatter(standard_deviations[-1], returns[-1], color='#007bff')
if w1 == 0.5 and w2 == 0.5:
portfolio_50_50_standard_deviation = standard_deviations[-1]
portfolio_50_50_returns = returns[-1]
plt.scatter(portfolio_50_50_standard_deviation,
portfolio_50_50_returns, marker='x', color='red', alpha=1, s=320)
x_padding = np.average(standard_deviations) / 25
plt.xlim(min(standard_deviations) - x_padding,
max(standard_deviations) + x_padding)
y_padding = np.average(returns) / 25
plt.ylim(min(returns) - y_padding, max(returns) + y_padding)
plt.gca().set_xticklabels(['{:.2f}%'.format(x*100)
for x in plt.gca().get_xticks()])
plt.gca().set_yticklabels(['{:.2f}%'.format(y*100)
for y in plt.gca().get_yticks()])
plt.title(f'Efficient Frontier ({symbol1} and {symbol2})')
plt.xlabel('Risk')
plt.ylabel('Return')
pathlib.Path('img/frontier2').mkdir(parents=True, exist_ok=True)
plt.show()
plt.savefig(f'img/frontier2/{symbol1}-{symbol2}.png')
plt.close()
show_frontier(sys.argv[1], sys.argv[2])
|
import FWCore.ParameterSet.Config as cms
# -*-TCL-*-
#seqs/mods to make MuIsoDeposits
from RecoMuon.MuonIsolationProducers.muIsoDeposits_cff import *
# sequences suggested for reco (only isoDeposits are produced at this point)
muIsolation_muons = cms.Sequence(muIsoDeposits_muons)
muIsolation_ParamGlobalMuons = cms.Sequence(muIsoDeposits_ParamGlobalMuons)
muIsolation_ParamGlobalMuonsOld = cms.Sequence(muIsoDeposits_ParamGlobalMuonsOld)
#standard sequence
muIsolation = cms.Sequence(muIsolation_muons)
|
import discord
from discord.ext import commands
import calendar
import datetime
class Leader(commands.Cog):
def __init__(self, bot):
self.bot= bot
async def create_queue(self):
await self.bot.SQL.connect()
gymList = (await self.bot.SQL.fetch_all_list((await self.bot.SQL.query("SELECT user_fk FROM challengers WHERE active=1 and name=\"Gym Leader\" ORDER BY id ASC;")),"user_fk"))
eliteList =await (await self.bot.SQL.query("SELECT user_fk, users.friendCode FROM challengers INNER JOIN users ON users.id=challengers.user_fk WHERE active=1 AND name=\"Elite Four\" ORDER BY challengers.id ASC;")).fetchall()
self.bot.SQL.disconnect()
self.leaderQueue = {\
"gym" : {},\
"elite": {}}
for gym in gymList:
self.leaderQueue["gym"][gym] = []
for elite in eliteList:
self.leaderQueue["elite"][elite] = []
@commands.group(pass_context=True)
async def leader(self,ctx):
"""Manage and List Frontier League Leaders"""
if ctx.invoked_subcommand is None:
await ctx.send("You need a subcommand for this to work! Please try again")
@leader.command(pass_context=True)
async def list(self,ctx,*,ltype : str = None):
"""List all leaders of given type. If no type given, lists all leaders"""
#leader list [ltype]
isError=False
if ltype.replace(" ","")[:3].lower() == "gym" if ltype is not None else True:
url1 = "https://pokemongo.gamepress.gg/sites/pokemongo/files/2018-02/Badge_GymLeader_GOLD_01.png"
await self.bot.SQL.connect()
userList = (await self.bot.SQL.fetch_all_list((await self.bot.SQL.query("SELECT user_fk FROM challengers WHERE active=1 and name=\"Gym Leader\" ORDER BY id ASC;")),"user_fk"))
if len(userList) != 0:
badgeCursor = (await self.bot.SQL.query("\
SELECT challengers.user_fk, badges.name, badges.description, users.friendCode, badges.thumbnail_path\
FROM challengers\
INNER JOIN badges ON badges.id=challengers.badge_fk\
INNER JOIN users ON challengers.user_fk=users.id\
WHERE challengers.active=1 and challengers.name='Gym Leader'\
ORDER BY challengers.id ASC;"))
result = await badgeCursor.fetchall()
self.bot.SQL.disconnect()
for row in result:
user = ctx.message.guild.get_member(row['user_fk'])
em = discord.Embed(name="Gym Leader", description="Gym Leader")
if row['thumbnail_path'] != None:
url1 = row['thumbnail_path']
em.set_thumbnail(url=url1)
if user != None:
em.add_field(name="Discord Username",value=user.mention,inline=True)
em.add_field(name="Friend Code",value=row['friendCode'],inline=True)
em.add_field(name="Badge Title",value=row['name'],inline=True)
em.add_field(name="Challenge Description",value=str(row['description']),inline=True)
await ctx.send(embed=em)
else:
self.bot.SQL.disconnect()
else:
isError=True
if ltype.replace(" ","")[:9].lower() == "elitefour" if ltype is not None else True:
url1 = "https://i.imgur.com/l48LJkw.png"
await self.bot.SQL.connect()
userList =await (await self.bot.SQL.query("SELECT user_fk, users.friendCode FROM challengers INNER JOIN users ON users.id=challengers.user_fk WHERE active=1 AND name=\"Elite Four\" ORDER BY challengers.id ASC;")).fetchall()
self.bot.SQL.disconnect()
for userDict in userList:
user = ctx.message.guild.get_member(str(userDict["user_fk"]))
em = discord.Embed(name="Elite Four",description="Elite Four")
em.set_thumbnail(url=url1)
em.add_field(name="Discord Username",value=user.mention,inline=True)
em.add_field(name="Friend Code",value=userDict["friendCode"],inline=True)
await ctx.send(embed=em)
else:
isError=True
if isError:
await ctx.send("I'm not sure I got that. Please try again")
@leader.command(pass_context=True)
@commands.has_any_role('Admin','Mod','admin')
async def add(self,ctx,ltype : str,user : discord.Member,desc : str = None,badgeName : str = None,badgeImageUrl : str = None,challengeMonth : str = calendar.month_name[(datetime.datetime.today().month+1 if datetime.datetime.today().month < 12 else 1)],challengeYear : int = datetime.datetime.today().year):
"""Adds a leader to the Frontier League. This command is for admins only"""
print("Adding a leader")
#if ctx.message.guild.id != 488144913230462989:
# await ctx.send("ya can't trick me!")
# return
challengeMonthNum = list(calendar.month_name).index(challengeMonth)
await self.bot.SQL.connect()
if ltype.replace(" ","")[:3].lower() == "gym":
cursor = await self.bot.SQL.query("SELECT max(id) FROM badges")
await self.bot.SQL.query("\
REPLACE INTO badges\
SET description=\"{}\",\
name=\"{}\",\
thumbnail_path=\"{}\",\
start_available=\"{}-{}-01\",\
end_available=\"{}-{}-{}\";".format(\
desc,badgeName,badgeImageUrl,\
challengeYear,challengeMonthNum,\
challengeYear,challengeMonthNum,calendar.monthrange(challengeYear,challengeMonthNum)[1]))
badgeid = (await self.bot.SQL.fetch_all_list((await self.bot.SQL.query("\
SELECT id FROM badges\
WHERE\
name=\"{}\" and start_available=\"{}-{}-01\";".format(\
badgeName,challengeYear,challengeMonthNum))),"id"))[0]
await self.bot.SQL.query("\
REPLACE INTO challengers\
SET name=\"Gym Leader\",\
user_fk={},\
badge_fk={},\
active=1,\
description=\"{}, {}\";".format(\
user.id,badgeid,challengeMonth,challengeYear))
gymRole = discord.utils.get(ctx.message.guild.roles,name="Gym Leader")
await ctx.send("Testing")
await user.add_roles(gymRole)
await ctx.send("Gym Leader added:\nLeader: {}\nDescription: {}\nBadge Name: {}".format(user.mention,desc,badgeName))
elif ltype.replace(" ","")[:9].lower() == "elitefour":
##Adds an Elite Four Member
await self.bot.SQL.query("\
REPLACE INTO challengers\
SET user_fk={},\
name=\"Elite Four\",\
active=1,\
description=\"{}, {}\";".format(\
user.id,challengeMonth,challengeYear))
eliteRole = discord.utils.get(ctx.message.guild.roles,name="Elite Four")
await user.add_roles(eliteRole)
await ctx.send("Elite Four Added:\n{}".format(user.mention))
else:
await ctx.send("I'm not sure I got that. Please try again")
self.bot.SQL.disconnect()
@add.error
async def add_error(cog,ctx,error):
if isinstance(error, commands.CheckFailure):
await ctx.send("You do not have the permission to add leaders. Please contact an Admin")
elif isinstance(error.original, discord.Forbidden):
await ctx.send("The bot doesn't have permission to do that. Please contact an Admin")
else:
await ctx.send("Something went wrong. Please contact an admin")
print(error.original)
@leader.command(pass_context=True)
@commands.has_any_role('Admin','Mod','admin','Gym Leader')
async def remove(self,ctx,ltype : str,user : discord.Member):
"""Sets a leader as inactive in the Frontier League"""
if ltype.replace(" ","")[:3].lower() == "gym":
await self.bot.SQL.connect()
await self.bot.SQL.query("UPDATE challengers SET active=0 WHERE user_fk={} and name=\"Gym Leader\";".format(user.id))
self.bot.SQL.disconnect()
gymRole = discord.utils.get(ctx.message.guild.roles,name="Gym Leader")
await user.remove_roles(gymRole)
await ctx.send("Gym Leader removed: {}".format(user.mention))
elif ltype.replace(" ","")[:9].lower() == "elitefour":
await self.bot.SQL.connect()
await self.bot.SQL.query("UPDATE challengers SET active=0 WHERE user_fk={} and name=\"Elite Four\";".format(user.id))
self.bot.SQL.disconnect()
eliteRole = discord.utils.get(ctx.message.guild.roles,name="Elite Four")
await user.remove_roles(eliteRole)
await ctx.send("Elite Four Member removed: {}".format(user.mention))
else:
await ctx.send("I'm not sure I got that. Please try again")
@remove.error
async def remove_error(self,ctx,error):
if isinstance(error, commands.CheckFailure):
await ctx.send("You do not have the permission to remove leaders. Please contact an Admin")
elif isinstance(error.original, discord.Forbidden):
await ctx.send("The bot doesn't have permission to do that. Please contact an Admin")
else:
await ctx.send("Something went wrong. Please contact an admin")
print(error.original)
@leader.command(pass_context=True)
@commands.has_role('Frontier League Participant')
async def challenge(self,ctx,ltype : str,user : discord.Member, challenger : discord.Member = None):
"""Challenge a leader
leader challenge <ltype> <@user> [@challenger]
"""
if 'leaderQueue' not in vars(self):
await self.create_queue()
if challenger is None:
challenger = ctx.message.author
await self.bot.SQL.connect()
if ltype.replace(" ","")[:3].lower() == "gym":
gymLeader = (await self.bot.SQL.query("SELECT user_fk FROM challengers WHERE active=1 and name=\"Gym Leader\" and user_fk={} ORDER BY id ASC;".format(user.id))).fetchone()#[0]
await self.bot.say(gymLeader)
if gymLeader is None:
ctx.send("{} is not a Gym Leader".format(user.mention))
else:
if user.id not in self.leaderQueue["gym"]:
self.leaderQueue["gym"][user.id] = []
self.leaderQueue["gym"][user.id].append(challenger.id)
elif ltype.replace(" ","")[:9].lower() == "elitefour":
eliteFour =await (await self.bot.SQL.query("SELECT user_fk, users.friendCode FROM challengers INNER JOIN users ON users.id=challengers.user_fk WHERE active=1 AND name=\"Elite Four\" and challengers.user_fk={} ORDER BY challengers.id ASC;".format(user.id))).fetchall()
if eliteFour is None:
ctx.send("{} is not in the Elite Four".format(user.mention))
else:
if user.id not in self.leaderQueue["elite"]:
self.leaderQueue["elite"][user.id] = []
self.leaderQueue["elite"][user.id].append(challenger.id)
@challenge.error
async def chal_error(self,error,ctx):
if isinstance(error, commands.CommandInvokeError):
await ctx.send("Something went wrong, please try again in a moment")
print(error)
await self.create_queue()
await ctx.command.invoke(ctx.command,ctx)
else:
await ctx.send("Exception not caught")
@leader.command(pass_context=True)
@commands.has_role('Frontier League Participant')
async def listQueue(self,ctx,leader : discord.Member):
if 'leaderQueue' not in vars(self):
await self.create_queue()
value = ""
if leader.id in self.leaderQueue["gym"]:
value += "Gym:\n"
for userId in self.leaderQueue["gym"][leader.id]:
user = ctx.message.guild.get_member(userId)
value += "{}\n".format(user.mention)
if leader.id in self.leaderQueue["elite"]:
value += "\nElite Four:\n"
for userId in self.leaderQueue["elite"][leader.id]:
user = ctx.message.guild.get_member(userId)
value += "{}\n".format(user.mention)
if value != "":
em = discord.Embed(name="Leader Queue",description="Leader Queue for {}".format(leader.mention))
em.add_field(value=value)
await ctx.send(embed=em)
else:
await ctx.send("{} has 0 challengers in their queue!".format(leader.mention))
@listQueue.error
async def lqueue_error(self,error,ctx):
if isinstance(error, commands.CommandInvokeError):
await ctx.send("Something went wrong, please try again in a moment")
print("{}\n\n\n\n\n\n".format(error))
await self.create_queue()
await ctx.command.invoke(ctx)
else:
await ctx.send("Exception not caught")
def setup(bot):
bot.add_cog(Leader(bot))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pytorch_lightning.callbacks import Callback
class ExceptionCallback(Callback):
def on_exception(trainer, litmodel, exception):
print("saving checkpoints by Exception")
# TODO:save checkpoint
raise exception
|
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output, State
from label_label import label_top_disabled
from app import app
import dash
from HandlerWrapper import handler
from tools.serial.tools.list_ports import comports
import time
# from HandlerWrapper import mock_handler
layout = html.Div(children=[
html.Div(id='top-section',
children=html.Div(id='Logos',
children=label_top_disabled,
style={'height': '10vh',
'overflow': 'hidden'})
),
html.Hr(style={'margin-top': '1vh'}),
html.Div([html.Div(children=[
html.H5("Program Setting",
style={'margin-top': '7vh', 'margin-left': '2vw'}),
dcc.Tabs([
dcc.Tab(label='Basestation', children=[
html.Div(
children=[
html.Div(children=[html.H6("Channel"),
dcc.Dropdown(value='',
id='select-radio-channel',
placeholder='select channel',
options=[{'label':i, 'value':i} for i in range(0, 256, 32)]),
],
style={'flex': '50vw'}),
html.Div(children=[],
style={'flex': '50vw'})],
style={'display': 'flex'}
),
], id='type-base'),
dcc.Tab(label='Leaf', children=[
html.Div(
children=[
html.Div(children=[html.H6("Channel"),
dcc.Dropdown(value='',
id='select-radio-channel-leaf',
placeholder='select channel',
options=[{'label':i, 'value':i} for i in range(0, 256, 32)]),
html.H6("Sample Interval (min)"),
dcc.Dropdown(value='',
id="select-sample-interval",
placeholder="select sample interval",
options=[{'label':i, 'value':i} for i in iter([1, 3, 5, 10, 15, 20, 30, 60, 120, 180])]),
],
style={'flex': '50vw'}),
html.Div(children=[],
style={'flex': '50vw'})],
style={'display': 'flex'}
),
], id='type-leaf'),
dcc.Tab(label='Router', children=[
], id='type-router'),
], id='select-device-type-to-program'),
], id='program-setting-box',
style={'flex': '36vw', 'height': '89vh',
'border-right': '0.5px solid',
'border-top': '0.5px solid',
'margin-top': '0vh',
'margin-left': '0vh'}),
html.Div(children=[
html.H5("Select Port", style={'display': 'block',
'float': "left",
'margin-top': '30vh',
'margin-left': '10vw'}),
html.Div(
dcc.Dropdown(
id='port-dropdown-program',
style={'width': '40vw'},
placeholder='Select port...'
), style={'margin-left': '18vh'}),
html.Button(id='connect-button-program',
children="Connect",
n_clicks=0,
style={'color': 'white',
'margin-top': '15vh',
'margin-left': ' 41.5vw',
'background-color': '#2185D0',
'display': 'inline'}),
dcc.Link(html.Button(id='quit-button',
children="Cancel",
n_clicks=0,
style={'color': '#414141',
'margin-top': '15vh',
'margin-left': ' 1vw',
'display': 'inline',
'background-color': '#E0E1E2'}), href='/label_start'),
dcc.Interval(
id='interval-component-program',
interval=1 * 1000,
n_intervals=0),
dcc.Interval(
id='programming-pin-interval',
interval=1 * 1000,
n_intervals=0),
html.Div(id='program-status')
], id='select-port-box',
style={'flex': '64vw',
'border-top': '0.5px solid',
'height': '89vh'})
],
id='program-select-port-and-program-setting',
style={'display': 'flex',
'margin-top': '0vh',
'margin-left': '0vh'}),
])
# TODO: change this into a hidden element to avoid the usage of global variable
current_ports_program = {}
# TODO compile binaries and implement custom programming with different channel / sample interval
@app.callback([Output('connect-button-program', 'children'),
Output('connect-button-program', 'style'),
Output('connect-button-program', 'disabled'),
Output('port-dropdown-program', 'disabled'),
Output('program-status', 'children'),
Output('quit-button', 'disabled')],
[Input('connect-button-program', 'n_clicks'),
Input('programming-pin-interval', 'n_intervals')],
[State('select-device-type-to-program', 'value'),
State('port-dropdown-program', 'value')])
def program_device(n_clicks, n_intervals, device_type, port):
busy_style = {'color': 'white', 'margin-top': '15vh', 'margin-left': ' 41.5vw', 'background-color': '#13900B',
'display': 'inline'}
idle_style = {'color': 'white', 'margin-top': '15vh', 'margin-left': ' 41.5vw', 'background-color': '#2185D0',
'display': 'inline'}
ctx = dash.callback_context
if ctx.triggered[0]['prop_id'].split('.')[0] == 'programming-pin-interval':
time.sleep(0.2)
if handler.programming:
return "Programming...", busy_style, True, True, "", True
else:
if handler.programmingStatus:
return "Program", idle_style, False, False, "Programming successful", False
else:
return "Program", idle_style, False, False, "", False
if n_clicks > 0:
if not port:
return "Program", idle_style, False, False, "No port selected", False
if device_type == 'tab-1':
print('program basestation')
handler.programBasestation(port)
return "Programming...", busy_style, True, True, "", True
elif device_type == "tab-2":
print('program leaf')
handler.programLeaf(port)
return "Programming...", busy_style, True, True, "", True
elif device_type == "tab-3":
print('program router')
handler.programRouter(port)
return "Programming...", busy_style, True, True, "", True
else:
return "Program", idle_style, False, False, "", False
# Select Port Dropdown
@app.callback(Output('port-dropdown-program', 'options'),
[Input('interval-component-program', 'n_intervals'),
Input('port-dropdown-program', 'value')])
def update_port(n_intervals, port_selected):
new_ports = {}
ports = sorted(comports())
for port, desc, hwid in ports:
if "Bluetooth" not in port:
new_ports[port] = port
if new_ports != current_ports_program:
current_ports_program.clear()
current_ports_program.update(new_ports)
return [{'label': k, 'value': k} for k in current_ports_program.keys()]
|
# Author: Laura Kulowski
'''
Create a r-theta grid, using Chebyshev points for r and Gauss-Legendre points for theta.
Define the Chebyshev and Gauss-Legendre weights for integration using quadrature rules
Define the Chebyshev differentiation matrix for taking derivatives on the r-grid.
Define the Legendre polynomials and their derivatives for spherical harmonic expansions
and taking derivatives on the theta-grid.
'''
import numpy as np
class Grid():
def __init__(self, nr, nt, r_inner, r_outer):
'''
: param nr: number of radial grid points (chebyshev)
: param nt: number of theta grid points (gauss-legendre)
: param r_inner: inner boundary of the radial grid (0 < r_inner < 1)
: param r_outer: outer boundary of the radial grid (0 < r_outer <= 1)
'''
self.nr = nr
self.nt = nt
self.r_inner = r_inner
self.r_outer = r_outer
self.leg_deg = self.nt
self.xc = None
self.xcs = None
self.x = None
self.w = None
self.wr = None
self.theta = None
def cgl_points(self):
'''
chebyshev-gauss-legendre (cgl) points in interval [-1, 1]
: return self.xc: chebyshev-gauss-legendre points
'''
js = np.linspace(0, self.nr, self.nr + 1)
self.xc = np.cos(js * np.pi / self.nr)
self.xc = self.xc[::-1]
return self.xc
def cheby_shift(self):
'''
chebyshev-gauss-legendre points shifted into interval [a, b]
: return self.xcs: shifted chebyshev-gauss-legendre points
'''
js = np.linspace(0, self.nr, self.nr + 1)
self.xcs = 0.5 * (self.r_inner + self.r_outer) + 0.5 * (self.r_outer - self.r_inner) * np.cos(js * np.pi / self.nr)
self.xcs = self.xcs[::-1]
return self.xcs
def gauss_leg_xw(self):
'''
gauss-legendre points and weights
: return [self.x, self.w]: [gauss-legendre points, gauss-legendre weights]
'''
[self.x, self.w] = np.polynomial.legendre.leggauss(self.nt)
return self.x, self.w
def cheby_wr(self):
'''
chebyshev weights for quadrature
: return self.wr: chebyshev quadrature weights
'''
w_r = np.pi/self.nr * np.ones(self.nr + 1)
w_r[0] = np.pi / (2. * self.nr)
w_r[-1] = np.pi / (2. * self.nr)
self.wr = w_r
return self.wr
def theta_lat_grids(self):
'''
theta grid points in radials and colatitude, useful when making plots
: return self.theta, lats_gl: theta points, colatitude points
'''
[xl, w] = self.gauss_leg_xw()
self.theta = np.arccos(xl)
theta_lat = np.pi/2 - self.theta
lats_gl = theta_lat * 180./np.pi
return self.theta, lats_gl
def T(self, n, x):
''' cheby polys, abs(x) <= 1 '''
return np.cos(n * np.arccos(x))
def cheby_diff_matrix(self):
'''
chebyshev differentiation matrix for taking derivatives on the r-grid
: return DN: chebyshev differentiation matrix
'''
self.rcs = self.cheby_shift()
DN = np.zeros([self.nr+1, self.nr+1])
# i = row, j = column
# off-diagonal terms
for i in range(self.nr + 1):
for j in range(self.nr + 1):
# off-diagonal entries
if i != j:
if i == 0 or i == self.nr:
ci = 2.
else:
ci = 1.
if j == 0 or j == self.nr:
cj = 2.
else:
cj = 1.
d = (ci / cj) * (-1.)**(i + j) / (self.rcs[i] - self.rcs[j])
DN[i, j] = d
# diagonal terms
for i in range(self.nr + 1):
DN[i, i] = -1. * sum(DN[i, :])
return DN
def leg_P(self):
'''
legendre polynomials up to degree self.leg_deg
: return P_matrix: matrix of legendre polynomials evaluated at chebyshev points (xl, or cos(theta));
: shape (maximum degree, nt), that is, the first row is the legendre polynomial of
: degree zero evaluated at each chebyshev point on the grid
'''
xl, w = self.gauss_leg_xw()
P_matrix = np.zeros([self.leg_deg + 1, np.size(xl)])
for ii in range(self.leg_deg + 1):
if ii == 0:
P_matrix[0, :] = np.ones(np.size(xl))
if ii == 1:
P_matrix[1, :] = xl
if ii > 1:
P_matrix[ii, :] = (2. * ii - 1.)/ii * xl * P_matrix[ii-1, :] - (ii - 1.)/ii * P_matrix[ii-2, :]
return P_matrix
def leg_P_deriv(self):
'''
legendre polynomial derivatives
: return dP_dz: legendre derivatives with respect to chebyshev points, xl [-1, 1], evaluated at
: each chebyshev point on the grid; shape (maximum degree, number of theta grid points)
: return dP_dtheta: legendre derivatives with respect to theta points [0, pi], evaluated at each
: chebyshev point on the grid; shape (maximum degree, nt)
'''
xl, w = self.gauss_leg_xw()
theta, lats_gl = self.theta_lat_grids()
P_matrix = self.leg_P()
dP_dz = np.zeros([self.leg_deg + 1, self.nt])
for ii in range(self.leg_deg + 1):
if ii == 0:
dP_dz[ii, :] = np.zeros(self.nt)
else:
dP_dz[ii, :] = ii/(xl**2 - 1.) * (xl*P_matrix[ii, :] - P_matrix[ii-1, :])
dP_dtheta = -np.sin(theta) * dP_dz
return dP_dz, dP_dtheta
|
from celery.utils.log import get_task_logger
from applications.async_update.celery import app
from applications.feed.service import (
update_feeds, get_feeds_for_update
)
from applications.subscription.service import (
update_subscriptions,
get_subscriptions_for_update
)
logger = get_task_logger(__name__)
@app.task
def update_subscriptions_task():
feeds_for_update = get_feeds_for_update()
logger.info(f'Feeds for update: {feeds_for_update}')
update_feeds(feeds_for_update)
subs_for_update = get_subscriptions_for_update()
logger.info(f'Subscriptions for update: {subs_for_update}')
update_subscriptions(subs_for_update)
app.conf.beat_schedule.update({
'update_subscriptions': {
'task': 'applications.async_update.tasks.update_subscriptions_task',
'schedule': 60.0,
}
})
|
# -*- coding: utf-8 -*-
from rest_framework.serializers import *
from Data.models import *
class IPaddressSerializer(ModelSerializer):
class Meta(object):
model = IPaddressModel
fields = '__all__'
class PortSerializer(Serializer):
id = IntegerField()
ipid = IntegerField()
port = IntegerField()
service = CharField(max_length=100)
info = DictField()
update_time = IntegerField()
class WebsiteSerializer(ModelSerializer):
class Meta(object):
model = WebsiteModel
fields = '__all__'
class WebsiteTechSerializer(ModelSerializer):
class Meta(object):
model = WebsiteTechModel
fields = '__all__'
class WebsiteWafSerializer(ModelSerializer):
class Meta(object):
model = WebsiteWafModel
fields = '__all__'
class WebsiteCDNSerializer(Serializer):
id = IntegerField()
wid = IntegerField()
hascdn = BooleanField()
title = CharField(max_length=500)
ipaddress = ListField(default=[])
update_time = IntegerField()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2020 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
try:
from queue import Queue
except ImportError:
from Queue import Queue
from collections import defaultdict
from prompt_toolkit.styles import style_from_dict
from prompt_toolkit.token import Token
from prompt_toolkit.mouse_events import MouseEventTypes
from hb.common.utils import read_json_file
def get_style(style_type):
if style_type == 'terminal':
return style_from_dict({
Token.Separator: '#75c951',
Token.QuestionMark: '#5F819D',
Token.Selected: '', # default
Token.Pointer: '#FF9D00 bold', # AWS orange
Token.Instruction: '', # default
Token.Answer: '#FF9D00 bold', # AWS orange
Token.Question: 'bold',
})
if style_type == 'answer':
return style_from_dict({
Token.Separator: '#75c951',
Token.QuestionMark: '#E91E63 bold',
Token.Selected: '#cc5454', # default
Token.Pointer: '#ed9164 bold',
Token.Instruction: '', # default
Token.Answer: '#f44336 bold',
Token.Question: '',
})
return None
def if_mousedown(handler):
def handle_if_mouse_down(cli, mouse_event):
if mouse_event.event_type == MouseEventTypes.MOUSE_DOWN:
return handler(cli, mouse_event)
else:
return NotImplemented
return handle_if_mouse_down
def get_deps(platform_json):
platform = read_json_file(platform_json)
subsystem_dict = {}
component_deps = defaultdict(list)
component_targets = {}
component_dirs = {}
for subsystem in platform['subsystems']:
subsystem_dict[subsystem['subsystem']] = []
for component in subsystem['components']:
cname = component['component']
subsystem_dict[subsystem['subsystem']].append(cname)
if 'components' in component['deps']:
deps = component['deps']['components']
if cname in deps:
deps.remove(cname)
else:
deps = []
component_deps[cname] = deps
component_targets[cname] = component['targets']
component_dirs[cname] = [
os.path.join(os.path.dirname(platform_json),
os.pardir, os.pardir, os.pardir, os.pardir, path)
for path in component['dirs']]
return subsystem_dict, component_deps, component_targets, component_dirs
def select_node(node, selected, nodes_from, deps):
queue = Queue()
queue.put(node)
nodes_from[node].append(node)
while not queue.empty():
now_node = queue.get()
if now_node not in selected:
selected.append(now_node)
for dep in deps.get(now_node, []):
if now_node != dep and dep not in selected:
queue.put(dep)
nodes_from[dep].append(node)
def deselect_node(node, selected, nodes_from, deps):
queue = Queue()
queue.put(node)
node_list = []
while not queue.empty():
now_node = queue.get()
for each_node in nodes_from[now_node]:
queue.put(each_node)
nodes_from[now_node].clear()
if now_node in selected:
selected.remove(now_node)
node_list.append(now_node)
[queue.put(n) for n in node_list]
while not queue.empty():
now_node = queue.get()
for dep in deps.get(now_node, []):
if dep not in selected:
continue
nodes_from[dep] = [n for n in nodes_from[dep] if n in selected]
if not len(nodes_from[dep]):
selected.remove(dep)
queue.put(dep)
def get_deps_list(comp, deps):
queue = Queue()
visited = set()
deps_list = [comp]
queue.put(comp)
while not queue.empty():
node = queue.get()
for index, dep_comp in enumerate(deps[node]):
if dep_comp in visited:
continue
deps_list.append(dep_comp)
queue.put(dep_comp)
visited.add(dep_comp)
return deps_list
def get_support_product(product_path):
product_dict = defaultdict(list)
for product in os.listdir(product_path):
product_json = os.path.join(product_path, product)
product_content = read_json_file(product_json)
board = product_content.get('board')
kernel = product_content.get('kernel')
platform = "{}_{}".format(board, kernel)
product_dict[platform].append(product.strip('.json'))
return product_dict
def check_path(dep, path):
dep = dep[:-1] if dep.endswith("/") else dep
path = path[:-1] if path.endswith("/") else path
if len(dep) > len(path):
path_max = dep
path_min = path
else:
path_max = path
path_min = dep
if path_min in path_max:
path_sub = path_max.replace(path_min, "")
if path_sub == "":
return True
if path_sub.startswith('/') or path_sub.startswith(':'):
return True
return False
class Separator(object):
line = '-' * 15
def __init__(self, line=None):
if line:
self.line = f'\n{line}'
def __str__(self):
return self.line
|
import numpy as np
import time
from numba import jit, prange
import matplotlib.pyplot as plt
from ..util import f_SRM, h_exp_update, h_erlang_update
def ASA1(
time_end,
dt,
Lambda,
Gamma,
c=1,
Delta=1,
theta=0,
interaction=0,
lambda_kappa=20,
base_I=0,
I_ext_time=0,
I_ext=0,
a_cutoff=5,
use_LambdaGamma=True,
m_t0=0,
rho0=0,
h_t0=0,
kappa_type="exp",
):
""""""
if isinstance(Gamma, (float, int)):
Gamma = [Gamma]
if isinstance(Lambda, (float, int)):
Lambda = [Lambda]
Gamma = np.array(Gamma)
Lambda = np.array(Lambda)
if use_LambdaGamma:
Gamma = Gamma * Lambda
dim = Gamma.shape[0]
# Need dt = da
a_grid_size = int(a_cutoff / dt)
a_grid = np.linspace(0, a_cutoff, a_grid_size)
a_d_grid = np.vstack((a_grid,) * dim).T
# Shape must be in order: len, d, d
exp_La = np.exp(-Lambda * a_d_grid)
steps = int(time_end / dt)
dim = Gamma.shape[0]
# Init vectors
ts = np.linspace(0, time_end, steps)
A_t = np.zeros(steps)
rho_t = np.zeros((steps, a_grid_size))
m_t = np.zeros((steps, dim))
h_t = np.zeros(steps)
k_t = np.zeros(steps)
m_t[0] = m_t0
rho_t[0, 0] = 1 / dt
h_t[0] = h_t0
if isinstance(rho0, np.ndarray):
rho_t[0] = rho0
# interaction = J from our equations
J = interaction
da = dt
c = c * np.exp(-theta / Delta)
f_SRM_args = dict(c=c, Delta=Delta, theta=theta)
a_iplusone = np.exp(-Lambda * dt)
# a_iplusone = 1
h_args = dict(J=J, lambda_kappa=lambda_kappa, dt=dt)
# @jit(nopython=True, cache=True)
def optimized(rho_t, m_t, h_t):
for s in range(0, steps - 1):
x_fixed = I_ext + base_I if I_ext_time < dt * (s + 1) else base_I
num_age_steps = min(s, a_grid_size)
# A_t = rho_t[s, 0]
# if A_t < 1e-5:
# A_t = 1e-5
# print("Low activity at step", s, ":", A_t)
indices = s - np.arange(num_age_steps)
m0 = m_t0 * np.ones((a_grid_size - num_age_steps, dim))
m = np.concatenate((m_t[indices], m0), axis=0)
exp_m_t = exp_La * m
f = f_SRM(np.sum(exp_m_t, axis=1) + h_t[s], c=c, Delta=Delta, theta=theta)
# firing_prob = np.zeros(a_grid_size)
# for i in range(a_grid_size):
# firing_prob[i] = f[i] if i < 1 else 1
# firing_prob = np.clip(f * da, 0, 1)
firing_prob = 1 - np.exp(-f * da)
A_t[s] = np.sum(firing_prob * rho_t[s])
if A_t[s] < 1e-6:
A_t[s] = 1e-6
m_t[s + 1] = (
np.sum((a_iplusone * exp_m_t + Gamma).T * firing_prob * rho_t[s], axis=1) / A_t[s]
)
if kappa_type == "erlang":
h_t[s + 1], k_t[s + 1] = h_erlang_update(h_t[s], k_t[s], A_t[s], x_fixed, **h_args)
else:
h_t[s + 1] = h_exp_update(h_t[s], A_t[s], x_fixed, **h_args)
# h_t[s + 1] = h_t[s] + dt * lambda_kappa * (-h_t[s] + (A_t[s] * J + x_fixed))
# Mass loss
mass_transfer = rho_t[s] * firing_prob
# rho_t[s + 1] -= mass_transfer
lass_cell_mass = rho_t[s, -1] # Last cell necessarely spikes
# Linear transport
rho_t[s + 1, 1:] = rho_t[s, :-1] - mass_transfer[:-1]
# Mass insertion
rho_t[s + 1, 0] = np.sum(mass_transfer) + lass_cell_mass
return rho_t, m_t, h_t
rho_t, m_t, h_t = optimized(rho_t, m_t, h_t)
A_t[-1] = rho_t[-1, 0]
mass_conservation = np.sum(rho_t * dt, axis=-1)
activity = rho_t[:, 0]
return ts, a_grid, rho_t, m_t, h_t, mass_conservation, A_t
|
#!/usr/bin/env python
# vim: noet sw=4 ts=4
import sys
import os
import argparse
import re
try:
from version import Verstion
except:
Version = '0.0.0rc0'
class KmRedact( object ):
def __init__( self ):
self._init_ip_hiding()
self._init_dns_hiding()
self._init_port_hiding()
self._init_email_hiding()
pass
def main( self ):
retval = 1
while True:
prog = os.path.splitext(
os.path.basename( sys.argv[ 0 ] )
)[ 0 ]
if prog == '__init__':
prog = 'km-redact'
p = argparse.ArgumentParser(
prog = prog,
description = '''Obscure sensitive HIPPA in text.''',
epilog = '''Gosh, that was great!'''
)
p.add_argument(
'-D',
'--keep-dns',
dest = 'keep_dns',
action = 'store_true',
help = 'do not obscure DNS names',
)
p.add_argument(
'-I',
'--keep-ip',
dest = 'keep_ip',
action = 'store_true',
help = 'do not obscure IP addresses',
)
p.add_argument(
'-M',
'--keep-email',
dest = 'keep_email',
action = 'store_true',
help = 'do not obscure email addresses',
)
p.add_argument(
'-P',
'--port',
dest = 'keep_ports',
action = 'store_true',
help = 'do not obscure port numbers',
)
p.add_argument(
'-r',
'--ring',
dest = 'want_decoder_ring',
action = 'store_true',
help = 'show magic decoder ring',
)
p.add_argument(
'-s',
'--span',
dest = 'spans',
action = 'append',
metavar = 'l:r',
default = [],
help = 'checks character range (1=based)',
)
p.add_argument(
'-t',
'--token',
dest = 'fields',
metavar = 'N',
action = 'append',
default = [],
help = 'check field (whitespace,1-based)',
)
p.add_argument(
'--version',
action = 'version',
version = Version,
help = '{0} Version {1}'.format(
prog,
Version
)
)
default = []
p.add_argument(
'names',
nargs = '*',
metavar = 'FILE',
default = default,
help = 'files to process if not stdin',
)
self.opts = p.parse_args()
if len( self.opts.names ) == 0:
self.process()
else:
for name in self.opts.names:
with open( name ) as f:
self.process( f )
retval = 0
break
return retval
def process( self, f = sys.stdin ):
for line in f:
line = line.rstrip()
if not self.opts.keep_email:
line = self.hide_email( line )
if not self.opts.keep_ip:
line = self.hide_ip( line )
if not self.opts.keep_dns:
line = self.hide_dns( line )
if not self.opts.keep_ports:
line = self.hide_ports( line )
print line
pass
if self.opts.want_decoder_ring:
if not self.opts.keep_email:
self.dump_decoder( self.email_dict, 'Email Decoder' )
if not self.opts.keep_ip:
self.dump_decoder( self.ip_dict, 'IP Decoder' )
if not self.opts.keep_dns:
self.dump_decoder( self.dns_dict, 'DNS Decoder' )
if not self.opts.keep_ports:
self.dump_decoder( self.port_dict, 'PORT Decoder')
return
def dump_decoder( self, ring, title ):
print
print
banner = 'Decoder Ring: {0}'.format( title )
print banner
print '=' * len( banner )
print
for key in sorted( ring, key = lambda d: ring[d] ):
print '{0}\t{1}'.format(
ring[ key ],
key,
)
pass
return
def _init_email_hiding( self ):
self.email_dict = dict()
self.email_dict_count = 0
self.email_pattern = r'[0-9_a-zA-Z.]+@[0-9_a-zA-Z.]+'
return
def _hide_email( self, mo ):
email = mo.group( 0 )
if email not in self.email_dict:
self.email_dict_count += 1
replacement = '<EMAIL{0}>'.format(
self.email_dict_count
)
self.email_dict[ email ] = replacement
return self.email_dict[ email ]
def hide_email( self, line ):
# print 'hide_dns={0}'.format( line )
hidden_line = re.sub(
self.email_pattern,
self._hide_email,
line
)
return hidden_line
def _init_dns_hiding( self ):
self.dns_dict = dict()
self.dns_dict_count = 0
self.dns_pattern = r'[0-9a-zA-Z]+[.][0-9a-zA-Z.]+'
return
def _hide_dns( self, mo ):
host = mo.group( 0 )
# print 'host={0}'.format( host )
if host not in self.dns_dict:
self.dns_dict_count += 1
replacement = '<HOST{0}>'.format(
self.dns_dict_count
)
self.dns_dict[ host ] = replacement
return self.dns_dict[ host ]
def hide_dns( self, line ):
# print 'hide_dns={0}'.format( line )
hidden_line = re.sub(
self.dns_pattern,
self._hide_dns,
line
)
return hidden_line
def _init_port_hiding( self ):
self.port_dict = dict()
self.port_dict_count = 0
self.port_pattern = r':[0-9]+'
return
def _hide_port( self, mo ):
port = mo.group( 0 )
if port not in self.port_dict:
self.port_dict_count += 1
replacement = ':<PORT{0}>'.format( self.port_dict_count )
self.port_dict[ port ] = replacement
return self.port_dict[ port ]
def hide_ports( self, line ):
hidden_line = re.sub(
self.port_pattern,
self._hide_port,
line
)
return hidden_line
def _init_ip_hiding( self ):
self.ip_dict = dict()
self.ip_dict_count = 0
self.ip_pattern = r'[0-9]{1,3}[.][0-9]{1,3}[.][0-9]{1,3}'
return
def _hide_ip( self, mo ):
ip = mo.group( 0 )
if ip not in self.ip_dict:
self.ip_dict_count += 1
replacement = '<IP{0}>'.format( self.ip_dict_count )
self.ip_dict[ ip ] = replacement
# print 'self.ip_dict={0}'.format( self.ip_dict )
return self.ip_dict[ ip ]
def hide_ip( self, line ):
hidden_line = re.sub(
self.ip_pattern,
self._hide_ip,
line
)
return hidden_line
if __name__ == '__main__':
exit( KmRedact().main() )
|
import numpy as np
class SpectScaler:
"""class that scales spectrograms that all have the
same number of frequency bins. Any input spectrogram
will be scaled by subtracting off the mean of each
frequency bin from the 'fit' set of spectrograms, and
then dividing by the standard deviation of each
frequency bin from the 'fit' set.
"""
def __init__(self):
pass
def fit(self, spects):
"""fit a SpectScaler.
takes a 3d array of spectrograms, aligns them all
horizontally, and then rotates to the right 90° so
that the columns are frequency bins. Then finds the
mean and standard deviation of each frequency bin,
which are used by `transform` method to
scale other spects
Parameters
----------
spects : 3-d numpy array
with dimensions (samples, frequency bins, time bins)
"""
if spects.ndim != 3:
raise ValueError("spects should be a 3-d array")
# concatenate all spects then rotate so
# Hz bins are columns, i.e., 'features'
one_long_spect_rotated = np.rot90(np.hstack(spects[:, :, :]))
self.columnMeans = np.mean(one_long_spect_rotated)
self.columnStds = np.std(one_long_spect_rotated)
def _transform(self, spect):
""""""
return (spect - self.columnMeans) / self.columnStds
def transform(self, spects):
"""transform spect"""
if any([not hasattr(self, attr) for attr in ["columnMeans", "columnStds"]]):
raise AttributeError(
"SpectScaler properties are set to None,"
"must call fit method first to set the"
"value of these properties before calling"
"transform"
)
if spects.ndim != 3:
raise ValueError("spects should be a 3-d array")
z_norm_spects = np.empty(spects.shape)
for i in range(spects.shape[0]):
z_norm_spects[i, :, :] = self._transform(spects[i, :, :])
return z_norm_spects
|
import numpy as np
import cv2 as cv
import glob
def unsharp_mask(img, blur_size = (9,9), imgWeight = 1.5, gaussianWeight = -0.5):
gaussian = cv.GaussianBlur(img, (5,5), 0)
return cv.addWeighted(img, imgWeight, gaussian, gaussianWeight, 0)
for opt in ["capsule"]:
for name in glob.glob("./train/"+opt+"/*"):
#img=cv.imread(name,cv.IMREAD_UNCHANGED)
img=cv.imread(name,cv.IMREAD_GRAYSCALE)
print(img.shape)
top=193
bottom=192
left=5
right=5
res=img[0:215, 10:600]
result = cv.copyMakeBorder(res, top, bottom, left, right, cv.BORDER_REPLICATE)
print(result.shape)
img=result
img = cv.blur(img, (5, 5))
img = unsharp_mask(img)
img = unsharp_mask(img)
img = unsharp_mask(img)
img = cv.threshold(img, 0, 255, cv.THRESH_BINARY_INV + cv.THRESH_OTSU)[1]
img=cv.Canny(img,50,150)
horizontal_kernel = cv.getStructuringElement(cv.MORPH_RECT, (25,1))
vertical_kernel = cv.getStructuringElement(cv.MORPH_RECT, (1, 25));
detected_lines = cv.morphologyEx(img, cv.MORPH_OPEN, horizontal_kernel, iterations=2)
cnts = cv.findContours(detected_lines, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
for c in cnts:
cv.drawContours(img, [c], -1, (0,0,0), 2)
detected_lines = cv.morphologyEx(img, cv.MORPH_OPEN, vertical_kernel, iterations=2)
cnts = cv.findContours(detected_lines, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
for c in cnts:
cv.drawContours(img, [c], -1, (0,0,0), 2)
result=img
cv.imwrite(name.replace("train","train1"), result)
|
#!/usr/bin/env python
"""Create a combined notebook. The first path is the assignment notebook.
Remaining paths are student notebooks.
"""
import argparse
import logging
import os
import sys
import nbformat
import nbformat.reader
import nbcollate as nbc
from minimalkeys import minimal_keys
from . import nbcollate
def safe_read(nbf):
"""A wrapper for nbformat.read, that prints a warning and returns None on
bad notebooks.
"""
try:
return nbformat.read(nbf, as_version=4)
except nbformat.reader.NotJSONError:
print('while reading', nbf)
def capitalize(s):
"""Upcase the first character in a string s."""
return s[:1].upper() + s[1:] if s else s
def map_if_uniq(fn, seq):
"""Return fn mapped across seq, if this doesn't conflate distinct items."""
out = list(map(fn, seq))
return out if len(set(out)) == len(set(seq)) else seq
def collate(master_nb_path, submission_paths, args):
"""Collate notebooks.
Arguments
---------
master_nb_path: str
The master notebook.
submission_paths: [str]
A list of notebook file pathnames.
"""
if args.verbose:
logging.basicConfig(format='%(message)s', level=logging.INFO)
submission_nbs = [safe_read(nbf) for nbf in submission_paths]
submission_nbs = [collated_nb for collated_nb in submission_nbs if collated_nb]
master_nb = safe_read(master_nb_path)
assert master_nb
labels = None
if args.label:
labels = minimal_keys(submission_paths, split=r'([\w-]+)')
labels = map_if_uniq(lambda s: s.replace('-', ' '), labels)
labels = map_if_uniq(capitalize, labels)
collated_nb = nbcollate(master_nb, submission_nbs, labels=labels)
if not args.label:
nbc.remove_duplicate_answers(collated_nb)
# nbc.sort_answers(collated_nb)
suffix = "-collated"
root, ext = os.path.splitext(args.notebook_files[0])
collated_nb_path = "{}{}{}".format(root, suffix, ext)
if args.out:
collated_nb_path = os.path.join(
args.out, os.path.split(collated_nb_path)[1])
if not args.force and os.path.exists(collated_nb_path):
# FIXME raise condition; instead open w/ os.O_CREAT | os.O_WRONLY
err = FileExistsError()
err.filename = collated_nb_path
raise err
if not args.dry_run:
with open(collated_nb_path, 'w') as f:
nbformat.write(collated_nb, f)
print('wrote', collated_nb_path)
def main(args=sys.argv[1:]):
"Create a collated notebook."
parser = argparse.ArgumentParser(description=__doc__)
nb_nargs = '*' if '--version' in args else '+'
parser.add_argument('-f', '--force', action='store_true',
help="Force overwrite existing file")
parser.add_argument('-n', '--dry-run', help="Dry run")
parser.add_argument('-o', '--out', type=str, help="Output directory")
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('--label', action='store_true',
help="Label answers by notebook")
parser.add_argument('--version', action='store_true')
parser.add_argument('notebook_files', nargs=nb_nargs, metavar='NOTEBOOK_FILE')
args = parser.parse_args(args)
if args.version:
print('nbcollate version', nbc.__version__)
return
if not args.notebook_files:
parser.error('the following arguments are required: NOTEBOOK_FILE')
master_file, *submission_files = args.notebook_files
# Remove the master file from the answer files. This allows the CLI
# to be used in the pattern `nbcollate master.ipynb *.ipynb`.
if master_file in submission_files:
submission_files = [f for f in submission_files if f != master_file]
try:
collate(master_file, submission_files, args)
except FileExistsError:
sys.stderr.write("Output file already exists. "
"Repeat with --force to replace it.\n")
sys.exit(1)
|
while True:
n = int(input('Quer ver a tabuada de qual número: '))
print('=+=' * 5)
if n < 0:
break
for c in range(1, 11):
print(f'{n} x {c:2} = {n*c:2}')
print('=+=' * 5)
print('PROGRAMA TABUADA ENCERRADO. VOLTE SEMPRE.')
|
#-*- coding: UTF-8 -*-
import json
import datetime
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import *
from sqlalchemy.types import *
from sqlalchemy.orm import *
from ..engine.db import Base
class City(Base):
__table_args__ = { 'schema': 'mente' }
__tablename__ = 'city_info'
properties_name = Column(String(30), primary_key=True)
value = Column(JSON)
def __init__(self, *args):
engine = args[0]
self.db = engine
Base.metadata.create_all(self.db.engine)
def __repr__(self):
return '<City %r>' % self.code
def __json__(self, o):
for key in self.__mapper__.columns.keys():
setattr(self, key, o[key])
def gets(self):
return self.db.session.query(City).all()
def add(self, c):
try:
self.db.session.add(c)
self.db.session.commit()
except:
self.db.session.rollback()
raise
def add_all(self, cs):
try:
self.db.session.add_all(cs)
self.db.session.commit()
except:
self.db.session.rollback()
raise
def delete(self, code):
try:
self.db.session.query(City).filter(City.code==code).delete()
self.db.session.commit()
except:
self.db.session.rollback()
raise
class OptionPatitions(Options):
@declared_attr
def id(cls):
return Column(Integer)
@declared_attr
def value(cls):
return Column(String(30))
@declared_attr
def order(cls):
return Column(Integer)
def get_option_citys(self):
return self.db.session.execute(text(getPatitions(cId, language)))
class CitySchema(SQLAlchemyAutoSchema):
class Meta:
model = City
load_instance = True
|
"""
Given:
an sqlite database containing stackoverflow data as constructed by stackoverflow_to_sqlite.py
Construct:
a text file with one error message per line
"""
import re
import json
import sqlite3
import argparse
import collections
GOLANG_PATTERN = re.compile(r'(([^:]*\.go)\:(\d+)([^\d][^ ]*)?\:\ )(.*)')
def extract_error_golang(s):
r = GOLANG_PATTERN.match(s)
if r is None:
return None
else:
return s
def extract_error_python(s):
if 'Error:' in s:
return s
else:
return None
def main():
parser = argparse.ArgumentParser()
parser.add_argument('stackoverflow_db')
parser.add_argument('output')
parser.add_argument('--language', type=str, choices=['python', 'golang'], required=True)
args = parser.parse_args()
# Initialize inverted index
post_ids_by_pattern_index = collections.defaultdict(list)
# Connect to DB
conn = sqlite3.connect(args.stackoverflow_db)
c = conn.cursor()
sql = """SELECT content, code_blocks.post_id
FROM code_blocks JOIN tags
ON code_blocks.post_id == tags.post_id
WHERE tag == ?"""
if args.language == "python":
extract_error = extract_error_python
language_tag = "python"
elif args.language == "golang":
extract_error = extract_error_golang
language_tag = "go"
else:
print("Uncorecognized language: %s" % args.language)
return
c.execute(sql, (language_tag,))
num_rows = 0
num_lines = 0
num_errors = 0
with open(args.output, 'w') as f:
while True:
batch = c.fetchmany(1000)
if len(batch) == 0:
break
num_rows += len(batch)
for content, post_id in batch:
post_id = int(post_id)
for line in content.split('\n'):
num_lines += 1
errmsg = extract_error(line)
if errmsg is not None:
f.write(errmsg.strip() + "\n")
num_errors += 1
print("\nQuery was:\n%s\n" % sql)
print("Query returned %d rows containing %d lines, of which %d were errors" %
(num_rows, num_lines, num_errors))
print("Wrote errors to %s" % args.output)
if __name__ == '__main__':
import cProfile, pstats
pr = cProfile.Profile()
pr.enable()
main()
pr.disable()
print('\n')
pstats.Stats(pr).sort_stats('tottime').print_stats(12)
|
import pandas as pd
# Type hinting
from typing import Tuple
def df_difference(a: pd.DataFrame, b: pd.DataFrame) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
"""Compute the difference of two DataFrames as sets.
Parameters:
a : First DataFrame
b : Second DataFrame
Returns:
Tuple containing
- ``left`` : Rows that appear only in ``a`` (i.e., left-only rows).
- ``right`` : Rows that appear only in ``b`` (i.e., right-only rows).
- ``both`` : Rows that appear both in ``a`` and ``b``.
"""
a = a.reset_index(drop=True)
b = b.reset_index(drop=True)
df = pd.merge(a, b, indicator=True, how='outer')
left = df.query('_merge=="left_only"').drop('_merge', axis=1)
right = df.query('_merge=="right_only"').drop('_merge', axis=1)
both = df.query('_merge=="both"').drop('_merge', axis=1)
return left, right, both
|
from fabric.api import *
from config import *
local('mysql -h %s -u %s -p%s %s' % (RDS_HOST, RDS_NAME, RDS_PASS, RDS_DB))
|
from typing import List
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import transforms
class NNPolicy(nn.Module):
def __init__(self, input_dim, hidden_layers, output_dim, discrete):
super(NNPolicy, self).__init__()
layers = [nn.Linear(input_dim, hidden_layers[0])]
for i, l in enumerate(hidden_layers[:-1]):
layers.append(nn.Tanh())
layers.append(nn.Linear(hidden_layers[i], hidden_layers[i + 1]))
layers.append(nn.Tanh())
self.layers = nn.Sequential(*layers)
self.discrete = discrete
self.actor = nn.Linear(hidden_layers[-1], output_dim)
def forward(self, x):
x = self.layers(x)
actor = self.actor(x)
return actor
def act(self, x, sample=False):
actor = self.forward(x)
if self.discrete:
action = actor.argmax(-1, keepdims=True)
else:
action = actor
return action
def reset(self):
None
class CNNPolicy(nn.Module):
def __init__(self, stack_states, input_dim, hidden_layers, output_dim, discrete):
super(CNNPolicy, self).__init__()
self.discrete = discrete
c, h, w = input_dim
self.convs = nn.ModuleList()
neurons = c*stack_states
for n in hidden_layers:
conv = nn.Conv2d(in_channels=neurons, out_channels=n, kernel_size=5,
stride=[2,2], padding=1)
neurons = n
self.convs.append(conv)
self.convs.append(nn.ReLU())
self.encoder = nn.Sequential(*self.convs)
out = self.encoder(torch.zeros(1, c*stack_states, h, w))
b, c, h, w = out.shape
self.actor = nn.Sequential(
nn.Linear(c*h*w, 32),
nn.ReLU(),
nn.Linear(32, 32),
nn.ReLU(),
nn.Linear(32, output_dim))
self.stack_states = stack_states
def forward(self, x):
x = self.encoder(x)
x = torch.flatten(x, 1)
actor = self.actor(x)
return actor
def act(self, x, sample=False):
if len(self.history) == 0:
for i in range(self.stack_states):
self.history.append(x.unsqueeze(1))
self.history.insert(0, x.unsqueeze(1))
self.history.pop()
x = torch.cat(self.history, 1)
b, t, c, h, w = x.shape
x = x.reshape(b, c*t, h, w)
actor = self.forward(x)
if self.discrete:
action = actor.argmax(-1, keepdims=True)
else:
action = actor
return action
def reset(self):
self.history = []
|
# Generated by Django 4.0.2 on 2022-02-21 17:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reviewApp', '0008_artist_background_image'),
]
operations = [
migrations.AlterField(
model_name='artist',
name='background_image',
field=models.FileField(blank=True, null=True, upload_to='artist/bg_images/'),
),
migrations.AlterField(
model_name='artist',
name='image',
field=models.FileField(blank=True, null=True, upload_to='artist/images/'),
),
]
|
#********************************************************************************
#--------------------------------------------------------------------------------
#
# Significance Labs
# Brooklyn, NYC
#
# Author: Alexandra Berke (aberke)
# Written: Summer 2014
#
#
# /backstage/backstage.py
#
#
#--------------------------------------------------------------------------------
#*********************************************************************************
from flask import Blueprint, request, send_file
from app.lib.util import dumpJSON, respond500, respond200
from app.models import cleaner, list as List, room, task
from app.lib.basic_auth import requires_auth
bp = Blueprint('backstage', __name__, static_folder='static')
@bp.route('/')
@requires_auth
def view():
"""
TODO: Administration only
"""
return send_file('backstage/static/backstage.html')
@bp.route('/data/all', methods=['GET'])
def GET_data_all():
"""
Return all the cleaners with all their lists with all their rooms with all their tasks -- HUGE JOIN
"""
try:
all_cleaners = cleaner.find()
for c in all_cleaners:
c['lists'] = List.find(_cleaner=c['_id'])
for l in c['lists']:
l['rooms'] = room.find(_list=l['_id'], populate_tasks=True)
return dumpJSON(all_cleaners)
except Exception as e:
return respond500(e)
@bp.route('/cleaner/<id>', methods=['DELETE'])
@requires_auth
def DELETE_cleaner(id):
try:
cleaner.delete(id)
return respond200()
except Exception as e:
return respond500(e)
|
'''
Created on 9 Dec 2012
@author: kreczko
'''
from optparse import OptionParser
from rootpy.io import File
from array import array
from config.variable_binning import bin_edges
from tools.Unfolding import Unfolding
from tools.hist_utilities import hist_to_value_error_tuplelist
from tools.file_utilities import write_data_to_JSON, make_folder_if_not_exists
from tools.Timer import Timer
from time import clock, time
from tools.ROOT_utils import set_root_defaults
from config import XSectionConfig
def check_multiple_data_multiple_unfolding( input_file, method, channel ):
global nbins, use_N_toy, output_folder, offset_toy_mc, offset_toy_data, k_value
# same unfolding input, different data
get_folder = input_file.Get
pulls = []
add_pull = pulls.append
histograms = []
add_histograms = histograms.append
print 'Reading toy MC'
start1 = time()
mc_range = range( offset_toy_mc + 1, offset_toy_mc + use_N_toy + 1 )
data_range = range( offset_toy_data + 1, offset_toy_data + use_N_toy + 1 )
for nth_toy_mc in range( 1, 10000 + 1 ): # read all of them (easier)
if nth_toy_mc in mc_range or nth_toy_mc in data_range:
folder_mc = get_folder( channel + '/toy_%d' % nth_toy_mc )
add_histograms( get_histograms( folder_mc ) )
else:
add_histograms( ( 0, 0, 0 ) )
print 'Done reading toy MC in', time() - start1, 's'
for nth_toy_mc in range( offset_toy_mc + 1, offset_toy_mc + use_N_toy + 1 ):
print 'Doing MC no', nth_toy_mc
h_truth, h_measured, h_response = histograms[nth_toy_mc - 1]
unfolding_obj = Unfolding( h_truth, h_measured, h_response, method = method, k_value = k_value )
unfold, get_pull, reset = unfolding_obj.unfold, unfolding_obj.pull, unfolding_obj.Reset
for nth_toy_data in range( offset_toy_data + 1, offset_toy_data + use_N_toy + 1 ):
if nth_toy_data == nth_toy_mc:
continue
print 'Doing MC no, ' + str( nth_toy_mc ) + ', data no', nth_toy_data
h_data = histograms[nth_toy_data - 1][1]
unfold( h_data )
pull = get_pull()
diff = unfolding_obj.unfolded_data - unfolding_obj.truth
diff_tuple = hist_to_value_error_tuplelist( diff )
unfolded = unfolding_obj.unfolded_data
unfolded_tuple = hist_to_value_error_tuplelist( unfolded )
all_data = {'unfolded': unfolded_tuple,
'difference' : diff_tuple,
'pull': pull,
'nth_toy_mc': nth_toy_mc,
'nth_toy_data':nth_toy_data
}
add_pull( all_data )
reset()
save_pulls( pulls, test = 'multiple_data_multiple_unfolding', method = method, channel = channel )
def save_pulls( pulls, test, method, channel ):
global use_N_toy, offset_toy_mc, offset_toy_data
file_template = 'Pulls_%s_%s_%s_toy_MC_%d_to_%d_MC_%d_to_%d_data.txt'
output_file = output_folder + file_template % ( test, method, channel, offset_toy_mc + 1, use_N_toy + offset_toy_mc, offset_toy_data + 1, use_N_toy + offset_toy_data )
write_data_to_JSON( pulls, output_file )
print 'Pulls saved in file: ', output_file
def get_histograms( folder ):
h_truth = folder.truth.Clone()
h_measured = folder.measured.Clone()
h_response = folder.response.Clone()
return h_truth, h_measured, h_response
if __name__ == "__main__":
set_root_defaults( msg_ignore_level = 3001 )
parser = OptionParser()
parser.add_option( "-o", "--output",
dest = "output_folder", default = 'data/pull_data/',
help = "output folder for pull data files" )
parser.add_option( "-n", "--n_input_mc", type = 'int',
dest = "n_input_mc", default = 100,
help = "number of toy MC used for the tests" )
parser.add_option( "-k", "--k_value", type = 'int',
dest = "k_value", default = 3,
help = "k-value for SVD unfolding" )
parser.add_option( "-m", "--method", type = 'string',
dest = "method", default = 'RooUnfoldSvd',
help = "unfolding method" )
parser.add_option( "-f", "--file", type = 'string',
dest = "file", default = 'data/toy_mc/unfolding_toy_mc.root',
help = "file with toy MC" )
parser.add_option( "-v", "--variable", dest = "variable", default = 'MET',
help = "set the variable to analyse (MET, HT, ST, MT, WPT)" )
parser.add_option( "-s", "--centre-of-mass-energy", dest = "CoM", default = 8,
help = "set the centre of mass energy for analysis. Default = 8 [TeV]", type = int )
parser.add_option( "-c", "--channel", type = 'string',
dest = "channel", default = 'combined',
help = "channel to be analysed: electron|muon|combined" )
parser.add_option( "--offset_toy_mc", type = 'int',
dest = "offset_toy_mc", default = 0,
help = "offset of the toy MC used to response matrix" )
parser.add_option( "--offset_toy_data", type = 'int',
dest = "offset_toy_data", default = 0,
help = "offset of the toy MC used as data for unfolding" )
( options, args ) = parser.parse_args()
measurement_config = XSectionConfig(options.CoM)
centre_of_mass = options.CoM
variable = options.variable
make_folder_if_not_exists( options.output_folder )
# set the number of toy MC for error calculation
k_value = options.k_value
use_N_toy = options.n_input_mc
offset_toy_mc = options.offset_toy_mc
offset_toy_data = options.offset_toy_data
method = options.method
variable = options.variable
# define bins
bins = array( 'd', bin_edges[variable] )
nbins = len( bins ) - 1
output_folder = options.output_folder + '/' + str(centre_of_mass) + 'TeV/' + variable + '/%d_input_toy_mc/k_value_%d/' % ( use_N_toy, k_value )
make_folder_if_not_exists( output_folder )
print 'Producing unfolding pull data for %s variable, k-value %s. \nOutput folder: %s' % ( variable, k_value, output_folder )
input_file = File( options.file, 'read' )
timer = Timer()
if options.channel == 'electron':
check_multiple_data_multiple_unfolding( input_file, method, 'electron' )
elif options.channel == 'muon':
check_multiple_data_multiple_unfolding( input_file, method, 'muon' )
else:
check_multiple_data_multiple_unfolding( input_file, method, 'combined' )
end1, end2 = clock(), time()
print 'Runtime', timer.elapsed_time()
|
from PIL import Image
from predict import prediction
from preprocess_v3 import gen_images
### begin networking by xmcp
import getpass
import requests
import time
import io
import random
ELECTIVE_XH = input('学号:')
ELECTIVE_PW = getpass.getpass('密码:')
DELAY_S_MIN = 1.5
DELAY_S_DELTA = 1.5
adapter = requests.adapters.HTTPAdapter(pool_connections=3, pool_maxsize=3, pool_block=True, max_retries=3)
s = requests.Session()
s.mount('http://elective.pku.edu.cn', adapter)
s.mount('https://elective.pku.edu.cn', adapter)
def login():
print('login')
res = s.post(
'https://iaaa.pku.edu.cn/iaaa/oauthlogin.do',
data={
'appid': 'syllabus',
'userName': ELECTIVE_XH,
'password': ELECTIVE_PW,
'randCode': '',
'smsCode': '',
'otpCode': '',
'redirUrl': 'http://elective.pku.edu.cn:80/elective2008/ssoLogin.do'
},
)
res.raise_for_status()
json = res.json()
assert json['success'], json
token = json['token']
res = s.get(
'https://elective.pku.edu.cn/elective2008/ssoLogin.do',
params={
'rand': '%.10f'%random.random(),
'token': token,
},
)
res.raise_for_status()
def get_captcha():
res = s.get(
'https://elective.pku.edu.cn/elective2008/DrawServlet?Rand=114514',
headers={
'referer': 'https://elective.pku.edu.cn/elective2008/edu/pku/stu/elective/controller/supplement/SupplyCancel.do',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36',
#'cookie': ELECTIVE_COOKIE,
},
timeout=(3,3),
)
res.raise_for_status()
rawim = res.content
if not rawim.startswith(b'GIF89a'):
print(res.text)
raise RuntimeError('bad captcha')
return rawim
def check_captcha(captcha):
res = s.post(
'https://elective.pku.edu.cn/elective2008/edu/pku/stu/elective/controller/supplement/validate.do',
headers={
'referer': 'https://elective.pku.edu.cn/elective2008/edu/pku/stu/elective/controller/supplement/SupplyCancel.do',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36',
#'cookie': ELECTIVE_COOKIE,
},
data={
'xh': ELECTIVE_XH,
'validCode': captcha,
},
timeout=(3,3),
)
res.raise_for_status()
try:
json = res.json()
except Exception as e:
if '异常刷新' in res.text:
login()
return check_captcha(captcha)
else:
print(res.text)
raise
if json['valid']!='2':
return False
else:
return True
### end networking
def step():
rawim = get_captcha()
im = Image.open(io.BytesIO(rawim))
ans = prediction(gen_images(im))
succ = check_captcha(ans)
serial = '%d-%d'%(1000*time.time(), random.random()*1000)
with open('bootstrap_img_%s/%s=%s.gif'%('succ' if succ else 'fail', ans, serial), 'wb') as f:
f.write(rawim)
return succ, ans
if __name__ == '__main__':
tot = 0
totsucc = 0
login()
while True:
tot += 1
succ, ans = step()
if succ:
totsucc += 1
print('pred: %s\tacc'%(ans), totsucc, '/', tot, '=', '%.3f'%(totsucc/tot))
time.sleep(DELAY_S_MIN + random.random()*DELAY_S_DELTA) |
"""
Base settings to build other settings files upon.
"""
import os
from django.urls import reverse_lazy
from decouple import config
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
DEBUG = config('DEBUG', cast=bool)
# SECURITY WARNING: keep the secret key used in production secret!
# SECRET_KEY = '3up3aw-a1n73oq7#^q!gy189zp4p@l7knldwl6y#nq!8e!7t(p'
SECRET_KEY = config("SECRET_KEY")
ALLOWED_HOSTS = ['example.com','http://example.com', '*', '127.0.0.1']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# 'channels',
'widget_tweaks',
'avatar',
'social_django',
'rest_framework',
'accounts',
'feed',
'comments',
'hortihome',
'actions',
'notifications',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'accounts.login_middleware.LoginRequiredMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
'social_django.middleware.SocialAuthExceptionMiddleware',
]
ROOT_URLCONF = 'hortihub.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'social_django.context_processors.backends', # <--
'social_django.context_processors.login_redirect',
],
},
},
]
WSGI_APPLICATION = 'hortihub.wsgi.application'
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# REDIS setup
REDIS_URL = config('REDIS_URL', default=('localhost', 6379))
# CHANNEL_LAYERS = {
# "default": {
# "BACKEND": "asgi_redis.RedisChannelLayer",
# "CONFIG": {
# "hosts": [REDIS_URL,],
# },
# "ROUTING": "notifications.routing.channel_routing",
# },
# }
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
# STATICFILES_DIRS = [
# os.path.join(BASE_DIR, 'static'),
# ]
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') # Why?
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
LOGIN_EXEMPT_URLS = (
r'^/accounts/login/$',
r'^accounts/logout/$',
r'^accounts/signup/$',
r'^oauth/',
)
LOGIN_URL = '/accounts/login/'
LOGIN_REDIRECT_URL = '/feed/userfeed/'
LOGOUT_REDIRECT_URL = '/'
ABSOLUTE_URL_OVERRIDES = {
'auth.user': lambda u: reverse_lazy('accounts:userdetail', args=[u.pk])
}
INTERNAL_IPS = ['127.0.0.1',]
AUTHENTICATION_BACKENDS = (
'social_core.backends.github.GithubOAuth2',
'social_core.backends.twitter.TwitterOAuth',
'social_core.backends.facebook.FacebookOAuth2',
'social_core.backends.linkedin.LinkedinOAuth2',
'social_core.backends.google.GoogleOAuth2',
'django.contrib.auth.backends.ModelBackend',
'accounts.authentication.EmailAuthBackend',
)
SOCIAL_AUTH_GITHUB_KEY = config("SOCIAL_AUTH_GITHUB_KEY")
SOCIAL_AUTH_GITHUB_SECRET = config("SOCIAL_AUTH_GITHUB_SECRET")
SOCIAL_AUTH_FACEBOOK_KEY = config("SOCIAL_AUTH_FACEBOOK_KEY")
SOCIAL_AUTH_FACEBOOK_SECRET = config("SOCIAL_AUTH_FACEBOOK_SECRET")
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = config("SOCIAL_AUTH_GOOGLE_OAUTH2_KEY")
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = config("SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET")
SOCIAL_AUTH_LINKEDIN_OAUTH2_KEY = config("SOCIAL_AUTH_LINKEDIN_OAUTH2_KEY")
SOCIAL_AUTH_LINKEDIN_OAUTH2_SECRET = config("SOCIAL_AUTH_LINKEDIN_OAUTH2_SECRET")
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
REDIS_DB = 0
|
import corner
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import mod_temperature
import defaultparams.uconv as uconv
import defaultparams.cosmology as cosmo
import scipy
from mod_gasdensity import *
from mod_mass import *
'''
Plotting functions
'''
def seplog(n):
'''
For a float of the form n=fac*10**power, seperates out "fac" and "power".
Used with the intent of making nice looking annotations on a plot.
'''
power = int(np.floor(np.log10(n)))
fac = n/(10.**power)
return [fac, power]
def plt_mcmc_freeparam(mcmc_results, samples, sampler, tspec_data,
clustermeta):
'''
Make a corner plot from the MCMC posterior distribution of
free-parameter values.
Args:
-----
mcmc_results (array):
samples (array): posterior MCMC distribution of free-param vals
tspec_data (astropy table): table containing profile information about
temperature
Results:
--------
fig1 (plot)
'''
matplotlib.rcParams['font.size'] = 9
matplotlib.rcParams['axes.labelsize'] = 12
if samples.shape[1] == 3:
xa = 0.7
elif samples.shape[1] == 2:
xa = 0.6
fig1 = corner.corner(samples,
labels=["$c$",
"$R_s$",
r"$\rho_{\star,0,\mathrm{Sersic}}$"])
chainshape = np.array(sampler.chain).shape
plt.annotate('Nwalkers, Nsteps = '
+ str(chainshape[0])
+ ', '+str(chainshape[1]),
(xa, 0.95), xycoords='figure fraction')
# plt.annotate('Nburnin = '+str(params.Nburnin),
# (xa,0.9),xycoords='figure fraction')
plt.annotate('$r_{\mathrm{ref}}$='
+ str(int(tspec_data['radius'][clustermeta['refindex']]))
+ ' kpc', (xa, 0.8), xycoords='figure fraction')
plt.annotate(r'$c = '+str(np.round(mcmc_results['c'][0], decimals=1))
+ '_{-'+str(np.round(mcmc_results['c'][2], decimals=2))
+ '}^{+'+str(np.round(mcmc_results['c'][1], decimals=2))
+ '}$', (xa, 0.75), xycoords='figure fraction')
plt.annotate(r'$R_{s} = '+str(np.round(mcmc_results['rs'][0], decimals=1))
+ '_{-'+str(np.round(mcmc_results['rs'][2], decimals=1))
+ '}^{+'+str(np.round(mcmc_results['rs'][1], decimals=1))
+ '}$ kpc', (xa, 0.7), xycoords='figure fraction')
ya = 0.7
if clustermeta['incl_mstar'] == 1:
ya = 0.65
plt.annotate(
r'$log(\rho_{\star,0,\mathrm{Sersic}} [M_{\odot} kpc^{-3}]) = '
+ str(np.round(mcmc_results['normsersic'][0], decimals=1))
+ '_{-'+str(np.round(mcmc_results['normsersic'][2], decimals=2))
+ '}^{+'+str(np.round(mcmc_results['normsersic'][1], decimals=2))
+ '}$', (xa, 0.65), xycoords='figure fraction')
# print properties of the sampler
try:
# check autocorrelation time
tacor = sampler.acor
plt.annotate(
r'$\tau_{\mathrm{acor}}(c)$='+str(int(np.round(tacor[0], 0))),
(xa, ya-0.1), xycoords='figure fraction')
plt.annotate(
r'$\tau_{\mathrm{acor}}(R_s)$='+str(int(np.round(tacor[1], 0))),
(xa, ya-0.15), xycoords='figure fraction')
if clustermeta['incl_mstar'] == 1:
plt.annotate(
r'$\tau_{\mathrm{acor}}(log(\rho_{\star,0,\mathrm{Sersic}}))$='
+ str(int(np.round(tacor[2], 0))),
(xa, ya-0.2), xycoords='figure fraction')
except:
pass
return fig1
###########################################################################
###########################################################################
###########################################################################
def plt_summary(ne_data, tspec_data, nemodel, mcmc_results, clustermeta):
'''
Make a summary plot containing the gas density profile, temperature
profile, and mass profile. Annotations for all relevant calculated
quantities.
Args:
-----
ne_data (astropy table): table containing profile information about
gas density
tspec_data (astropy table): table containing profile information about
temperature
nemodel (dictionary): info about ne profile fit including param values
and errors
mcmc_results (dictionary): values and errors of free-params of MCMC as
well as quantities calculated from the posterior MCMC distribution
Results:
--------
fig2 (plot):
subfig 1: plot of observed gas density profile and fitted gas
density profile
subfig 2: plot of observed temperature profile and model temperature
profile
subfig 3: mass profile of clustermeta - includes total and components
of DM, stars, gas
'''
fig3 = plt.figure(3, (9, 9))
plt.figure(3)
matplotlib.rcParams['font.size'] = 10
matplotlib.rcParams['axes.labelsize'] = 12
matplotlib.rcParams['legend.fontsize'] = 10
matplotlib.rcParams['mathtext.default'] = 'regular'
matplotlib.rcParams['mathtext.fontset'] = 'stixsans'
plt.suptitle(str(clustermeta['name']))
'''
gas density
'''
ax1 = fig3.add_subplot(2, 2, 1)
plt.loglog(ne_data['radius'], ne_data['ne'], 'o', color='#707070',
markersize=2)
plt.errorbar(ne_data['radius'], ne_data['ne'],
xerr=[ne_data['radius_lowerbound'],
ne_data['radius_upperbound']],
yerr=ne_data['ne_err'], linestyle='none', color='b')
plt.xlim(xmin=1)
ax1.set_xscale("log", nonposx='clip')
ax1.set_yscale("log", nonposy='clip')
plt.xlabel('r [kpc]')
plt.ylabel('$n_{e}$ [cm$^{-3}$]')
plt_densityprof(nemodel=nemodel, ne_data=ne_data, annotations=1)
'''
final kT profile with c, rs
'''
if clustermeta['incl_mstar'] == 1:
tfit_arr \
= mod_temperature.Tmodel_func(
ne_data=ne_data,
tspec_data=tspec_data,
nemodel=nemodel,
clustermeta=clustermeta,
c=mcmc_results['c'][0],
rs=mcmc_results['rs'][0],
normsersic=mcmc_results['normsersic'][0])
elif clustermeta['incl_mstar'] == 0:
tfit_arr \
= mod_temperature.Tmodel_func(
ne_data=ne_data,
tspec_data=tspec_data,
nemodel=nemodel,
clustermeta=clustermeta,
c=mcmc_results['c'][0],
rs=mcmc_results['rs'][0])
ax2 = fig3.add_subplot(2, 2, 2)
plt.semilogx(tspec_data['radius'], tspec_data['tspec'], 'bo')
plt.errorbar(tspec_data['radius'], tspec_data['tspec'],
xerr=[tspec_data['radius_lowerbound'],
tspec_data['radius_upperbound']],
yerr=[tspec_data['tspec_lowerbound'],
tspec_data['tspec_upperbound']],
linestyle='none', color='b')
plt.xlabel('r [kpc]')
plt.ylabel('kT [keV]')
plt.annotate('$r_{\mathrm{ref}}$='
+ str(int(tspec_data['radius'][clustermeta['refindex']]))
+ ' kpc', (0.05, 0.9), xycoords='axes fraction')
xmin,xmax=plt.xlim()
if xmin<1:
plt.xlim(xmin=1)
ymin,ymax=plt.ylim()
plt.ylim(np.floor(ymin),np.ceil(ymax))
plt.semilogx(tspec_data['radius'], np.array(tfit_arr), 'r-')
##########################################################################
'''
OVERDENSITY RADIUS: MASS PROFILE
'''
ax3 = fig3.add_subplot(2, 2, 3)
xplot = np.logspace(np.log10(1.), np.log10(900.), 100)
mass_nfw = nfw_mass_model(xplot,
mcmc_results['c'][0],
mcmc_results['rs'][0],
clustermeta['z']) # [Msun]
mass_tot = np.copy(mass_nfw)
if clustermeta['incl_mstar'] == 1:
mass_sersic = sersic_mass_model(xplot, mcmc_results['normsersic'][0],
clustermeta) # Msun
mass_tot += mass_sersic
if clustermeta['incl_mgas'] == 1:
mass_gas = gas_mass_model(xplot, nemodel) # [Msun]
mass_tot += mass_gas
plt.loglog(xplot, mass_tot, 'r-', label='M$_{\mathrm{tot}}$')
plt.loglog(xplot, mass_nfw, 'b-', label='M$_{\mathrm{DM}}$')
if clustermeta['incl_mstar'] == 1:
plt.loglog(xplot, mass_sersic, 'g-', label='M$_{\star}$')
if clustermeta['incl_mgas'] == 1:
plt.loglog(xplot, mass_gas, 'y-', label='M$_{\mathrm{gas}}$')
handles, labels = ax3.get_legend_handles_labels()
plt.legend(handles, labels, loc=2)
plt.xlim(xmin=2)
plt.ylim(ymin=6.*10**10., ymax=10**14.) # to match g07
plt.xlabel('r [kpc]')
plt.ylabel('mass [$M_{\odot}$]')
plt.annotate(r'$c_{'+str(int(cosmo.overdensity))+'} = '
+ str(np.round(mcmc_results['c'][0], 1))
+ '_{-'+str(np.round(mcmc_results['c'][2], 2))
+ '}^{+'+str(np.round(mcmc_results['c'][1], 2))+'}$',
(0.55, 0.45), xycoords='figure fraction')
plt.annotate(r'$R_{s} = '+str(np.round(mcmc_results['rs'][0], 1))
+ '_{-'+str(np.round(mcmc_results['rs'][2], 1))
+ '}^{+'+str(np.round(mcmc_results['rs'][1], 1))+'}$ kpc',
(0.55, 0.4), xycoords='figure fraction')
if clustermeta['incl_mstar'] == 1:
plt.annotate(
r'$log(\rho_{\star,0,\mathrm{Sersic}} [M_{\odot} kpc^{-3}]) = '
+ str(np.round(mcmc_results['normsersic'][0], 1))
+ '_{-'+str(np.round(mcmc_results['normsersic'][2], 2))
+ '}^{+'+str(np.round(mcmc_results['normsersic'][1], 2))
+ '}$',
(0.55, 0.35), xycoords='figure fraction')
plt.annotate(
r'$R_{eff}=$'+str(clustermeta['bcg_re'])+' kpc',
(0.8, 0.45), xycoords='figure fraction')
plt.annotate(
r'$n_{\mathrm{Sersic}}$='+str(clustermeta['bcg_sersic_n']),
(0.8, 0.4), xycoords='figure fraction')
plt.annotate(
'$R_{'+str(int(cosmo.overdensity))+'}='
+ str(int(np.round(mcmc_results['rdelta'][0], 0)))
+ '_{-'+str(int(np.round(mcmc_results['rdelta'][2], 0)))
+ '}^{+'+str(int(np.round(mcmc_results['rdelta'][1], 0)))
+ ' }$ kpc',
(0.55, 0.25), xycoords='figure fraction')
plt.annotate(
'$M_{'+str(int(cosmo.overdensity))+'}='
+ str(np.round(seplog(mcmc_results['mdelta'][0])[0], 2))
+ '_{-'+str(np.round(mcmc_results['mdelta'][2]
* 10**-seplog(mcmc_results['mdelta'][0])[1], 2))
+ '}^{+'+str(np.round(mcmc_results['mdelta'][1]
* 10**-seplog(mcmc_results['mdelta'][0])[1], 2))
+ '} \ 10^{'+str(seplog(mcmc_results['mdelta'][0])[1])
+ '} \ M_{\odot}$',
(0.55, 0.2), xycoords='figure fraction')
plt.annotate(
'$M_{DM}(R_{'+str(int(cosmo.overdensity))+'})='
+ str(np.round(seplog(mcmc_results['mdm'][0])[0], 2))
+ '_{-'+str(np.round(mcmc_results['mdm'][2]
* 10**-seplog(mcmc_results['mdm'][0])[1], 2))
+ '}^{+'+str(np.round(mcmc_results['mdm'][1]
* 10**-seplog(mcmc_results['mdm'][0])[1], 2))
+ '} \ 10^{'+str(seplog(mcmc_results['mdm'][0])[1])
+ '} \ M_{\odot}$',
(0.55, 0.15), xycoords='figure fraction')
if clustermeta['incl_mgas'] == 1:
plt.annotate(
'$M_{gas}(R_{'+str(int(cosmo.overdensity))+'})='
+ str(np.round(seplog(mcmc_results['mgas'][0])[0], 2))
+ '_{-'
+ str(np.round(mcmc_results['mgas'][2]
* 10**-seplog(mcmc_results['mgas'][0])[1], 2))
+ '}^{+'
+ str(np.round(mcmc_results['mgas'][1]
* 10**-seplog(mcmc_results['mgas'][0])[1], 2))
+ '} \ 10^{'+str(seplog(mcmc_results['mgas'][0])[1])
+ '} \ M_{\odot}$',
(0.55, 0.10), xycoords='figure fraction')
if clustermeta['incl_mstar'] == 1:
plt.annotate(
'$M_{\star}(R_{'+str(int(cosmo.overdensity))+'})='
+ str(np.round(seplog(mcmc_results['mstars'][0])[0], 2))
+ '_{-'
+ str(np.round(mcmc_results['mstars'][2]
* 10**-seplog(mcmc_results['mstars'][0])[1], 2))
+ '}^{+'
+ str(np.round(mcmc_results['mstars'][1]
* 10**-seplog(mcmc_results['mstars'][0])[1], 2))
+ '} \ 10^{'+str(seplog(mcmc_results['mstars'][0])[1])
+ '} \ M_{\odot}$',
(0.55, 0.05), xycoords='figure fraction')
return fig3, ax1, ax2
#############################################################################
#############################################################################
#############################################################################
def plt_densityprof(nemodel, ne_data, annotations=0):
'''
Helper function to plot the input gas density profile model.
Args:
-----
nemodel (dictionary): info about ne profile fit including
param values and errors
annotations: option to add ne model parameter values and errors to plot
Results:
--------
plt (plot): a plot with annotations of the best-fitting model of the
gas density profile.
'''
# add model to plot
rplot = np.linspace(1., max(ne_data['radius']), 1000)
if nemodel['type'] == 'double_beta':
plt.plot(rplot, doublebetamodel(nemodel['parvals'], rplot), 'r')
if annotations == 1:
plt.annotate(
r'$n_{e,0,1}='+str(np.round(nemodel['parvals'][0], 3))
+ '_{'+str(np.round(nemodel['parmins'][0], 3))
+ '}^{+'+str(np.round(nemodel['parmaxes'][0], 3))
+ '}$ cm$^{-3}$', (0.02, 0.4), xycoords='axes fraction')
plt.annotate(
'$r_{c,1}='+str(np.round(nemodel['parvals'][1], 2))
+ '_{'+str(np.round(nemodel['parmins'][1], decimals=2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][1], decimals=2))
+ '}$ kpc', (0.02, 0.35), xycoords='axes fraction')
plt.annotate(
r'$\beta_1='+str(np.round(nemodel['parvals'][2], 2))
+ '_{'+str(np.round(nemodel['parmins'][2], decimals=2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][2], decimals=2))
+ '}$', (0.02, 0.3), xycoords='axes fraction')
plt.annotate(
r'$n_{e,0,2}='+str(np.round(nemodel['parvals'][3], decimals=3))
+ '_{'+str(np.round(nemodel['parmins'][3], decimals=3))
+ '}^{+'+str(np.round(nemodel['parmaxes'][3], decimals=3))
+ '}$ cm$^{-3}$', (0.02, 0.25), xycoords='axes fraction')
plt.annotate(
'$r_{c,2}='+str(np.round(nemodel['parvals'][4], decimals=2))
+ '_{'+str(np.round(nemodel['parmins'][4], decimals=2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][4], decimals=2))
+ '}$ kpc', (0.02, 0.2), xycoords='axes fraction')
plt.annotate(
r'$\beta_2='+str(np.round(nemodel['parvals'][5], decimals=2))
+ '_{'+str(np.round(nemodel['parmins'][5], decimals=2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][5], decimals=2))
+ '}$', (0.02, 0.15), xycoords='axes fraction')
plt.annotate(
'$\chi^2_r$='+str(np.round(nemodel['rchisq'], decimals=2)),
(0.02, 0.05), xycoords='axes fraction')
if nemodel['type'] == 'double_beta_tied':
plt.plot(rplot, doublebetamodel_tied(nemodel['parvals'], rplot), 'r')
if annotations == 1:
plt.annotate(
r'$n_{e,0,1}='+str(np.round(nemodel['parvals'][0], 3))
+ '_{'+str(np.round(nemodel['parmins'][0], 3))
+ '}^{+'+str(np.round(nemodel['parmaxes'][0], 3))
+ '}$ cm$^{-3}$', (0.02, 0.4), xycoords='axes fraction')
plt.annotate(
'$r_{c,1}='+str(np.round(nemodel['parvals'][1], 2))
+ '_{'+str(np.round(nemodel['parmins'][1], 2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][1], 2))
+ '}$ kpc', (0.02, 0.35), xycoords='axes fraction')
plt.annotate(
r'$\beta_1='+str(np.round(nemodel['parvals'][2], 2))
+ '_{'+str(np.round(nemodel['parmins'][2], 2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][2], 2))
+ '}$', (0.02, 0.3), xycoords='axes fraction')
plt.annotate(
r'$n_{e,0,2}='+str(np.round(nemodel['parvals'][3], 3))
+ '_{'+str(np.round(nemodel['parmins'][3], 3))
+ '}^{+'+str(np.round(nemodel['parmaxes'][3], 3))
+ '}$ cm$^{-3}$', (0.02, 0.25), xycoords='axes fraction')
plt.annotate(
'$r_{c,2}='+str(np.round(nemodel['parvals'][4], 2))
+ '_{'+str(np.round(nemodel['parmins'][4], 2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][4], 2))
+ '}$ kpc', (0.02, 0.2), xycoords='axes fraction')
plt.annotate(r'$\beta_2=\beta_1$',
(0.02, 0.15), xycoords='axes fraction')
plt.annotate(
'$\chi^2_r$='+str(np.round(nemodel['rchisq'], 2)),
(0.02, 0.05), xycoords='axes fraction')
if nemodel['type'] == 'single_beta':
plt.plot(rplot, betamodel(nemodel['parvals'], rplot), 'r')
if annotations == 1:
plt.annotate(
r'$n_{e,0}='+str(np.round(nemodel['parvals'][0], decimals=3))
+ '_{'+str(np.round(nemodel['parmins'][0], decimals=3))
+ '}^{+'+str(np.round(nemodel['parmaxes'][0], decimals=3))
+ '}$ cm$^{-3}$', (0.02, 0.25), xycoords='axes fraction')
plt.annotate(
'$r_{c}='+str(np.round(nemodel['parvals'][1], decimals=2))
+ '_{'+str(np.round(nemodel['parmins'][1], decimals=2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][1], decimals=2))
+ '}$ kpc', (0.02, 0.2), xycoords='axes fraction')
plt.annotate(
r'$\beta='+str(np.round(nemodel['parvals'][2], decimals=2))
+ '_{'+str(np.round(nemodel['parmins'][2], decimals=2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][2], decimals=2))
+ '}$', (0.02, 0.15), xycoords='axes fraction')
plt.annotate(
'$\chi^2_r$='+str(np.round(nemodel['rchisq'], decimals=2)),
(0.02, 0.05), xycoords='axes fraction')
if nemodel['type'] == 'cusped_beta':
plt.plot(rplot, cuspedbetamodel(nemodel['parvals'], rplot), 'r')
if annotations == 1:
plt.annotate(
r'$n_{e,0}='+str(np.round(nemodel['parvals'][0], decimals=3))
+ '_{'+str(np.round(nemodel['parmins'][0], decimals=3))
+ '}^{+'+str(np.round(nemodel['parmaxes'][0], decimals=3))
+ '}$ cm$^{-3}$', (0.02, 0.3), xycoords='axes fraction')
plt.annotate(
'$r_{c}='+str(np.round(nemodel['parvals'][1], decimals=2))
+ '_{'+str(np.round(nemodel['parmins'][1], decimals=2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][1], decimals=2))
+ '}$ kpc', (0.02, 0.25), xycoords='axes fraction')
plt.annotate(
r'$\beta='+str(np.round(nemodel['parvals'][2], decimals=2))
+ '_{'+str(np.round(nemodel['parmins'][2], decimals=2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][2], decimals=2))
+ '}$', (0.02, 0.2), xycoords='axes fraction')
plt.annotate(
r'$\epsilon='+str(np.round(nemodel['parvals'][3], decimals=2))
+ '_{'+str(np.round(nemodel['parmins'][3], decimals=2))
+ '}^{+'+str(np.round(nemodel['parmaxes'][3], decimals=2))
+ '}$', (0.02, 0.15), xycoords='axes fraction')
plt.annotate(
'$\chi^2_r$='+str(np.round(nemodel['rchisq'], decimals=2)),
(0.02, 0.05), xycoords='axes fraction')
return plt
###########################################################################
###########################################################################
###########################################################################
def plt_summary_nice(ne_data, tspec_data, nemodel, mcmc_results, clustermeta):
'''
Make a summary plot containing the gas density profile, temperature
profile, and mass profile. Annotations for all relevant calculated
quantities.
Nice version to go in paper.
Args:
-----
ne_data (astropy table): table containing profile information about
gas density
tspec_data (astropy table): table containing profile information about
temperature
nemodel (dictionary): info about ne profile fit including param values
and errors
mcmc_results (dictionary): values and errors of free-params of MCMC as
well as quantities calculated from the posterior MCMC distribution
Results:
--------
fig4 (plot):
subfig 1: plot of observed gas density profile and fitted gas density
profile
subfig 2: plot of observed temperature profile and model temperature
profile
subfig 3: mass profile of clustermeta - includes total and components
of DM, stars, gas
'''
fig4 = plt.figure(4, (12, 4))
plt.figure(4)
matplotlib.rcParams['font.size'] = 10
matplotlib.rcParams['axes.labelsize'] = 12
matplotlib.rcParams['legend.fontsize'] = 10
matplotlib.rcParams['mathtext.default'] = 'regular'
matplotlib.rcParams['mathtext.fontset'] = 'stixsans'
'''
gas density
'''
ax1 = fig4.add_subplot(1, 3, 1)
plt.loglog(ne_data['radius'], ne_data['ne'], 'o', color='#707070',
markersize=2)
plt.errorbar(ne_data['radius'], ne_data['ne'],
xerr=[ne_data['radius_lowerbound'],
ne_data['radius_upperbound']],
yerr=ne_data['ne_err'],
linestyle='none', color='#707070')
plt.xlim(xmin=1)
ax1.set_xscale("log", nonposx='clip')
ax1.set_yscale("log", nonposy='clip')
plt.xlabel('r [kpc]')
plt.ylabel('$n_{e}$ [cm$^{-3}$]')
plt_densityprof(nemodel=nemodel, ne_data=ne_data, annotations=0)
'''
final kT profile with c, rs
'''
if clustermeta['incl_mstar'] == 1:
tfit_arr \
= mod_temperature.Tmodel_func(
ne_data=ne_data,
tspec_data=tspec_data,
nemodel=nemodel,
clustermeta=clustermeta,
c=mcmc_results['c'][0],
rs=mcmc_results['rs'][0],
normsersic=mcmc_results['normsersic'][0])
elif clustermeta['incl_mstar'] == 0:
tfit_arr \
= mod_temperature.Tmodel_func(
ne_data=ne_data,
tspec_data=tspec_data,
nemodel=nemodel,
clustermeta=clustermeta,
c=mcmc_results['c'][0],
rs=mcmc_results['rs'][0])
ax2 = fig4.add_subplot(1, 3, 2)
plt.semilogx(tspec_data['radius'], tspec_data['tspec'], 'bo')
plt.errorbar(tspec_data['radius'], tspec_data['tspec'],
xerr=[tspec_data['radius_lowerbound'],
tspec_data['radius_upperbound']],
yerr=[tspec_data['tspec_lowerbound'],
tspec_data['tspec_upperbound']],
linestyle='none', color='b')
plt.xlabel('r [kpc]')
plt.ylabel('kT [keV]')
plt.ylim(0, 4)
plt.xlim(xmin=1)
plt.semilogx(tspec_data['radius'], np.array(tfit_arr), 'r-')
##########################################################################
'''
OVERDENSITY RADIUS: MASS PROFILE
'''
ax3 = fig4.add_subplot(1, 3, 3)
xplot = np.logspace(np.log10(1.), np.log10(900.), 100)
mass_nfw = nfw_mass_model(xplot,
mcmc_results['c'][0],
mcmc_results['rs'][0],
clustermeta['z']) # [Msun]
mass_tot = np.copy(mass_nfw)
if clustermeta['incl_mstar'] == 1:
mass_sersic = sersic_mass_model(xplot, mcmc_results['normsersic'][0],
clustermeta) # Msun
mass_tot += mass_sersic
if clustermeta['incl_mgas'] == 1:
mass_gas = gas_mass_model(xplot, nemodel) # [Msun]
mass_tot += mass_gas
plt.loglog(xplot, mass_tot, 'r-', label='M$_{\mathrm{tot}}$')
plt.loglog(xplot, mass_nfw, 'b-', label='M$_{\mathrm{DM}}$')
if clustermeta['incl_mstar'] == 1:
plt.loglog(xplot, mass_sersic, 'g-', label='M$_{\star}$')
if clustermeta['incl_mgas'] == 1:
plt.loglog(xplot, mass_gas, 'y-', label='M$_{\mathrm{gas}}$')
handles, labels = ax3.get_legend_handles_labels()
plt.legend(handles, labels, loc=2)
plt.xlim(xmin=2)
plt.ylim(ymin=6.*10**10., ymax=10**14.) # to match g07
plt.xlabel('r [kpc]')
plt.ylabel('mass [$M_{\odot}$]')
return fig4, ax1
|
import os
max_connection_retires = int(os.environ.get("MF_SERVICE_CONNECTION_RETRIES", 3))
connection_retry_wait_time_seconds = int(os.environ.get("MF_SERVICE_CONNECTION_RETRY_WAITTIME_SECONDS", 1))
max_startup_retries = int(os.environ.get("MF_SERVICE_STARTUP_RETRIES", 5))
startup_retry_wait_time_seconds = int(os.environ.get("MF_SERVICE_STARTUP_WAITTIME_SECONDS", 1))
|
# Generated by Django 3.0.7 on 2020-06-12 08:27
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('references', '0002_auto_20200612_1516'),
]
operations = [
migrations.RenameField(
model_name='references',
old_name='Description',
new_name='description',
),
migrations.AlterField(
model_name='references',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='description', to=settings.AUTH_USER_MODEL),
),
]
|
# coding: utf-8
"""
Definition of CLI commands.
"""
import json
import logging
from os import path
from traceback import format_exc
from time import sleep
import click
from click.types import StringParamType
import docker
from gql import gql, Client
from gql.transport.requests import RequestsHTTPTransport
import yaml
_logger = logging.getLogger(__name__)
class AliasedGroup(click.Group):
"""A Click group with short subcommands.
Example
-------
>>> @click.command(cls=AliasedGroup)
>>> def long_name_command():
... pass
"""
def get_command(self, ctx, cmd_name):
rv = click.Group.get_command(self, ctx, cmd_name)
if rv is not None:
return rv
matches = [x for x in self.list_commands(ctx)
if x.startswith(cmd_name)]
if not matches:
return None
elif len(matches) == 1:
return click.Group.get_command(self, ctx, matches[0])
ctx.fail('Too many matches: %s' % ', '.join(sorted(matches)))
class StrLength(StringParamType):
"""A Click option type of string with length validation.
This is basically the same as `str`, except for additional
functionalities of length validation.
:param min: Minimum length
:param max: Maximum length
:param clamp: Clamp the input if exeeded
"""
def __init__(self, min=None, max=None, clamp=False):
self.min = min
self.max = max
self.clamp = clamp
def convert(self, value, param, ctx):
rv = StringParamType.convert(self, value, param, ctx)
l = len(rv)
if self.clamp:
if self.min is not None and l < self.min:
return rv + ' ' * (self.min - l)
if self.max is not None and l > self.max:
return rv[:self.max]
if self.min is not None and l < self.min or \
self.max is not None and l > self.max:
if self.min is None:
self.fail(
'Length %d is longer than the maximum valid length %d.'
% (l, self.max), param, ctx)
elif self.max is None:
self.fail(
'Length %d is shorter than the minimum valid length %d.'
% (l, self.min), param, ctx)
else:
self.fail(
'Length %d is not in the valid range of %d to %d.'
% (l, self.min, self.max), param, ctx)
return rv
def __repr__(self):
return 'StrLength(%d, %d)' % (self.min, self.max)
def load_config(ctx, self, value):
"""Load `ctx.default_map` from a file.
:param ctx: Click context
:param self: Self object
:param value: File name
:return dict: Loaded config
"""
if not path.exists(value):
return {}
with open(value) as f:
ctx.default_map = yaml.safe_load(f)
return ctx.default_map
def save_config(ctx, value):
"""Save `ctx.default_map` to a file.
:param ctx: Click context
:param value: File name
:return dict: Saveed config
"""
with open(value, 'w') as f:
yaml.dump(ctx.default_map, f)
return ctx.default_map
def query(ctx, q, **kwargs):
"""Submit a GraphQL query to a database.
:param ctx: Click context
:param q: str: GraphQL query submitted to a database. q takes either of q_solution_to_evaluate, q_start_evaluation, q_check_budget, q_finish_evaluation, q_cancel_evaluation.
:param kwargs: GraphQL variables
:return r: Results returned from a query (q). r depends on q. For example, when q=q_solution_to_evaluate, r is about a single solution that has not been evaluated by objective functions.
"""
_logger.debug('query(%s, %s)', q, kwargs)
try:
r = ctx.obj['client'].execute(gql(q), variable_values=kwargs)
except Exception as e:
ctx.fail('Exception %s raised when executing query %s\n' % (e, q))
_logger.debug('-> %s', r)
return r
def wait_to_fetch(ctx, interval):
"""Check if an unevaluated solution exists in a database by calling query every "interval" seconds.
:param ctx: Click context
:param interval: int: Interval to access a database (second)
:return solution_id: ID of a solution that has not been evaluated.
"""
while True:
r = query(ctx, q_solution_to_evaluate) # Polling
if r['solutions']:
break # solution found
sleep(interval)
return r['solutions'][0]['id']
def check_budget(ctx, user_id, match_id):
r = query(ctx, q_check_budget, user_id=user_id, match_id=match_id)
p = r['progress'][0]
n_eval = p['submitted'] - p['evaluation_error'] - p['scoring_error']
if n_eval > p['budget']: # Budget exceeded.
raise Exception('Out of budget: %d / %d.' % (n_eval, p['budget']))
# Check if an unevaluated solution exists in a database.
q_solution_to_evaluate = """
query solution_to_evaluate {
solutions(
limit: 1
order_by: { id: asc }
where: { evaluation_started_at: { _is_null: true } }
) {
id
}
}
"""
# Update evaluation_started_at of a solution to be evaluated by objective functions to the current time now().
q_start_evaluation = """
mutation start_evaluation(
$id: Int!
) {
update_solutions(
where: {
id: { _eq: $id }
evaluation_started_at: { _is_null: true }
}
_set: {
evaluation_started_at: "now()"
}
) {
affected_rows
returning {
id
owner_id
match_id
match {
problem { image }
environments {
key
value
}
}
variable
}
}
}
"""
# Get information about the number of function evaluations so far. budget is the pre-defined maximum number of function evaluations for a given problem instance. submitted is the total number of submissions of solutions. evaluation_error is the number of errors that occurred during the evaluation process. scoring_error is the number of errors that occurred during the scoring process.
q_check_budget = """
query check_budget(
$user_id: String!
$match_id: Int!
) {
progress(
limit: 1
where: {
user_id: { _eq: $user_id }
match_id: { _eq: $match_id }
}
) {
budget
submitted
evaluating
evaluated
evaluation_error
scoring
scored
scoring_error
}
}
"""
# Update evaluation_finished_at to the current time now(). Objective values, constraint values, and information about errors are also updated.
q_finish_evaluation = """
mutation finish_evaluation(
$id: Int!
$objective: jsonb
$constraint: jsonb
$info: jsonb
$error: String
) {
update_solutions_by_pk(
pk_columns: { id: $id }
_set: {
objective: $objective
constraint: $constraint
info: $info
evaluation_error: $error
evaluation_finished_at: "now()"
}) {
id
updated_at
}
}
"""
# Update evaluation_started_at and evaluation_finished_at to null when an error occurs in the evaluation process. A solution with evaluation_started_at=null and evaluation_finished=null means that it has not been evaluated by objective functions.
q_cancel_evaluation = """
mutation cancel_evaluation(
$id: Int!
) {
update_solutions_by_pk(
pk_columns: { id: $id }
_set: {
objective: null
constraint: null
info: null
evaluation_started_at: null
evaluation_finished_at: null
}) {
id
updated_at
}
}
"""
@click.command(help='OptHub Evaluator.')
@click.option('-u', '--url', envvar='OPTHUB_URL', type=str,
default='https://opthub-api.herokuapp.com/v1/graphql',
help='URL to OptHub.')
@click.option('-a', '--apikey', envvar='OPTHUB_APIKEY',
type=StrLength(max=64), help='ApiKey.')
@click.option('-i', '--interval', envvar='OPTHUB_INTERVAL',
type=click.IntRange(min=1), default=2, help='Polling interval.')
@click.option('--verify/--no-verify', envvar='OPTHUB_VERIFY',
default=True, help='Verify SSL certificate.')
@click.option('-r', '--retries', envvar='OPTHUB_RETRIES',
type=click.IntRange(min=0), default=3,
help='Retries to establish HTTPS connection.')
@click.option('-t', '--timeout', envvar='OPTHUB_TIMEOUT',
type=click.IntRange(min=0), default=600,
help='Timeout to process a query.')
@click.option('--rm', envvar='OPTHUB_REMOVE',
is_flag=True,
help='Remove containers after exit.')
@click.option('-q', '--quiet', count=True, help='Be quieter.')
@click.option('-v', '--verbose', count=True, help='Be more verbose.')
@click.option('-c', '--config', envvar='OPTHUB_EVALUATOR_CONFIG',
type=click.Path(dir_okay=False), default='opthub-evaluator.yml',
is_eager=True, callback=load_config, help='Configuration file.')
@click.version_option()
@click.argument('command', envvar='OPTHUB_COMMAND',
type=str, nargs=-1)
@click.pass_context
def run(ctx, **kwargs):
"""The entrypoint of CLI.
:param ctx: Click context
:param kwargs: GraphQL variables
"""
verbosity = 10 * (kwargs['quiet'] - kwargs['verbose'])
log_level = logging.WARNING + verbosity
logging.basicConfig(level=log_level)
_logger.info('Log level is set to %d', log_level)
_logger.debug('run(%s)', kwargs)
transport = RequestsHTTPTransport(
url=kwargs['url'],
verify=kwargs['verify'],
retries=kwargs['retries'],
headers={'X-Hasura-Admin-Secret': kwargs['apikey']},
)
ctx.obj = {
'client': Client(
transport=transport,
fetch_schema_from_transport=True,
)
}
_logger.info('Connect to docker daemon...')
client = docker.from_env()
_logger.info('...Connected')
n_solution = 1
_logger.info('==================== Solution: %d ====================', n_solution)
while True:
try:
_logger.info('Find solution to evaluate...')
solution_id = wait_to_fetch(ctx, kwargs['interval'])
_logger.debug(solution_id)
_logger.info('...Found')
except Exception as e:
if type(e) is InterruptedError:
_logger.info(e)
_logger.info('Attempt graceful shutdown...')
_logger.info('No need to rollback')
_logger.info('...Shutted down')
ctx.exit(0)
else:
_logger.error(format_exc())
continue
try:
_logger.info('Try to lock solution to evaluate...')
r = query(ctx, q_start_evaluation, id=solution_id)
if r['update_solutions']['affected_rows'] == 0:
_logger.info('...Already locked')
continue
elif r['update_solutions']['affected_rows'] != 1:
_logger.error('Lock error: affected_rows must be 0 or 1, but %s', r)
solution = r['update_solutions']["returning"][0]
_logger.info('...Lock aquired')
_logger.info('Check budget...')
check_budget(ctx, user_id=solution['owner_id'], match_id=solution['match_id'])
_logger.info('...OK')
_logger.info('Parse variable to evaluate...')
_logger.debug(solution['variable'])
x = json.dumps(solution['variable']) + '\n'
_logger.debug(x)
_logger.info('...Parsed')
_logger.info('Start container...')
_logger.debug(solution['match']['problem']['image'])
c = client.containers.run(
image=solution['match']['problem']['image'],
command=kwargs['command'],
environment={v['key']: v['value']
for v in solution['match']['environments']},
stdin_open=True,
detach=True,
)
_logger.info('...Started: %s', c.name)
_logger.info('Send variable...')
s = c.attach_socket(params={'stdin': 1, 'stream': 1, 'stdout': 1, 'stderr': 1})
s._sock.sendall(x.encode('utf-8'))
_logger.info('...Send')
_logger.info('Wait for Evaluation...')
c.wait(timeout=kwargs['timeout'])
_logger.info('...Evaluated')
_logger.info('Recieve stdout...')
stdout = c.logs(stdout=True, stderr=False).decode('utf-8')
_logger.debug(stdout)
_logger.info('...Recived')
if kwargs['rm']:
_logger.info('Remove container...')
c.remove()
_logger.info('...Removed')
_logger.info('Parse stdout...')
stdout = json.loads(stdout)
_logger.debug(stdout)
_logger.info('...Parsed')
_logger.info('Check budget...')
check_budget(ctx, user_id=solution['owner_id'], match_id=solution['match_id'])
_logger.info('...OK')
_logger.info('Push evaluation...')
query(ctx, q_finish_evaluation,
id=solution['id'],
objective=stdout.get('objective'),
constraint=stdout.get('constraint'),
info=stdout.get('info'),
error=stdout.get('error'))
_logger.info('...Pushed')
except Exception as e:
if type(e) is InterruptedError:
_logger.info(e)
_logger.info('Attempt graceful shutdown...')
_logger.info('Rollback evaluation...')
query(ctx, q_cancel_evaluation, id=solution['id'])
_logger.info('...Rolled back')
_logger.info('...Shutted down')
ctx.exit(0)
_logger.error(format_exc())
_logger.info('Finish evaluation...')
query(ctx, q_finish_evaluation,
id=solution['id'],
objective=None,
constraint=None,
info=None,
error=str(e))
_logger.info('...Finished')
continue
n_solution += 1
_logger.info('==================== Solution: %d ====================', n_solution)
|
import gym
import torch
from torch import nn
from torch import optim
from torch.nn import functional as F
from torch.utils.data import TensorDataset, DataLoader
from torch import optim
import numpy as np
import copy
import babyai.utils as utils
from babyai.rl import DictList
from babyai.model import ACModel
import multiprocessing
import os
import json
import logging
from torch.autograd import Variable
logger = logging.getLogger(__name__)
class EvalLearner(nn.Module):
"""
Meta Learner
"""
def __init__(self, args):
"""
:param args:
"""
super(EvalLearner, self).__init__()
self.update_lr = args.update_lr
self.meta_lr = args.meta_lr
self.task_num = args.task_num
self.args = args
utils.seed(self.args.seed)
self.env = gym.make(self.args.env)
demos_path = utils.get_demos_path(args.demos, args.env, args.demos_origin, valid=False)
demos_path_valid = utils.get_demos_path(args.demos, args.env, args.demos_origin, valid=True)
logger.info('loading demos')
self.train_demos = utils.load_demos(demos_path)
logger.info('loaded demos')
# if args.episodes:
# if args.episodes > len(self.train_demos):
# raise ValueError("there are only {} train demos".format(len(self.train_demos)))
# self.train_demos = self.train_demos[:args.episodes]
self.val_demos = utils.load_demos(demos_path_valid)
# if args.val_episodes > len(self.val_demos):
# logger.info('Using all the available {} demos to evaluate valid. accuracy'.format(len(self.val_demos)))
self.val_demos = self.val_demos[:self.args.val_episodes]
observation_space = self.env.observation_space
action_space = self.env.action_space
print(args.model)
self.obss_preprocessor = utils.ObssPreprocessor(args.model, observation_space,
getattr(self.args, 'pretrained_model', None))
# Define actor-critic model
self.net = utils.load_model(args.model, raise_not_found=True)
# if self.net is None:
# if getattr(self.args, 'pretrained_model', None):
# self.net = utils.load_model(args.pretrained_model, raise_not_found=True)
# else:
# self.net = ACModel(self.obss_preprocessor.obs_space, action_space,
# args.image_dim, args.memory_dim, args.instr_dim,
# not self.args.no_instr, self.args.instr_arch,
# not self.args.no_mem, self.args.arch)
self.obss_preprocessor.vocab.save()
# utils.save_model(self.net, args.model)
self.fast_net = copy.deepcopy(self.net)
self.net.train()
self.fast_net.train()
if torch.cuda.is_available():
self.net.cuda()
self.fast_net.cuda()
self.optimizer = torch.optim.SGD(self.fast_net.parameters(), lr= self.args.update_lr)
# self.scheduler = torch.optim.lr_scheduler.StepLR(self.optimizer, step_size=100, gamma=0.9)
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.meta_optim = optim.Adam(self.net.parameters(), lr=self.meta_lr)
def starting_indexes(self, num_frames):
if num_frames % self.args.recurrence == 0:
return np.arange(0, num_frames, self.args.recurrence)
else:
return np.arange(0, num_frames, self.args.recurrence)[:-1]
def forward_batch(self, batch, task, net = 'fast', is_training = True):
if net == 'fast':
acmodel = self.fast_net
else:
acmodel = self.net
batch = utils.demos.induce_grammar(batch, task)
batch = utils.demos.transform_demos(batch)
batch.sort(key=len, reverse=True)
# Constructing flat batch and indices pointing to start of each demonstration
flat_batch = []
inds = [0]
for demo in batch:
flat_batch += demo
inds.append(inds[-1] + len(demo))
flat_batch = np.array(flat_batch)
inds = inds[:-1]
num_frames = len(flat_batch)
mask = np.ones([len(flat_batch)], dtype=np.float64)
mask[inds] = 0
mask = torch.tensor(mask, device=self.device, dtype=torch.float).unsqueeze(1)
# Observations, true action, values and done for each of the stored demostration
obss, action_true, done = flat_batch[:, 0], flat_batch[:, 1], flat_batch[:, 2]
action_true = torch.tensor([action for action in action_true], device=self.device, dtype=torch.long)
# Memory to be stored
memories = torch.zeros([len(flat_batch), acmodel.memory_size], device=self.device)
episode_ids = np.zeros(len(flat_batch))
memory = torch.zeros([len(batch), acmodel.memory_size], device=self.device)
preprocessed_first_obs = self.obss_preprocessor(obss[inds], device=self.device)
instr_embedding = acmodel._get_instr_embedding(preprocessed_first_obs.instr)
# Loop terminates when every observation in the flat_batch has been handled
while True:
# taking observations and done located at inds
obs = obss[inds]
done_step = done[inds]
preprocessed_obs = self.obss_preprocessor(obs, device=self.device)
with torch.no_grad():
# taking the memory till len(inds), as demos beyond that have already finished
new_memory = acmodel(
preprocessed_obs,
memory[:len(inds), :], instr_embedding[:len(inds)])['memory']
memories[inds, :] = memory[:len(inds), :]
memory[:len(inds), :] = new_memory
episode_ids[inds] = range(len(inds))
# Updating inds, by removing those indices corresponding to which the demonstrations have finished
inds = inds[:len(inds) - sum(done_step)]
if len(inds) == 0:
break
# Incrementing the remaining indices
inds = [index + 1 for index in inds]
# Here, actual backprop upto args.recurrence happens
final_loss = 0
final_entropy, final_policy_loss, final_value_loss = 0, 0, 0
indexes = self.starting_indexes(num_frames)
memory = memories[indexes]
accuracy = 0
total_frames = len(indexes) * self.args.recurrence
for _ in range(self.args.recurrence):
obs = obss[indexes]
preprocessed_obs = self.obss_preprocessor(obs, device=self.device)
action_step = action_true[indexes]
mask_step = mask[indexes]
model_results = acmodel(
preprocessed_obs, memory * mask_step,
instr_embedding[episode_ids[indexes]])
dist = model_results['dist']
memory = model_results['memory']
entropy = dist.entropy().mean()
policy_loss = -dist.log_prob(action_step).mean()
loss = policy_loss - self.args.entropy_coef * entropy
action_pred = dist.probs.max(1, keepdim=True)[1]
accuracy += float((action_pred == action_step.unsqueeze(1)).sum()) / total_frames
final_loss += loss
final_entropy += entropy
final_policy_loss += policy_loss
indexes += 1
final_loss /= self.args.recurrence
# if is_training:
# self.optimizer.zero_grad()
# final_loss.backward()
# self.optimizer.step()
log = {}
log["entropy"] = float(final_entropy / self.args.recurrence)
log["policy_loss"] = float(final_policy_loss / self.args.recurrence)
log["accuracy"] = float(accuracy)
return final_loss,log
def validate(self, demo):
val_task_num = self.args.task_num
losses = [] # losses_q[i], i is tasks idx
logs = []
val_logs = []
for i in range(99):
self.fast_net = copy.deepcopy(self.net)
self.fast_net.train()
self.fast_net.zero_grad()
# optimize fast net for k isntances of task i
for k in range(5):
loss_task, log = self.forward_batch(demo[k*10:10*k+10], 119-i, 'fast')
self.optimizer.zero_grad()
loss_task.backward()
self.optimizer.step()
# loss_task, log = self.forward_batch(demo, i, 'fast')
# losses.append(loss_task)
logs.append(log)
self.fast_net.eval()
loss_task, log = self.forward_batch(demo, i, 'fast')
val_logs.append(log)
return val_logs
|
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
import json
import random
from typing import Any, Callable, Dict, Iterator, List
import httpx
import pytest
import sqlalchemy as sa
from _dask_helpers import DaskGatewayServer
from _pytest.monkeypatch import MonkeyPatch
from distributed.deploy.spec import SpecCluster
from faker import Faker
from httpx import URL
from models_library.clusters import (
CLUSTER_ADMIN_RIGHTS,
CLUSTER_MANAGER_RIGHTS,
CLUSTER_NO_RIGHTS,
CLUSTER_USER_RIGHTS,
Cluster,
ClusterAccessRights,
ClusterAuthentication,
SimpleAuthentication,
)
from pydantic import AnyHttpUrl, SecretStr, parse_obj_as
from settings_library.rabbit import RabbitSettings
from settings_library.utils_cli import create_json_encoder_wo_secrets
from simcore_postgres_database.models.clusters import ClusterType, clusters
from simcore_service_director_v2.models.schemas.clusters import (
ClusterCreate,
ClusterGet,
ClusterPatch,
ClusterPing,
)
from starlette import status
pytest_simcore_core_services_selection = ["postgres", "rabbit"]
pytest_simcore_ops_services_selection = ["adminer"]
@pytest.fixture()
def clusters_config(
mock_env: None,
postgres_db: sa.engine.Engine,
postgres_host_config: Dict[str, str],
rabbit_service: RabbitSettings,
monkeypatch: MonkeyPatch,
dask_spec_local_cluster: SpecCluster,
):
monkeypatch.setenv("DIRECTOR_V2_POSTGRES_ENABLED", "1")
monkeypatch.setenv("COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED", "1")
monkeypatch.setenv("R_CLONE_S3_PROVIDER", "MINIO")
@pytest.fixture
def cluster_simple_authentication(faker: Faker) -> Callable[[], Dict[str, Any]]:
def creator() -> Dict[str, Any]:
simple_auth = {
"type": "simple",
"username": faker.user_name(),
"password": faker.password(),
}
assert SimpleAuthentication.parse_obj(simple_auth)
return simple_auth
return creator
@pytest.fixture
def clusters_cleaner(postgres_db: sa.engine.Engine) -> Iterator:
yield
with postgres_db.connect() as conn:
conn.execute(sa.delete(clusters))
async def test_list_clusters(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster: Callable[..., Cluster],
async_client: httpx.AsyncClient,
):
user_1 = registered_user()
list_clusters_url = URL(f"/v2/clusters?user_id={user_1['id']}")
# there is no cluster at the moment, the list shall contain the default cluster
response = await async_client.get(list_clusters_url)
assert response.status_code == status.HTTP_200_OK
returned_clusters_list = parse_obj_as(List[ClusterGet], response.json())
assert (
len(returned_clusters_list) == 1
), f"no default cluster in {returned_clusters_list=}"
assert (
returned_clusters_list[0].id == 0
), "default cluster id is not the one expected"
# let's create some clusters
NUM_CLUSTERS = 111
for n in range(NUM_CLUSTERS):
cluster(user_1, name=f"pytest cluster{n:04}")
response = await async_client.get(list_clusters_url)
assert response.status_code == status.HTTP_200_OK
returned_clusters_list = parse_obj_as(List[ClusterGet], response.json())
assert (
len(returned_clusters_list) == NUM_CLUSTERS + 1
) # the default cluster comes on top of the NUM_CLUSTERS
assert (
returned_clusters_list[0].id == 0
), "the first cluster shall be the platform default cluster"
# now create a second user and check the clusters are not seen by it BUT the default one
user_2 = registered_user()
response = await async_client.get(f"/v2/clusters?user_id={user_2['id']}")
assert response.status_code == status.HTTP_200_OK
returned_clusters_list = parse_obj_as(List[ClusterGet], response.json())
assert (
len(returned_clusters_list) == 1
), f"no default cluster in {returned_clusters_list=}"
assert (
returned_clusters_list[0].id == 0
), "default cluster id is not the one expected"
# let's create a few more clusters owned by user_1 with specific rights
for rights, name in [
(CLUSTER_NO_RIGHTS, "no rights"),
(CLUSTER_USER_RIGHTS, "user rights"),
(CLUSTER_MANAGER_RIGHTS, "manager rights"),
(CLUSTER_ADMIN_RIGHTS, "admin rights"),
]:
cluster(
user_1, # cluster is owned by user_1
name=f"cluster with {name}",
access_rights={
user_1["primary_gid"]: CLUSTER_ADMIN_RIGHTS,
user_2["primary_gid"]: rights,
},
)
response = await async_client.get(f"/v2/clusters?user_id={user_2['id']}")
assert response.status_code == status.HTTP_200_OK
user_2_clusters = parse_obj_as(List[ClusterGet], response.json())
# we should find 3 clusters + the default cluster
assert len(user_2_clusters) == 3 + 1
for name in [
"cluster with user rights",
"cluster with manager rights",
"cluster with admin rights",
]:
clusters = list(
filter(
lambda cluster, name=name: cluster.name == name,
user_2_clusters,
),
)
assert len(clusters) == 1, f"missing cluster with {name=}"
async def test_get_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster: Callable[..., Cluster],
async_client: httpx.AsyncClient,
):
user_1 = registered_user()
# try to get one that does not exist
response = await async_client.get(
f"/v2/clusters/15615165165165?user_id={user_1['id']}"
)
assert response.status_code == status.HTTP_404_NOT_FOUND
# let's create some clusters
a_bunch_of_clusters = [
cluster(user_1, name=f"pytest cluster{n:04}") for n in range(111)
]
the_cluster = random.choice(a_bunch_of_clusters)
# there is no cluster at the moment, the list is empty
response = await async_client.get(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}"
)
assert response.status_code == status.HTTP_200_OK, f"received {response.text}"
returned_cluster = parse_obj_as(ClusterGet, response.json())
assert returned_cluster
assert the_cluster.dict(exclude={"authentication"}) == returned_cluster.dict(
exclude={"authentication"}
)
user_2 = registered_user()
# getting the same cluster for user 2 shall return 403
response = await async_client.get(
f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}"
)
assert (
response.status_code == status.HTTP_403_FORBIDDEN
), f"received {response.text}"
# let's create a few cluster for user 2 and share some with user 1
for rights, user_1_expected_access in [
(CLUSTER_NO_RIGHTS, False),
(CLUSTER_USER_RIGHTS, True),
(CLUSTER_MANAGER_RIGHTS, True),
(CLUSTER_ADMIN_RIGHTS, True),
]:
a_cluster = cluster(
user_2, # cluster is owned by user_2
access_rights={
user_2["primary_gid"]: CLUSTER_ADMIN_RIGHTS,
user_1["primary_gid"]: rights,
},
)
# now let's check that user_1 can access only the correct ones
response = await async_client.get(
f"/v2/clusters/{a_cluster.id}?user_id={user_1['id']}"
)
assert (
response.status_code == status.HTTP_200_OK
if user_1_expected_access
else status.HTTP_403_FORBIDDEN
), f"received {response.text}"
@pytest.mark.parametrize(
"cluster_sharing_rights, can_use",
[
pytest.param(CLUSTER_ADMIN_RIGHTS, True, id="SHARE_WITH_ADMIN_RIGHTS"),
pytest.param(CLUSTER_MANAGER_RIGHTS, True, id="SHARE_WITH_MANAGER_RIGHTS"),
pytest.param(CLUSTER_USER_RIGHTS, True, id="SHARE_WITH_USER_RIGHTS"),
pytest.param(CLUSTER_NO_RIGHTS, False, id="DENY_RIGHTS"),
],
)
async def test_get_another_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster: Callable[..., Cluster],
async_client: httpx.AsyncClient,
cluster_sharing_rights: ClusterAccessRights,
can_use: bool,
):
user_1 = registered_user()
user_2 = registered_user()
# let's create some clusters
a_bunch_of_clusters = [
cluster(
user_1,
name=f"pytest cluster{n:04}",
access_rights={
user_1["primary_gid"]: CLUSTER_ADMIN_RIGHTS,
user_2["primary_gid"]: cluster_sharing_rights,
},
)
for n in range(111)
]
the_cluster = random.choice(a_bunch_of_clusters)
# try to get the cluster as user 2
response = await async_client.get(
f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}"
)
assert (
response.status_code == status.HTTP_200_OK
if can_use
else status.HTTP_403_FORBIDDEN
), f"received {response.text}"
@pytest.mark.parametrize("with_query", [True, False])
async def test_get_default_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
async_client: httpx.AsyncClient,
with_query: bool,
):
user_1 = registered_user()
get_cluster_url = URL("/v2/clusters/default")
if with_query:
get_cluster_url = URL(f"/v2/clusters/default?user_id={user_1['id']}")
response = await async_client.get(get_cluster_url)
assert response.status_code == status.HTTP_200_OK, f"received {response.text}"
returned_cluster = parse_obj_as(ClusterGet, response.json())
assert returned_cluster
assert returned_cluster.id == 0
assert returned_cluster.name == "Default cluster"
assert 1 in returned_cluster.access_rights # everyone group is always 1
assert returned_cluster.access_rights[1] == CLUSTER_USER_RIGHTS
async def test_create_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster_simple_authentication: Callable,
async_client: httpx.AsyncClient,
faker: Faker,
postgres_db: sa.engine.Engine,
clusters_cleaner,
):
user_1 = registered_user()
create_cluster_url = URL(f"/v2/clusters?user_id={user_1['id']}")
cluster_data = ClusterCreate(
endpoint=faker.uri(),
authentication=cluster_simple_authentication(),
name=faker.name(),
type=random.choice(list(ClusterType)),
)
response = await async_client.post(
create_cluster_url,
json=json.loads(
cluster_data.json(
by_alias=True,
exclude_unset=True,
encoder=create_json_encoder_wo_secrets(ClusterCreate),
)
),
)
assert response.status_code == status.HTTP_201_CREATED, f"received: {response.text}"
created_cluster = parse_obj_as(ClusterGet, response.json())
assert created_cluster
assert cluster_data.dict(
exclude={"id", "owner", "access_rights", "authentication"}
) == created_cluster.dict(
exclude={"id", "owner", "access_rights", "authentication"}
)
assert created_cluster.id is not None
assert created_cluster.owner == user_1["primary_gid"]
assert created_cluster.access_rights == {
user_1["primary_gid"]: CLUSTER_ADMIN_RIGHTS
}
# let's check that DB is correctly setup, there is one entry
with postgres_db.connect() as conn:
cluster_entry = conn.execute(
sa.select([clusters]).where(clusters.c.name == cluster_data.name)
).one()
async def test_update_own_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster: Callable[..., Cluster],
cluster_simple_authentication: Callable,
async_client: httpx.AsyncClient,
faker: Faker,
):
_PATCH_EXPORT = {"by_alias": True, "exclude_unset": True, "exclude_none": True}
user_1 = registered_user()
# try to modify one that does not exist
response = await async_client.patch(
f"/v2/clusters/15615165165165?user_id={user_1['id']}",
json=json.loads(
ClusterPatch().json(
**_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch)
)
),
)
assert response.status_code == status.HTTP_404_NOT_FOUND
# let's create some clusters
a_bunch_of_clusters = [
cluster(user_1, name=f"pytest cluster{n:04}") for n in range(111)
]
the_cluster = random.choice(a_bunch_of_clusters)
# get the original one
response = await async_client.get(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}"
)
assert response.status_code == status.HTTP_200_OK, f"received {response.text}"
original_cluster = parse_obj_as(ClusterGet, response.json())
# now we modify nothing
response = await async_client.patch(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}",
json=json.loads(
ClusterPatch().json(
**_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch)
)
),
)
assert response.status_code == status.HTTP_200_OK, f"received {response.text}"
returned_cluster = parse_obj_as(ClusterGet, response.json())
assert returned_cluster.dict() == original_cluster.dict()
# modify some simple things
expected_modified_cluster = original_cluster.copy()
for cluster_patch in [
ClusterPatch(name=faker.name()),
ClusterPatch(description=faker.text()),
ClusterPatch(type=ClusterType.ON_PREMISE),
ClusterPatch(thumbnail=faker.uri()),
ClusterPatch(endpoint=faker.uri()),
ClusterPatch(authentication=cluster_simple_authentication()),
]:
jsonable_cluster_patch = json.loads(
cluster_patch.json(
**_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch)
)
)
print(f"--> patching cluster with {jsonable_cluster_patch}")
response = await async_client.patch(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}",
json=jsonable_cluster_patch,
)
assert response.status_code == status.HTTP_200_OK, f"received {response.text}"
returned_cluster = parse_obj_as(ClusterGet, response.json())
expected_modified_cluster = expected_modified_cluster.copy(
update=cluster_patch.dict(**_PATCH_EXPORT)
)
assert returned_cluster.dict(
exclude={"authentication": {"password"}}
) == expected_modified_cluster.dict(exclude={"authentication": {"password"}})
# we can change the access rights, the owner rights are always kept
user_2 = registered_user()
for rights in [
CLUSTER_ADMIN_RIGHTS,
CLUSTER_MANAGER_RIGHTS,
CLUSTER_USER_RIGHTS,
CLUSTER_NO_RIGHTS,
]:
cluster_patch = ClusterPatch(accessRights={user_2["primary_gid"]: rights})
response = await async_client.patch(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}",
json=cluster_patch.dict(**_PATCH_EXPORT),
)
assert response.status_code == status.HTTP_200_OK, f"received {response.text}"
returned_cluster = ClusterGet.parse_obj(response.json())
expected_modified_cluster.access_rights[user_2["primary_gid"]] = rights
assert returned_cluster.dict(
exclude={"authentication": {"password"}}
) == expected_modified_cluster.dict(exclude={"authentication": {"password"}})
# we can change the owner since we are admin
cluster_patch = ClusterPatch(owner=user_2["primary_gid"])
response = await async_client.patch(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}",
json=json.loads(
cluster_patch.json(
**_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch)
)
),
)
assert response.status_code == status.HTTP_200_OK, f"received {response.text}"
returned_cluster = ClusterGet.parse_obj(response.json())
expected_modified_cluster.owner = user_2["primary_gid"]
expected_modified_cluster.access_rights[
user_2["primary_gid"]
] = CLUSTER_ADMIN_RIGHTS
assert returned_cluster.dict(
exclude={"authentication": {"password"}}
) == expected_modified_cluster.dict(exclude={"authentication": {"password"}})
# we should not be able to reduce the rights of the new owner
cluster_patch = ClusterPatch(
accessRights={user_2["primary_gid"]: CLUSTER_NO_RIGHTS}
)
response = await async_client.patch(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}",
json=json.loads(
cluster_patch.json(
**_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch)
)
),
)
assert (
response.status_code == status.HTTP_403_FORBIDDEN
), f"received {response.text}"
async def test_update_default_cluster_fails(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster: Callable[..., Cluster],
cluster_simple_authentication: Callable,
async_client: httpx.AsyncClient,
faker: Faker,
):
_PATCH_EXPORT = {"by_alias": True, "exclude_unset": True, "exclude_none": True}
user_1 = registered_user()
# try to modify one that does not exist
response = await async_client.patch(
f"/v2/clusters/default?user_id={user_1['id']}",
json=json.loads(
ClusterPatch().json(
**_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch)
)
),
)
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
@pytest.mark.parametrize(
"cluster_sharing_rights, can_use, can_manage, can_administer",
[
pytest.param(
CLUSTER_ADMIN_RIGHTS, True, True, True, id="SHARE_WITH_ADMIN_RIGHTS"
),
pytest.param(
CLUSTER_MANAGER_RIGHTS, True, True, False, id="SHARE_WITH_MANAGER_RIGHTS"
),
pytest.param(
CLUSTER_USER_RIGHTS, True, False, False, id="SHARE_WITH_USER_RIGHTS"
),
pytest.param(CLUSTER_NO_RIGHTS, False, False, False, id="DENY_RIGHTS"),
],
)
async def test_update_another_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster: Callable[..., Cluster],
cluster_simple_authentication: Callable,
async_client: httpx.AsyncClient,
faker: Faker,
cluster_sharing_rights: ClusterAccessRights,
can_use: bool,
can_manage: bool,
can_administer: bool,
):
"""user_1 is the owner and administrator, he/she gives some rights to user 2"""
_PATCH_EXPORT = {"by_alias": True, "exclude_unset": True, "exclude_none": True}
user_1 = registered_user()
user_2 = registered_user()
# let's create some clusters
a_bunch_of_clusters = [
cluster(
user_1,
name=f"pytest cluster{n:04}",
access_rights={
user_1["primary_gid"]: CLUSTER_ADMIN_RIGHTS,
user_2["primary_gid"]: cluster_sharing_rights,
},
)
for n in range(111)
]
the_cluster = random.choice(a_bunch_of_clusters)
# get the original one
response = await async_client.get(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}"
)
assert response.status_code == status.HTTP_200_OK, f"received {response.text}"
original_cluster = parse_obj_as(ClusterGet, response.json())
# let's try to modify stuff as we are user 2
for cluster_patch in [
ClusterPatch(name=faker.name()),
ClusterPatch(description=faker.text()),
ClusterPatch(type=ClusterType.ON_PREMISE),
ClusterPatch(thumbnail=faker.uri()),
ClusterPatch(endpoint=faker.uri()),
ClusterPatch(authentication=cluster_simple_authentication()),
]:
response = await async_client.patch(
f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}",
json=json.loads(
cluster_patch.json(
**_PATCH_EXPORT,
encoder=create_json_encoder_wo_secrets(ClusterPatch),
)
),
)
assert (
response.status_code == status.HTTP_200_OK
if can_manage
else status.HTTP_403_FORBIDDEN
), f"received {response.text}"
# let's try to add/remove someone (reserved to managers)
user_3 = registered_user()
for rights in [
CLUSTER_USER_RIGHTS, # add user
CLUSTER_NO_RIGHTS, # remove user
]:
# try to add user 3
cluster_patch = ClusterPatch(accessRights={user_3["primary_gid"]: rights})
response = await async_client.patch(
f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}",
json=json.loads(
cluster_patch.json(
**_PATCH_EXPORT,
encoder=create_json_encoder_wo_secrets(ClusterPatch),
)
),
)
assert (
response.status_code == status.HTTP_200_OK
if can_manage
else status.HTTP_403_FORBIDDEN
), f"received {response.text} while {'adding' if rights == CLUSTER_USER_RIGHTS else 'removing'} user"
# modify rights to admin/manager (reserved to administrators)
for rights in [
CLUSTER_ADMIN_RIGHTS,
CLUSTER_MANAGER_RIGHTS,
]:
cluster_patch = ClusterPatch(accessRights={user_3["primary_gid"]: rights})
response = await async_client.patch(
f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}",
json=json.loads(
cluster_patch.json(
**_PATCH_EXPORT,
encoder=create_json_encoder_wo_secrets(ClusterPatch),
)
),
)
assert (
response.status_code == status.HTTP_200_OK
if can_administer
else status.HTTP_403_FORBIDDEN
), f"received {response.text}"
async def test_delete_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster: Callable[..., Cluster],
async_client: httpx.AsyncClient,
):
user_1 = registered_user()
# let's create some clusters
a_bunch_of_clusters = [
cluster(
user_1,
name=f"pytest cluster{n:04}",
access_rights={
user_1["primary_gid"]: CLUSTER_ADMIN_RIGHTS,
},
)
for n in range(111)
]
the_cluster = random.choice(a_bunch_of_clusters)
# let's delete that cluster
response = await async_client.delete(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}"
)
assert (
response.status_code == status.HTTP_204_NO_CONTENT
), f"received {response.text}"
# now check it is gone
response = await async_client.get(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}"
)
assert (
response.status_code == status.HTTP_404_NOT_FOUND
), f"received {response.text}"
@pytest.mark.parametrize(
"cluster_sharing_rights, can_administer",
[
pytest.param(CLUSTER_ADMIN_RIGHTS, True, id="SHARE_WITH_ADMIN_RIGHTS"),
pytest.param(CLUSTER_MANAGER_RIGHTS, False, id="SHARE_WITH_MANAGER_RIGHTS"),
pytest.param(CLUSTER_USER_RIGHTS, False, id="SHARE_WITH_USER_RIGHTS"),
pytest.param(CLUSTER_NO_RIGHTS, False, id="DENY_RIGHTS"),
],
)
async def test_delete_another_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster: Callable[..., Cluster],
cluster_simple_authentication: Callable,
async_client: httpx.AsyncClient,
faker: Faker,
cluster_sharing_rights: ClusterAccessRights,
can_administer: bool,
):
user_1 = registered_user()
user_2 = registered_user()
# let's create some clusters
a_bunch_of_clusters = [
cluster(
user_1,
name=f"pytest cluster{n:04}",
access_rights={
user_1["primary_gid"]: CLUSTER_ADMIN_RIGHTS,
user_2["primary_gid"]: cluster_sharing_rights,
},
)
for n in range(111)
]
the_cluster = random.choice(a_bunch_of_clusters)
# let's delete that cluster as user_2
response = await async_client.delete(
f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}"
)
assert (
response.status_code == status.HTTP_204_NO_CONTENT
if can_administer
else status.HTTP_403_FORBIDDEN
), f"received {response.text}"
# now check it is gone or still around
response = await async_client.get(
f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}"
)
assert (
response.status_code == status.HTTP_404_NOT_FOUND
if can_administer
else status.HTTP_200_OK
), f"received {response.text}"
async def test_delete_default_cluster_fails(
clusters_config: None,
registered_user: Callable[..., Dict],
async_client: httpx.AsyncClient,
):
user_1 = registered_user()
response = await async_client.delete(f"/v2/clusters/default?user_id={user_1['id']}")
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
async def test_ping_invalid_cluster_raises_422(
clusters_config: None,
async_client: httpx.AsyncClient,
faker: Faker,
cluster_simple_authentication: Callable[[], Dict[str, Any]],
):
# calling with wrong data raises
response = await async_client.post("/v2/clusters:ping", json={})
with pytest.raises(httpx.HTTPStatusError):
response.raise_for_status()
# calling with correct data but non existing cluster also raises
some_fake_cluster = ClusterPing(
endpoint=faker.uri(),
authentication=parse_obj_as(
ClusterAuthentication, cluster_simple_authentication()
),
)
response = await async_client.post(
"/v2/clusters:ping",
json=json.loads(
some_fake_cluster.json(
by_alias=True, encoder=create_json_encoder_wo_secrets(ClusterPing)
)
),
)
with pytest.raises(httpx.HTTPStatusError):
response.raise_for_status()
async def test_ping_cluster(
clusters_config: None,
async_client: httpx.AsyncClient,
local_dask_gateway_server: DaskGatewayServer,
):
valid_cluster = ClusterPing(
endpoint=parse_obj_as(AnyHttpUrl, local_dask_gateway_server.address),
authentication=SimpleAuthentication(
username="pytest_user",
password=parse_obj_as(SecretStr, local_dask_gateway_server.password),
),
)
response = await async_client.post(
"/v2/clusters:ping",
json=json.loads(
valid_cluster.json(
by_alias=True,
encoder=create_json_encoder_wo_secrets(SimpleAuthentication),
)
),
)
response.raise_for_status()
assert response.status_code == status.HTTP_204_NO_CONTENT
async def test_ping_specific_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
cluster: Callable[..., Cluster],
async_client: httpx.AsyncClient,
local_dask_gateway_server: DaskGatewayServer,
):
user_1 = registered_user()
# try to ping one that does not exist
response = await async_client.get(
f"/v2/clusters/15615165165165:ping?user_id={user_1['id']}"
)
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
# let's create some clusters and ping one
a_bunch_of_clusters = [
cluster(
user_1,
name=f"pytest cluster{n:04}",
endpoint=local_dask_gateway_server.address,
authentication=SimpleAuthentication(
username="pytest_user",
password=parse_obj_as(SecretStr, local_dask_gateway_server.password),
),
)
for n in range(111)
]
the_cluster = random.choice(a_bunch_of_clusters)
response = await async_client.post(
f"/v2/clusters/{the_cluster.id}:ping?user_id={user_1['id']}",
)
response.raise_for_status()
assert response.status_code == status.HTTP_204_NO_CONTENT
async def test_ping_default_cluster(
clusters_config: None,
registered_user: Callable[..., Dict],
async_client: httpx.AsyncClient,
):
user_1 = registered_user()
# try to ping one that does not exist
response = await async_client.post(
f"/v2/clusters/default:ping?user_id={user_1['id']}"
)
assert response.status_code == status.HTTP_204_NO_CONTENT
|
"""
This part of code is the DQN brain, which is a brain of the agent.
All decisions are made in here.
Using Tensorflow to build the neural network.
View more on my tutorial page: https://morvanzhou.github.io/tutorials/
Using:
Tensorflow: 1.0
gym: 0.7.3
"""
import numpy as np
import pandas as pd
import tensorflow as tf
np.random.seed(1)
tf.random.set_seed(1)
# Deep Q Network off-policy
class DeepQNetwork:
def __init__(
self,
n_actions,
n_features,
learning_rate=0.01,
reward_decay=0.9,
e_greedy=0.9,
replace_target_iter=300,
memory_size=500,
batch_size=32,
e_greedy_increment=None,
output_graph=False,
):
'''
n_actions:4,动作数量(上下左右)
n_features:2,状态数量(x,y)
'''
print('n_actions:', n_actions)
print('n_features:', n_features)
print('learning_rate:', learning_rate)
print('reward_decay:', reward_decay)
print('e_greedy:', e_greedy)
self.n_actions = n_actions
self.n_features = n_features
self.lr = learning_rate
self.gamma = reward_decay
self.epsilon_max = e_greedy
self.replace_target_iter = replace_target_iter
self.memory_size = memory_size
self.batch_size = batch_size
self.epsilon_increment = e_greedy_increment
self.epsilon = 0 if e_greedy_increment is not None else self.epsilon_max
# total learning step
self.learn_step_counter = 0
# initialize zero memory [s, a, r, s_]
self.memory = np.zeros((self.memory_size, n_features * 2 + 2))
# consist of [target_net, evaluate_net]
self._build_net()
self.cost_his = []
def _build_net(self):
'''建立预测模型和target模型'''
# ------------------ build evaluate_net ------------------
s = tf.keras.Input([None, self.n_features], name='s')
q_target = tf.keras.Input([None, self.n_actions], name='Q_target')
# 预测模型
x = tf.keras.layers.Dense(20, activation=tf.keras.activations.relu, name='l1')(s)
x = tf.keras.layers.Dense(self.n_actions, name='l2')(x)
self.eval_net = tf.keras.Model(inputs=s, outputs=x)
# 损失计算函数
self.loss = tf.keras.losses.MeanSquaredError()
# 梯度下降方法
self._train_op = tf.keras.optimizers.RMSprop(learning_rate=self.lr)
# ------------------ build target_net ------------------
s_ = tf.keras.Input([None, self.n_features], name='s_')
# target模型
x = tf.keras.layers.Dense(20, activation=tf.keras.activations.relu, name='l1')(s_)
x = tf.keras.layers.Dense(self.n_actions, name='l2')(x)
self.target_net = tf.keras.Model(inputs=s_, outputs=x)
def replace_target(self):
'''预测模型权重更新到target模型权重'''
self.target_net.get_layer(name='l1').set_weights(self.eval_net.get_layer(name='l1').get_weights())
self.target_net.get_layer(name='l2').set_weights(self.eval_net.get_layer(name='l2').get_weights())
def store_transition(self, s, a, r, s_):
if not hasattr(self, 'memory_counter'):
self.memory_counter = 0
transition = np.hstack((s, [a, r], s_))
# replace the old memory with new memory
index = self.memory_counter % self.memory_size
self.memory[index, :] = transition
self.memory_counter += 1
def choose_action(self, observation):
# to have batch dimension when feed into tf placeholder
observation = observation[np.newaxis, :]
if np.random.uniform() < self.epsilon:
# forward feed the observation and get q value for every actions
actions_value = self.eval_net(observation).numpy()
action = np.argmax(actions_value)
else:
action = np.random.randint(0, self.n_actions)
return action
def learn(self):
# check to replace target parameters
if self.learn_step_counter % self.replace_target_iter == 0:
self.replace_target()
print('\ntarget_params_replaced\n')
# sample batch memory from all memory
if self.memory_counter > self.memory_size:
sample_index = np.random.choice(self.memory_size, size=self.batch_size)
else:
sample_index = np.random.choice(self.memory_counter, size=self.batch_size)
batch_memory = self.memory[sample_index, :]
with tf.GradientTape() as tape:
q_next = self.target_net(batch_memory[:, -self.n_features:]).numpy()
q_eval = self.eval_net(batch_memory[:, :self.n_features])
# change q_target w.r.t q_eval's action
q_target = q_eval.numpy()
batch_index = np.arange(self.batch_size, dtype=np.int32)
eval_act_index = batch_memory[:, self.n_features].astype(int)
reward = batch_memory[:, self.n_features + 1]
q_target[batch_index, eval_act_index] = reward + self.gamma * np.max(q_next, axis=1)
"""
For example in this batch I have 2 samples and 3 actions:
q_eval =
[[1, 2, 3],
[4, 5, 6]]
q_target = q_eval =
[[1, 2, 3],
[4, 5, 6]]
Then change q_target with the real q_target value w.r.t the q_eval's action.
For example in:
sample 0, I took action 0, and the max q_target value is -1;
sample 1, I took action 2, and the max q_target value is -2:
q_target =
[[-1, 2, 3],
[4, 5, -2]]
So the (q_target - q_eval) becomes:
[[(-1)-(1), 0, 0],
[0, 0, (-2)-(6)]]
We then backpropagate this error w.r.t the corresponding action to network,
leave other action as error=0 cause we didn't choose it.
"""
# train eval network
self.cost = self.loss(y_true=q_target,y_pred=q_eval)
# print('loss:', self.cost)
gradients = tape.gradient(
self.cost, self.eval_net.trainable_variables)
self._train_op.apply_gradients(
zip(gradients, self.eval_net.trainable_variables))
self.cost_his.append(self.cost)
# increasing epsilon
self.epsilon = self.epsilon + self.epsilon_increment if self.epsilon < self.epsilon_max else self.epsilon_max
self.learn_step_counter += 1
def plot_cost(self):
import matplotlib.pyplot as plt
plt.plot(np.arange(len(self.cost_his)), self.cost_his)
plt.ylabel('Cost')
plt.xlabel('training steps')
plt.show()
|
# python Python.py
from math import sin, pi
def composite_simpsons(f, a, b, n):
step_size = (b - a) / n
integral = 0
for k in range(1, n + 1):
x_k0 = a + step_size * k
x_k1 = a + step_size * (k - 1)
step = step_size / 6 * (f(x_k0) + f(x_k1) + 4 * f((x_k0 + x_k1) / 2))
integral += step
return integral
integral_of_function = composite_simpsons(sin, 0, 2*pi, 100000)
print(f"{integral_of_function}")
|
def get(name):
return {}
def getMergedConf(name):
return {}
|
#!/usr/bin/env python3
import sys
def left(dx,dy):
if (dx,dy) == (1,0):
return (0,1)
elif (dx,dy) == (0,1):
return (-1,0)
elif (dx,dy) == (-1,0):
return (0,-1)
else:
return (1,0)
def main(args):
# nubs = [s.strip() for s in sys.stdin]
x = 0
y = 0
n = 1
dx, dy = 1, 0
# last = 0
size = 0
# num = int(args[1])
num = 312051
# num = 12
while True:
#print(n, x, y, size)
if n == num: break
if max(abs(x+dx), abs(y+dy)) > size:
if x == size and y == -size:
size += 1
else:
dx, dy = left(dx, dy)
x += dx
y += dy
n += 1
print(abs(x) + abs(y))
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
import json
import os
import tempfile
import datetime
import config
import pyorc
from dateutil import parser
from kafka import KafkaProducer
from hdfs import HdfsError, InsecureClient
from repository.interface import BaseRepository
from repository.singleton import singleton
KST = datetime.timezone(datetime.timedelta(hours=9))
@singleton
class HDFS(BaseRepository):
def __init__(self, host: str, port, user: str):
super().__init__()
self.host = host
self.port = port
self.user = user
self.prodcuer = None
def connect(self):
self.conn = InsecureClient(f"http://{self.host}:{self.port}",
user=self.user)
if os.environ.get("KAFKA_BOOTSTRAP", None):
self.producer = KafkaProducer(
bootstrap_servers=os.environ.get("KAFAKA_BOOTSTRAP",
"localhost:1234")
)
else:
self.producer = None
def disconnect(self):
self.save_snapshot()
if self.prodcuer:
self.producer.close()
def insert_rows(self, rows: list[(datetime, str, str, str, str, str)]):
self.add_buff(rows)
self.flush()
def _last_datetime(self, category, date):
if self.conn.status(f"/krwordcloud/add-article/{date}")['length'] == 0:
return config.min_date
tfname = ''
with tempfile.NamedTemporaryFile("wb") as tf:
tfname = tf.name
with self.conn.read(f"/krwordcloud/add-article/{date}",
chunk_size=8096) as hf:
for chunk in hf:
tf.write(chunk)
with open(tfname, 'rb') as tf:
reader = pyorc.Reader(tf)
maximum = datetime.datetime \
.strptime(f"{date} GMT+0900", "%Y-%m-%d.orc GMT%z")
for row in reader:
if row[0] > maximum and row[1] == category:
maximum = row[0]
if (maximum < config.min_date):
return config.min_date
elif maximum > datetime.datetime.now().replace(tzinfo=KST):
return datetime.datetime.now().replace(tzinfo=KST)
else:
return maximum
os.unlink(tfname)
def make_entries(self):
entries = dict()
hdfs_entries = dict()
lookup_hdfs = []
self.load_snapshot()
for category in config.categories:
category_rows = list(
filter(lambda row: row[1] == category, self.buff))
if len(category_rows) > 0:
last = max(category_rows, key=lambda row: row[0])
entries[category] = last[0]
else:
lookup_hdfs.append(category)
try:
dates = self.conn.list("/krwordcloud/add-article/")
if len(dates) > 0:
for category in lookup_hdfs:
found = False
for last in reversed(dates):
try:
entries[category] = self._last_datetime(category,
last)
found = True
break
except Exception as e:
print(e)
continue
if found is False:
entries[category] = config.min_date
else:
hdfs_entries = dict.fromkeys(lookup_hdfs, config.min_date)
except HdfsError:
entries[category] = config.min_date
except Exception as e:
print(e)
return {k: v for k, v in sorted({**entries, **hdfs_entries}.items(),
key=lambda item: item[1])}
def save_snapshot(self):
print('save_snapshot')
with self.conn.write("/krwordcloud/snapshot.json", overwrite=True,
encoding="utf-8") as f:
data = list(map(lambda x: (x[0].isoformat(), x[1], x[2], x[3],
x[4], x[5]), self.buff))
json.dump(data, f, ensure_ascii=False)
def load_snapshot(self):
print('load_snapshot')
try:
with self.conn.read("/krwordcloud/snapshot.json",
encoding="utf-8") as f:
self.buff = list(map(
lambda x: (parser.parse(x[0]), x[1],
x[2], x[3], x[4], x[5]), json.load(f)))
except Exception:
self.buff = []
def flush(self):
dates = sorted(list(set(map(lambda row: row[0].date(), self.buff))))
if len(dates) > 1:
for d in dates[:-1]:
data = list(filter(lambda row: row[0].date() == d, self.buff))
if self.producer:
self._kafka_flush(d, data)
else:
self._hdfs_flush(d, data)
self.buff = list(filter(
lambda row: row[0].date() == dates[-1], self.buff))
self.save_snapshot()
def _kafka_flush(self, date, data):
self.producer.send(f"add-article-{date}", data)
def _hdfs_flush(self, date, data):
with self.conn.write(
f"/krwordcloud/add-article/{date}.orc",
overwrite=True
) as hf:
tfname = ''
with tempfile.NamedTemporaryFile(mode="wb+", delete=False) as tf:
tfname = tf.name
with pyorc.Writer(
tf,
schema="struct<field0:timestamp,field1:string," +
"field2:string,field3:string>",
) as of:
of.writerows(data)
with open(tfname, 'rb') as tf:
for line in tf:
hf.write(line)
os.unlink(tfname)
|
from .base_installer import FlaskExtInstaller
from ..config import TAB
class FlaskTalismanInstaller(FlaskExtInstaller):
package_name = "Flask-Talisman"
imports = ["from flask_talisman import Talisman"]
inits = ["talisman = Talisman()"]
attachments = [
'force_https = True if app.config.get("ENV") != "testing" else False',
"talisman.init_app(",
f"{TAB}app,",
f"{TAB}force_https=force_https",
")",
]
|
from pathlib import Path
from os import path
# __file__ = "./__init__.py"
THIS_DIR = Path(path.dirname(path.abspath(__file__)))
PROJECT_DIR = THIS_DIR.parent.parent
DATA_DIR = PROJECT_DIR / "data"
|
import json
AUDIT_FILENAME = "apic-pipeline-audit.json"
FILE_NAME = "print_audit.py"
INFO = "[INFO]["+ FILE_NAME +"] - "
WORKING_DIR_BASIC = "../WORKSPACE"
def orchestrate():
try:
with open(WORKING_DIR_BASIC + "/" + AUDIT_FILENAME,'r') as f:
data = f.read()
data_json = json.loads(data)
print(INFO + "AUDIT")
print(INFO + "-----")
print(json.dumps(data_json, indent=4, sort_keys=False))
except Exception as e:
raise Exception("[ERROR] - Exception in " + FILE_NAME + ": " + repr(e))
orchestrate()
|
# pyramid
from pyramid.view import view_config
from pyramid.renderers import render_to_response
from pyramid.httpexceptions import HTTPNotFound
from pyramid.httpexceptions import HTTPSeeOther
# stdlib
# pypi
from six.moves.urllib.parse import quote_plus
# localapp
from ..lib import formhandling
from ..lib.docs import docify
from ..lib.docs import formatted_get_docs
from ..lib.forms import Form_AcmeAccount_new__auth
from ..lib.forms import Form_AcmeAccount_new__file
from ..lib.forms import Form_AcmeAccount_mark
from ..lib.forms import Form_AcmeAccount_edit
from ..lib.forms import Form_AcmeAccount_deactivate_authorizations
from ..lib.forms import Form_AcmeAccount_deactivate
from ..lib.forms import Form_AcmeAccount_key_change
from ..lib.form_utils import AcmeAccountUploadParser
from ..lib.handler import Handler, items_per_page
from ..lib.handler import json_pagination
from ...lib import cert_utils
from ...lib import db as lib_db
from ...lib import errors
from ...lib import utils
from ...model import utils as model_utils
# ==============================================================================
class View_List(Handler):
@view_config(route_name="admin:acme_accounts", renderer="/admin/acme_accounts.mako")
@view_config(
route_name="admin:acme_accounts_paginated",
renderer="/admin/acme_accounts.mako",
)
@view_config(route_name="admin:acme_accounts|json", renderer="json")
@view_config(route_name="admin:acme_accounts_paginated|json", renderer="json")
@docify(
{
"endpoint": "/acme-accounts.json",
"section": "acme-account",
"about": """list AcmeAccount(s)""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-accounts.json",
}
)
@docify(
{
"endpoint": "/acme-accounts/{PAGE}.json",
"section": "acme-account",
"example": "curl {ADMIN_PREFIX}/acme-accounts/1.json",
"variant_of": "/acme-accounts.json",
}
)
def list(self):
items_count = lib_db.get.get__AcmeAccount__count(self.request.api_context)
url_template = (
"%s/acme-accounts/{0}"
% self.request.registry.settings["app_settings"]["admin_prefix"]
)
if self.request.wants_json:
url_template = "%s.json" % url_template
(pager, offset) = self._paginate(items_count, url_template=url_template)
items_paged = lib_db.get.get__AcmeAccount__paginated(
self.request.api_context, limit=items_per_page, offset=offset
)
if self.request.wants_json:
_accounts = {k.id: k.as_json for k in items_paged}
return {
"AcmeAccounts": _accounts,
"pagination": json_pagination(items_count, pager),
}
return {
"project": "peter_sslers",
"AcmeAccounts_count": items_count,
"AcmeAccounts": items_paged,
"pager": pager,
}
class View_New(Handler):
@view_config(route_name="admin:acme_account:upload")
@view_config(route_name="admin:acme_account:upload|json", renderer="json")
@docify(
{
"endpoint": "/acme-account/upload.json",
"section": "acme-account",
"about": """upload an AcmeAccount and AcmeAccountKey""",
"POST": True,
"GET": None,
"examples": [
"curl --form '[email protected]' --form 'acme_account_provider_id=1' {ADMIN_PREFIX}/acme-account/upload.json",
"curl --form '[email protected]' 'account_key_file_le_pkey=@private_key.json' '[email protected]' {ADMIN_PREFIX}/acme-account/upload.json",
],
"form_fields": {
"account_key_file_pem": "Group A",
"acme_account_provider_id": "Group A",
"account_key_file_le_meta": "Group B",
"account_key_file_le_pkey": "Group B",
"account_key_file_le_reg": "Group B",
"account__contact": "the contact's email address for the ACME Server",
"account__private_key_cycle": "how should the PrivateKey be cycled for this account?",
},
"notes": [
"You must submit ALL items from Group A or Group B",
],
"valid_options": {
"acme_account_provider_id": "{RENDER_ON_REQUEST}",
"account__private_key_cycle": model_utils.PrivateKeyCycle._options_AcmeAccount_private_key_cycle,
},
}
)
def upload(self):
if self.request.method == "POST":
return self._upload__submit()
return self._upload__print()
def _upload__print(self):
self._load_AcmeAccountProviders()
if self.request.wants_json:
return formatted_get_docs(self, "/acme-account/upload.json")
# quick setup, we need a bunch of options for dropdowns...
return render_to_response(
"/admin/acme_account-upload.mako",
{"AcmeAccountProviders": self.dbAcmeAccountProviders},
self.request,
)
def _upload__submit(self):
try:
(result, formStash) = formhandling.form_validate(
self.request, schema=Form_AcmeAccount_new__file, validate_get=False
)
if not result:
raise formhandling.FormInvalid()
parser = AcmeAccountUploadParser(formStash)
parser.require_upload(require_contact=None, require_technology=False)
# this will have `contact` and `private_key_cycle`
key_create_args = parser.getcreate_args
acme_account_provider_id = key_create_args.get("acme_account_provider_id")
if acme_account_provider_id:
self._load_AcmeAccountProviders()
_acme_account_provider_ids__all = [
i.id for i in self.dbAcmeAccountProviders
]
if acme_account_provider_id not in _acme_account_provider_ids__all:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="acme_account_provider_id",
message="Invalid provider submitted.",
)
key_create_args["event_type"] = "AcmeAccount__insert"
key_create_args[
"acme_account_key_source_id"
] = model_utils.AcmeAccountKeySource.from_string("imported")
try:
(dbAcmeAccount, _is_created,) = lib_db.getcreate.getcreate__AcmeAccount(
self.request.api_context, **key_create_args
)
except errors.ConflictingObject as exc:
# ConflictingObject: args[0] = tuple(conflicting_object, error_message_string)
# `formStash.fatal_form()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_form(message=exc.args[0][1])
if self.request.wants_json:
return {
"result": "success",
"AcmeAccount": dbAcmeAccount.as_json,
"is_created": True if _is_created else False,
"is_existing": False if _is_created else True,
}
return HTTPSeeOther(
"%s/acme-account/%s?result=success&operation=upload%s"
% (
self.request.admin_url,
dbAcmeAccount.id,
("&is_created=1" if _is_created else "&is_existing=1"),
)
)
except formhandling.FormInvalid as exc:
if self.request.wants_json:
return {"result": "error", "form_errors": formStash.errors}
return formhandling.form_reprint(self.request, self._upload__print)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(route_name="admin:acme_account:new")
@view_config(route_name="admin:acme_account:new|json", renderer="json")
@docify(
{
"endpoint": "/acme-account/new.json",
"section": "acme-account",
"about": """Create a new AcmeAccount""",
"POST": True,
"GET": None,
"instructions": [
"""curl --form '[email protected]' --form 'acme_account_provider_id=1' {ADMIN_PREFIX}/acme-account/new.json""",
],
"form_fields": {
"acme_account_provider_id": "which provider",
"account__contact": "the contact's email address for the ACME Server",
"account__private_key_cycle": "how should the PrivateKey be cycled for this account?",
"account__private_key_technology": "what is the key technology preference for this account?",
},
"valid_options": {
"acme_account_provider_id": "{RENDER_ON_REQUEST}",
"account__private_key_cycle": model_utils.PrivateKeyCycle._options_AcmeAccount_private_key_cycle,
"account__private_key_technology": model_utils.KeyTechnology._options_AcmeAccount_private_key_technology,
},
}
)
def new(self):
if self.request.method == "POST":
return self._new__submit()
return self._new__print()
def _new__print(self):
self._load_AcmeAccountProviders()
if self.request.wants_json:
return formatted_get_docs(self, "/acme-account/new.json")
# quick setup, we need a bunch of options for dropdowns...
return render_to_response(
"/admin/acme_account-new.mako",
{"AcmeAccountProviders": self.dbAcmeAccountProviders},
self.request,
)
def _new__submit(self):
try:
(result, formStash) = formhandling.form_validate(
self.request, schema=Form_AcmeAccount_new__auth, validate_get=False
)
if not result:
raise formhandling.FormInvalid()
self._load_AcmeAccountProviders()
_acme_account_provider_ids__all = [
i.id for i in self.dbAcmeAccountProviders
]
_acme_account_provider_ids__enabled = [
i.id for i in self.dbAcmeAccountProviders if i.is_enabled
]
acme_account_provider_id = formStash.results["acme_account_provider_id"]
if acme_account_provider_id not in _acme_account_provider_ids__all:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="acme_account_provider_id",
message="Invalid provider submitted.",
)
if acme_account_provider_id not in _acme_account_provider_ids__enabled:
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="acme_account_provider_id",
message="This provider is no longer enabled.",
)
parser = AcmeAccountUploadParser(formStash)
parser.require_new(require_contact=True)
# this will have `contact` and `private_key_cycle`
key_create_args = parser.getcreate_args
key_pem = cert_utils.new_account_key() # rsa_bits=None
key_create_args["key_pem"] = key_pem
key_create_args["event_type"] = "AcmeAccount__create"
key_create_args[
"acme_account_key_source_id"
] = model_utils.AcmeAccountKeySource.from_string("generated")
dbAcmeAccount = None
_dbAcmeAccount = None
try:
(
_dbAcmeAccount,
_is_created,
) = lib_db.getcreate.getcreate__AcmeAccount(
self.request.api_context, **key_create_args
)
# result is either: `new-account` or `existing-account`
# failing will raise an exception
authenticatedUser = lib_db.actions_acme.do__AcmeAccount_AcmeV2_register(
self.request.api_context, _dbAcmeAccount
)
dbAcmeAccount = _dbAcmeAccount
except errors.ConflictingObject as exc:
# this happens via `getcreate__AcmeAccount`
# * args[0] = tuple(conflicting_object, error_message_string)
_dbAcmeAccountDuplicate = exc.args[0][0]
# `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)`
formStash.fatal_field(
field="account__contact",
message=exc.args[0][1],
)
except errors.AcmeDuplicateAccount as exc:
# this happens via `do__AcmeAccount_AcmeV2_register`
# args[0] MUST be the duplicate AcmeAccount
_dbAcmeAccountDuplicate = exc.args[0]
# the 'Duplicate' account was the earlier account and therefore
# it is our merge Target
lib_db.update.update_AcmeAccount_from_new_duplicate(
self.request.api_context, _dbAcmeAccountDuplicate, _dbAcmeAccount
)
dbAcmeAccount = _dbAcmeAccountDuplicate
if self.request.wants_json:
return {
"result": "success",
"AcmeAccount": dbAcmeAccount.as_json,
"is_created": True if _is_created else False,
"is_existing": False if _is_created else True,
}
return HTTPSeeOther(
"%s/acme-account/%s?result=success&operation=new%s"
% (
self.request.admin_url,
dbAcmeAccount.id,
("&is_created=1" if _is_created else "&is_existing=1"),
)
)
except errors.AcmeServerError as exc:
if self.request.wants_json:
return {"result": "error", "form_errors": formStash.errors}
formStash.register_error_main_exception(exc)
return formhandling.form_reprint(self.request, self._new__print)
except formhandling.FormInvalid as exc:
if self.request.wants_json:
return {"result": "error", "form_errors": formStash.errors}
return formhandling.form_reprint(self.request, self._new__print)
class View_Focus(Handler):
dbAcmeAccount = None
def _focus(self):
if self.dbAcmeAccount is None:
dbAcmeAccount = lib_db.get.get__AcmeAccount__by_id(
self.request.api_context,
self.request.matchdict["id"],
)
if not dbAcmeAccount:
raise HTTPNotFound("the key was not found")
self.dbAcmeAccount = dbAcmeAccount
self._focus_url = "%s/acme-account/%s" % (
self.request.admin_url,
self.dbAcmeAccount.id,
)
return self.dbAcmeAccount
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus",
renderer="/admin/acme_account-focus.mako",
)
@view_config(route_name="admin:acme_account:focus|json", renderer="json")
@docify(
{
"endpoint": "/acme-account/{ID}.json",
"section": "acme-account",
"about": """AcmeAccount record""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-account/1.json",
}
)
def focus(self):
dbAcmeAccount = self._focus()
if self.request.wants_json:
_prefix = "%s" % self._focus_url
return {
"AcmeAccount": dbAcmeAccount.as_json,
"raw": {
"pem.txt": "%s/key.pem.txt" % _prefix,
"pem": "%s/key.pem" % _prefix,
"der": "%s/key.key" % _prefix,
},
}
return {"project": "peter_sslers", "AcmeAccount": dbAcmeAccount}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(route_name="admin:acme_account:focus:raw", renderer="string")
@docify(
{
"endpoint": "/acme-account/{ID}/key.pem",
"section": "acme-account",
"about": """AcmeAccount focus. Active key as PEM""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-account/1/key.pem",
}
)
@docify(
{
"endpoint": "/acme-account/{ID}/key.pem.txt",
"section": "acme-account",
"about": """AcmeAccount focus. Active key as PEM.txt""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-account/1/key.pem.txt",
}
)
@docify(
{
"endpoint": "/acme-account/{ID}/key.key",
"section": "acme-account",
"about": """AcmeAccount focus. Active key as pkcs8 (DER)""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-account/1/key.key",
}
)
def focus_raw(self):
dbAcmeAccount = self._focus()
if self.request.matchdict["format"] == "pem":
self.request.response.content_type = "application/x-pem-file"
return dbAcmeAccount.acme_account_key.key_pem
elif self.request.matchdict["format"] == "pem.txt":
return dbAcmeAccount.acme_account_key.key_pem
elif self.request.matchdict["format"] == "key":
self.request.response.content_type = "application/pkcs8"
as_der = cert_utils.convert_pem_to_der(
pem_data=dbAcmeAccount.acme_account_key.key_pem
)
return as_der
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(route_name="admin:acme_account:focus:parse|json", renderer="json")
@docify(
{
"endpoint": "/acme-account/{ID}/parse.json",
"section": "acme-account",
"about": """AcmeAccount focus. Active key, parsed""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-account/1/parse.json",
}
)
def focus_parse_json(self):
dbAcmeAccount = self._focus()
return {
"AcmeAccount": dbAcmeAccount.as_json,
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:acme_authorizations",
renderer="/admin/acme_account-focus-acme_authorizations.mako",
)
@view_config(
route_name="admin:acme_account:focus:acme_authorizations_paginated",
renderer="/admin/acme_account-focus-acme_authorizations.mako",
)
@view_config(
route_name="admin:acme_account:focus:acme_authorizations|json",
renderer="json",
)
@view_config(
route_name="admin:acme_account:focus:acme_authorizations_paginated|json",
renderer="json",
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-authorizations.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. list AcmeAuthorizations(s)""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-account/1/acme-authorizations.json",
}
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-authorizations/{PAGE}.json",
"section": "acme-account",
"example": "curl {ADMIN_PREFIX}/acme-account/1/acme-authorizations/1.json",
"variant_of": "/acme-account/{ID}/acme-authorizations.json",
}
)
def related__AcmeAuthorizations(self):
dbAcmeAccount = self._focus()
url_status = self.request.params.get("status")
if url_status not in ("active", "active-expired"):
url_status = ""
if url_status == "active":
sidenav_option = "active"
elif url_status == "active-expired":
sidenav_option = "active-expired"
else:
sidenav_option = "all"
active_only = True if url_status == "active" else False
expired_only = True if url_status == "active-expired" else False
items_count = lib_db.get.get__AcmeAuthorization__by_AcmeAccountId__count(
self.request.api_context,
dbAcmeAccount.id,
active_only=active_only,
expired_only=expired_only,
)
url_template = "%s/acme-authorizations/{0}" % self._focus_url
if self.request.wants_json:
url_template = "%s.json" % url_template
if url_status:
url_template = "%s?status=%s" % (url_template, url_status)
(pager, offset) = self._paginate(items_count, url_template=url_template)
items_paged = lib_db.get.get__AcmeAuthorization__by_AcmeAccountId__paginated(
self.request.api_context,
dbAcmeAccount.id,
active_only=active_only,
expired_only=expired_only,
limit=items_per_page,
offset=offset,
)
if self.request.wants_json:
_authorizations = [k.as_json for k in items_paged]
return {
"AcmeAuthorizations": _authorizations,
"pagination": json_pagination(items_count, pager),
}
return {
"project": "peter_sslers",
"AcmeAccount": dbAcmeAccount,
"AcmeAuthorizations_count": items_count,
"AcmeAuthorizations": items_paged,
"pager": pager,
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:acme_account_keys",
renderer="/admin/acme_account-focus-acme_account_keys.mako",
)
@view_config(
route_name="admin:acme_account:focus:acme_account_keys_paginated",
renderer="/admin/acme_account-focus-acme_account_keys.mako",
)
@view_config(
route_name="admin:acme_account:focus:acme_account_keys|json",
renderer="json",
)
@view_config(
route_name="admin:acme_account:focus:acme_account_keys_paginated|json",
renderer="json",
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-account-keys.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. list AcmeAccountKeys(s)""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-account/1/acme-account-keys.json",
}
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-account-keys/{PAGE}.json",
"section": "acme-account",
"example": "curl {ADMIN_PREFIX}/acme-account/1/acme-account-keys/1.json",
"variant_of": "/acme-account/{ID}/acme-account-keys.json",
}
)
def related__AcmeAccountKeys(self):
dbAcmeAccount = self._focus()
items_count = lib_db.get.get__AcmeAccountKey__by_AcmeAccountId__count(
self.request.api_context,
dbAcmeAccount.id,
)
url_template = "%s/acme-account-keys/{0}" % self._focus_url
if self.request.wants_json:
url_template = "%s.json" % url_template
(pager, offset) = self._paginate(items_count, url_template=url_template)
items_paged = lib_db.get.get__AcmeAccountKey__by_AcmeAccountId__paginated(
self.request.api_context,
dbAcmeAccount.id,
limit=items_per_page,
offset=offset,
)
if self.request.wants_json:
_acme_account_keys = [k.as_json for k in items_paged]
return {
"AcmeAccountKeys": _acme_account_keys,
"pagination": json_pagination(items_count, pager),
}
return {
"project": "peter_sslers",
"AcmeAccount": dbAcmeAccount,
"AcmeAccountKeys_count": items_count,
"AcmeAccountKeys": items_paged,
"pager": pager,
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:acme_orders",
renderer="/admin/acme_account-focus-acme_orders.mako",
)
@view_config(
route_name="admin:acme_account:focus:acme_orders_paginated",
renderer="/admin/acme_account-focus-acme_orders.mako",
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-orders.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. list AcmeOrder(s)""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-account/1/acme-orders.json",
}
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-orders/{PAGE}.json",
"section": "acme-account",
"example": "curl {ADMIN_PREFIX}/acme-account/1/acme-orders/1.json",
"variant_of": "/acme-account/{ID}/acme-orders.json",
}
)
def related__AcmeOrders(self):
dbAcmeAccount = self._focus()
items_count = lib_db.get.get__AcmeOrder__by_AcmeAccountId__count(
self.request.api_context, dbAcmeAccount.id
)
url_template = "%s/acme-orders/{0}" % self._focus_url
(pager, offset) = self._paginate(items_count, url_template=url_template)
items_paged = lib_db.get.get__AcmeOrder__by_AcmeAccountId__paginated(
self.request.api_context,
dbAcmeAccount.id,
limit=items_per_page,
offset=offset,
)
return {
"project": "peter_sslers",
"AcmeAccount": dbAcmeAccount,
"AcmeOrders_count": items_count,
"AcmeOrders": items_paged,
"pager": pager,
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:private_keys",
renderer="/admin/acme_account-focus-private_keys.mako",
)
@view_config(
route_name="admin:acme_account:focus:private_keys_paginated",
renderer="/admin/acme_account-focus-private_keys.mako",
)
@docify(
{
"endpoint": "/acme-account/{ID}/private-keys.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. list PrivateKeys(s)""",
"POST": None,
"GET": True,
"example": "curl {ADMIN_PREFIX}/acme-account/1/private-keys.json",
}
)
@docify(
{
"endpoint": "/acme-account/{ID}/private-keys/{PAGE}.json",
"section": "acme-account",
"example": "curl {ADMIN_PREFIX}/acme-account/1/private-keys/1.json",
"variant_of": "/acme-account/{ID}/private-keys.json",
}
)
def related__PrivateKeys(self):
dbAcmeAccount = self._focus()
items_count = lib_db.get.get__PrivateKey__by_AcmeAccountIdOwner__count(
self.request.api_context, dbAcmeAccount.id
)
url_template = "%s/private-keys/{0}" % self._focus_url
(pager, offset) = self._paginate(items_count, url_template=url_template)
items_paged = lib_db.get.get__PrivateKey__by_AcmeAccountIdOwner__paginated(
self.request.api_context,
dbAcmeAccount.id,
limit=items_per_page,
offset=offset,
)
return {
"project": "peter_sslers",
"AcmeAccount": dbAcmeAccount,
"PrivateKeys_count": items_count,
"PrivateKeys": items_paged,
"pager": pager,
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:certificate_signeds",
renderer="/admin/acme_account-focus-certificate_signeds.mako",
)
@view_config(
route_name="admin:acme_account:focus:certificate_signeds_paginated",
renderer="/admin/acme_account-focus-certificate_signeds.mako",
)
def related__CertificateSigneds(self):
dbAcmeAccount = self._focus()
items_count = lib_db.get.get__CertificateSigned__by_AcmeAccountId__count(
self.request.api_context, dbAcmeAccount.id
)
url_template = "%s/certificate-signeds/{0}" % self._focus_url
(pager, offset) = self._paginate(items_count, url_template=url_template)
items_paged = lib_db.get.get__CertificateSigned__by_AcmeAccountId__paginated(
self.request.api_context,
dbAcmeAccount.id,
limit=items_per_page,
offset=offset,
)
return {
"project": "peter_sslers",
"AcmeAccount": dbAcmeAccount,
"CertificateSigneds_count": items_count,
"CertificateSigneds": items_paged,
"pager": pager,
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:queue_certificates",
renderer="/admin/acme_account-focus-queue_certificates.mako",
)
@view_config(
route_name="admin:acme_account:focus:queue_certificates_paginated",
renderer="/admin/acme_account-focus-queue_certificates.mako",
)
def related__QueueCertificates(self):
dbAcmeAccount = self._focus()
items_count = lib_db.get.get__QueueCertificate__by_AcmeAccountId__count(
self.request.api_context, dbAcmeAccount.id
)
url_template = "%s/queue-certificates/{0}" % self._focus_url
(pager, offset) = self._paginate(items_count, url_template=url_template)
items_paged = lib_db.get.get__QueueCertificate__by_AcmeAccountId__paginated(
self.request.api_context,
dbAcmeAccount.id,
limit=items_per_page,
offset=offset,
)
return {
"project": "peter_sslers",
"AcmeAccount": dbAcmeAccount,
"QueueCertificates_count": items_count,
"QueueCertificates": items_paged,
"pager": pager,
}
class View_Focus_Manipulate(View_Focus):
@view_config(route_name="admin:acme_account:focus:edit")
@view_config(route_name="admin:acme_account:focus:edit|json", renderer="json")
@docify(
{
"endpoint": "/acme-account/{ID}/edit.json",
"section": "acme-account",
"about": """AcmeAccount: Edit""",
"POST": True,
"GET": None,
"example": "curl {ADMIN_PREFIX}/acme-account/1/edit.json",
"instructions": [
"""curl --form 'account__private_key_cycle=certificate'"""
""" --form 'account__private_key_technology=rsa'"""
""" {ADMIN_PREFIX}/acme-account/{ID}/edit.json""",
],
"form_fields": {
"account__private_key_cycle": "option for cycling the PrivateKey on renewals",
"account__private_key_technology": "what is the key technology preference for this account?",
},
"valid_options": {
"account__private_key_cycle": model_utils.PrivateKeyCycle._options_AcmeAccount_private_key_cycle,
"account__private_key_technology": model_utils.KeyTechnology._options_AcmeAccount_private_key_technology,
},
}
)
def focus_edit(self):
dbAcmeAccount = self._focus()
if self.request.method == "POST":
return self._focus_edit__submit()
return self._focus_edit__print()
def _focus_edit__print(self):
if self.request.wants_json:
return formatted_get_docs(self, "/acme-account/{ID}/edit.json")
return render_to_response(
"/admin/acme_account-focus-edit.mako",
{"AcmeAccount": self.dbAcmeAccount},
self.request,
)
def _focus_edit__submit(self):
try:
(result, formStash) = formhandling.form_validate(
self.request, schema=Form_AcmeAccount_edit, validate_get=False
)
if not result:
raise formhandling.FormInvalid()
event_type = model_utils.OperationsEventType.from_string(
"AcmeAccount__edit"
)
event_payload_dict = utils.new_event_payload_dict()
event_payload_dict["acme_account_id"] = self.dbAcmeAccount.id
event_payload_dict["action"] = "edit"
event_payload_dict["edit"] = {
"old": {},
"new": {},
}
private_key_cycle = formStash.results["account__private_key_cycle"]
if private_key_cycle != self.dbAcmeAccount.private_key_cycle:
try:
event_payload_dict["edit"]["old"][
"private_key_cycle"
] = self.dbAcmeAccount.private_key_cycle
event_payload_dict["edit"]["new"][
"private_key_cycle"
] = private_key_cycle
event_status = lib_db.update.update_AcmeAccount__private_key_cycle(
self.request.api_context,
self.dbAcmeAccount,
private_key_cycle,
)
except errors.InvalidTransition as exc:
# `formStash.fatal_form(` will raise a `FormInvalid()`
formStash.fatal_form(message=exc.args[0])
private_key_technology = formStash.results[
"account__private_key_technology"
]
if private_key_technology != self.dbAcmeAccount.private_key_technology:
try:
event_payload_dict["edit"]["old"][
"private_key_technology"
] = self.dbAcmeAccount.private_key_technology
event_payload_dict["edit"]["new"][
"private_key_technology"
] = private_key_technology
event_status = (
lib_db.update.update_AcmeAccount__private_key_technology(
self.request.api_context,
self.dbAcmeAccount,
private_key_technology,
)
)
except errors.InvalidTransition as exc:
# `formStash.fatal_form(` will raise a `FormInvalid()`
formStash.fatal_form(message=exc.args[0])
# bookkeeping
dbOperationsEvent = lib_db.logger.log__OperationsEvent(
self.request.api_context, event_type, event_payload_dict
)
lib_db.logger._log_object_event(
self.request.api_context,
dbOperationsEvent=dbOperationsEvent,
event_status_id=model_utils.OperationsObjectEventStatus.from_string(
event_status
),
dbAcmeAccount=self.dbAcmeAccount,
)
if self.request.wants_json:
return {
"result": "success",
"AcmeAccount": self.dbAcmeAccount.as_json,
}
url_success = "%s?result=success&operation=edit" % (self._focus_url,)
return HTTPSeeOther(url_success)
except formhandling.FormInvalid as exc:
if self.request.wants_json:
return {"result": "error", "form_errors": formStash.errors}
return formhandling.form_reprint(self.request, self._focus_edit__print)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _handle_potentially_deactivated(self, exc):
if exc.args[0] == 403:
if isinstance(exc.args[1], dict):
info = exc.args[1]
# pebble and bounder use the same strings
if info.get("type") == "urn:ietf:params:acme:error:unauthorized":
if (
info.get("detail")
== "An account with the provided public key exists but is deactivated"
):
if not self.dbAcmeAccount.timestamp_deactivated:
lib_db.update.update_AcmeAccount__set_deactivated(
self.request.api_context, self.dbAcmeAccount
)
self.request.api_context.dbSession.flush(
objects=[self.dbAcmeAccount]
)
return True
return False
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:acme_server:authenticate",
renderer=None,
)
@view_config(
route_name="admin:acme_account:focus:acme_server:authenticate|json",
renderer="json",
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-server/authenticate.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. ACME Server - Authenticate""",
"summary": """Authenticate the key against the provider's new-reg endpoint""",
"POST": True,
"GET": None,
"instructions": [
"""curl -X POST {ADMIN_PREFIX}/acme-account/{ID}/acme-server/authenticate.json""",
],
}
)
def focus__acme_server_authenticate(self):
"""
this just hits the api, hoping we authenticate correctly.
"""
dbAcmeAccount = self._focus()
if not dbAcmeAccount.is_can_authenticate:
error_message = "This AcmeAccount can not Authenticate"
if self.request.wants_json:
return {
"error": error_message,
}
url_error = (
"%s?result=error&error=%s&operation=acme-server--authenticate"
% (
self._focus_url,
error_message.replace(" ", "+"),
)
)
return HTTPSeeOther(url_error)
if self.request.method == "POST":
return self._focus__authenticate__submit()
return self._focus__authenticate__print()
def _focus__authenticate__print(self):
dbAcmeAccount = self._focus()
if self.request.wants_json:
return formatted_get_docs(
self, "/acme-account/{ID}/acme-server/authenticate.json"
)
url_post_required = (
"%s?result=error&error=post+required&operation=acme-server--authenticate"
% (self._focus_url,)
)
return HTTPSeeOther(url_post_required)
def _focus__authenticate__submit(self):
dbAcmeAccount = self._focus()
# result is either: `new-account` or `existing-account`
# failing will raise an exception
try:
authenticatedUser = lib_db.actions_acme.do__AcmeAccount_AcmeV2_authenticate(
self.request.api_context, dbAcmeAccount
)
except errors.AcmeServerError as exc:
if not self._handle_potentially_deactivated(exc):
raise
if self.request.wants_json:
return {"AcmeAccount": dbAcmeAccount.as_json}
return HTTPSeeOther(
"%s?result=success&operation=acme-server--authenticate&is_authenticated=%s"
% (self._focus_url, True)
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:acme_server:check",
renderer=None,
)
@view_config(
route_name="admin:acme_account:focus:acme_server:check|json",
renderer="json",
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-server/check.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. ACME Server - Check""",
"summary": """Check the key against the provider's new-reg endpoint""",
"POST": True,
"GET": None,
"instructions": [
"""curl -X POST {ADMIN_PREFIX}/acme-account/{ID}/acme-server/check.json""",
],
}
)
def focus__acme_server_check(self):
"""
this just hits the api, hoping we check correctly.
"""
dbAcmeAccount = self._focus()
if not dbAcmeAccount.is_can_authenticate:
error_message = "This AcmeAccount can not Check"
if self.request.wants_json:
return {
"error": error_message,
}
url_error = "%s?result=error&error=%s&operation=acme-server--check" % (
self._focus_url,
error_message.replace(" ", "+"),
)
return HTTPSeeOther(url_error)
if self.request.method == "POST":
return self._focus__check__submit()
return self._focus__check__print()
def _focus__check__print(self):
dbAcmeAccount = self._focus()
if self.request.wants_json:
return formatted_get_docs(self, "/acme-account/{ID}/acme-server/check.json")
url_post_required = (
"%s?result=error&error=post+required&operation=acme-server--check"
% (self._focus_url,)
)
return HTTPSeeOther(url_post_required)
def _focus__check__submit(self):
dbAcmeAccount = self._focus()
# result is either: `existing-account` or ERROR
# failing will raise an exception
# passing in `onlyReturnExisting` will log the "check"
_result = None
_message = None
try:
checkedUser = lib_db.actions_acme.do__AcmeAccount_AcmeV2_authenticate(
self.request.api_context, dbAcmeAccount, onlyReturnExisting=True
)
_result = "success"
except errors.AcmeServerError as exc:
# only catch this if `onlyReturnExisting` and there is an DNE error
if (exc.args[0] == 400) and (
exc.args[1]["type"] == "urn:ietf:params:acme:error:accountDoesNotExist"
):
_result = "error"
if "detail" in exc.args[1]:
_message = exc.args[1]["detail"]
else:
raise
if self.request.wants_json:
return {
"AcmeAccount": dbAcmeAccount.as_json,
"is_checked": True,
"result": _result,
"message": _message,
}
_message = quote_plus(_message) if _message else ""
return HTTPSeeOther(
"%s?result=success&operation=acme-server--check&is_checked=%s&result=%s&message=%s"
% (self._focus_url, True, _result, _message)
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(route_name="admin:acme_account:focus:mark", renderer=None)
@view_config(route_name="admin:acme_account:focus:mark|json", renderer="json")
@docify(
{
"endpoint": "/acme-account/{ID}/mark.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. Mark""",
"POST": True,
"GET": None,
"example": "curl --form 'action=active' {ADMIN_PREFIX}/acme-account/1/mark.json",
"form_fields": {"action": "the intended action"},
"valid_options": {"action": ["global_default", "active", "inactive"]},
}
)
def focus_mark(self):
dbAcmeAccount = self._focus()
if self.request.method == "POST":
return self._focus_mark__submit()
return self._focus_mark__print()
def _focus_mark__print(self):
dbAcmeAccount = self._focus()
if self.request.wants_json:
return formatted_get_docs(self, "/acme-account/{ID}/mark.json")
url_post_required = "%s?result=error&error=post+required&operation=mark" % (
self._focus_url
)
return HTTPSeeOther(url_post_required)
def _focus_mark__submit(self):
dbAcmeAccount = self._focus()
action = self.request.params.get("action")
try:
(result, formStash) = formhandling.form_validate(
self.request,
schema=Form_AcmeAccount_mark,
validate_get=False,
# validate_post=False
)
if not result:
raise formhandling.FormInvalid()
action = formStash.results["action"]
event_type = model_utils.OperationsEventType.from_string(
"AcmeAccount__mark"
)
event_payload_dict = utils.new_event_payload_dict()
event_payload_dict["acme_account_id"] = dbAcmeAccount.id
event_payload_dict["action"] = formStash.results["action"]
event_status = False
event_alt = None
try:
if action == "active":
event_status = lib_db.update.update_AcmeAccount__set_active(
self.request.api_context, dbAcmeAccount
)
elif action == "inactive":
event_status = lib_db.update.update_AcmeAccount__unset_active(
self.request.api_context, dbAcmeAccount
)
elif action == "global_default":
(
event_status,
alt_info,
) = lib_db.update.update_AcmeAccount__set_global_default(
self.request.api_context, dbAcmeAccount
)
if alt_info:
for (k, v) in alt_info["event_payload_dict"].items():
event_payload_dict[k] = v
event_alt = alt_info["event_alt"]
else:
raise errors.InvalidTransition("Invalid option")
except errors.InvalidTransition as exc:
# `formStash.fatal_form(` will raise a `FormInvalid()`
formStash.fatal_form(message=exc.args[0])
self.request.api_context.dbSession.flush(objects=[dbAcmeAccount])
# bookkeeping
dbOperationsEvent = lib_db.logger.log__OperationsEvent(
self.request.api_context, event_type, event_payload_dict
)
lib_db.logger._log_object_event(
self.request.api_context,
dbOperationsEvent=dbOperationsEvent,
event_status_id=model_utils.OperationsObjectEventStatus.from_string(
event_status
),
dbAcmeAccount=dbAcmeAccount,
)
if event_alt:
lib_db.logger._log_object_event(
self.request.api_context,
dbOperationsEvent=dbOperationsEvent,
event_status_id=model_utils.OperationsObjectEventStatus.from_string(
event_alt[0]
),
dbAcmeAccount=event_alt[1],
)
if self.request.wants_json:
return {"result": "success", "AcmeAccount": dbAcmeAccount.as_json}
url_success = "%s?result=success&operation=mark&action=%s" % (
self._focus_url,
action,
)
return HTTPSeeOther(url_success)
except formhandling.FormInvalid as exc:
if self.request.wants_json:
return {"result": "error", "form_errors": formStash.errors}
url_failure = "%s?result=error&error=%s&operation=mark&action=%s" % (
self._focus_url,
errors.formstash_to_querystring(formStash),
action,
)
raise HTTPSeeOther(url_failure)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:acme_server:deactivate_pending_authorizations",
renderer=None,
)
@view_config(
route_name="admin:acme_account:focus:acme_server:deactivate_pending_authorizations|json",
renderer="json",
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-server/deactivate-pending-authorizations.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. ACME Server - Deactivate Pending Authorizations""",
"summary": """deactivate pending authorizations on the acme server, must supply the authorization_ids""",
"POST": True,
"GET": None,
"instructions": [
"""curl --form 'acme_authorization_id=1' --form 'acme_authorization_id=2' {ADMIN_PREFIX}/acme-account/1/acme-server/deactivate-pending-authorizations.json""",
],
"form_fields": {
"authorization_id": "the pending authorization id to delete ",
},
}
)
def focus__acme_server_deactivate_pending_authorizations(self):
"""
this just hits the api, hoping we authenticate correctly.
"""
dbAcmeAccount = self._focus()
if not dbAcmeAccount.is_can_authenticate:
error_message = "This AcmeAccount can not Authenticate"
if self.request.wants_json:
return {
"error": error_message,
}
url_error = "%s?result=error&error=%s&operation=acme-server--deactivate-pending-authorizations" % (
self._focus_url,
error_message.replace(" ", "+"),
)
return HTTPSeeOther(url_error)
if self.request.method == "POST":
return self._focus__acme_server_deactivate_pending_authorizations__submit()
return self._focus__acme_server_deactivate_pending_authorizations__print()
def _focus__acme_server_deactivate_pending_authorizations__print(self):
dbAcmeAccount = self._focus()
if self.request.wants_json:
return formatted_get_docs(
self,
"/acme-account/{ID}/acme-server/deactivate-pending-authorizations.json",
)
url_post_required = (
"%s/acme-authorizations?status=active&result=error&error=post+required&operation=acme-server--deactivate-pending-authorizations"
% (self._focus_url,)
)
return HTTPSeeOther(url_post_required)
def _focus__acme_server_deactivate_pending_authorizations__submit(self):
dbAcmeAccount = self._focus()
try:
(result, formStash) = formhandling.form_validate(
self.request,
schema=Form_AcmeAccount_deactivate_authorizations,
validate_get=False,
)
if not result:
raise formhandling.FormInvalid()
if not formStash.results["acme_authorization_id"]:
# `formStash.fatal_form()` will raise `FormInvalid()`
formStash.fatal_form(
"You must supply at least one `acme_authorization_id` to deactivate."
)
results = lib_db.actions_acme.do__AcmeV2_AcmeAccount__acme_server_deactivate_authorizations(
self.request.api_context,
dbAcmeAccount=dbAcmeAccount,
acme_authorization_ids=formStash.results["acme_authorization_id"],
)
if self.request.wants_json:
return {
"result": "success",
"results": results,
"AcmeAccount": dbAcmeAccount.as_json,
}
return HTTPSeeOther(
"%s/acme-authorizations?status=active&result=success&operation=acme-server--deactivate-pending-authorizations"
% (self._focus_url,)
)
except formhandling.FormInvalid as exc:
if self.request.wants_json:
return {"result": "error", "form_errors": formStash.errors}
return HTTPSeeOther(
"%s/acme-authorizations?status=active&result=error&error=%s&operation=acme-server--deactivate-pending-authorizations"
% (
self._focus_url,
errors.formstash_to_querystring(formStash),
)
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:acme_server:deactivate",
renderer=None,
)
@view_config(
route_name="admin:acme_account:focus:acme_server:deactivate|json",
renderer="json",
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-server/deactivate.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. ACME Server - Deactivate""",
"POST": True,
"GET": None,
"instructions": [
"""curl -X POST {ADMIN_PREFIX}/acme-account/{ID}/acme-server/authenticate.json""",
],
"form_fields": {
"key_pem": "the active key as md5(PEM) or PEM",
},
"instructions": [
"""curl -X POST {ADMIN_PREFIX}/acme-server/deactivate.json""",
],
}
)
def focus__acme_server_deactivate(self):
"""
this just hits the api, hoping we authenticate correctly.
"""
dbAcmeAccount = self._focus()
if not dbAcmeAccount.is_can_deactivate:
error_message = "This AcmeAccount can not be deactivated"
if self.request.wants_json:
return {
"error": error_message,
}
url_error = "%s?result=error&error=%s&operation=acme-server--deactivate" % (
self._focus_url,
error_message.replace(" ", "+"),
)
return HTTPSeeOther(url_error)
if self.request.method == "POST":
return self._focus__acme_server_deactivate__submit()
return self._focus__acme_server_deactivate__print()
def _focus__acme_server_deactivate__print(self):
dbAcmeAccount = self._focus()
if self.request.wants_json:
return formatted_get_docs(
self, "/acme-account/{ID}/acme-server/deactivate.json"
)
return render_to_response(
"/admin/acme_account-focus-deactivate.mako",
{"AcmeAccount": dbAcmeAccount},
self.request,
)
def _focus__acme_server_deactivate__submit(self):
dbAcmeAccount = self._focus()
try:
(result, formStash) = formhandling.form_validate(
self.request,
schema=Form_AcmeAccount_deactivate,
validate_get=False,
)
if not result:
raise formhandling.FormInvalid()
# `key_pem` can match the full or md5
_key_pem = formStash.results["key_pem"]
if _key_pem != dbAcmeAccount.acme_account_key.key_pem_md5:
_key_pem = cert_utils.cleanup_pem_text(_key_pem)
if _key_pem != dbAcmeAccount.acme_account_key.key_pem:
formStash.fatal_field(
field="key_pem",
message="This does not match the active account key",
)
try:
results = lib_db.actions_acme.do__AcmeV2_AcmeAccount__deactivate(
self.request.api_context,
dbAcmeAccount=dbAcmeAccount,
transaction_commit=True,
)
except errors.AcmeServerError as exc:
if self._handle_potentially_deactivated(exc):
formStash.fatal_form(message=str(exc.args[1]))
raise
if self.request.wants_json:
return {
"result": "success",
"AcmeAccount": dbAcmeAccount.as_json,
}
return HTTPSeeOther(
"%s?result=success&operation=acme-server--deactivate"
% (self._focus_url,)
)
except formhandling.FormInvalid as exc:
if self.request.wants_json:
return {"result": "error", "form_errors": formStash.errors}
return formhandling.form_reprint(
self.request, self._focus__acme_server_deactivate__print
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@view_config(
route_name="admin:acme_account:focus:acme_server:key_change",
renderer=None,
)
@view_config(
route_name="admin:acme_account:focus:acme_server:key_change|json",
renderer="json",
)
@docify(
{
"endpoint": "/acme-account/{ID}/acme-server/key-change.json",
"section": "acme-account",
"about": """AcmeAccount: Focus. ACME Server - KeyChange""",
"POST": True,
"GET": None,
"instructions": [
"""curl -X POST {ADMIN_PREFIX}/acme-account/{ID}/acme-server/key-change.json""",
],
"form_fields": {
"key_pem_existing": "the active key as md5(PEM) or PEM",
},
"instructions": [
"""curl -X POST {ADMIN_PREFIX}/acme-server/key-change.json""",
],
}
)
def focus__acme_server_key_change(self):
"""
this just hits the api, hoping we authenticate correctly.
"""
dbAcmeAccount = self._focus()
if self.request.method == "POST":
return self._focus__acme_server_key_change__submit()
if not dbAcmeAccount.is_can_key_change:
error_message = "This AcmeAccount can not be key changed"
if self.request.wants_json:
return {
"error": error_message,
}
url_error = "%s?result=error&error=%s&operation=acme-server--key-change" % (
self._focus_url,
error_message.replace(" ", "+"),
)
return HTTPSeeOther(url_error)
return self._focus__acme_server_key_change__print()
def _focus__acme_server_key_change__print(self):
dbAcmeAccount = self._focus()
if self.request.wants_json:
return formatted_get_docs(
self, "/acme-account/{ID}/acme-server/key-change.json"
)
return render_to_response(
"/admin/acme_account-focus-key_change.mako",
{"AcmeAccount": dbAcmeAccount},
self.request,
)
def _focus__acme_server_key_change__submit(self):
dbAcmeAccount = self._focus()
try:
(result, formStash) = formhandling.form_validate(
self.request,
schema=Form_AcmeAccount_key_change,
validate_get=False,
)
if not result:
raise formhandling.FormInvalid()
# `key_pem` can match the full or md5
_key_pem_old = formStash.results["key_pem_existing"]
if _key_pem_old != dbAcmeAccount.acme_account_key.key_pem_md5:
_key_pem_old = cert_utils.cleanup_pem_text(_key_pem_old)
if _key_pem_old != dbAcmeAccount.acme_account_key.key_pem:
formStash.fatal_field(
field="key_pem_existing",
message="This does not match the active account key",
)
try:
results = lib_db.actions_acme.do__AcmeV2_AcmeAccount__key_change(
self.request.api_context,
dbAcmeAccount=dbAcmeAccount,
key_pem_new=None,
transaction_commit=True,
)
except errors.ConflictingObject as exc:
# args[0] = tuple(conflicting_object, error_message_string)
formStash.fatal_form(message=str(exc.args[0][1]))
if self.request.wants_json:
return {
"result": "success",
"AcmeAccount": dbAcmeAccount.as_json,
}
return HTTPSeeOther(
"%s?&result=success&operation=acme-server--key-change"
% (self._focus_url,)
)
except formhandling.FormInvalid as exc:
if self.request.wants_json:
return {"result": "error", "form_errors": formStash.errors}
return formhandling.form_reprint(
self.request, self._focus__acme_server_key_change__print
)
|
# Copyright (c) 2013 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""pools: JSON schema for zaqar-queues pools resources."""
# NOTE(cpp-cabrera): options can be anything. These will be unique to
# each storage driver, so we don't perform any further validation at
# the transport layer.
patch_options = {
'type': 'object', 'properties': {
'options': {
'type': 'object'
}
}
}
# NOTE(cpp-cabrera): a string valid for use in a URI
# TODO(cpp-cabrera): perhaps validate this further using jsonschema's
# uri validator as per rfc3987
patch_uri = {
'type': 'object', 'properties': {
'uri': {
'type': 'string'
},
'additionalProperties': False
}
}
patch_group = {
'type': 'object', 'properties': {
'uri': {
'type': 'string'
},
'additionalProperties': False
}
}
patch_weight = {
'type': 'object', 'properties': {
'weight': {
'type': 'integer', 'minimum': 0, 'maximum': 2**32 - 1
},
'additionalProperties': False
}
}
create = {
'type': 'object', 'properties': {
'weight': patch_weight['properties']['weight'],
'group': patch_group['properties']['uri'],
'uri': patch_uri['properties']['uri'],
'options': patch_options['properties']['options']
},
# NOTE(cpp-cabrera): options need not be present. Storage drivers
# must provide reasonable defaults.
'required': ['uri', 'weight'],
'additionalProperties': False
}
|
#!/usr/bin/env casarun
""".. _Archive_Pipeline-api:
**Archive_Pipeline** --- Produces standard JAO ADMIT pipeline products for spectral line plus continuum images.
===========================================================
Example Usage:
admit_recipe Archive_Pipeline Spectral-Cube Continuum-Image
or
admit.recipe("Archive_Pipeline","Spectral-Cube","Continuum-Image")
If primary beam files given:
admit_recipe Archive_Pipeline Spectral-Cube Continuum-Image specpb="Spectral-Primary-Beam" contpb="Continuum-Primary-Beam"
or
admit.recipe("Archive_Pipeline","Spectral-Cube","Continuum-Image", specpb="Spectral-Primary-Beam", contpb="Continuum-Primary-Beam")
This ADMIT script makes standard ADMIT pipeline products for a local dataset. The flow is:
#. Ingest the cube and optional continuum image into ADMIT doing primary beam correction if PB file(s) supplied. This will create CASA images if inputs are FITS.
#. Calculate statistics on cube for later use
#. Make a zeroth moment map over all emission in the cube.
#. Make a position-velocity (PV) slice oriented on the moment emission from the previous step.
#. Find segments with emission or absorption and try to ID the line(s)
#. Cut out cubes for each line found; cube name is line name
#. Calculate moment 0,1,2 maps for each line cube
#. Make a spectrum at the peak in each moment map
#. Make a PV slice through the peak in each moment map
#. Compute statistics on continuum map
#. Search for sources in the continuum map down to a given cutoff.
#. Make a spectrum at each source found from in the previous step.
Parameters
----------
param1 : spectral image cube
Your CASA or FITS spectral line image cube. If the cube is not primary beam
corrected, then do not supply a primary beam for it. Default cubes from *clean*
are not primary beam corrected: The noise does not rise up at the edge of the field
param2 : continuum image, optional
Your CASA or FITS continuum image. This image should have one channel (NAXIS3=1).
If the image is not primary beam corrected, then do not supply the primary beam for it.
Optional Keywords
-----------------
- *specpb* Spectral primary beam image
The CASA or FITS primary beam image for the spectral line cube. Cubes from
the ALMA archive are often primary beam corrected. In these images,
the noise rises out from the center of the imaged field. In this
case, you need to input both the image file and the primary beam
cube. Both are available to you from the archive.
- *conpb* Continuum primary beam image
The CASA or FITS primary beam image for the continuum image.
- *numsigma* in LineID_AT: typically use 6.0 to 8.0 for 4000 channels;
4.0 if you only have a few hundred channels
3.0 if you want to dig deep but then expect to get fake lines too.
- *minchan* in LineID_AT: minimum width of line in channels to assume when searching for lines.
- *pad* in Linecube_AT: this controls how many "extra" channels are added to either end of the line sub-cube to be cut from the input cube. It should generally be comparable to your line width
- *cutoff* in Moment_AT: number of sigma for cut levels in making moment maps: one value for each requested moment map. Must be a Python list: [1.0, 2.0,3.0] for example for moment 0, 1 and 2 maps
- *width* in PVSlice_AT: width in channels orthogonal to the slice length to sum.
"""
#
# Required imports
#
import os, sys
import ast
import admit
# Give a descriptive name to required optional keyless arguments
# to be used in help string.
REQARGS = ["Spectral-Cube"]
# Give a descriptive name to optional keyless arguments to be used
# in help string.
OPTARGS = ["Continuum-Image"]
# Keywords recognized by this program and their default values.
# Non-matching keywords given on command line will be ignored.
#KEYS = {"minchan" :4, "numsigma": 5.0, "cutoff":[1.5,3.0,3.0], "width":5, "pad":50 , "specpb":None, "contpb":None}
KEYS = {"minchan" :4, "numsigma": 5.0, "cutoff":[2.0], "width":1, "pad":5 , "specpb":None, "contpb":None}
# Brief description of accepted keywords
KEYDESC = {
"specpb" : "Primary beam file to correct spectral cube. Default:None", # required
"contpb" : "Primary beam file to correct continuum image. Default:None", # optional
"numsigma": "number of sigma cutoff for LineID_AT. Default:%s" % str(KEYS['numsigma']),
"minchan" : "minimum channel width of line when searching for lines. Default:%s" % str(KEYS['minchan']),
"pad" : "number of extra channels added to either end of LineCubes. Default: %s" % str(KEYS['pad']),
"cutoff" : "list giving number of sigma for cut levels for output moment maps. Default:%s" % str(KEYS['cutoff']),
"width" : "width in channels of position-velocity slice in PVSlice_AT. Default:%s" % str(KEYS['width']),
}
# put the functionality in a method so that it is not executed when
# sphinx imports it to make the documentation. The method name starts
# with _ so that the method is not listed in the sphinx-generated documentation
def _run(argv):
# Verify arguments are good
if ( not admit.recipeutils._processargs(argv,REQARGS,OPTARGS,KEYS,KEYDESC,__doc__)): return
cubefile = argv[1]
contfile = None
if len(argv) == 3:
contfile = argv[2]
projdir = os.path.splitext(argv[1])[0] + '.admit'
loglevel = 10 # INFO = 15 should be user default
# convert key values from string
try:
KEYS["minchan"] = int(KEYS["minchan"])
KEYS["numsigma"] = float(KEYS["numsigma"])
KEYS["pad"] = int(KEYS["pad"])
KEYS["width"] = int(KEYS["width"])
KEYS["cutoff"] = ast.literal_eval(str(KEYS["cutoff"]))
except Exception, e:
print("Exception converting keyword value to number:",e)
return
#========================================================================
# Master project. Beginning for ADMIT Commands
#
p = admit.Project(projdir,commit=False,loglevel=loglevel)
# list object for Tasks so we don't have to individually name them
Tasks = []
#
# Set-up all ADMIT Flow tasks for execution including their aliases and connections
# The aliases allow you to refer to a task's input by the alias name of the (previous)
# task providing that input.
#
# Add spectral line processing to flow
if KEYS["specpb"] == None:
Tasks.append(p.addtask(admit.Ingest_AT(file=cubefile, alias='incube')))
else:
Tasks.append(p.addtask(admit.Ingest_AT(file=cubefile, alias='incube', pb=KEYS["specpb"])))
Tasks.append(p.addtask(admit.CubeStats_AT (alias='instats'), ['incube']))
Tasks.append(p.addtask(admit.CubeSum_AT (alias='insum', sigma=1, numsigma=3.0), ['incube', 'instats']))
Tasks.append(p.addtask(admit.CubeSpectrum_AT (alias='spec1'), ['incube', 'insum']))
Tasks.append(p.addtask(admit.PVSlice_AT ( width=KEYS["width"]), ['incube', 'insum']))
Tasks.append(p.addtask(admit.LineID_AT (alias='lines', csub=[0,0], minchan=KEYS["minchan"], numsigma=KEYS["numsigma"]), ['instats','spec1']))
Tasks.append(p.addtask(admit.LineCube_AT (alias='cutcubes', pad=KEYS["pad"]), ['incube', 'lines']))
Tasks.append(p.addtask(admit.Moment_AT (alias='linemom', mom0clip=2.0, numsigma=KEYS["cutoff"], moments=[0, 1, 2]), ['cutcubes', 'instats']))
Tasks.append(p.addtask(admit.CubeSpectrum_AT (alias='linespec'), ['cutcubes', 'linemom']))
# While 'linemom' produces 3 moment image BDPs, the default input is taken
# here, which is the first BDP which is the zeroth moment. This relies on
# Moment_AT's default behavior of putting the zeroth moment in the
# BDP index 0.
Tasks.append(p.addtask(admit.PVSlice_AT ( width=KEYS["width"]), ['cutcubes', 'linemom']))
# If given, add continuum map processing to flow
if contfile != None:
if KEYS["contpb"] == None:
Tasks.append(p.addtask(admit.Ingest_AT (alias='incont', file=contfile)))
else:
Tasks.append(p.addtask(admit.Ingest_AT (alias='incont', file=contfile, pb=KEYS["contpb"])))
Tasks.append(p.addtask(admit.CubeStats_AT (alias='contstats'), ['incont']))
Tasks.append(p.addtask(admit.SFind2D_AT (alias='contsfind'), ['incont','contstats']))
# Only add this CubeSpectrum_at to flow if SFind2D found at least one source.
# This can only be known by running up the flow to now.
p.run()
if p['contsfind'][0] != None and len(p['contsfind'][0]) > 0:
Tasks.append(p.addtask(admit.CubeSpectrum_AT (alias='contspec'), ['cutcubes','contsfind']))
#
# Execute ADMIT flow
#
p.run()
if __name__ == "__main__":
# Command line processing to pick-up file name and define
# ADMIT directory that you will be creating
argv = admit.utils.casa_argv(sys.argv)
# now do the work
_run(argv)
|
'''
BMC HMM Router
'''
from Products.ZenUtils.Ext import DirectRouter, DirectResponse
from Products import Zuul
class bmcRouter(DirectRouter):
'''
BMC Router
'''
def _getFacade(self):
'''
getfacade
'''
# The parameter in the next line - myAppAdapter - must match with
# the name field in an adapter stanza in configure.zcml
return Zuul.getFacade('BMCAdapter', self.context)
# The method name - myRouterFunc - and its parameters - must match with
# the last part of the call for Zenoss.remote.myAppRouter.myRouterFunc
# in the javascript file myFooterMenu.js . The parameters will be
# populated by the items defined in the js file.
# Note that the router function has 2 parameters, comments and rackSlot
# that are passed as the "opts" parameters from myFooterMenu.js. The
# values of these fields were provided by the form input.
def routerbs(self, deviceip, bootsequence, cfgboottype):
'''
routerBS
'''
facade = self._getFacade()
# The object that is being operated on is in self.context
devobject = self.context
success, message = facade.bootsequence(
devobject, deviceip, bootsequence, cfgboottype)
if success:
return DirectResponse.succeed(message)
return DirectResponse.fail(message)
def routerfpc(self, deviceip, frupowercontrol):
'''
routerFPC
'''
facade = self._getFacade()
devobject = self.context
frunum = 1
success, message = facade.frupowerctrl(devobject, deviceip,
frunum, frupowercontrol)
if success:
return DirectResponse.succeed(message)
return DirectResponse.fail(message)
class hmmRouter(DirectRouter):
'''
HMM Router
'''
def _getFacade(self):
'''
getfacade
'''
return Zuul.getFacade('HMMAdapter', self.context)
def routerbbo(self, deviceip, hmmbladenum,
hmmbiosbootoption, hmmbotype):
'''
routerBBO
'''
facade = self._getFacade()
# The object that is being operated on is in self.context
devobject = self.context
success, message = facade.biosbootoption(devobject, deviceip, hmmbladenum,
hmmbiosbootoption,
hmmbotype)
if success:
return DirectResponse.succeed(message)
return DirectResponse.fail(message)
def routerfrucontrol(self, deviceip, hmmbladenum, hmmfrucontrol):
'''
routerFruControl
'''
hmmallblade = False
facade = self._getFacade()
devobject = self.context
hmmfrunum = 1
success, message = facade.frucontrol(devobject, deviceip, hmmbladenum,
hmmfrunum, hmmfrucontrol,
hmmallblade)
if success:
return DirectResponse.succeed(message)
return DirectResponse.fail(message)
|
import tkinter
from tkinter import *
import tkinter.font as font
gui = Tk(className='Python Examples - Button') #initalises
gui.geometry("500x200") #sets the dimensions
gui.title("CODE::D") #title of window
myFont = font.Font(family='Helvetica', size=50, weight='bold') #define font
##################################################################
########FUNCTION TO SCAN BARCODE, PLACE CODE INSIDE###############
##################################################################
def activate_event(): #called when button is pressed
btn.destroy()
loading_message = tkinter.Label(gui, text = "Scanning...", fg = "green", bg = "white")
loading_message['font'] = myFont
loading_message.pack(ipady = 50, ipadx = 70, expand = True)
####scan():
# mat = get_material() #PLACEHOLDER
# loading_message.destroy()
# material_message = tkinter.Label(gui, text = mat, fg = "black", bg = "yellow")
# material_message['font'] = myFont
# material_message.pack(ipady = 120, ipadx = 100, expand = True)
#################################################################
#################################################################
btn = Button(gui, text='SCAN', bg='black', fg='red', command = activate_event) # create button
btn['font'] = myFont # apply font to the button label
btn.pack(ipady = 50, ipadx = 70, expand = True) # add button to gui window
gui.mainloop()
|
class MultiSegmentGrid(Element,IDisposable):
"""
This element acts as a multi-segmented Grid. The individual grids associated to
the MultiSegmentGrid behave as a single unit and all share the same text. They inherit
their type (GridType) from the MultiSegmentGrid.
"""
@staticmethod
def AreGridsInSameMultiSegmentGrid(grid1,grid2):
"""
AreGridsInSameMultiSegmentGrid(grid1: Grid,grid2: Grid) -> bool
Determine whether two Grids are members of the same GridChain.
grid1: A Grid.
grid2: A Grid.
Returns: Returns true if both of the specified Grids are associated to the same
MultiSegmentGrid,
i.e. getMultiSegementGridId returns the same valid
element id for both Grids.
"""
pass
@staticmethod
def Create(document,typeId,curveLoop,sketchPlaneId):
"""
Create(document: Document,typeId: ElementId,curveLoop: CurveLoop,sketchPlaneId: ElementId) -> ElementId
Create a MultiSegmentGrid element from the specified curve loop.
document: The document in which to create the MultiSegmentGrid.
typeId: Element id of a GridType element.
curveLoop: An open curve loop consisting of lines and arcs.
sketchPlaneId: Element id of a SketchPlane for the curves elements that will be created from
the curveLoop.
Returns: The element id of the new MultiSegmentGrid element.
"""
pass
def Dispose(self):
""" Dispose(self: Element,A_0: bool) """
pass
def getBoundingBox(self,*args):
""" getBoundingBox(self: Element,view: View) -> BoundingBoxXYZ """
pass
def GetGridIds(self):
"""
GetGridIds(self: MultiSegmentGrid) -> ICollection[ElementId]
Get the element ids of the Grids that make up this MultiSegmentGrid.
Returns: Element ids of Grids that make up this MultiSegmentGrid.
"""
pass
@staticmethod
def GetMultiSegementGridId(grid):
"""
GetMultiSegementGridId(grid: Grid) -> ElementId
Retrieve the element id of the MultiSegmentGrid of which the specified Grid is
a member.
grid: A Grid.
Returns: The element id of the associated GridChain. If the Grid is not associated to a
GridChain,
this will return invalidElementId.
"""
pass
@staticmethod
def IsValidCurveLoop(curveLoop):
"""
IsValidCurveLoop(curveLoop: CurveLoop) -> bool
Identifies whether the specified curve loop is valid for creation of a
MultiSegmentGrid.
curveLoop: The curve loop.
Returns: True if the curve loop is an open curve loop consisting of lines and arcs,and
false otherwise.
"""
pass
@staticmethod
def IsValidSketchPlaneId(document,elemId):
"""
IsValidSketchPlaneId(document: Document,elemId: ElementId) -> bool
Identifies whether provided element id corresponds to a SketchPlane that is
valid for GridChain creation.
document: The document.
elemId: Element id.
Returns: True if elemId is the element id of a horizontal SketchPlane.
"""
pass
def ReleaseUnmanagedResources(self,*args):
""" ReleaseUnmanagedResources(self: Element,disposing: bool) """
pass
def setElementType(self,*args):
""" setElementType(self: Element,type: ElementType,incompatibleExceptionMessage: str) """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
Text=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""name shared by grids in this MultiSegmentGrid
Get: Text(self: MultiSegmentGrid) -> str
Set: Text(self: MultiSegmentGrid)=value
"""
|
from flask import Flask,render_template,request
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('student.html')
@app.route('/<name>')
def hello_name(name):
return "Hello {}!".format(name)
@app.route('/about/')
def about_page():
return render_template('about.html')
@app.route('/layout/')
def layout_page():
return render_template('layout.html')
@app.route('/hello/<int:score>')
def hello_nam(score):
return render_template('hello.html',marks=score)
@app.route('/result',methods=['POST','GET'])
def result():
if request.method=='POST':
result=request.form
return render_template("result.html",result=result)
if __name__ == '__main__':
app.run(debug=True)
|
"""Test the example calculator CLI binary."""
from subprocess import run
from typing import List
import pytest
from hypothesis import given
from hypothesis import strategies
from mylittleci.lib import simplemath
def test_calculator_binary(capfd: pytest.Class) -> None:
"""Add numbers together using binary and assert the output to stdout."""
cmd = ["calculator", "--sum", "666", "999"]
proc = run(cmd, check=False)
assert proc.returncode == 0
captured = capfd.readouterr()
# print(captured) # debug
assert captured.out.rstrip() == "1665".rstrip()
assert captured.err == ""
@pytest.mark.parametrize(
"cmd, expected_exit_code",
[
(["calculator", "--help"], 0),
(["calculator", "--sum", "1", "2", "3"], 0),
(["calculator"], 2),
(["calculator", "--blah"], 2),
],
)
def test_calculator_binary_exit_code(
capfd: pytest.Class, expected_exit_code: int, cmd: List[str]
) -> None:
"""Add numbers together using binary and assert the output to stdout."""
proc = run(cmd, check=False)
assert proc.returncode == expected_exit_code
@given(integers=strategies.lists(strategies.integers()))
def test_calculator_api(integers: List[int]) -> None:
"""Add numbers together using the API."""
result = simplemath.calculate_sum(integers=integers)
assert result == sum(integers)
def test_calculator_api_type_error() -> None:
"""Add numbers together using the API."""
with pytest.raises(simplemath.SimpleMathException) as excinfo:
simplemath.calculate_sum("invalid input")
assert "You need to provide a list of integers." in str(excinfo)
|
from django.shortcuts import render
from validator.models import Settings
def home(request):
return render(request, 'validator/index.html', {'news_text': Settings.load().news})
def alpha(request):
return render(request, 'validator/alpha.html')
def terms(request):
return render(request, 'validator/terms.html')
|
import re
import os
import shutil
import time
from datetime import datetime, timedelta
from gppylib.db import dbconn
from test.behave_utils.utils import check_schema_exists, check_table_exists, drop_table_if_exists
from behave import given, when, then
CREATE_MULTI_PARTITION_TABLE_SQL = """
CREATE TABLE %s.%s (trans_id int, date date, amount decimal(9,2), region text)
WITH (appendonly=true, orientation=column)
DISTRIBUTED BY (trans_id)
PARTITION BY RANGE (date)
SUBPARTITION BY LIST (region)
SUBPARTITION TEMPLATE
( SUBPARTITION usa VALUES ('usa'),
SUBPARTITION asia VALUES ('asia'),
SUBPARTITION europe VALUES ('europe'),
DEFAULT SUBPARTITION other_regions)
(START (date '2011-01-01') INCLUSIVE
END (date '2012-01-01') EXCLUSIVE
EVERY (INTERVAL '5 month'),
DEFAULT PARTITION outlying_dates)
"""
CREATE_PARTITION_TABLE_SQL = """
CREATE TABLE %s.%s (id int, date date) WITH (appendonly=true, orientation=column)
DISTRIBUTED BY (id)
PARTITION BY RANGE (date)
( START (date '2008-01-01') INCLUSIVE
END (date '2008-01-04') EXCLUSIVE
EVERY (INTERVAL '1 day'),
DEFAULT PARTITION default_dates);
"""
@given('there is a regular "{storage_type}" table "{tablename}" with column name list "{col_name_list}" and column type list "{col_type_list}" in schema "{schemaname}"')
def impl(context, storage_type, tablename, col_name_list, col_type_list, schemaname):
schemaname_no_quote = schemaname
if '"' in schemaname:
schemaname_no_quote = schemaname[1:-1]
if not check_schema_exists(context, schemaname_no_quote, context.dbname):
raise Exception("Schema %s does not exist in database %s" % (schemaname_no_quote, context.dbname))
drop_table_if_exists(context, '.'.join([schemaname, tablename]), context.dbname)
create_table_with_column_list(context.conn, storage_type, schemaname, tablename, col_name_list, col_type_list)
check_table_exists(context, context.dbname, '.'.join([schemaname, tablename]), table_type=storage_type)
@given('there is a hard coded ao partition table "{tablename}" with 4 child partitions in schema "{schemaname}"')
def impl(context, tablename, schemaname):
if not check_schema_exists(context, schemaname, context.dbname):
raise Exception("Schema %s does not exist in database %s" % (schemaname, context.dbname))
drop_table_if_exists(context, '.'.join([schemaname, tablename]), context.dbname)
dbconn.execSQL(context.conn, CREATE_PARTITION_TABLE_SQL % (schemaname, tablename))
context.conn.commit()
check_table_exists(context, context.dbname, '.'.join([schemaname, tablename]), table_type='ao')
@given('there is a hard coded multi-level ao partition table "{tablename}" with 4 mid-level and 16 leaf-level partitions in schema "{schemaname}"')
def impl(context, tablename, schemaname):
if not check_schema_exists(context, schemaname, context.dbname):
raise Exception("Schema %s does not exist in database %s" % (schemaname, context.dbname))
drop_table_if_exists(context, '.'.join([schemaname, tablename]), context.dbname)
dbconn.execSQL(context.conn, CREATE_MULTI_PARTITION_TABLE_SQL % (schemaname, tablename))
context.conn.commit()
check_table_exists(context, context.dbname, '.'.join([schemaname, tablename]), table_type='ao')
@given('no state files exist for database "{dbname}"')
def impl(context, dbname):
analyze_dir = get_analyze_dir(dbname)
if os.path.exists(analyze_dir):
shutil.rmtree(analyze_dir)
@then('"{number}" analyze directories exist for database "{dbname}"')
def impl(context, number, dbname):
dirs_found = get_list_of_analyze_dirs(dbname)
if str(number) != str(len(dirs_found)):
raise Exception("number of directories expected, %s, didn't match number found: %s" % (
str(number), str(len(dirs_found))))
@given('a view "{view_name}" exists on table "{table_name}" in schema "{schema_name}"')
def impl(context, view_name, table_name, schema_name):
create_view_on_table_in_schema(context.conn, schema_name, table_name, view_name)
@given('a view "{view_name}" exists on table "{table_name}"')
def impl(context, view_name, table_name):
create_view_on_table(context.conn, view_name, table_name)
@given('"{qualified_table}" appears in the latest state files')
@then('"{qualified_table}" should appear in the latest state files')
def impl(context, qualified_table):
found, filename = table_found_in_state_file(context.dbname, qualified_table)
if not found:
if filename == '':
assert False, "no state files found for database %s" % context.dbname
else:
assert False, "table %s not found in state file %s" % (qualified_table, os.path.basename(filename))
@then('"{qualified_table}" should not appear in the latest state files')
def impl(context, qualified_table):
found, filename = table_found_in_state_file(context.dbname, qualified_table)
if found:
assert False, "table %s found in state file %s" % (qualified_table, os.path.basename(filename))
@given('"{expected_result}" should appear in the latest ao_state file in database "{dbname}"')
@then('"{expected_result}" should appear in the latest ao_state file in database "{dbname}"')
def impl(context, expected_result, dbname):
latest_file = get_latest_aostate_file(dbname)
with open(latest_file, 'r') as f:
for line in f:
if expected_result in line:
return True
raise Exception("couldn't find %s in %s" % (expected_result, latest_file))
@given('columns "{col_name_list}" of table "{qualified_table}" appear in the latest column state file')
@then('columns "{col_name_list}" of table "{qualified_table}" should appear in the latest column state file')
def impl(context, col_name_list, qualified_table):
found, column, filename = column_found_in_state_file(context.dbname, qualified_table, col_name_list)
if not found:
if filename == '':
assert False, "no column state file found for database %s" % context.dbname
else:
assert False, "column(s) %s of table %s not found in state file %s" % (
column, qualified_table, os.path.basename(filename))
@given('column "{col_name}" of table "{qualified_table}" does not appear in the latest column state file')
@then('column "{col_name}" of table "{qualified_table}" should not appear in the latest column state file')
def impl(context, col_name, qualified_table):
found, column, filename = column_found_in_state_file(context.dbname, qualified_table, col_name)
if found:
if filename == '':
assert False, "no column state file found for database %s" % context.dbname
else:
assert False, "unexpected column %s of table %s found in state file %s" % (
column, qualified_table, os.path.basename(filename))
@given('"{qualified_table}" appears in the latest report file')
@then('"{qualified_table}" should appear in the latest report file')
def impl(context, qualified_table):
found, filename = table_found_in_report_file(context.dbname, qualified_table)
if not found:
assert False, "table %s not found in report file %s" % (qualified_table, os.path.basename(filename))
@then('output should contain either "{output1}" or "{output2}"')
def impl(context, output1, output2):
pat1 = re.compile(output1)
pat2 = re.compile(output2)
if not pat1.search(context.stdout_message) and not pat2.search(context.stdout_message):
err_str = "Expected stdout string '%s' or '%s', but found:\n'%s'" % (output1, output2, context.stdout_message)
raise Exception(err_str)
@then('output should not contain "{output1}"')
def impl(context, output1):
pat1 = re.compile(output1)
if pat1.search(context.stdout_message):
err_str = "Unexpected stdout string '%s', found:\n'%s'" % (output1, context.stdout_message)
raise Exception(err_str)
@then('output should contain both "{output1}" and "{output2}"')
def impl(context, output1, output2):
pat1 = re.compile(output1)
pat2 = re.compile(output2)
if not pat1.search(context.stdout_message) or not pat2.search(context.stdout_message):
err_str = "Expected stdout string '%s' and '%s', but found:\n'%s'" % (output1, output2, context.stdout_message)
raise Exception(err_str)
@given('table "{qualified_table}" does not appear in the latest state files')
def impl(context, qualified_table):
found, filename = table_found_in_state_file(context.dbname, qualified_table)
if found:
delete_table_from_state_files(context.dbname, qualified_table)
@given('{num_rows} rows are inserted into table "{tablename}" in schema "{schemaname}" with column type list "{column_type_list}"')
@then('{num_rows} rows are inserted into table "{tablename}" in schema "{schemaname}" with column type list "{column_type_list}"')
@when('{num_rows} rows are inserted into table "{tablename}" in schema "{schemaname}" with column type list "{column_type_list}"')
def impl(context, num_rows, tablename, schemaname, column_type_list):
insert_data_into_table(context.conn, schemaname, tablename, column_type_list, num_rows)
@given('some data is inserted into table "{tablename}" in schema "{schemaname}" with column type list "{column_type_list}"')
@when('some data is inserted into table "{tablename}" in schema "{schemaname}" with column type list "{column_type_list}"')
def impl(context, tablename, schemaname, column_type_list):
insert_data_into_table(context.conn, schemaname, tablename, column_type_list)
@given('some ddl is performed on table "{tablename}" in schema "{schemaname}"')
def impl(context, tablename, schemaname):
perform_ddl_on_table(context.conn, schemaname, tablename)
@given('the user starts a transaction and runs "{query}" on "{dbname}"')
@when('the user starts a transaction and runs "{query}" on "{dbname}"')
def impl(context, query, dbname):
if 'long_lived_conn' not in context:
create_long_lived_conn(context, dbname)
dbconn.execSQL(context.long_lived_conn, 'BEGIN; %s' % query)
@given('the user rollsback the transaction')
@when('the user rollsback the transaction')
def impl(context):
dbconn.execSQL(context.long_lived_conn, 'ROLLBACK;')
@then('the latest state file should have a mod count of {mod_count} for table "{table}" in "{schema}" schema for database "{dbname}"')
def impl(context, mod_count, table, schema, dbname):
mod_count_in_state_file = get_mod_count_in_state_file(dbname, schema, table)
if mod_count_in_state_file != mod_count:
raise Exception(
"mod_count %s does not match mod_count %s in state file for %s.%s" %
(mod_count, mod_count_in_state_file, schema, table))
@then('root stats are populated for partition table "{tablename}" for database "{dbname}"')
def impl(context, tablename, dbname):
with dbconn.connect(dbconn.DbURL(dbname=dbname), unsetSearchPath=False) as conn:
query = "select count(*) from pg_statistic where starelid='%s'::regclass;" % tablename
num_tuples = dbconn.execSQLForSingleton(conn, query)
if num_tuples == 0:
raise Exception("Expected partition table %s to contain root statistics" % tablename)
@given('the state files for "{dbname}" are artificially aged by {num_days} days')
@when('the state files for "{dbname}" are artificially aged by {num_days} days')
def impl(context, dbname, num_days):
analyze_dir = get_analyze_dir(dbname)
folders = get_list_of_analyze_dirs(dbname)
for f in folders:
time_of_analyze = datetime.strptime(os.path.basename(f), '%Y%m%d%H%M%S')
aged_time_of_analyze = time_of_analyze - timedelta(days=int(num_days))
new_folder_name = os.path.join(analyze_dir, aged_time_of_analyze.strftime('%Y%m%d%H%M%S'))
shutil.move(f, new_folder_name)
@then('there should be {num_dirs} state directories for database "{dbname}"')
@then('there should be {num_dirs} state directory for database "{dbname}"')
def impl(context, num_dirs, dbname):
folders = get_list_of_analyze_dirs(dbname)
if len(folders) != int(num_dirs):
raise Exception("Found %d state directories, expected %s" % (len(folders), num_dirs))
@given('the user waits {num_secs} seconds')
@when('the user waits {num_secs} seconds')
@given('the user waits {num_secs} second')
@when('the user waits {num_secs} second')
def impl(context, num_secs):
time.sleep(int(num_secs))
def get_mod_count_in_state_file(dbname, schema, table):
file = get_latest_aostate_file(dbname)
comma_name = ','.join([schema, table])
with open(file) as fd:
for line in fd:
if comma_name in line:
return line.split(',')[2].strip()
return -1
def create_long_lived_conn(context, dbname):
context.long_lived_conn = dbconn.connect(dbconn.DbURL(dbname=dbname), unsetSearchPath=False)
def table_found_in_state_file(dbname, qualified_table):
comma_name = ','.join(qualified_table.split('.'))
files = get_latest_analyze_state_files(dbname)
if len(files) == 0:
return False, ""
state_file = ""
for state_file in files:
found = False
with open(state_file) as fd:
for line in fd:
if comma_name in line:
found = True
continue
if not found:
return False, state_file
return True, state_file
def table_found_in_report_file(dbname, qualified_table):
report_file = get_latest_analyze_report_file(dbname)
with open(report_file) as fd:
for line in fd:
if qualified_table == line.strip('\n'):
return True, report_file
return False, report_file
def column_found_in_state_file(dbname, qualified_table, col_name_list):
comma_name = ','.join(qualified_table.split('.'))
files = get_latest_analyze_state_files(dbname)
if len(files) == 0:
return False, "", ""
for state_file in files:
if "col_state_file" not in state_file:
continue
with open(state_file) as fd:
for line in fd:
line = line.strip('\n')
if comma_name in line:
for column in col_name_list.split(','):
if column not in line.split(',')[2:]:
return False, column, state_file
return True, "", state_file
return False, col_name_list, state_file
def delete_table_from_state_files(dbname, qualified_table):
comma_name = ','.join(qualified_table.split('.'))
files = get_latest_analyze_state_files(dbname)
for filename in files:
lines = []
with open(filename) as fd:
for line in fd:
lines.append(line.strip('\n'))
f = open(filename, "w")
for line in lines:
if comma_name not in line:
f.write(line)
f.close()
def get_list_of_analyze_dirs(dbname):
analyze_dir = get_analyze_dir(dbname)
if not os.path.exists(analyze_dir):
return []
ordered_list = [os.path.join(analyze_dir, x) for x in sorted(os.listdir(analyze_dir), reverse=True)]
return filter(os.path.isdir, ordered_list)
def get_latest_analyze_dir(dbname):
analyze_dir = get_analyze_dir(dbname)
folders = get_list_of_analyze_dirs(dbname)
if len(folders) == 0:
return []
return os.path.join(analyze_dir, folders[0])
def get_analyze_dir(dbname):
master_data_dir = os.environ.get('MASTER_DATA_DIRECTORY')
analyze_dir = os.path.join(master_data_dir, 'db_analyze', dbname)
return analyze_dir
def get_latest_aostate_file(dbname):
for path in get_latest_analyze_state_files(dbname):
if 'ao_state' in path:
return path
return None
def get_latest_analyze_state_files(dbname):
"""
return the latest state files (absolute paths)
"""
state_files_dir = get_latest_analyze_dir(dbname)
if not state_files_dir:
return []
files = os.listdir(state_files_dir)
if len(files) != 4:
raise Exception("Missing or unexpected state files in folder %s" % state_files_dir)
ret = []
for f in files:
if 'report' not in f:
ret.append(os.path.join(state_files_dir, f))
return ret
def get_latest_analyze_report_file(dbname):
"""
return the latest report file (absolute path)
"""
report_file_dir = get_latest_analyze_dir(dbname)
if not report_file_dir:
return []
files = os.listdir(report_file_dir)
for f in files:
if 'report' in f:
return os.path.join(report_file_dir, f)
raise Exception("Missing report file in folder %s" % report_file_dir)
def create_table_with_column_list(conn, storage_type, schemaname, tablename, col_name_list, col_type_list):
col_name_list = col_name_list.strip().split(',')
col_type_list = col_type_list.strip().split(',')
col_list = ' (' + ','.join(['%s %s' % (x, y) for x, y in zip(col_name_list, col_type_list)]) + ') '
if storage_type.lower() == 'heap':
storage_str = ''
elif storage_type.lower() == 'ao':
storage_str = " with (appendonly=true) "
elif storage_type.lower() == 'co':
storage_str = " with (appendonly=true, orientation=column) "
else:
raise Exception("Invalid storage type")
query = 'CREATE TABLE %s.%s %s %s DISTRIBUTED RANDOMLY' % (schemaname, tablename, col_list, storage_str)
dbconn.execSQL(conn, query)
conn.commit()
def insert_data_into_table(conn, schemaname, tablename, col_type_list, num_rows="100"):
col_type_list = col_type_list.strip().split(',')
col_str = ','.join(["(random()*i)::%s" % x for x in col_type_list])
query = "INSERT INTO " + schemaname + '.' + tablename + " SELECT " + col_str + " FROM generate_series(1," + num_rows + ") i"
dbconn.execSQL(conn, query)
conn.commit()
def perform_ddl_on_table(conn, schemaname, tablename):
query = "ALTER TABLE " + schemaname + '.' + tablename + " ADD COLUMN tempcol int default 0"
dbconn.execSQL(conn, query)
query = "ALTER TABLE " + schemaname + '.' + tablename + " DROP COLUMN tempcol"
dbconn.execSQL(conn, query)
conn.commit()
def create_view_on_table_in_schema(conn, schemaname, tablename, viewname):
query = "CREATE OR REPLACE VIEW " + schemaname + "." + viewname + \
" AS SELECT * FROM " + schemaname + "." + tablename
dbconn.execSQL(conn, query)
conn.commit()
def create_view_on_table(conn, viewname, tablename):
query = "CREATE OR REPLACE VIEW " + viewname + \
" AS SELECT * FROM " + tablename
dbconn.execSQL(conn, query)
conn.commit()
|
n,m=map(int,input().split());a=set(int(i) for i in input().split());b=set(int(i) for i in input().split());a-=b
r=str(len(a));a=list(a);a.sort()
if a!=[]:
r+='\n'
for i in a:
r+=str(i)+' '
print(r)
|
import os
from yaml import dump
from cfg import Opts
def test_default_args():
Opts.reset()
Opts.add_int('a', 1, 'a value')
opt = Opts()
assert opt.a == 1
opt.b = 2
assert opt['b'] == 2
data = opt.dumps()
assert data['b'] == 2
data1 = opt.dumps()
opt.loads(data, update=False)
data2 = opt.dumps()
for k in data1.keys():
assert data1[k] == data2[k]
# =================
try:
dumped_yaml_file = 'dumped_yaml_file.yaml'
opt.dump(dumped_yaml_file)
opt._cfg = {}
assert not opt._cfg
opt.load(dumped_yaml_file)
for k in data1.keys():
assert data1[k] == opt[k]
except Exception as e:
raise e
finally:
remove_yaml_file(dumped_yaml_file)
# ======================================
yaml_file = 'yaml_test.yaml'
def create_yaml_file(data, file=None):
with open(file or yaml_file, 'w') as f:
for k, v in data.items():
if not isinstance(v, list):
f.write(f'{k}: {v}\n')
else:
f.write(f'{k}:\n')
for item in v:
f.write(f' - {item}\n')
def remove_yaml_file(file=None):
if os.path.exists(file or yaml_file):
os.remove(file or yaml_file)
def test_opts_load_yaml():
Opts.reset()
try:
data = {'name': 'linux'}
create_yaml_file(data)
opt = Opts(yaml_file)
assert opt['name'] == data['name']
except Exception as e:
raise e
#======================
finally:
remove_yaml_file()
def test_yaml_to_arg():
Opts.reset()
try:
data = {'name': 'linux'}
create_yaml_file(data)
opt = Opts(yaml_file, _parse_data='--name unix'.split(' '))
assert opt['name'] == 'unix'
except Exception as e:
raise e
#======================
finally:
remove_yaml_file()
def test_arg_list():
Opts.reset()
lst = [1, 2, 3]
Opts.add_list_int('a', lst, 'a list')
opt = Opts()
for item in lst:
assert item in opt.a
for item in opt.a:
assert item in lst
# ============================
Opts.reset()
lst = [1, 2, 3]
lst2 = [4, 5, 6]
Opts.add_list_int('a', lst, 'a list')
parse_data = f"--a {' '.join(str(i) for i in lst2)}".split(' ')
opt = Opts(_parse_data=parse_data)
for item in lst2:
assert item in opt.a
for item in opt.a:
assert item in lst2
|
from torch import nn
from torch.nn import Sequential
from torch import zeros, unsqueeze
from hw_asr.base import BaseModel
class SimpleRnnModel(BaseModel):
def __init__(self, n_feats, n_class, fc_hidden=512, n_layers=2, dropout=0.25, *args, **kwargs):
super().__init__(n_feats, n_class, *args, **kwargs)
self.n_layers = n_layers
self.fc_hidden = fc_hidden
self.rnn = nn.LSTM(
n_feats, fc_hidden, num_layers=n_layers,
batch_first=True, dropout=dropout, bidirectional=True
)
self.fc = nn.Linear(2 * fc_hidden, n_class)
self.layers = Sequential(
self.rnn,
self.fc
)
def forward(self, spectrogram, *args, **kwargs):
output, _ = self.rnn(spectrogram)
output = output.view(spectrogram.size(0), -1, 2 * self.fc_hidden)
return {"logits": self.fc(output)}
def init_hidden(self, batch_size):
hidden = zeros(self.n_layers, batch_size, self.fc_hidden)
return hidden
def transform_input_lengths(self, input_lengths):
return input_lengths # we don't reduce time dimension here
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import unittest
import json
from subprocess import Popen, PIPE
class TestArion(unittest.TestCase):
ARION_PATH = '../../build/arion'
# Images for general purpose testing (leave off file:// for testing)
IMAGE_1_PATH = '../../examples/images/image-1.jpg'
IMAGE_2_PATH = '../../examples/images/image-2.jpg'
IMAGE_3_PATH = '../../examples/images/image-3.jpg'
# Images for JPG orientation tests (include file:// for testing)
# Images from https://github.com/recurser/exif-orientation-examples
# Copyright (c) 2010 Dave Perrett.
LANDSCAPE_1_PATH = 'file://../images/Landscape_1.jpg'
LANDSCAPE_2_PATH = 'file://../images/Landscape_2.jpg'
LANDSCAPE_3_PATH = 'file://../images/Landscape_3.jpg'
LANDSCAPE_4_PATH = 'file://../images/Landscape_4.jpg'
LANDSCAPE_5_PATH = 'file://../images/Landscape_5.jpg'
LANDSCAPE_6_PATH = 'file://../images/Landscape_6.jpg'
LANDSCAPE_7_PATH = 'file://../images/Landscape_7.jpg'
LANDSCAPE_8_PATH = 'file://../images/Landscape_8.jpg'
OUTPUT_IMAGE_PATH = 'output/'
# -------------------------------------------------------------------------------
# Helper function for calling Arion
# -------------------------------------------------------------------------------
def call_arion(self, input_url, operations, *additional_root_params):
input_dict = {'input_url': input_url,
'correct_rotation': True,
'operations': operations}
if (additional_root_params):
input_dict = self.merge_two_dicts(input_dict, additional_root_params[0])
input_string = json.dumps(input_dict, separators=(',', ':'))
p = Popen([self.ARION_PATH, "--input", input_string], stdout=PIPE)
cmd_output = p.communicate()
output = json.loads(cmd_output[0])
# DEBUG
# print cmd_output[0]
return output
# -------------------------------------------------------------------------------
# Helper function for reading data back about an image
# -------------------------------------------------------------------------------
def read_image(self, input_url):
operation = {
'type': 'read_meta',
'params': {
'info': True
}
}
return self.call_arion(input_url, [operation])
# -------------------------------------------------------------------------------
# Helper function for copying an image
# -------------------------------------------------------------------------------
def copy_image(self, input_url, output_url):
operation = {
'type': 'copy',
'params': {
'output_url': output_url
}
}
return self.call_arion(input_url, [operation])
# -------------------------------------------------------------------------------
# Helper function for checking for successful output
# -------------------------------------------------------------------------------
def verifySuccess(self, output, expected_width=-1, expected_height=-1):
self.assertTrue(output['result'])
self.assertEqual(output['failed_operations'], 0)
self.assertEqual(output['total_operations'], 1)
if expected_width >= 0:
self.assertEqual(output['width'], expected_width)
if expected_height >= 0:
self.assertEqual(output['height'], expected_height)
# -------------------------------------------------------------------------------
# Helper function for checking for failed output
# -------------------------------------------------------------------------------
def verifyFailure(self, output):
self.assertFalse(output['result'])
self.assertEqual(output['failed_operations'], 1)
self.assertEqual(output['total_operations'], 1)
# -------------------------------------------------------------------------------
# Helper function for creating output url
# -------------------------------------------------------------------------------
def outputUrlHelper(self, filename):
return self.OUTPUT_IMAGE_PATH + filename
# -------------------------------------------------------------------------------
# Helper function for testing fill operation
# -------------------------------------------------------------------------------
def imageResizeHelper(self, srcPath, outputPrefix, options):
outputFilename = outputPrefix + \
str(options['width']) + 'x' + str(options['height']) + \
'_' + str(options['type']) + '.jpg'
outputUrl = self.outputUrlHelper(outputFilename)
resize_operation = {
'type': 'resize',
'params':
{
'width': options['width'],
'height': options['height'],
'type': options['type'],
'gravity': options['gravity'],
'output_url': outputUrl
}
}
operations = [resize_operation];
output = self.call_arion(srcPath, operations)
self.verifySuccess(output);
# -----------------------------
# Now read back image data
# -----------------------------
output = self.read_image(outputUrl)
self.verifySuccess(output, options['width'], options['height']);
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def testImageFormats(self):
# -----------------------------------------
# JPG
# -----------------------------------------
input_url = '../images/small_input.jpg'
watermark_url = '../images/watermark.png'
output_url = self.outputUrlHelper('test_format_jpg.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 100,
'height': 400,
'type': 'width',
'quality': 92,
'watermark_url': watermark_url,
'watermark_type': 'adaptive',
'watermark_min': 0.3,
'watermark_max': 1.0,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(input_url, operations)
self.verifySuccess(output);
# -----------------------------------------
# PNG
# -----------------------------------------
input_url = '../images/small_input.png'
watermark_url = '../images/watermark.png'
output_url = self.outputUrlHelper('test_format_png.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 100,
'height': 400,
'type': 'width',
'quality': 92,
'watermark_url': watermark_url,
'watermark_type': 'adaptive',
'watermark_min': 0.3,
'watermark_max': 1.0,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(input_url, operations)
self.verifySuccess(output);
# -----------------------------------------
# TIFF
# -----------------------------------------
input_url = '../images/small_input.tif'
watermark_url = '../images/watermark.png'
output_url = self.outputUrlHelper('test_format_tif.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 100,
'height': 400,
'type': 'width',
'quality': 92,
'watermark_url': watermark_url,
'watermark_type': 'adaptive',
'watermark_min': 0.3,
'watermark_max': 1.0,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(input_url, operations)
self.verifySuccess(output);
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def testWatermark(self):
# -----------------------------------------
# Standard 1:1
# -----------------------------------------
input_url = '../images/watermark_test_input.jpg'
watermark_url = '../images/watermark.png'
output_url = self.outputUrlHelper('test_watermark_1_standard.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 400,
'height': 400,
'type': 'fill',
'quality': 92,
'watermark_url': watermark_url,
'watermark_type': 'standard',
'watermark_amount': 0.1,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(input_url, operations)
self.verifySuccess(output);
# -----------------------------------------
# Adaptive 1:1
# -----------------------------------------
input_url = '../images/watermark_test_input.jpg'
watermark_url = '../images/watermark.png'
output_url = self.outputUrlHelper('test_watermark_2_adaptive.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 400,
'height': 400,
'type': 'fill',
'quality': 92,
'watermark_url': watermark_url,
'watermark_type': 'adaptive',
'watermark_min': 0.1,
'watermark_max': 0.5,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(input_url, operations)
self.verifySuccess(output);
# -----------------------------------------
# Output size is smaller than watermark
# -----------------------------------------
watermark_url = '../images/watermark2.png'
output_url = self.outputUrlHelper('test_watermark_2_photo.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 200,
'height': 200,
'type': 'fill',
'quality': 92,
'watermark_url': watermark_url,
'watermark_type': 'adaptive',
'watermark_min': 0.1,
'watermark_max': 0.5,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(self.IMAGE_1_PATH, operations)
self.verifySuccess(output);
# -----------------------------------------
# Output size is larger than watermark
# -----------------------------------------
watermark_url = '../images/watermark2.png'
output_url = self.outputUrlHelper('test_watermark_3_photo.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 1000,
'height': 1000,
'type': 'fill',
'quality': 92,
'watermark_url': watermark_url,
'watermark_type': 'adaptive',
'watermark_min': 0.1,
'watermark_max': 0.5,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(self.IMAGE_1_PATH, operations)
self.verifySuccess(output);
# -----------------------------------------
# Output width is larger than watermark,
# but height is smaller
# -----------------------------------------
output_url = self.outputUrlHelper('test_watermark_4_photo.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 1000,
'height': 200,
'type': 'fill',
'quality': 92,
'watermark_url': watermark_url,
'watermark_type': 'adaptive',
'watermark_min': 0.1,
'watermark_max': 0.5,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(self.IMAGE_1_PATH, operations)
self.verifySuccess(output);
# -----------------------------------------
# Output height is larger than watermark,
# but width is smaller
# -----------------------------------------
output_url = self.outputUrlHelper('test_watermark_5_photo.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 200,
'height': 1000,
'type': 'fill',
'quality': 92,
'watermark_url': watermark_url,
'watermark_url': watermark_url,
'watermark_type': 'adaptive',
'watermark_min': 0.1,
'watermark_max': 0.5,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(self.IMAGE_1_PATH, operations)
self.verifySuccess(output);
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a tall portion at
# the center of the image
# -------------------------------------------------------------------------------
def test100x200TallCenter(self):
srcPath = "file://../images/100x200_tall_center.png"
outputPrefix = "100x200_tall_center_to_"
# Just a crop, take the center
opts = {
'type': 'fill',
'gravity': 'center',
'width': 50,
'height': 200,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
opts = {
'type': 'fill',
'gravity': 'north',
'width': 25,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
opts = {
'type': 'fill',
'gravity': 'south',
'width': 100,
'height': 400,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a tall portion at
# the left of the image
# -------------------------------------------------------------------------------
def test100x200TallLeft(self):
srcPath = "file://../images/100x200_tall_left.png"
outputPrefix = "100x200_tall_left_to_"
# Just a crop, take the left
opts = {
'type': 'fill',
'gravity': 'west',
'width': 50,
'height': 200,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the left
opts = {
'type': 'fill',
'gravity': 'northwest',
'width': 25,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the left
opts = {
'type': 'fill',
'gravity': 'southwest',
'width': 100,
'height': 400,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a tall portion
# at the right of the image
# -------------------------------------------------------------------------------
def test100x200TallRight(self):
srcPath = "file://../images/100x200_tall_right.png"
outputPrefix = "100x200_tall_right_to_"
# Just a crop, take the right
opts = {
'type': 'fill',
'gravity': 'east',
'width': 50,
'height': 200,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the right
opts = {
'type': 'fill',
'gravity': 'northeast',
'width': 25,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the right
opts = {
'type': 'fill',
'gravity': 'southeast',
'width': 100,
'height': 400,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a wide portion
# at the bottom of the image
# -------------------------------------------------------------------------------
def test100x200WideBottom(self):
srcPath = "file://../images/100x200_wide_bottom.png"
outputPrefix = "100x200_wide_bottom_to_"
# Just a crop, take the bottom
opts = {
'type': 'fill',
'gravity': 'south',
'width': 100,
'height': 50,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the bottom
opts = {
'type': 'fill',
'gravity': 'southeast',
'width': 50,
'height': 25,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the bottom
opts = {
'type': 'fill',
'gravity': 'southwest',
'width': 200,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a wide portion
# at the bottom of the image
# -------------------------------------------------------------------------------
def test100x200WideCenter(self):
srcPath = "file://../images/100x200_wide_center.png"
outputPrefix = "100x200_wide_center_to_"
# Just a crop, take the bottom
opts = {
'type': 'fill',
'gravity': 'center',
'width': 100,
'height': 50,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the bottom
opts = {
'type': 'fill',
'gravity': 'east',
'width': 50,
'height': 25,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the bottom
opts = {
'type': 'fill',
'gravity': 'west',
'width': 200,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a wide portion
# at the top of the image
# -------------------------------------------------------------------------------
def test100x200WideTop(self):
srcPath = "file://../images/100x200_wide_top.png"
outputPrefix = "100x200_wide_top_to_"
# Just a crop, take the top
opts = {
'type': 'fill',
'gravity': 'north',
'width': 100,
'height': 50,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the top
opts = {
'type': 'fill',
'gravity': 'northeast',
'width': 50,
'height': 25,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the top
opts = {
'type': 'fill',
'gravity': 'northwest',
'width': 200,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a wide source image and we are always cropping a tall portion at
# the center of the image
# -------------------------------------------------------------------------------
def test200x100TallCenter(self):
srcPath = "file://../images/200x100_tall_center.png"
outputPrefix = "200x100_tall_center_to_"
# Just a crop, take the center
opts = {
'type': 'fill',
'gravity': 'center',
'width': 50,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the center
opts = {
'type': 'fill',
'gravity': 'north',
'width': 25,
'height': 50,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the center
opts = {
'type': 'fill',
'gravity': 'south',
'width': 100,
'height': 200,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a tall portion at
# the left of the image
# -------------------------------------------------------------------------------
def test200x100TallLeft(self):
srcPath = "file://../images/200x100_tall_left.png"
outputPrefix = "200x100_tall_left_to_"
# Just a crop, take the left
opts = {
'type': 'fill',
'gravity': 'west',
'width': 50,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the left
opts = {
'type': 'fill',
'gravity': 'northwest',
'width': 25,
'height': 50,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the left
opts = {
'type': 'fill',
'gravity': 'southwest',
'width': 100,
'height': 200,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a tall portion at
# the right of the image
# -------------------------------------------------------------------------------
def test200x100TallRight(self):
srcPath = "file://../images/200x100_tall_right.png"
outputPrefix = "200x100_tall_right_to_"
# Just a crop, take the right
opts = {
'type': 'fill',
'gravity': 'east',
'width': 50,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the right
opts = {
'type': 'fill',
'gravity': 'northeast',
'width': 25,
'height': 50,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the right
opts = {
'type': 'fill',
'gravity': 'southeast',
'width': 100,
'height': 200,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a wide portion at
# the bottom of the image
# -------------------------------------------------------------------------------
def test200x100WideBottom(self):
srcPath = "file://../images/200x100_wide_bottom.png"
outputPrefix = "200x100_wide_bottom_to_"
# Just a crop, take the bottom
opts = {
'type': 'fill',
'gravity': 'south',
'width': 200,
'height': 50,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the bottom
opts = {
'type': 'fill',
'gravity': 'southeast',
'width': 100,
'height': 25,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the bottom
opts = {
'type': 'fill',
'gravity': 'southwest',
'width': 400,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a tall source image and we are always cropping a wide portion at
# the bottom of the image
# -------------------------------------------------------------------------------
def test200x100WideCenter(self):
srcPath = "file://../images/200x100_wide_center.png"
outputPrefix = "200x100_wide_center_to_"
# Just a crop, take the bottom
opts = {
'type': 'fill',
'gravity': 'center',
'width': 200,
'height': 50,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the bottom
opts = {
'type': 'fill',
'gravity': 'east',
'width': 100,
'height': 25,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the bottom
opts = {
'type': 'fill',
'gravity': 'west',
'width': 400,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# Here we have a wide source image and we are always cropping a wide portion at
# the top of the image
# -------------------------------------------------------------------------------
def test200x100WideTop(self):
srcPath = "file://../images/200x100_wide_top.png"
outputPrefix = "200x100_wide_top_to_"
# Just a crop, take the top
opts = {
'type': 'fill',
'gravity': 'north',
'width': 200,
'height': 50,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Shrink, take the top
opts = {
'type': 'fill',
'gravity': 'northeast',
'width': 100,
'height': 25,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# Enlarge, take the top
opts = {
'type': 'fill',
'gravity': 'northwest',
'width': 400,
'height': 100,
}
self.imageResizeHelper(srcPath, outputPrefix, opts)
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_basic_jpg_resize(self):
# -----------------------------
# Resize image
# -----------------------------
output_url = self.outputUrlHelper('test_basic_jpg_resize.jpg')
# Use low JPG quality to make sure parameter is working
resize_operation = {
'type': 'resize',
'params':
{
'width': 200,
'height': 1000,
'type': 'width',
'quality': 50,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(self.IMAGE_1_PATH, operations)
self.verifySuccess(output, 1296, 864);
# -----------------------------
# Now read back image data
# -----------------------------
output = self.read_image(output_url)
self.verifySuccess(output, 200, 133);
info = output['info'][0]
self.assertTrue(info['result'])
self.assertEqual(info['type'], 'read_meta')
# By default meta data gets stripped
self.assertFalse(info['model_released'])
self.assertFalse(info['property_released'])
self.assertEqual(info['special_instructions'], '')
self.assertEqual(info['subject'], [])
self.assertEqual(info['copyright'], '')
self.assertEqual(info['city'], '')
self.assertEqual(info['province_state'], '')
self.assertEqual(info['country_name'], '')
self.assertEqual(info['country_code'], '')
self.assertEqual(info['caption'], '')
self.assertEqual(info['keywords'], [])
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_resize_shrink_width_limit(self):
output_url = self.outputUrlHelper('test_resize_shrink_width_limit.jpg')
operation = {
'type': 'resize',
'params':
{
'width': 200,
'height': 120,
'type': 'width',
'quality': 92,
'output_url': output_url
}
}
output = self.call_arion(self.IMAGE_1_PATH, [operation])
self.verifySuccess(output)
# -----------------------------
# Now read back image data
# -----------------------------
output = self.read_image(output_url)
self.verifySuccess(output, 180, 120)
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_resize_shrink_height(self):
output_url = self.outputUrlHelper('test_resize_shrink_height.jpg')
operation = {
'type': 'resize',
'params':
{
'width': 1000,
'height': 200,
'type': 'height',
'quality': 92,
'output_url': output_url
}
}
output = self.call_arion(self.IMAGE_1_PATH, [operation])
self.verifySuccess(output);
# -----------------------------
# Now read back image data
# -----------------------------
output = self.read_image(output_url)
self.verifySuccess(output, 300, 200)
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_resize_shrink_height_limit(self):
output_url = self.outputUrlHelper('test_resize_shrink_height_limit.jpg')
operation = {
'type': 'resize',
'params':
{
'width': 200,
'height': 200,
'type': 'height',
'quality': 92,
'output_url': output_url
}
}
output = self.call_arion(self.IMAGE_1_PATH, [operation])
self.verifySuccess(output);
# -----------------------------
# Now read back image data
# -----------------------------
output = self.read_image(output_url)
self.verifySuccess(output, 200, 133);
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_resize_shrink_square(self):
output_url = self.outputUrlHelper('test_resize_shrink_square.jpg')
# Height should not matter here...
resize_operation = {
'type': 'resize',
'params':
{
'width': 200,
'height': 2000,
'type': 'square',
'quality': 92,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(self.IMAGE_1_PATH, operations)
self.verifySuccess(output);
# -----------------------------
# Now read back image data
# -----------------------------
output = self.read_image(output_url)
self.verifySuccess(output, 200, 200);
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_resize_fill(self):
output_url = self.outputUrlHelper('test_resize_fill.jpg')
resize_operation = {
'type': 'resize',
'params':
{
'width': 200,
'height': 400,
'type': 'fill',
'quality': 92,
'output_url': output_url
}
}
operations = [resize_operation];
output = self.call_arion(self.IMAGE_1_PATH, operations)
self.verifySuccess(output);
# -----------------------------
# Now read back image data
# -----------------------------
output = self.read_image(output_url)
# self.verifySuccess(output, 200, 200);
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_basic_read_meta(self):
# -----------------------------
# Read image meta
# -----------------------------
output = self.read_image(self.IMAGE_1_PATH)
self.verifySuccess(output, 1296, 864)
info = output['info'][0]
self.assertTrue(info['result'])
self.assertEqual(info['type'], 'read_meta')
# By default meta data gets stripped
self.assertFalse(info['model_released'])
self.assertFalse(info['property_released'])
self.assertEqual(info['special_instructions'], 'Not Released (NR)')
self.assertEqual(info['subject'], [])
self.assertEqual(info['copyright'], 'Paul Filitchkin')
self.assertEqual(info['city'], 'Bol')
# TODO
# self.assertEqual(info['province_state'], "Splitsko-dalmatinska županija")
self.assertEqual(info['country_name'], 'Croatia')
self.assertEqual(info['country_code'], 'HR')
self.assertEqual(info['caption'], 'Windy road during sunset on Brac Island in Croatia - "Republic of Croatia"')
keywords = info['keywords']
self.assertTrue("Adriatic Sea" in keywords)
self.assertTrue("Balkans" in keywords)
self.assertTrue("Croatia" in keywords)
self.assertTrue("Europe" in keywords)
self.assertTrue("island" in keywords)
self.assertTrue("outdoors" in keywords)
self.assertTrue("road" in keywords)
self.assertTrue("roadtrip" in keywords)
self.assertTrue("sea" in keywords)
self.assertTrue("sunset" in keywords)
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_jpg_orienation(self):
output = self.copy_image(self.LANDSCAPE_1_PATH, self.outputUrlHelper('Landscape_1.jpg'))
self.verifySuccess(output);
output = self.copy_image(self.LANDSCAPE_2_PATH, self.outputUrlHelper('Landscape_2.jpg'))
self.verifySuccess(output);
output = self.copy_image(self.LANDSCAPE_3_PATH, self.outputUrlHelper('Landscape_3.jpg'))
self.verifySuccess(output);
output = self.copy_image(self.LANDSCAPE_4_PATH, self.outputUrlHelper('Landscape_4.jpg'))
self.verifySuccess(output);
output = self.copy_image(self.LANDSCAPE_5_PATH, self.outputUrlHelper('Landscape_5.jpg'))
self.verifySuccess(output);
output = self.copy_image(self.LANDSCAPE_6_PATH, self.outputUrlHelper('Landscape_6.jpg'))
self.verifySuccess(output);
output = self.copy_image(self.LANDSCAPE_7_PATH, self.outputUrlHelper('Landscape_7.jpg'))
self.verifySuccess(output);
output = self.copy_image(self.LANDSCAPE_8_PATH, self.outputUrlHelper('Landscape_8.jpg'))
self.verifySuccess(output);
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_invalid_operation(self):
read_meta_operation = {
'type': 'invalid',
'params': {
'value': 'bogus'
}
}
operations = [read_meta_operation];
output = self.call_arion(self.IMAGE_1_PATH, operations)
self.assertFalse(output['result'])
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_invalid_json(self):
# Missing ending brace, but otherwise valid
input_string = "{\"input_url\":\"file://../../examples/images/image-1.jpg\",\"correct_rotation\":true,\"operations\":[{\"type\":\"read_meta\",\"params\":{\"info\":true}}]"
p = Popen([self.ARION_PATH, "--input", input_string], stdout=PIPE)
cmd_output = p.communicate()
output = json.loads(cmd_output[0])
self.assertFalse(output['result'])
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def testNoParams(self):
# Missing params
operation = {
'type': 'read_meta'
}
output = self.call_arion(self.IMAGE_1_PATH, [operation])
self.assertFalse(output['result'])
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_md5(self):
operation = {
'type': 'fingerprint',
'params':
{
'type': 'md5'
}
}
operations = [operation];
output = self.call_arion(self.IMAGE_1_PATH, operations)
self.assertEqual(output['info'][0]['md5'], 'a0c5cee72d1a59a6d0f3f6e76b73cecc') # new libJpeg
# self.assertEqual(output['info'][0]['md5'], 'c8d342a627da420e77c2e90a10f75689')
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def testInvalidCopyParams(self):
# No output_url
operation = {
'type': 'copy',
'params': {
}
}
self.verifyFailure(self.call_arion(self.IMAGE_1_PATH, [operation]))
# Empty output_url
operation = {
'type': 'copy',
'params': {
'output_url': ''
}
}
self.verifyFailure(self.call_arion(self.IMAGE_1_PATH, [operation]))
# Missing valid output_url
operation = {
'type': 'copy',
'params': {
'output_url': ''
}
}
self.verifyFailure(self.call_arion(self.IMAGE_1_PATH, [operation]))
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def testInvalidResizeParams(self):
# Resize operation missing type
operation = {
'type': 'resize',
'params':
{
'width': 200,
'height': 400,
'output_url': 'output.jpg'
}
}
self.verifyFailure(self.call_arion(self.IMAGE_1_PATH, [operation]))
# Resize operation missing width
operation = {
'type': 'resize',
'params':
{
'type': 'width',
'height': 400,
'output_url': 'output.jpg'
}
}
self.verifyFailure(self.call_arion(self.IMAGE_1_PATH, [operation]))
# Resize operation missing height
operation = {
'type': 'resize',
'params':
{
'type': 'width',
'width': 200,
'output_url': 'output.jpg'
}
}
self.verifyFailure(self.call_arion(self.IMAGE_1_PATH, [operation]))
# Desire size too big
operation = {
'type': 'resize',
'params':
{
'type': 'width',
'width': 10000,
'height': 10001,
'output_url': 'output.jpg'
}
}
self.verifyFailure(self.call_arion(self.IMAGE_1_PATH, [operation]))
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
def test_allow_skip_decode_image(self):
# Missing params
operation = {
'type': 'read_meta',
'params': {
'info': True
}
}
additional_params = {
'allow_skip_decode_image': True
}
output = self.call_arion(self.IMAGE_1_PATH, [operation], additional_params)
self.assertFalse('height' in output)
self.assertFalse('width' in output)
# -------------------------------------------------------------------------------
# Helper to merge dicts
# -------------------------------------------------------------------------------
@staticmethod
def merge_two_dicts(x, y):
z = x.copy() # start with x's keys and values
z.update(y) # modifies z with y's keys and values & returns None
return z
# -------------------------------------------------------------------------------
# Called only once
# -------------------------------------------------------------------------------
@classmethod
def setUpClass(cls):
if not os.path.exists(cls.OUTPUT_IMAGE_PATH):
os.makedirs(cls.OUTPUT_IMAGE_PATH)
# Remove any existing output files
for the_file in os.listdir(cls.OUTPUT_IMAGE_PATH):
file_path = os.path.join(cls.OUTPUT_IMAGE_PATH, the_file)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
except Exception as e:
print(e)
# -------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Li, Hao<[email protected]>
import unittest
import os
import sys
import commands
import comm
import shutil
import glob
import xml.etree.ElementTree as ET
from TestApp import *
app_name = "Sample"
package_name = "org.crosswalkproject." + app_name.lower()
active_name = app_name + "Activity"
sample_src = comm.sample_src_pref + "extensions-android/"
testapp = None
comm.setUp()
def init(xmlpath):
channel = os.environ.get('CHANNEL')
if not channel:
print (" get channel error\n")
sys.exit(1)
if not comm.xwalk_version:
print (" get crosswalk version error\n")
sys.exit(1)
tree = ET.parse(xmlpath)
for elem in tree.iter(tag='property'):
xwalk_version_name = elem.attrib.get('name')
if xwalk_version_name == 'crosswalk-version':
crosswalk_version = comm.xwalk_version
if "64" in comm.ARCH:
crosswalk_version = comm.xwalk_version + "-64bit"
#elem.set(str(elem.attrib.items()[1][0]),'15.44.375.0')
elem.set(str(elem.attrib.items()[1][0]), crosswalk_version)
for node in tree.iter(tag='get'):
#src_val = https://download.01.org/crosswalk/releases/crosswalk/android/canary/18.46.452.0/crosswalk-18.46.452.0-64bit.zip
src_val = "https://download.01.org/crosswalk/releases/crosswalk/android/%s/%s/crosswalk-%s.zip" \
% (channel, comm.xwalk_version, crosswalk_version)
print node.attrib.items()[1][0]
node.set(str(node.attrib.items()[1][0]), src_val)
print src_val
tree.write(xmlpath, "utf-8", "xml")
def check_appname():
global app_name
#xwalk_version = '8.38.208.0'
if int(comm.xwalk_version.split('.')[0]) < 9:
app_name = 'xwalk_echo_app'
else:
app_name = 'Sample'
class ExtensionsAndroid(unittest.TestCase):
def test_1_pack(self):
check_appname()
xmlpath = sample_src + '/xwalk-echo-extension-src/build.xml'
init(xmlpath)
cmd = "%s/build.sh -v %s -a %s -m %s" % (sample_src, comm.xwalk_version, comm.ARCH, comm.MODE)
os.chdir(comm.build_app_dest)
print "Generate APK %s ----------------> START" % app_name
packstatus = commands.getstatusoutput(cmd)
self.assertEquals(0, packstatus[0])
self.assertIn("build successful", packstatus[1].lower())
print "\nGenerate APK %s ----------------> OK\n" % app_name
apk_build_flag = False
apks = glob.glob(os.path.join(sample_src, "*.apk"))
if len(apks) > 0:
print apks
apk_build_flag = True
for apk in apks:
shutil.move(apk, comm.build_app_dest)
else:
print 'Not found apk'
self.assertTrue(apk_build_flag)
def test_2_install(self):
apk_file = commands.getstatusoutput("ls %s| grep %s" % (comm.build_app_dest, app_name.lower()))[1]
if apk_file.endswith(".apk"):
global testapp
testapp = TestApp(comm.device, comm.build_app_dest + apk_file, package_name, active_name)
if testapp.isInstalled():
testapp.uninstall()
self.assertTrue(testapp.install())
else:
print("-->> No packed %s apk in %s" % (app_name, comm.build_app_dest))
self.assertTrue(False)
def test_3_launch(self):
if testapp is not None:
self.assertTrue(testapp.launch())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_4_switch(self):
if testapp is not None:
self.assertTrue(testapp.switch())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_5_stop(self):
if testapp is not None:
self.assertTrue(testapp.stop())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_6_uninstall(self):
if testapp is not None:
self.assertTrue(testapp.uninstall())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_7_uninstall_when_app_running(self):
if testapp is not None:
if not testapp.isInstalled():
testapp.install()
if not testapp.isRunning():
testapp.launch()
self.assertTrue(testapp.uninstall())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# Copyright 2015-present Samsung Electronics Co., Ltd. and other contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This file converts src/js/*.js to a C-array in src/iotjs_js.[h|c] file.
# And this file also generates magic string list in src/iotjs_string_ext.inl.h
# file to reduce JerryScript heap usage.
import os
import re
import subprocess
import struct
import string
from common_py.system.filesystem import FileSystem as fs
from common_py import path
def regroup(l, n):
return [l[i:i+n] for i in range(0, len(l), n)]
def remove_comments(code):
pattern = r'(\".*?\"|\'.*?\')|(/\*.*?\*/|//[^\r\n]*$)'
regex = re.compile(pattern, re.MULTILINE | re.DOTALL)
def _replacer(match):
if match.group(2) is not None:
return ""
else:
return match.group(1)
return regex.sub(_replacer, code)
def remove_whitespaces(code):
return re.sub('\n+', '\n', re.sub('\n +', '\n', code))
def force_str(string):
if not isinstance(string, str):
return string.decode('utf-8')
else:
return string
def parse_literals(code):
JERRY_SNAPSHOT_VERSION = 51
JERRY_SNAPSHOT_MAGIC = 0x5952524A
literals = set()
# header format:
# uint32_t magic
# uint32_t version
# uint32_t global opts
# uint32_t literal table offset
header = struct.unpack('I' * 4, code[0:4 * 4])
if header[0] != JERRY_SNAPSHOT_MAGIC:
print('Incorrect snapshot format! Magic number is incorrect')
exit(1)
print(header)
if header[1] != JERRY_SNAPSHOT_VERSION:
print ('Please check jerry snapshot version (Last confirmed: %d)'
% JERRY_SNAPSHOT_VERSION)
exit(1)
code_ptr = header[3] + 4
while code_ptr < len(code):
length = struct.unpack('H', code[code_ptr : code_ptr + 2])[0]
code_ptr = code_ptr + 2
if length == 0:
continue
if length < 32:
item = struct.unpack('%ds' % length,
code[code_ptr : code_ptr + length])
literals.add(force_str(item[0]))
code_ptr = code_ptr + length + (length % 2)
return literals
LICENSE = '''
/* Copyright 2015-present Samsung Electronics Co., Ltd. and other contributors
*
* Licensed under the Apache License, Version 2.0 (the \"License\");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an \"AS IS\" BASIS
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file is generated by tools/js2c.py
* Do not modify this.
*/
'''
HEADER1 = '''#ifndef IOTJS_JS_H
#define IOTJS_JS_H
'''
FOOTER1 = '''
#endif
'''
HEADER2 = '''#include <stdio.h>
#include <stdint.h>
#include "iotjs_js.h"
'''
EMPTY_LINE = '\n'
MAGIC_STRINGS_HEADER = '#define JERRY_MAGIC_STRING_ITEMS \\\n'
MODULE_SNAPSHOT_VARIABLES_H = '''
extern const char module_{NAME}[];
extern const uint32_t module_{NAME}_idx;
'''
MODULE_SNAPSHOT_VARIABLES_C = '''
#define MODULE_{NAME}_IDX ({IDX})
const char module_{NAME}[] = "{NAME}";
const uint32_t module_{NAME}_idx = MODULE_{NAME}_IDX;
'''
NATIVE_SNAPSHOT_STRUCT_H = '''
typedef struct {
const char* name;
const uint32_t idx;
} iotjs_js_module_t;
extern const iotjs_js_module_t js_modules[];
'''
MODULE_VARIABLES_H = '''
extern const char {NAME}_n[];
extern const uint8_t {NAME}_s[];
extern const size_t {NAME}_l;
'''
MODULE_VARIABLES_C = '''
#define SIZE_{NAME_UPPER} {SIZE}
const size_t {NAME}_l = SIZE_{NAME_UPPER};
const char {NAME}_n[] = "{NAME}";
const uint8_t {NAME}_s[] = {{
{CODE}
}};
'''
NATIVE_STRUCT_H = '''
typedef struct {
const char* name;
const void* code;
const size_t length;
} iotjs_js_module_t;
extern const iotjs_js_module_t js_modules[];
'''
NATIVE_STRUCT_C = '''
const iotjs_js_module_t js_modules[] = {{
{MODULES}
}};
'''
def hex_format(ch):
if isinstance(ch, str):
ch = ord(ch)
return "0x{:02x}".format(ch)
def format_code(code, indent):
lines = []
# convert all characters to hex format
converted_code = map(hex_format, code)
# 10 hex number per line
for line in regroup(", ".join(converted_code), 10 * 6):
lines.append((' ' * indent) + line.strip())
return "\n".join(lines)
def merge_snapshots(snapshot_infos, snapshot_tool):
output_path = fs.join(path.SRC_ROOT, 'js','merged.modules')
cmd = [snapshot_tool, "merge", "-o", output_path]
cmd.extend([item['path'] for item in snapshot_infos])
ret = subprocess.call(cmd)
if ret != 0:
msg = "Failed to merge %s: - %d" % (snapshot_infos, ret)
print("%s%s%s" % ("\033[1;31m", msg, "\033[0m"))
exit(1)
for item in snapshot_infos:
fs.remove(item['path'])
with open(output_path, 'rb') as snapshot:
code = snapshot.read()
fs.remove(output_path)
return code
def get_snapshot_contents(js_path, snapshot_tool):
""" Convert the given module with the snapshot generator
and return the resulting bytes.
"""
wrapped_path = js_path + ".wrapped"
snapshot_path = js_path + ".snapshot"
module_name = os.path.splitext(os.path.basename(js_path))[0]
with open(wrapped_path, 'w') as fwrapped, open(js_path, "r") as fmodule:
if module_name != "iotjs":
fwrapped.write("(function(exports, require, module, native) {\n")
fwrapped.write(fmodule.read())
if module_name != "iotjs":
fwrapped.write("});\n")
ret = subprocess.call([snapshot_tool,
"generate",
"--context", "eval",
"-o", snapshot_path,
wrapped_path])
fs.remove(wrapped_path)
if ret != 0:
msg = "Failed to dump %s: - %d" % (js_path, ret)
print("%s%s%s" % ("\033[1;31m", msg, "\033[0m"))
fs.remove(snapshot_path)
exit(1)
return snapshot_path
def get_js_contents(js_path, is_debug_mode=False):
""" Read the contents of the given js module. """
with open(js_path, "r") as f:
code = f.read()
# minimize code when in release mode
if not is_debug_mode:
code = remove_comments(code)
code = remove_whitespaces(code)
return code
def js2c(buildtype, js_modules, snapshot_tool=None, verbose=False):
is_debug_mode = (buildtype == "debug")
no_snapshot = (snapshot_tool == None)
magic_string_set = set()
str_const_regex = re.compile('^#define IOTJS_MAGIC_STRING_\w+\s+"(\w+)"$')
with open(fs.join(path.SRC_ROOT, 'iotjs_magic_strings.h'), 'r') as fin_h:
for line in fin_h:
result = str_const_regex.search(line)
if result:
magic_string_set.add(result.group(1))
# generate the code for the modules
with open(fs.join(path.SRC_ROOT, 'iotjs_js.h'), 'w') as fout_h, \
open(fs.join(path.SRC_ROOT, 'iotjs_js.c'), 'w') as fout_c:
fout_h.write(LICENSE)
fout_h.write(HEADER1)
fout_c.write(LICENSE)
fout_c.write(HEADER2)
snapshot_infos = []
js_module_names = []
for idx, module in enumerate(sorted(js_modules)):
[name, js_path] = module.split('=', 1)
js_module_names.append(name)
if verbose:
print('Processing module: %s' % name)
if no_snapshot:
code = get_js_contents(js_path, is_debug_mode)
code_string = format_code(code, 1)
fout_h.write(MODULE_VARIABLES_H.format(NAME=name))
fout_c.write(MODULE_VARIABLES_C.format(NAME=name,
NAME_UPPER=name.upper(),
SIZE=len(code),
CODE=code_string))
else:
code_path = get_snapshot_contents(js_path, snapshot_tool)
info = {'name': name, 'path': code_path, 'idx': idx}
snapshot_infos.append(info)
fout_h.write(MODULE_SNAPSHOT_VARIABLES_H.format(NAME=name))
fout_c.write(MODULE_SNAPSHOT_VARIABLES_C.format(NAME=name,
IDX=idx))
if no_snapshot:
modules_struct = [
' {{ {0}_n, {0}_s, SIZE_{1} }},'.format(name, name.upper())
for name in sorted(js_module_names)
]
modules_struct.append(' { NULL, NULL, 0 }')
else:
code = merge_snapshots(snapshot_infos, snapshot_tool)
code_string = format_code(code, 1)
magic_string_set |= parse_literals(code)
name = 'iotjs_js_modules'
fout_h.write(MODULE_VARIABLES_H.format(NAME=name))
fout_c.write(MODULE_VARIABLES_C.format(NAME=name,
NAME_UPPER=name.upper(),
SIZE=len(code),
CODE=code_string))
modules_struct = [
' {{ module_{0}, MODULE_{0}_IDX }},'.format(info['name'])
for info in snapshot_infos
]
modules_struct.append(' { NULL, 0 }')
if no_snapshot:
native_struct_h = NATIVE_STRUCT_H
else:
native_struct_h = NATIVE_SNAPSHOT_STRUCT_H
fout_h.write(native_struct_h)
fout_h.write(FOOTER1)
fout_c.write(NATIVE_STRUCT_C.format(MODULES="\n".join(modules_struct)))
fout_c.write(EMPTY_LINE)
# Write out the external magic strings
magic_str_path = fs.join(path.SRC_ROOT, 'iotjs_string_ext.inl.h')
with open(magic_str_path, 'w') as fout_magic_str:
fout_magic_str.write(LICENSE)
fout_magic_str.write(MAGIC_STRINGS_HEADER)
sorted_strings = sorted(magic_string_set, key=lambda x: (len(x), x))
for idx, magic_string in enumerate(sorted_strings):
magic_text = repr(magic_string)[1:-1]
magic_text = string.replace(magic_text, "\"", "\\\"")
fout_magic_str.write(' MAGICSTR_EX_DEF(MAGIC_STR_%d, "%s") \\\n'
% (idx, magic_text))
# an empty line is required to avoid compile warning
fout_magic_str.write(EMPTY_LINE)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--buildtype',
choices=['debug', 'release'], default='debug',
help='Specify the build type: %(choices)s (default: %(default)s)')
parser.add_argument('--modules', required=True,
help='List of JS files to process. Format: '
'<module_name1>=<js_file1>,<module_name2>=<js_file2>,...')
parser.add_argument('--snapshot-tool', default=None,
help='Executable to use for generating snapshots and merging them '
'(ex.: the JerryScript snapshot tool). '
'If not specified the JS files will be directly processed.')
parser.add_argument('-v', '--verbose', default=False,
help='Enable verbose output.')
options = parser.parse_args()
if not options.snapshot_tool:
print('Converting JS modules to C arrays (no snapshot)')
else:
print('Using "%s" as snapshot tool' % options.snapshot_tool)
modules = options.modules.replace(',', ' ').split()
js2c(options.buildtype, modules, options.snapshot_tool, options.verbose)
|
from dango import dcog, Cog
from .cmds import SubModule # noqa pylint: disable=unused-import
@dcog()
class InModule(Cog):
def __init__(self, config):
pass
|
from matplotlib.pyplot import axes
import pandas as pd
from fbprophet import Prophet
from pandas.core.frame import DataFrame
raw_data = pd.read_csv('./rawdata/student.csv', encoding='CP949')
raw_data = raw_data.fillna(0)
data = pd.DataFrame(raw_data.sum()) # 유학생 데이터
print(data.head(10))
all_data = pd.read_csv('./data/all_preprocess.csv')
all_data = all_data.drop(columns=list(all_data.columns)[0])
print(all_data) |
from Layer import *
class Cursor(Render):
def __init__(self, img, buttons):
self.img = pygame.image.load(img).convert_alpha()
self.location = buttons[0].location - Vector2(50, -30)
self.buttons = buttons
self.sounds = Sound()
self.point_to = 0
def move(self, direction):
self.sounds.play('menu')
if direction == 'down':
self.point_to += 1
if self.point_to == 4:
self.point_to = 0
elif direction == 'up':
self.point_to -= 1
if self.point_to == -1:
self.point_to = 3
else:
if self.point_to == 1:
self.buttons[1].change(direction)
self.change()
def change(self):
if self.point_to == 1:
self.buttons[1].arrow_on = True
else:
self.buttons[1].arrow_on = False
self.location = self.buttons[self.point_to].location - Vector2(50, -30)
class Menu:
def __init__(self, level):
self.buttons = [Button('pictures/buttons.png', (400, 100), 'Start'),
SelectButton('pictures/buttons.png', (400, 225),self.find_level(level), 'Level'),
Button('pictures/buttons.png', (400, 350), 'Manual'),
Button('pictures/buttons.png', (400, 475), 'Exit')]
self.cursor = Cursor('pictures/arrow.png', self.buttons)
self.blink = 60
self.manual = False
self.exit = False
def run(self):
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
self.exit = True
return False
if event.type == pygame.KEYDOWN:
if not self.manual:
if event.key == pygame.K_SPACE:
if self.cursor.point_to == 0:
return False
if self.cursor.point_to == 2:
self.manual = True
if self.cursor.point_to == 3:
pygame.quit()
self.exit = True
return False
if event.key == pygame.K_DOWN:
self.cursor.move('down')
if event.key == pygame.K_UP:
self.cursor.move('up')
if event.key == pygame.K_LEFT:
self.cursor.move('left')
if event.key == pygame.K_RIGHT:
self.cursor.move('right')
if event.key == pygame.K_ESCAPE:
self.manual = False
return True
def level(self):
if self.buttons[1].level == 'Easy':
return 150
if self.buttons[1].level == 'Medium':
return 100
return 75
def find_level(self, level):
if level == 150:
return 'Easy'
if level == 100:
return 'Medium'
return 'Hard'
def render(self, surface):
text = create_text('Move with arrow keys and Press space to select','shrikhand', 30, white)
if self.blink == 60:
self.blink = 0
elif 30 <= self.blink < 60:
self.blink += 1
elif self.blink < 30:
surface.blit(text, (250, 600))
self.blink += 1
for b in self.buttons:
b.render(surface)
self.cursor.render(surface)
if self.manual:
Manual().render(surface)
class Manual:
def __init__(self):
self.line1 = create_text("Goal: To reach 270 electoral votes before you're out of the electoral votes or out of lives."
,'shrikhand', 20, black)
self.line2 = create_text('How to win: Move with the arrows keys (up and down) to collect electoral votes (the blue circle)'
, 'shrikhand', 20, black)
self.line3 = create_text('and shoot the mail ballot at the enemy to prevent him from getting into the White House by pressing the Space key. '
, 'shrikhand', 20, black)
def render(self, surface):
pygame.draw.rect(surface, white, pygame.Rect(90, 200, 1250, 200))
surface.blit(self.line1, (100, 210))
surface.blit(self.line2, (100, 280))
surface.blit(self.line3, (100, 310))
exit = create_text('To back to menu press Escape key'
, 'shrikhand', 20, red)
surface.blit(exit, (110, 370)) |
from torch import nn
class DPNet(nn.Module):
def __init__(
self, image_channels: int = 1, common_channels: int = 9, final_channel: int = 3
):
super().__init__()
self.conv1 = self._build_conv1(
in_channels=image_channels, out_channels=common_channels, kernel_size=3
)
self.res1 = self._build_residual_block(
in_channels=common_channels, out_channels=common_channels, kernel_size=3
)
self.res2 = self._build_residual_block(
in_channels=common_channels, out_channels=common_channels, kernel_size=3
)
self.res3 = self._build_residual_block(
in_channels=common_channels, out_channels=common_channels, kernel_size=3
)
self.res4 = self._build_residual_block(
in_channels=common_channels, out_channels=common_channels, kernel_size=3
)
self.res5 = self._build_residual_block(
in_channels=common_channels, out_channels=common_channels, kernel_size=3
)
self.conv2 = self._build_conv2(
in_channels=common_channels, out_channels=common_channels, kernel_size=3
)
self.upsample = self._build_upsample_block(
in_channels=common_channels,
upscale_factor=4,
image_channels=image_channels,
kernel_size=3,
final_channel=final_channel,
)
self.conv3 = self._build_conv3(
in_channels=final_channel, out_channels=1, kernel_size=1
)
self.out = nn.Tanh()
def _build_conv1(self, in_channels: int, out_channels: int, kernel_size: int):
return nn.Sequential(
nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding="same",
),
nn.PReLU(),
)
def _build_conv2(self, in_channels, out_channels, kernel_size):
return nn.Sequential(
nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding="same",
),
nn.BatchNorm2d(out_channels),
)
def _build_conv3(self, in_channels, out_channels, kernel_size):
return nn.Sequential(
nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding="valid",
),
)
def _build_residual_block(self, in_channels, out_channels, kernel_size):
return nn.Sequential(
nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding="same",
),
nn.BatchNorm2d(out_channels),
nn.PReLU(),
nn.Conv2d(
in_channels=out_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding="same",
),
nn.BatchNorm2d(out_channels),
)
def _build_upsample_block(
self, in_channels, upscale_factor, image_channels, kernel_size, final_channel
):
return nn.Sequential(
nn.Conv2d(
in_channels=in_channels,
out_channels=upscale_factor ** 2 * image_channels * final_channel,
kernel_size=kernel_size,
padding="same",
),
nn.PixelShuffle(upscale_factor=upscale_factor),
nn.BatchNorm2d(image_channels * final_channel),
nn.PReLU(),
)
def forward(self, x):
f0 = self.conv1(x)
x = self.res1(f0)
f1 = f0 + x
x = self.res2(f1)
f2 = f1 + x
x = self.res3(f2)
f3 = f2 + x
x = self.res4(f3)
f4 = f3 + x
x = self.res5(f4)
x = self.conv2(x)
x = f0 + x
x = self.upsample(x)
x = self.conv3(x)
x = self.out(x)
x = x.clamp(-1, 1)
return x
if __name__ == "__main__":
model = DPNet()
import torch
x = torch.randn((4, 1, 12, 12))
y = model(x)
print(y.shape)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.