max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
test/test_cursor_binding.py | rhlahuja/snowflake-connector-python | 0 | 4200 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2018 Snowflake Computing Inc. All right reserved.
#
import pytest
from snowflake.connector.errors import (ProgrammingError)
def test_binding_security(conn_cnx, db_parameters):
"""
SQL Injection Tests
"""
try:
with conn_cnx() as cnx:
cnx.cursor().execute(
"CREATE OR REPLACE TABLE {name} "
"(aa INT, bb STRING)".format(
name=db_parameters['name']))
cnx.cursor().execute(
"INSERT INTO {name} VALUES(%s, %s)".format(
name=db_parameters['name']),
(1, 'test1'))
cnx.cursor().execute(
"INSERT INTO {name} VALUES(%(aa)s, %(bb)s)".format(
name=db_parameters['name']),
{'aa': 2, 'bb': 'test2'})
for rec in cnx.cursor().execute(
"SELECT * FROM {name} ORDER BY 1 DESC".format(
name=db_parameters['name'])):
break
assert rec[0] == 2, 'First column'
assert rec[1] == 'test2', 'Second column'
for rec in cnx.cursor().execute(
"SELECT * FROM {name} WHERE aa=%s".format(
name=db_parameters['name']), (1,)):
break
assert rec[0] == 1, 'First column'
assert rec[1] == 'test1', 'Second column'
# SQL injection safe test
# Good Example
with pytest.raises(ProgrammingError):
cnx.cursor().execute(
"SELECT * FROM {name} WHERE aa=%s".format(
name=db_parameters['name']),
("1 or aa>0",))
with pytest.raises(ProgrammingError):
cnx.cursor().execute(
"SELECT * FROM {name} WHERE aa=%(aa)s".format(
name=db_parameters['name']),
{"aa": "1 or aa>0"})
# Bad Example in application. DON'T DO THIS
c = cnx.cursor()
c.execute("SELECT * FROM {name} WHERE aa=%s".format(
name=db_parameters['name']) % ("1 or aa>0",))
rec = c.fetchall()
assert len(rec) == 2, "not raising error unlike the previous one."
finally:
with conn_cnx() as cnx:
cnx.cursor().execute(
"drop table if exists {name}".format(
name=db_parameters['name']))
def test_binding_list(conn_cnx, db_parameters):
"""
SQL binding list type for IN
"""
try:
with conn_cnx() as cnx:
cnx.cursor().execute(
"CREATE OR REPLACE TABLE {name} "
"(aa INT, bb STRING)".format(
name=db_parameters['name']))
cnx.cursor().execute(
"INSERT INTO {name} VALUES(%s, %s)".format(
name=db_parameters['name']),
(1, 'test1'))
cnx.cursor().execute(
"INSERT INTO {name} VALUES(%(aa)s, %(bb)s)".format(
name=db_parameters['name']),
{'aa': 2, 'bb': 'test2'})
cnx.cursor().execute(
"INSERT INTO {name} VALUES(3, 'test3')".format(
name=db_parameters['name']))
for rec in cnx.cursor().execute("""
SELECT * FROM {name} WHERE aa IN (%s) ORDER BY 1 DESC
""".format(name=db_parameters['name']), ([1, 3],)):
break
assert rec[0] == 3, 'First column'
assert rec[1] == 'test3', 'Second column'
for rec in cnx.cursor().execute(
"SELECT * FROM {name} WHERE aa=%s".format(
name=db_parameters['name']), (1,)):
break
assert rec[0] == 1, 'First column'
assert rec[1] == 'test1', 'Second column'
rec = cnx.cursor().execute("""
SELECT * FROM {name} WHERE aa IN (%s) ORDER BY 1 DESC
""".format(name=db_parameters['name']), ((1,),))
finally:
with conn_cnx() as cnx:
cnx.cursor().execute(
"drop table if exists {name}".format(
name=db_parameters['name']))
def test_unsupported_binding(conn_cnx, db_parameters):
"""
Unsupported data binding
"""
try:
with conn_cnx() as cnx:
cnx.cursor().execute(
"CREATE OR REPLACE TABLE {name} "
"(aa INT, bb STRING)".format(
name=db_parameters['name']))
cnx.cursor().execute(
"INSERT INTO {name} VALUES(%s, %s)".format(
name=db_parameters['name']),
(1, 'test1'))
sql = 'select count(*) from {name} where aa=%s'.format(
name=db_parameters['name'])
with cnx.cursor() as cur:
rec = cur.execute(sql, (1,)).fetchone()
assert rec[0] is not None, 'no value is returned'
# dict
with pytest.raises(ProgrammingError):
cnx.cursor().execute(sql, ({'value': 1},))
finally:
with conn_cnx() as cnx:
cnx.cursor().execute(
"drop table if exists {name}".format(
name=db_parameters['name']))
| 2.828125 | 3 |
taln2016/icsisumm-primary-sys34_v1/nltk/nltk-0.9.2/nltk/model/__init__.py | hectormartinez/rougexstem | 0 | 4201 | # Natural Language Toolkit: Language Models
#
# Copyright (C) 2001-2008 University of Pennsylvania
# Author: <NAME> <<EMAIL>>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
class ModelI(object):
"""
A processing interface for assigning a probability to the next word.
"""
def __init__(self):
'''Create a new language model.'''
raise NotImplementedError()
def train(self, text):
'''Train the model on the text.'''
raise NotImplementedError()
def probability(self, word, context):
'''Evaluate the probability of this word in this context.'''
raise NotImplementedError()
def choose_random_word(self, context):
'''Randomly select a word that is likely to appear in this context.'''
raise NotImplementedError()
def entropy(self, text):
'''Evaluate the total entropy of a message with respect to the model.
This is the sum of the log probability of each word in the message.'''
raise NotImplementedError()
| 3.15625 | 3 |
flask-graphene-sqlalchemy/models.py | JovaniPink/flask-apps | 0 | 4202 | import os
from graphene_sqlalchemy import SQLAlchemyObjectType
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
POSTGRES_CONNECTION_STRING = (
os.environ.get("POSTGRES_CONNECTION_STRING")
or "postgres://postgres:password@localhost:6432/postgres"
)
engine = create_engine(POSTGRES_CONNECTION_STRING, convert_unicode=True)
db_session = scoped_session(
sessionmaker(autocommit=False, autoflush=False, bind=engine)
)
Base = declarative_base()
Base.query = db_session.query_property()
class UserModel(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
name = Column(String)
balance = Column(Integer)
class MinAmountModel(Base):
__tablename__ = "min_amount"
amount = Column(Integer, primary_key=True)
class User(SQLAlchemyObjectType):
class Meta:
model = UserModel
class MinAmount(SQLAlchemyObjectType):
class Meta:
model = MinAmountModel
| 2.546875 | 3 |
curlypiv/synthetics/microsig.py | sean-mackenzie/curlypiv | 0 | 4203 | # microsig
"""
Author: <NAME>
More detail about the MicroSIG can be found at:
Website:
https://gitlab.com/defocustracking/microsig-python
Publication:
Rossi M, Synthetic image generator for defocusing and astigmatic PIV/PTV, Meas. Sci. Technol., 31, 017003 (2020)
DOI:10.1088/1361-6501/ab42bb.
"""
import numpy as np
import imageio
import tkinter as tk
import os
from os import listdir
from os.path import isfile, basename, join, isdir
import sys
import glob
# import time as tm
from tkinter import filedialog
# ----- code adapted by <NAME> ------
# 2.0 define class
class CurlypivMicrosigCollection(object):
def __init__(self, testSetup, synCol, use_gui=False,
use_internal_setting=False, setting_file=None,
use_internal_data=False, data_files=None,
to_internal_sequence=False, destination_folder=None,
output_dtype='np.uint16'):
if not isinstance(testSetup, object):
raise ValueError("{} must be a CurlypivTestSetup class object".format(testSetup))
if not isinstance(synCol, object):
raise ValueError("{} must be a CurlypivSyntheticCollection class object".format(synCol))
valid_output_dtype = ['np.uint16', 'np.uint8']
if output_dtype not in valid_output_dtype:
raise ValueError("{} must be one of {}".format(output_dtype, valid_output_dtype))
self.testSetup = testSetup
self.synCol = synCol
self.use_gui = use_gui
self.output_dtype = output_dtype
if self.use_gui:
run()
else:
if use_internal_setting:
self.setting_file = self.synCol.microsigSetup
else:
if not isinstance(setting_file, str):
raise ValueError("{} must be a filepath to microsig settings text file".format(setting_file))
self.setting_file = os.path.abspath(setting_file)
if use_internal_data:
raise ValueError("script to use internal data still in development")
else:
if not isinstance(data_files, str):
raise ValueError("{} must be a filepath to particle location text files".format(data_files))
all_files = glob.glob(data_files + '/*.txt')
save_files = []
for ff in [f for f in all_files if f.endswith('.txt')]:
save_files.append(ff)
save_files.sort()
self.data_files = save_files
if to_internal_sequence:
raise ValueError("script to use internal data still in development")
else:
if not isinstance(destination_folder, str):
raise ValueError("{} must be a filepath to write output images".format(destination_folder))
self.destination_folder = os.path.abspath(destination_folder)
self.generate()
def generate(self):
# %%
mic = {}
f = open(self.setting_file)
for x in f:
words = x.split()
mic[words[0]] = float(words[2])
mic['pixel_dim_x'] = int(mic['pixel_dim_x'])
mic['pixel_dim_y'] = int(mic['pixel_dim_y'])
mic['n_rays'] = int(mic['n_rays'])
# %%
ii = 0;
ii_tot = len(self.data_files)
for data in self.data_files:
ii = ii + 1
print('creating image {0} of {1} ...'.format(ii, ii_tot))
P = np.genfromtxt(data)
if len(P.shape) == 1:
P = np.array([P])
head, tail = os.path.split(data)
I = take_image(mic, P)
if self.output_dtype == 'np.uint16':
imageio.imwrite(os.path.join(self.destination_folder, (tail[:-3] + 'tif')),
np.uint16(I))
elif self.output_dtype == 'np.uint8':
imageio.imwrite(os.path.join(self.destination_folder, (tail[:-3] + 'tif')),
np.uint8(I))
print('done!')
# %%
def sorter(f):
sorting = int(f[:-4])
return sorting
def run():
# %%
root = tk.Tk()
root.attributes('-topmost', True)
root.withdraw()
setting_file = filedialog.askopenfilenames(
title="Select settings file", parent=root,
filetypes=(("txt files", "*.txt"), ("all files", "*.*")))
if not setting_file:
sys.exit('input file not valid')
data_files = filedialog.askopenfilenames(
title="Select data file(s)", parent=root,
filetypes=(("txt files", "*.txt"), ("all files", "*.*")))
if not setting_file:
sys.exit('input file not valid')
destination_folder = filedialog.askdirectory(
title="Select destination file", parent=root)
if not setting_file:
sys.exit('input file not valid')
# %%
mic = {}
f = open(setting_file[0])
for x in f:
words = x.split()
mic[words[0]] = float(words[2])
mic['pixel_dim_x'] = int(mic['pixel_dim_x'])
mic['pixel_dim_y'] = int(mic['pixel_dim_y'])
mic['n_rays'] = int(mic['n_rays'])
# %%
ii = 0;
ii_tot = len(data_files)
for data in data_files:
ii = ii + 1
print('creating image {0} of {1} ...'.format(ii, ii_tot))
P = np.genfromtxt(data)
if len(P.shape) == 1:
P = np.array([P])
head, tail = os.path.split(data)
I = take_image(mic, P)
print('done!')
# %%
def take_image(mic, P):
# NOTE: x and xp represent here light fields and should not be confused$
# with particle image coordinates which are represented by P
I = np.zeros((mic['pixel_dim_y'], mic['pixel_dim_x']));
dp_s = np.unique(P[:, 3])
if P.shape[1] == 5 or P.shape[1] == 8:
k_id = P[:, -1]
else:
k_id = np.ones(P.shape[0])
if P.shape[1] <= 5 and dp_s.size == 1:
n_points = int(np.round(mic['points_per_pixel'] * 2 * np.pi *
(dp_s * mic['magnification'] / mic['pixel_size']) ** 2))
xp = create_particle(dp_s, n_points, mic['n_rays'])
for ii in range(0, P.shape[0]):
Id = image_spherical(mic, xp, P[ii, 0:3])
I = I + Id * k_id[ii]
elif P.shape[1] <= 5 and dp_s.size != 1:
for ii in range(0, P.shape[0]):
n_points = int(np.round(mic['points_per_pixel'] * 2 * np.pi *
(P[ii, 3] * mic['magnification'] / mic['pixel_size']) ** 2))
xp = create_particle(P[ii, 3], n_points, mic['n_rays'])
Id = image_spherical(mic, xp, P[ii, 0:3])
I = I + Id * k_id[ii]
elif P.shape[1] >= 7:
for ii in range(0, P.shape[0]):
n_points = int(np.round(mic['points_per_pixel'] * 2 * np.pi *
(P[ii, 3] * mic['magnification'] / mic['pixel_size']) ** 2))
ecc = P[ii, 4]
if ecc > 1:
# area elipsoid/area sphere
fact = 1 / 2 * (1 + ecc / np.sqrt(1 - 1 / ecc ** 2)
* np.arcsin(np.sqrt(1 - 1 / ecc ** 2)))
n_points = int(np.round(fact * n_points))
elif ecc < 1:
# area elipsoid/area sphere
fact = 1 / 2 * (1 + ecc ** 2 / np.sqrt(1 - ecc ** 2)
* np.arctan(np.sqrt(1 - ecc ** 2)))
n_points = int(np.round(fact * n_points))
xp = create_ellipsoid(P[ii, 3:7], n_points, mic['n_rays'])
Id = image_spherical(mic, xp, P[ii, 0:3]);
I = I + Id * k_id[ii]
I = I * mic['gain']
if mic['background_mean'] != 0:
I = I + mic['background_mean']
if mic['background_noise'] != 0:
Irand = np.random.normal(0, mic['background_noise'],
(mic['pixel_dim_y'], mic['pixel_dim_x']))
I = I + np.round(Irand)
# I = np.round(I+random('norm',0,mic.background_noise,...
# mic.pixel_dim_y,mic.pixel_dim_x));
return I
# %%
def image_spherical(mic, xp, P1):
# take image of a particle with a spherical lens
# NOTE: x and xp represent here light fields and should not be confused$
# with particle image coordinates which are represented by P1
lens_radius = (np.tan(np.arcsin(mic['numerical_aperture']))
* (1 + 1 / mic['magnification']) * mic['focal_length'])
# distance lens-ccd
dCCD = -mic['focal_length'] * (mic['magnification'] + 1);
# distance particle-lens
dPART = P1[2] + mic['focal_length'] * (1 / mic['magnification'] + 1);
# linear transformation from the object plane to the lens plane
T2 = np.array([[1, 0, dPART, 0],
[0, 1, 0, dPART],
[0, 0, 1, 0],
[0, 0, 0, 1]])
# light field right before the lens
x = np.linalg.inv(T2) @ xp
# remove rays outside of the lens aperture
ind = x[0, :] ** 2 + x[1, :] ** 2 <= lens_radius ** 2
x = x[:, ind]
# transformation of the light field with spherical lens
a = x[0, :];
b = x[1, :]
c = x[2, :];
d = x[3, :]
# radius of curvature of the lens
rk = mic['focal_length'] * (mic['ri_lens'] / mic['ri_medium'] - 1) * 2
dum = a * 0
# refraction medium-lens
# ray-vector befor lens
Vr = np.vstack((1 + dum, c, d))
Vr = (Vr / np.tile(np.sqrt(sum(Vr ** 2)), (3, 1)))
# normal-vector to the lens surface
Vl = np.vstack((rk + dum, a, b))
Vl = (Vl / np.tile(np.sqrt(sum(Vl ** 2)), (3, 1)))
# tangent-vector to the lens surface
Vrot = np.cross(Vr, Vl, axisa=0, axisb=0)
Vrot = np.cross(Vrot, Vl, axisa=1, axisb=0).transpose()
Vrot = Vrot / np.tile(np.sqrt(sum(Vrot ** 2)), (3, 1))
# angle after snell-law correction
vx = np.sum(Vr * Vl, axis=0) # dot product!
vy = np.sum(Vr * Vrot, axis=0) # dot product!
th11 = np.arcsin(mic['ri_medium'] / mic['ri_lens'] *
np.sin(np.arctan(vy / vx)))
# new ray-vector inside the lens
Vr11 = (Vl * np.tile(np.cos(th11), (3, 1)) +
Vrot * np.tile(np.sin(th11), (3, 1)))
Vr = Vr11 / np.tile(Vr11[0, :], (3, 1))
# refraction lens-medium
# normal-vector to the lens surface
Vl2 = np.vstack((Vl[0, :], -Vl[1:, :]))
# tangent-vector to the lens surface
Vrot = np.cross(Vr, Vl2, axisa=0, axisb=0)
Vrot = np.cross(Vrot, Vl2, axisa=1, axisb=0).transpose()
Vrot = Vrot / np.tile(np.sqrt(sum(Vrot ** 2)), (3, 1))
# angle after snell-law correction
vx = np.sum(Vr * Vl2, axis=0) # dot product!
vy = np.sum(Vr * Vrot, axis=0) # dot product!
th11 = np.arcsin(mic['ri_lens'] / mic['ri_medium'] *
np.sin(np.arctan(vy / vx)))
# new ray-vector outside the lens
Vr11 = (Vl2 * np.tile(np.cos(th11), (3, 1)) +
Vrot * np.tile(np.sin(th11), (3, 1)))
Vr = Vr11 / np.tile(Vr11[0, :], (3, 1))
# light field after the spherical lens
x[2, :] = Vr[1, :]
x[3, :] = Vr[2, :]
if mic['cyl_focal_length'] == 0:
# linear transformation from the lens plane to the ccd plane
T1 = np.array([[1, 0, -dCCD, 0],
[0, 1, 0, -dCCD],
[0, 0, 1, 0],
[0, 0, 0, 1]])
# light field at the ccd plane
xs = np.linalg.inv(T1) @ x
else:
# # linear transformation from the lens plane to the cyl_lens plane
T1c = np.array([[1, 0, -dCCD * 1 / 3, 0],
[0, 1, 0, -dCCD * 1 / 3],
[0, 0, 1, 0],
[0, 0, 0, 1]])
# # light field at the cylindrical lens plane
xc = np.linalg.inv(T1c) @ x
# # light field after the cylindrical lens plane
Tc = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[-1 / mic['cyl_focal_length'], 0, 1, 0],
[0, 0, 0, 1]])
xc_a = np.linalg.inv(Tc) @ xc
# # light field at the ccd plane
T1 = np.array([[1, 0, -dCCD * 2 / 3, 0],
[0, 1, 0, -dCCD * 2 / 3],
[0, 0, 1, 0],
[0, 0, 0, 1]]);
# # light field at the ccd plane
xs = np.linalg.inv(T1) @ xc_a
# transform the position in pixel units
X = np.round(xs[0, :] / mic['pixel_size'] + P1[0])
Y = np.round(xs[1, :] / mic['pixel_size'] + P1[1])
# remove rays outside the CCD
ind = np.all([X > 0, X <= mic['pixel_dim_x'], Y > 0, Y <= mic['pixel_dim_y'],
X.imag == 0, Y.imag == 0], axis=0)
# count number of rays in each pixel
countXY = np.sort(Y[ind] + (X[ind] - 1) * mic['pixel_dim_y'])
indi, ia = np.unique(countXY, return_index=True)
nCounts = np.hstack((ia[1:], countXY.size + 1)) - ia
# prepare image
I = np.zeros((mic['pixel_dim_y'], mic['pixel_dim_x']))
Ifr = I.flatten('F')
Ifr[indi.astype(int) - 1] = nCounts
I = Ifr.reshape(mic['pixel_dim_y'], mic['pixel_dim_x'], order='F')
return I
# %%
def create_particle(D, Ns, Nr):
R = D / 2
V = spiral_sphere(Ns)
V[0:2, V[0, :] > 0] = -V[0:2, V[0, :] > 0]
x = R * V[0, :]
y = R * V[1, :]
z = R * V[2, :]
V0 = spiral_sphere(Nr + 2)
V0 = V0[:, 1:-1]
u = np.tile(x, (Nr, 1))
v = np.tile(y, (Nr, 1))
s = u * 0
t = u * 0
phs = np.random.uniform(-np.pi, np.pi, z.size)
cs = np.cos(phs)
sn = np.sin(phs)
for k in range(0, Ns):
Rot = np.array([[cs[k], -sn[k], 0],
[sn[k], cs[k], 0], [0, 0, 1]])
Vr = Rot @ V0
Vr[0, :] = -abs(Vr[0, :])
s[:, k] = Vr[1, :] / Vr[0, :]
t[:, k] = Vr[2, :] / Vr[0, :]
u[:, k] = y[k] - s[:, k] * x[k]
v[:, k] = z[k] - t[:, k] * x[k]
xp = np.vstack((u.flatten('F'), v.flatten('F'),
s.flatten('F'), t.flatten('F')))
return xp
# %%
def create_ellipsoid(Deab, Ns, Nr):
D = Deab[0];
ecc = Deab[1]
alpha = Deab[2];
beta = Deab[3]
R = D / 2
V = spiral_sphere(Ns)
V = R * V
V[2, :] = V[2, :] * ecc
R_beta = np.array([[np.cos(beta), 0, np.sin(beta)],
[0, 1, 0],
[-np.sin(beta), 0, np.cos(beta)]])
R_alpha = np.array([[np.cos(alpha), -np.sin(alpha), 0],
[np.sin(alpha), np.cos(alpha), 0],
[0, 0, 1]])
Vf = R_alpha @ (R_beta @ V)
ii1 = (Vf[1, :] == np.min(Vf[1, :])).nonzero()[0][0]
ii2 = (Vf[1, :] == np.max(Vf[1, :])).nonzero()[0][0]
ii3 = (Vf[2, :] == np.min(Vf[2, :])).nonzero()[0][0]
ii4 = (Vf[2, :] == np.max(Vf[2, :])).nonzero()[0][0]
Vdum = Vf[:, [ii1, ii2, ii3, ii4]]
A = np.c_[Vdum[1, :], Vdum[2, :], np.ones(Vdum.shape[1])]
C, _, _, _ = np.linalg.lstsq(A, Vdum[0, :], rcond=None)
V1dum = C[0] * Vf[1, :] + C[1] * Vf[2, :] + C[2]
ind = (Vf[0, :] - V1dum) < 0
x = Vf[0, ind]
y = Vf[1, ind]
z = Vf[2, ind]
Ns = z.size
V0 = spiral_sphere(Nr + 2)
V0 = V0[:, 1:-1]
u = np.tile(x, (Nr, 1))
v = np.tile(y, (Nr, 1))
s = u * 0
t = u * 0
phs = np.random.uniform(-np.pi, np.pi, z.size)
cs = np.cos(phs)
sn = np.sin(phs)
for k in range(0, Ns):
Rot = np.array([[cs[k], -sn[k], 0],
[sn[k], cs[k], 0], [0, 0, 1]])
Vr = Rot @ V0
Vr[0, :] = -abs(Vr[0, :])
s[:, k] = Vr[1, :] / Vr[0, :]
t[:, k] = Vr[2, :] / Vr[0, :]
u[:, k] = y[k] - s[:, k] * x[k]
v[:, k] = z[k] - t[:, k] * x[k]
xp = np.vstack((u.flatten('F'), v.flatten('F'),
s.flatten('F'), t.flatten('F')))
return xp
# %%
def spiral_sphere(N):
gr = (1 + np.sqrt(5)) / 2 # golden ratio
ga = 2 * np.pi * (1 - 1 / gr) # golden angle
ind_p = np.arange(0, N) # particle (i.e., point sample) index
lat = np.arccos(1 - 2 * ind_p / (
N - 1)) # latitude is defined so that particle index is proportional to surface area between 0 and lat
lon = ind_p * ga # position particles at even intervals along longitude
# Convert from spherical to Cartesian co-ordinates
x = np.sin(lat) * np.cos(lon)
y = np.sin(lat) * np.sin(lon)
z = np.cos(lat)
V = np.vstack((x, y, z))
return V
# %%
if __name__ == '__main__':
run()
| 2.328125 | 2 |
planning/scenario_planning/lane_driving/motion_planning/obstacle_avoidance_planner/scripts/trajectory_visualizer.py | kmiya/AutowareArchitectureProposal.iv | 0 | 4204 | <gh_stars>0
# Copyright 2020 Tier IV, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# !/usr/bin/env python
# -*- coding: utf-8 -*-
# TODO(<NAME>): write ros2 visualizer
# import rospy
# from autoware_planning_msgs.msg import Trajectory
# from autoware_planning_msgs.msg import TrajectoryPoint
# import matplotlib.pyplot as plt
# import numpy as np
# import tf
# from geometry_msgs.msg import Vector3
# def quaternion_to_euler(quaternion):
# """Convert Quaternion to Euler Angles
# quaternion: geometry_msgs/Quaternion
# euler: geometry_msgs/Vector3
# """
# e = tf.transformations.euler_from_quaternion(
# (quaternion.x, quaternion.y, quaternion.z, quaternion.w))
# return Vector3(x=e[0], y=e[1], z=e[2])
# class TrajectoryVisualizer():
# def __init__(self):
# self.in_trajectory = Trajectory()
# self.debug_trajectory = Trajectory()
# self.debug_fixed_trajectory = Trajectory()
# self.plot_done1 = True
# self.plot_done2 = True
# self.plot_done3 = True
# self.length = 50
# self.substatus1 = rospy.Subscriber(
# "/planning/scenario_planning/lane_driving/motion_planning/obstacle_avoidance_planner/trajectory",
# Trajectory, self.CallBackTraj, queue_size=1, tcp_nodelay=True)
# rospy.Timer(rospy.Duration(0.3), self.timerCallback)
# def CallBackTraj(self, cmd):
# if (self.plot_done1):
# self.in_trajectory = cmd
# self.plot_done1 = False
# def CallBackDebugTraj(self, cmd):
# if (self.plot_done2):
# self.debug_trajectory = cmd
# self.plot_done2 = False
# def CallBackDebugFixedTraj(self, cmd):
# if (self.plot_done3):
# self.debug_fixed_trajectory = cmd
# self.plot_done3 = False
# def timerCallback(self, event):
# self.plotTrajectory()
# self.plot_done1 = True
# self.plot_done2 = True
# self.plot_done3 = True
# def CalcArcLength(self, traj):
# s_arr = []
# ds = 0.0
# s_sum = 0.0
# if len(traj.points) > 0:
# s_arr.append(s_sum)
# for i in range(1, len(traj.points)):
# p0 = traj.points[i-1]
# p1 = traj.points[i]
# dx = p1.pose.position.x - p0.pose.position.x
# dy = p1.pose.position.y - p0.pose.position.y
# ds = np.sqrt(dx**2 + dy**2)
# s_sum += ds
# if(s_sum > self.length):
# break
# s_arr.append(s_sum)
# return s_arr
# def CalcX(self, traj):
# v_list = []
# for p in traj.points:
# v_list.append(p.pose.position.x)
# return v_list
# def CalcY(self, traj):
# v_list = []
# for p in traj.points:
# v_list.append(p.pose.position.y)
# return v_list
# def CalcYaw(self, traj, s_arr):
# v_list = []
# for p in traj.points:
# v_list.append(quaternion_to_euler(p.pose.orientation).z)
# return v_list[0: len(s_arr)]
# def plotTrajectory(self):
# plt.clf()
# ax3 = plt.subplot(1, 1, 1)
# x = self.CalcArcLength(self.in_trajectory)
# y = self.CalcYaw(self.in_trajectory, x)
# if len(x) == len(y):
# ax3.plot(x, y, label="final", marker="*")
# ax3.set_xlabel("arclength [m]")
# ax3.set_ylabel("yaw")
# plt.pause(0.01)
# def main():
# rospy.init_node("trajectory_visualizer")
# TrajectoryVisualizer()
# rospy.spin()
# if __name__ == "__main__":
# main()
| 2.375 | 2 |
main/forms.py | agokhale11/test2 | 0 | 4205 | from django.contrib.auth.forms import AuthenticationForm, UserCreationForm
from django.contrib.auth.models import User
from django import forms
class UploadFileForm(forms.Form):
title = forms.CharField(max_length=50)
file = forms.FileField()
# If you don't do this you cannot use Bootstrap CSS
class LoginForm(AuthenticationForm):
username = forms.CharField(label="Username", max_length=16,
widget=forms.TextInput(attrs={'class': 'form-control', 'name': 'username'}))
password = forms.CharField(label="Password", max_length=16,
widget=forms.PasswordInput(attrs={'class': 'form-control', 'name': 'password'}))
class SignUpForm(UserCreationForm):
full_name = forms.CharField(label="Full Name", max_length=50,
widget=forms.TextInput(attrs={'class': 'form-control', 'name': 'full_name'}))
email = forms.EmailField(label = "Email", max_length =50, widget=forms.EmailInput(attrs={'class': 'form-control', 'name': 'email'}))
class Meta:
model = User
fields = ("email", "full_name", "username", "<PASSWORD>", "<PASSWORD>")
def save(self, commit=True):
user = super(SignUpForm, self).save(commit=False)
user.full_name = self.cleaned_data["full_name"]
user.email = self.cleaned_data["email"]
if commit:
user.save()
return user
class EmailSignupForm(UserCreationForm):
full_name = forms.CharField(label="Full Name", max_length=50,
widget=forms.TextInput(attrs={'class': 'form-control', 'name': 'full_name'}))
class Meta:
model = User
fields = ("full_name", "username", "<PASSWORD>", "<PASSWORD>")
def save(self, commit=True):
user = super(EmailSignupForm, self).save(commit=False)
user.full_name = self.cleaned_data["full_name"]
if commit:
user.save()
return user
class ChangePasswordForm(forms.Form):
security_code = forms.CharField(label="Security Code", max_length=50,
widget=forms.TextInput(attrs={'class': 'form-control', 'name': 'security_code'}))
password1 = forms.CharField(label="New Password", max_length=16,
widget=forms.PasswordInput(attrs={'class': 'form-control', 'name': 'password1'}))
password2 = forms.CharField(label="Re-enter New Password", max_length=16,
widget=forms.PasswordInput(attrs={'class': 'form-control', 'name': 'password2'}))
class Meta:
fields = ("security_code", "password1", "password2") | 2.546875 | 3 |
pandas 9 - Statistics Information on data sets.py | PythonProgramming/Pandas-Basics-with-2.7 | 10 | 4206 | import pandas as pd
from pandas import DataFrame
df = pd.read_csv('sp500_ohlc.csv', index_col = 'Date', parse_dates=True)
df['H-L'] = df.High - df.Low
# Giving us count (rows), mean (avg), std (standard deviation for the entire
# set), minimum for the set, maximum for the set, and some %s in that range.
print( df.describe())
x = input('enter to cont')
# gives us correlation data. Remember the 3d chart we plotted?
# now you can see if correlation of H-L and Volume also is correlated
# with price swings. Correlations for your correlations
print( df.corr())
x = input('enter to cont')
# covariance... now plenty of people know what correlation is, but what in the
# heck is covariance.
# Let's defined the two.
# covariance is the measure of how two variables change together.
# correlation is the measure of how two variables move in relation to eachother.
# so covariance is a more direct assessment of the relationship between two variables.
# Maybe a better way to put it is that covariance is the measure of the strength of correlation.
print( df.cov())
x = input('enter to cont')
print( df[['Volume','H-L']].corr())
x = input('enter to cont')
# see how it makes a table?
# so now, we can actually perform a service that some people actually pay for
# I once had a short freelance gig doing this
# so a popular form of analysis within especially forex is to compare correlations between
# the currencies. The idea here is that you pace one currency with another.
#
import datetime
import pandas.io.data
C = pd.io.data.get_data_yahoo('C',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
AAPL = pd.io.data.get_data_yahoo('AAPL',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
MSFT = pd.io.data.get_data_yahoo('MSFT',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
TSLA = pd.io.data.get_data_yahoo('TSLA',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
print( C.head())
x = input('enter to cont')
del C['Open']
# , 'high', 'low', 'close', 'volume'
del C['High']
del C['Low']
del C['Close']
del C['Volume']
corComp = C
corComp.rename(columns={'Adj Close': 'C'}, inplace=True)
corComp['AAPL'] = AAPL['Adj Close']
corComp['MSFT'] = MSFT['Adj Close']
corComp['TSLA'] = TSLA['Adj Close']
print( corComp.head())
x = input('enter to cont')
print( corComp.corr())
x = input('enter to cont')
C = pd.io.data.get_data_yahoo('C',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
AAPL = pd.io.data.get_data_yahoo('AAPL',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
MSFT = pd.io.data.get_data_yahoo('MSFT',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
TSLA = pd.io.data.get_data_yahoo('TSLA',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
BAC = pd.io.data.get_data_yahoo('BAC',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
BBRY = pd.io.data.get_data_yahoo('BBRY',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
CMG = pd.io.data.get_data_yahoo('CMG',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
EBAY = pd.io.data.get_data_yahoo('EBAY',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
JPM = pd.io.data.get_data_yahoo('JPM',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
SBUX = pd.io.data.get_data_yahoo('SBUX',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
TGT = pd.io.data.get_data_yahoo('TGT',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
WFC = pd.io.data.get_data_yahoo('WFC',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
x = input('enter to cont')
print( C.head())
del C['Open']
# , 'high', 'low', 'close', 'volume'
del C['High']
del C['Low']
del C['Close']
del C['Volume']
corComp = C
corComp.rename(columns={'Adj Close': 'C'}, inplace=True)
corComp['BAC'] = BAC['Adj Close']
corComp['MSFT'] = MSFT['Adj Close']
corComp['TSLA'] = TSLA['Adj Close']
corComp['AAPL'] = AAPL['Adj Close']
corComp['BBRY'] = BBRY['Adj Close']
corComp['CMG'] = CMG['Adj Close']
corComp['EBAY'] = EBAY['Adj Close']
corComp['JPM'] = JPM['Adj Close']
corComp['SBUX'] = SBUX['Adj Close']
corComp['TGT'] = TGT['Adj Close']
corComp['WFC'] = WFC['Adj Close']
print( corComp.head())
x = input('enter to cont')
print( corComp.corr())
x = input('enter to cont')
fancy = corComp.corr()
fancy.to_csv('bigmoney.csv')
| 4.125 | 4 |
working/tkinter_widget/test.py | songdaegeun/school-zone-enforcement-system | 0 | 4207 | import cv2
import numpy as np
import threading
def test():
while 1:
img1=cv2.imread('captured car1.jpg')
print("{}".format(img1.shape))
print("{}".format(img1))
cv2.imshow('asd',img1)
cv2.waitKey(1)
t1 = threading.Thread(target=test)
t1.start()
| 3.015625 | 3 |
ceilometer/compute/virt/hyperv/utilsv2.py | aristanetworks/ceilometer | 2 | 4208 | # Copyright 2013 Cloudbase Solutions Srl
#
# Author: <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility class for VM related operations.
Based on the "root/virtualization/v2" namespace available starting with
Hyper-V Server / Windows Server 2012.
"""
import sys
if sys.platform == 'win32':
import wmi
from oslo.config import cfg
from ceilometer.compute.virt import inspector
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common import log as logging
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class HyperVException(inspector.InspectorException):
pass
class UtilsV2(object):
_VIRTUAL_SYSTEM_TYPE_REALIZED = 'Microsoft:Hyper-V:System:Realized'
_PROC_SETTING = 'Msvm_ProcessorSettingData'
_SYNTH_ETH_PORT = 'Msvm_SyntheticEthernetPortSettingData'
_ETH_PORT_ALLOC = 'Msvm_EthernetPortAllocationSettingData'
_PORT_ACL_SET_DATA = 'Msvm_EthernetSwitchPortAclSettingData'
_STORAGE_ALLOC = 'Msvm_StorageAllocationSettingData'
_VS_SETTING_DATA = 'Msvm_VirtualSystemSettingData'
_METRICS_ME = 'Msvm_MetricForME'
_BASE_METRICS_VALUE = 'Msvm_BaseMetricValue'
_CPU_METRIC_NAME = 'Aggregated Average CPU Utilization'
_NET_IN_METRIC_NAME = 'Filtered Incoming Network Traffic'
_NET_OUT_METRIC_NAME = 'Filtered Outgoing Network Traffic'
# Disk metrics are supported from Hyper-V 2012 R2
_DISK_RD_METRIC_NAME = 'Disk Data Read'
_DISK_WR_METRIC_NAME = 'Disk Data Written'
def __init__(self, host='.'):
if sys.platform == 'win32':
self._init_hyperv_wmi_conn(host)
self._init_cimv2_wmi_conn(host)
self._host_cpu_info = None
def _init_hyperv_wmi_conn(self, host):
self._conn = wmi.WMI(moniker='//%s/root/virtualization/v2' % host)
def _init_cimv2_wmi_conn(self, host):
self._conn_cimv2 = wmi.WMI(moniker='//%s/root/cimv2' % host)
def get_host_cpu_info(self):
if not self._host_cpu_info:
host_cpus = self._conn_cimv2.Win32_Processor()
self._host_cpu_info = (host_cpus[0].MaxClockSpeed, len(host_cpus))
return self._host_cpu_info
def get_all_vms(self):
vms = [(v.ElementName, v.Name) for v in
self._conn.Msvm_ComputerSystem(['ElementName', 'Name'],
Caption="Virtual Machine")]
return vms
def get_cpu_metrics(self, vm_name):
vm = self._lookup_vm(vm_name)
cpu_sd = self._get_vm_resources(vm, self._PROC_SETTING)[0]
cpu_metrics_def = self._get_metric_def(self._CPU_METRIC_NAME)
cpu_metric_aggr = self._get_metrics(vm, cpu_metrics_def)
cpu_used = 0
if cpu_metric_aggr:
cpu_used = long(cpu_metric_aggr[0].MetricValue)
return (cpu_used,
int(cpu_sd.VirtualQuantity),
long(vm.OnTimeInMilliseconds))
def get_vnic_metrics(self, vm_name):
vm = self._lookup_vm(vm_name)
ports = self._get_vm_resources(vm, self._ETH_PORT_ALLOC)
vnics = self._get_vm_resources(vm, self._SYNTH_ETH_PORT)
metric_def_in = self._get_metric_def(self._NET_IN_METRIC_NAME)
metric_def_out = self._get_metric_def(self._NET_OUT_METRIC_NAME)
for port in ports:
vnic = [v for v in vnics if port.Parent == v.path_()][0]
metric_value_instances = self._get_metric_value_instances(
port.associators(wmi_result_class=self._PORT_ACL_SET_DATA),
self._BASE_METRICS_VALUE)
metric_values = self._sum_metric_values_by_defs(
metric_value_instances, [metric_def_in, metric_def_out])
yield {
'rx_mb': metric_values[0],
'tx_mb': metric_values[1],
'element_name': vnic.ElementName,
'address': vnic.Address
}
def get_disk_metrics(self, vm_name):
vm = self._lookup_vm(vm_name)
metric_def_r = self._get_metric_def(self._DISK_RD_METRIC_NAME)
metric_def_w = self._get_metric_def(self._DISK_WR_METRIC_NAME)
disks = self._get_vm_resources(vm, self._STORAGE_ALLOC)
for disk in disks:
metric_values = self._get_metric_values(
disk, [metric_def_r, metric_def_w])
# Thi sis e.g. the VHD file location
if disk.HostResource:
host_resource = disk.HostResource[0]
yield {
# Values are in megabytes
'read_mb': metric_values[0],
'write_mb': metric_values[1],
'instance_id': disk.InstanceID,
'host_resource': host_resource
}
def _sum_metric_values(self, metrics):
tot_metric_val = 0
for metric in metrics:
tot_metric_val += long(metric.MetricValue)
return tot_metric_val
def _sum_metric_values_by_defs(self, element_metrics, metric_defs):
metric_values = []
for metric_def in metric_defs:
if metric_def:
metrics = self._filter_metrics(element_metrics, metric_def)
metric_values.append(self._sum_metric_values(metrics))
else:
# In case the metric is not defined on this host
metric_values.append(0)
return metric_values
def _get_metric_value_instances(self, elements, result_class):
instances = []
for el in elements:
associators = el.associators(wmi_result_class=result_class)
if associators:
instances.append(associators[0])
return instances
def _get_metric_values(self, element, metric_defs):
element_metrics = element.associators(
wmi_association_class=self._METRICS_ME)
return self._sum_metric_values_by_defs(element_metrics, metric_defs)
def _lookup_vm(self, vm_name):
vms = self._conn.Msvm_ComputerSystem(ElementName=vm_name)
n = len(vms)
if n == 0:
raise inspector.InstanceNotFoundException(
_('VM %s not found on Hyper-V') % vm_name)
elif n > 1:
raise HyperVException(_('Duplicate VM name found: %s') % vm_name)
else:
return vms[0]
def _get_metrics(self, element, metric_def):
return self._filter_metrics(
element.associators(
wmi_association_class=self._METRICS_ME), metric_def)
def _filter_metrics(self, all_metrics, metric_def):
return [v for v in all_metrics if
v.MetricDefinitionId == metric_def.Id]
def _get_metric_def(self, metric_def):
metric = self._conn.CIM_BaseMetricDefinition(ElementName=metric_def)
if metric:
return metric[0]
def _get_vm_setting_data(self, vm):
vm_settings = vm.associators(
wmi_result_class=self._VS_SETTING_DATA)
# Avoid snapshots
return [s for s in vm_settings if
s.VirtualSystemType == self._VIRTUAL_SYSTEM_TYPE_REALIZED][0]
def _get_vm_resources(self, vm, resource_class):
setting_data = self._get_vm_setting_data(vm)
return setting_data.associators(wmi_result_class=resource_class)
| 1.664063 | 2 |
src/cli.py | cajones314/avocd2019 | 0 | 4209 | # system
from io import IOBase, StringIO
import os
# 3rd party
import click
# internal
from days import DayFactory
# import logging
# logger = logging.getLogger(__name__)
# logger.setLevel(logging.DEBUG)
# ch = logging.StreamHandler()
# logger.addHandler(ch)
@click.group(invoke_without_command=True)
@click.option('-d', '--day', required=True, type=click.IntRange(1, 31), metavar="<1..31>", help="Day you want to select.")
@click.option('-p', '--puzzle', required=True, type=click.IntRange(1, 2), metavar="<1|2>", help="Puzzle you want to run.")
@click.option('-i', '--input', required=True, type=click.Path(exists=True), help="Path to puzzle data.")
def cli(day: int, puzzle: int, input: str):
filename = os.path.join(input, f"{day:02}_puzzle_{puzzle}.txt")
if os.path.exists(filename):
input_stream = open(filename, "r")
else:
input_stream = StringIO('')
avocd = DayFactory(day, input_stream)
try:
print(avocd.run(puzzle))
except NotImplementedError:
print(f"Puzzle {puzzle} for day {day} not implemented.")
if __name__ == "__main__":
# pylint: disable=no-value-for-parameter
cli()
| 2.625 | 3 |
option_c.py | wrosecrans/colormap | 231 | 4210 |
from matplotlib.colors import LinearSegmentedColormap
from numpy import nan, inf
# Used to reconstruct the colormap in viscm
parameters = {'xp': [-5.4895292543686764, 14.790571669586654, 82.5546687431056, 29.15531114139253, -4.1316769886951761, -13.002076438907238],
'yp': [-35.948168839230306, -42.273376159885785, -28.845467523197698, 52.03426124197, 36.832712600868973, 40.792291220556734],
'min_JK': 16.8314150305,
'max_JK': 95}
cm_data = [[ 5.03832136e-02, 2.98028976e-02, 5.27974883e-01],
[ 6.35363639e-02, 2.84259729e-02, 5.33123681e-01],
[ 7.53531234e-02, 2.72063728e-02, 5.38007001e-01],
[ 8.62217979e-02, 2.61253206e-02, 5.42657691e-01],
[ 9.63786097e-02, 2.51650976e-02, 5.47103487e-01],
[ 1.05979704e-01, 2.43092436e-02, 5.51367851e-01],
[ 1.15123641e-01, 2.35562500e-02, 5.55467728e-01],
[ 1.23902903e-01, 2.28781011e-02, 5.59423480e-01],
[ 1.32380720e-01, 2.22583774e-02, 5.63250116e-01],
[ 1.40603076e-01, 2.16866674e-02, 5.66959485e-01],
[ 1.48606527e-01, 2.11535876e-02, 5.70561711e-01],
[ 1.56420649e-01, 2.06507174e-02, 5.74065446e-01],
[ 1.64069722e-01, 2.01705326e-02, 5.77478074e-01],
[ 1.71573925e-01, 1.97063415e-02, 5.80805890e-01],
[ 1.78950212e-01, 1.92522243e-02, 5.84054243e-01],
[ 1.86212958e-01, 1.88029767e-02, 5.87227661e-01],
[ 1.93374449e-01, 1.83540593e-02, 5.90329954e-01],
[ 2.00445260e-01, 1.79015512e-02, 5.93364304e-01],
[ 2.07434551e-01, 1.74421086e-02, 5.96333341e-01],
[ 2.14350298e-01, 1.69729276e-02, 5.99239207e-01],
[ 2.21196750e-01, 1.64970484e-02, 6.02083323e-01],
[ 2.27982971e-01, 1.60071509e-02, 6.04867403e-01],
[ 2.34714537e-01, 1.55015065e-02, 6.07592438e-01],
[ 2.41396253e-01, 1.49791041e-02, 6.10259089e-01],
[ 2.48032377e-01, 1.44393586e-02, 6.12867743e-01],
[ 2.54626690e-01, 1.38820918e-02, 6.15418537e-01],
[ 2.61182562e-01, 1.33075156e-02, 6.17911385e-01],
[ 2.67702993e-01, 1.27162163e-02, 6.20345997e-01],
[ 2.74190665e-01, 1.21091423e-02, 6.22721903e-01],
[ 2.80647969e-01, 1.14875915e-02, 6.25038468e-01],
[ 2.87076059e-01, 1.08554862e-02, 6.27294975e-01],
[ 2.93477695e-01, 1.02128849e-02, 6.29490490e-01],
[ 2.99855122e-01, 9.56079551e-03, 6.31623923e-01],
[ 3.06209825e-01, 8.90185346e-03, 6.33694102e-01],
[ 3.12543124e-01, 8.23900704e-03, 6.35699759e-01],
[ 3.18856183e-01, 7.57551051e-03, 6.37639537e-01],
[ 3.25150025e-01, 6.91491734e-03, 6.39512001e-01],
[ 3.31425547e-01, 6.26107379e-03, 6.41315649e-01],
[ 3.37683446e-01, 5.61830889e-03, 6.43048936e-01],
[ 3.43924591e-01, 4.99053080e-03, 6.44710195e-01],
[ 3.50149699e-01, 4.38202557e-03, 6.46297711e-01],
[ 3.56359209e-01, 3.79781761e-03, 6.47809772e-01],
[ 3.62553473e-01, 3.24319591e-03, 6.49244641e-01],
[ 3.68732762e-01, 2.72370721e-03, 6.50600561e-01],
[ 3.74897270e-01, 2.24514897e-03, 6.51875762e-01],
[ 3.81047116e-01, 1.81356205e-03, 6.53068467e-01],
[ 3.87182639e-01, 1.43446923e-03, 6.54176761e-01],
[ 3.93304010e-01, 1.11388259e-03, 6.55198755e-01],
[ 3.99410821e-01, 8.59420809e-04, 6.56132835e-01],
[ 4.05502914e-01, 6.78091517e-04, 6.56977276e-01],
[ 4.11580082e-01, 5.77101735e-04, 6.57730380e-01],
[ 4.17642063e-01, 5.63847476e-04, 6.58390492e-01],
[ 4.23688549e-01, 6.45902780e-04, 6.58956004e-01],
[ 4.29719186e-01, 8.31008207e-04, 6.59425363e-01],
[ 4.35733575e-01, 1.12705875e-03, 6.59797077e-01],
[ 4.41732123e-01, 1.53984779e-03, 6.60069009e-01],
[ 4.47713600e-01, 2.07954744e-03, 6.60240367e-01],
[ 4.53677394e-01, 2.75470302e-03, 6.60309966e-01],
[ 4.59622938e-01, 3.57374415e-03, 6.60276655e-01],
[ 4.65549631e-01, 4.54518084e-03, 6.60139383e-01],
[ 4.71456847e-01, 5.67758762e-03, 6.59897210e-01],
[ 4.77343929e-01, 6.97958743e-03, 6.59549311e-01],
[ 4.83210198e-01, 8.45983494e-03, 6.59094989e-01],
[ 4.89054951e-01, 1.01269996e-02, 6.58533677e-01],
[ 4.94877466e-01, 1.19897486e-02, 6.57864946e-01],
[ 5.00677687e-01, 1.40550640e-02, 6.57087561e-01],
[ 5.06454143e-01, 1.63333443e-02, 6.56202294e-01],
[ 5.12206035e-01, 1.88332232e-02, 6.55209222e-01],
[ 5.17932580e-01, 2.15631918e-02, 6.54108545e-01],
[ 5.23632990e-01, 2.45316468e-02, 6.52900629e-01],
[ 5.29306474e-01, 2.77468735e-02, 6.51586010e-01],
[ 5.34952244e-01, 3.12170300e-02, 6.50165396e-01],
[ 5.40569510e-01, 3.49501310e-02, 6.48639668e-01],
[ 5.46157494e-01, 3.89540334e-02, 6.47009884e-01],
[ 5.51715423e-01, 4.31364795e-02, 6.45277275e-01],
[ 5.57242538e-01, 4.73307585e-02, 6.43443250e-01],
[ 5.62738096e-01, 5.15448092e-02, 6.41509389e-01],
[ 5.68201372e-01, 5.57776706e-02, 6.39477440e-01],
[ 5.73631859e-01, 6.00281369e-02, 6.37348841e-01],
[ 5.79028682e-01, 6.42955547e-02, 6.35126108e-01],
[ 5.84391137e-01, 6.85790261e-02, 6.32811608e-01],
[ 5.89718606e-01, 7.28775875e-02, 6.30407727e-01],
[ 5.95010505e-01, 7.71902878e-02, 6.27916992e-01],
[ 6.00266283e-01, 8.15161895e-02, 6.25342058e-01],
[ 6.05485428e-01, 8.58543713e-02, 6.22685703e-01],
[ 6.10667469e-01, 9.02039303e-02, 6.19950811e-01],
[ 6.15811974e-01, 9.45639838e-02, 6.17140367e-01],
[ 6.20918555e-01, 9.89336721e-02, 6.14257440e-01],
[ 6.25986869e-01, 1.03312160e-01, 6.11305174e-01],
[ 6.31016615e-01, 1.07698641e-01, 6.08286774e-01],
[ 6.36007543e-01, 1.12092335e-01, 6.05205491e-01],
[ 6.40959444e-01, 1.16492495e-01, 6.02064611e-01],
[ 6.45872158e-01, 1.20898405e-01, 5.98867442e-01],
[ 6.50745571e-01, 1.25309384e-01, 5.95617300e-01],
[ 6.55579615e-01, 1.29724785e-01, 5.92317494e-01],
[ 6.60374266e-01, 1.34143997e-01, 5.88971318e-01],
[ 6.65129493e-01, 1.38566428e-01, 5.85582301e-01],
[ 6.69845385e-01, 1.42991540e-01, 5.82153572e-01],
[ 6.74522060e-01, 1.47418835e-01, 5.78688247e-01],
[ 6.79159664e-01, 1.51847851e-01, 5.75189431e-01],
[ 6.83758384e-01, 1.56278163e-01, 5.71660158e-01],
[ 6.88318440e-01, 1.60709387e-01, 5.68103380e-01],
[ 6.92840088e-01, 1.65141174e-01, 5.64521958e-01],
[ 6.97323615e-01, 1.69573215e-01, 5.60918659e-01],
[ 7.01769334e-01, 1.74005236e-01, 5.57296144e-01],
[ 7.06177590e-01, 1.78437000e-01, 5.53656970e-01],
[ 7.10548747e-01, 1.82868306e-01, 5.50003579e-01],
[ 7.14883195e-01, 1.87298986e-01, 5.46338299e-01],
[ 7.19181339e-01, 1.91728906e-01, 5.42663338e-01],
[ 7.23443604e-01, 1.96157962e-01, 5.38980786e-01],
[ 7.27670428e-01, 2.00586086e-01, 5.35292612e-01],
[ 7.31862231e-01, 2.05013174e-01, 5.31600995e-01],
[ 7.36019424e-01, 2.09439071e-01, 5.27908434e-01],
[ 7.40142557e-01, 2.13863965e-01, 5.24215533e-01],
[ 7.44232102e-01, 2.18287899e-01, 5.20523766e-01],
[ 7.48288533e-01, 2.22710942e-01, 5.16834495e-01],
[ 7.52312321e-01, 2.27133187e-01, 5.13148963e-01],
[ 7.56303937e-01, 2.31554749e-01, 5.09468305e-01],
[ 7.60263849e-01, 2.35975765e-01, 5.05793543e-01],
[ 7.64192516e-01, 2.40396394e-01, 5.02125599e-01],
[ 7.68090391e-01, 2.44816813e-01, 4.98465290e-01],
[ 7.71957916e-01, 2.49237220e-01, 4.94813338e-01],
[ 7.75795522e-01, 2.53657797e-01, 4.91170517e-01],
[ 7.79603614e-01, 2.58078397e-01, 4.87539124e-01],
[ 7.83382636e-01, 2.62499662e-01, 4.83917732e-01],
[ 7.87132978e-01, 2.66921859e-01, 4.80306702e-01],
[ 7.90855015e-01, 2.71345267e-01, 4.76706319e-01],
[ 7.94549101e-01, 2.75770179e-01, 4.73116798e-01],
[ 7.98215577e-01, 2.80196901e-01, 4.69538286e-01],
[ 8.01854758e-01, 2.84625750e-01, 4.65970871e-01],
[ 8.05466945e-01, 2.89057057e-01, 4.62414580e-01],
[ 8.09052419e-01, 2.93491117e-01, 4.58869577e-01],
[ 8.12611506e-01, 2.97927865e-01, 4.55337565e-01],
[ 8.16144382e-01, 3.02368130e-01, 4.51816385e-01],
[ 8.19651255e-01, 3.06812282e-01, 4.48305861e-01],
[ 8.23132309e-01, 3.11260703e-01, 4.44805781e-01],
[ 8.26587706e-01, 3.15713782e-01, 4.41315901e-01],
[ 8.30017584e-01, 3.20171913e-01, 4.37835947e-01],
[ 8.33422053e-01, 3.24635499e-01, 4.34365616e-01],
[ 8.36801237e-01, 3.29104836e-01, 4.30905052e-01],
[ 8.40155276e-01, 3.33580106e-01, 4.27454836e-01],
[ 8.43484103e-01, 3.38062109e-01, 4.24013059e-01],
[ 8.46787726e-01, 3.42551272e-01, 4.20579333e-01],
[ 8.50066132e-01, 3.47048028e-01, 4.17153264e-01],
[ 8.53319279e-01, 3.51552815e-01, 4.13734445e-01],
[ 8.56547103e-01, 3.56066072e-01, 4.10322469e-01],
[ 8.59749520e-01, 3.60588229e-01, 4.06916975e-01],
[ 8.62926559e-01, 3.65119408e-01, 4.03518809e-01],
[ 8.66077920e-01, 3.69660446e-01, 4.00126027e-01],
[ 8.69203436e-01, 3.74211795e-01, 3.96738211e-01],
[ 8.72302917e-01, 3.78773910e-01, 3.93354947e-01],
[ 8.75376149e-01, 3.83347243e-01, 3.89975832e-01],
[ 8.78422895e-01, 3.87932249e-01, 3.86600468e-01],
[ 8.81442916e-01, 3.92529339e-01, 3.83228622e-01],
[ 8.84435982e-01, 3.97138877e-01, 3.79860246e-01],
[ 8.87401682e-01, 4.01761511e-01, 3.76494232e-01],
[ 8.90339687e-01, 4.06397694e-01, 3.73130228e-01],
[ 8.93249647e-01, 4.11047871e-01, 3.69767893e-01],
[ 8.96131191e-01, 4.15712489e-01, 3.66406907e-01],
[ 8.98983931e-01, 4.20391986e-01, 3.63046965e-01],
[ 9.01807455e-01, 4.25086807e-01, 3.59687758e-01],
[ 9.04601295e-01, 4.29797442e-01, 3.56328796e-01],
[ 9.07364995e-01, 4.34524335e-01, 3.52969777e-01],
[ 9.10098088e-01, 4.39267908e-01, 3.49610469e-01],
[ 9.12800095e-01, 4.44028574e-01, 3.46250656e-01],
[ 9.15470518e-01, 4.48806744e-01, 3.42890148e-01],
[ 9.18108848e-01, 4.53602818e-01, 3.39528771e-01],
[ 9.20714383e-01, 4.58417420e-01, 3.36165582e-01],
[ 9.23286660e-01, 4.63250828e-01, 3.32800827e-01],
[ 9.25825146e-01, 4.68103387e-01, 3.29434512e-01],
[ 9.28329275e-01, 4.72975465e-01, 3.26066550e-01],
[ 9.30798469e-01, 4.77867420e-01, 3.22696876e-01],
[ 9.33232140e-01, 4.82779603e-01, 3.19325444e-01],
[ 9.35629684e-01, 4.87712357e-01, 3.15952211e-01],
[ 9.37990034e-01, 4.92666544e-01, 3.12575440e-01],
[ 9.40312939e-01, 4.97642038e-01, 3.09196628e-01],
[ 9.42597771e-01, 5.02639147e-01, 3.05815824e-01],
[ 9.44843893e-01, 5.07658169e-01, 3.02433101e-01],
[ 9.47050662e-01, 5.12699390e-01, 2.99048555e-01],
[ 9.49217427e-01, 5.17763087e-01, 2.95662308e-01],
[ 9.51343530e-01, 5.22849522e-01, 2.92274506e-01],
[ 9.53427725e-01, 5.27959550e-01, 2.88883445e-01],
[ 9.55469640e-01, 5.33093083e-01, 2.85490391e-01],
[ 9.57468770e-01, 5.38250172e-01, 2.82096149e-01],
[ 9.59424430e-01, 5.43431038e-01, 2.78700990e-01],
[ 9.61335930e-01, 5.48635890e-01, 2.75305214e-01],
[ 9.63202573e-01, 5.53864931e-01, 2.71909159e-01],
[ 9.65023656e-01, 5.59118349e-01, 2.68513200e-01],
[ 9.66798470e-01, 5.64396327e-01, 2.65117752e-01],
[ 9.68525639e-01, 5.69699633e-01, 2.61721488e-01],
[ 9.70204593e-01, 5.75028270e-01, 2.58325424e-01],
[ 9.71835007e-01, 5.80382015e-01, 2.54931256e-01],
[ 9.73416145e-01, 5.85761012e-01, 2.51539615e-01],
[ 9.74947262e-01, 5.91165394e-01, 2.48151200e-01],
[ 9.76427606e-01, 5.96595287e-01, 2.44766775e-01],
[ 9.77856416e-01, 6.02050811e-01, 2.41387186e-01],
[ 9.79232922e-01, 6.07532077e-01, 2.38013359e-01],
[ 9.80556344e-01, 6.13039190e-01, 2.34646316e-01],
[ 9.81825890e-01, 6.18572250e-01, 2.31287178e-01],
[ 9.83040742e-01, 6.24131362e-01, 2.27937141e-01],
[ 9.84198924e-01, 6.29717516e-01, 2.24595006e-01],
[ 9.85300760e-01, 6.35329876e-01, 2.21264889e-01],
[ 9.86345421e-01, 6.40968508e-01, 2.17948456e-01],
[ 9.87332067e-01, 6.46633475e-01, 2.14647532e-01],
[ 9.88259846e-01, 6.52324832e-01, 2.11364122e-01],
[ 9.89127893e-01, 6.58042630e-01, 2.08100426e-01],
[ 9.89935328e-01, 6.63786914e-01, 2.04858855e-01],
[ 9.90681261e-01, 6.69557720e-01, 2.01642049e-01],
[ 9.91364787e-01, 6.75355082e-01, 1.98452900e-01],
[ 9.91984990e-01, 6.81179025e-01, 1.95294567e-01],
[ 9.92540939e-01, 6.87029567e-01, 1.92170500e-01],
[ 9.93031693e-01, 6.92906719e-01, 1.89084459e-01],
[ 9.93456302e-01, 6.98810484e-01, 1.86040537e-01],
[ 9.93813802e-01, 7.04740854e-01, 1.83043180e-01],
[ 9.94103226e-01, 7.10697814e-01, 1.80097207e-01],
[ 9.94323596e-01, 7.16681336e-01, 1.77207826e-01],
[ 9.94473934e-01, 7.22691379e-01, 1.74380656e-01],
[ 9.94553260e-01, 7.28727890e-01, 1.71621733e-01],
[ 9.94560594e-01, 7.34790799e-01, 1.68937522e-01],
[ 9.94494964e-01, 7.40880020e-01, 1.66334918e-01],
[ 9.94355411e-01, 7.46995448e-01, 1.63821243e-01],
[ 9.94140989e-01, 7.53136955e-01, 1.61404226e-01],
[ 9.93850778e-01, 7.59304390e-01, 1.59091984e-01],
[ 9.93482190e-01, 7.65498551e-01, 1.56890625e-01],
[ 9.93033251e-01, 7.71719833e-01, 1.54807583e-01],
[ 9.92505214e-01, 7.77966775e-01, 1.52854862e-01],
[ 9.91897270e-01, 7.84239120e-01, 1.51041581e-01],
[ 9.91208680e-01, 7.90536569e-01, 1.49376885e-01],
[ 9.90438793e-01, 7.96858775e-01, 1.47869810e-01],
[ 9.89587065e-01, 8.03205337e-01, 1.46529128e-01],
[ 9.88647741e-01, 8.09578605e-01, 1.45357284e-01],
[ 9.87620557e-01, 8.15977942e-01, 1.44362644e-01],
[ 9.86509366e-01, 8.22400620e-01, 1.43556679e-01],
[ 9.85314198e-01, 8.28845980e-01, 1.42945116e-01],
[ 9.84031139e-01, 8.35315360e-01, 1.42528388e-01],
[ 9.82652820e-01, 8.41811730e-01, 1.42302653e-01],
[ 9.81190389e-01, 8.48328902e-01, 1.42278607e-01],
[ 9.79643637e-01, 8.54866468e-01, 1.42453425e-01],
[ 9.77994918e-01, 8.61432314e-01, 1.42808191e-01],
[ 9.76264977e-01, 8.68015998e-01, 1.43350944e-01],
[ 9.74443038e-01, 8.74622194e-01, 1.44061156e-01],
[ 9.72530009e-01, 8.81250063e-01, 1.44922913e-01],
[ 9.70532932e-01, 8.87896125e-01, 1.45918663e-01],
[ 9.68443477e-01, 8.94563989e-01, 1.47014438e-01],
[ 9.66271225e-01, 9.01249365e-01, 1.48179639e-01],
[ 9.64021057e-01, 9.07950379e-01, 1.49370428e-01],
[ 9.61681481e-01, 9.14672479e-01, 1.50520343e-01],
[ 9.59275646e-01, 9.21406537e-01, 1.51566019e-01],
[ 9.56808068e-01, 9.28152065e-01, 1.52409489e-01],
[ 9.54286813e-01, 9.34907730e-01, 1.52921158e-01],
[ 9.51726083e-01, 9.41670605e-01, 1.52925363e-01],
[ 9.49150533e-01, 9.48434900e-01, 1.52177604e-01],
[ 9.46602270e-01, 9.55189860e-01, 1.50327944e-01],
[ 9.44151742e-01, 9.61916487e-01, 1.46860789e-01],
[ 9.41896120e-01, 9.68589814e-01, 1.40955606e-01],
[ 9.40015097e-01, 9.75158357e-01, 1.31325517e-01]]
test_cm = LinearSegmentedColormap.from_list(__file__, cm_data)
if __name__ == "__main__":
import matplotlib.pyplot as plt
import numpy as np
try:
from viscm import viscm
viscm(test_cm)
except ImportError:
print("viscm not found, falling back on simple display")
plt.imshow(np.linspace(0, 100, 256)[None, :], aspect='auto',
cmap=test_cm)
plt.show()
| 1.882813 | 2 |
RPI/yolov5/algorithm/planner/algorithms/hybrid_astar/draw/draw.py | Aditya239233/MDP | 4 | 4211 | <reponame>Aditya239233/MDP
import matplotlib.pyplot as plt
import numpy as np
import math
from algorithm.planner.utils.car_utils import Car_C
PI = np.pi
class Arrow:
def __init__(self, x, y, theta, L, c):
angle = np.deg2rad(30)
d = 0.3 * L
w = 2
x_start = x
y_start = y
x_end = x + L * np.cos(theta)
y_end = y + L * np.sin(theta)
theta_hat_L = theta + PI - angle
theta_hat_R = theta + PI + angle
x_hat_start = x_end
x_hat_end_L = x_hat_start + d * np.cos(theta_hat_L)
x_hat_end_R = x_hat_start + d * np.cos(theta_hat_R)
y_hat_start = y_end
y_hat_end_L = y_hat_start + d * np.sin(theta_hat_L)
y_hat_end_R = y_hat_start + d * np.sin(theta_hat_R)
plt.plot([x_start, x_end], [y_start, y_end], color=c, linewidth=w)
plt.plot([x_hat_start, x_hat_end_L],
[y_hat_start, y_hat_end_L], color=c, linewidth=w)
plt.plot([x_hat_start, x_hat_end_R],
[y_hat_start, y_hat_end_R], color=c, linewidth=w)
class Car:
def __init__(self, x, y, yaw, w, L):
theta_B = PI + yaw
xB = x + L / 4 * np.cos(theta_B)
yB = y + L / 4 * np.sin(theta_B)
theta_BL = theta_B + PI / 2
theta_BR = theta_B - PI / 2
x_BL = xB + w / 2 * np.cos(theta_BL) # Bottom-Left vertex
y_BL = yB + w / 2 * np.sin(theta_BL)
x_BR = xB + w / 2 * np.cos(theta_BR) # Bottom-Right vertex
y_BR = yB + w / 2 * np.sin(theta_BR)
x_FL = x_BL + L * np.cos(yaw) # Front-Left vertex
y_FL = y_BL + L * np.sin(yaw)
x_FR = x_BR + L * np.cos(yaw) # Front-Right vertex
y_FR = y_BR + L * np.sin(yaw)
plt.plot([x_BL, x_BR, x_FR, x_FL, x_BL],
[y_BL, y_BR, y_FR, y_FL, y_BL],
linewidth=1, color='black')
Arrow(x, y, yaw, L / 2, 'black')
def draw_car(x, y, yaw, steer, color='black', extended_car=True):
if extended_car:
car = np.array([[-Car_C.RB, -Car_C.RB, Car_C.RF, Car_C.RF, -Car_C.RB, Car_C.ACTUAL_RF, Car_C.ACTUAL_RF, -Car_C.ACTUAL_RB, -Car_C.ACTUAL_RB],
[Car_C.W / 2, -Car_C.W / 2, -Car_C.W / 2, Car_C.W / 2, Car_C.W / 2, Car_C.W/2, -Car_C.W/2, -Car_C.W/2, Car_C.W/2]])
else:
car = np.array([[-Car_C.RB, -Car_C.RB, Car_C.RF, Car_C.RF, -Car_C.RB],
[Car_C.W / 2, -Car_C.W / 2, -Car_C.W / 2, Car_C.W / 2, Car_C.W / 2]])
wheel = np.array([[-Car_C.TR, -Car_C.TR, Car_C.TR, Car_C.TR, -Car_C.TR],
[Car_C.TW / 4, -Car_C.TW / 4, -Car_C.TW / 4, Car_C.TW / 4, Car_C.TW / 4]])
rlWheel = wheel.copy()
rrWheel = wheel.copy()
frWheel = wheel.copy()
flWheel = wheel.copy()
Rot1 = np.array([[math.cos(yaw), -math.sin(yaw)],
[math.sin(yaw), math.cos(yaw)]])
Rot2 = np.array([[math.cos(steer), math.sin(steer)],
[-math.sin(steer), math.cos(steer)]])
frWheel = np.dot(Rot2, frWheel)
flWheel = np.dot(Rot2, flWheel)
frWheel += np.array([[Car_C.WB], [-Car_C.WD / 2]])
flWheel += np.array([[Car_C.WB], [Car_C.WD / 2]])
rrWheel[1, :] -= Car_C.WD / 2
rlWheel[1, :] += Car_C.WD / 2
frWheel = np.dot(Rot1, frWheel)
flWheel = np.dot(Rot1, flWheel)
rrWheel = np.dot(Rot1, rrWheel)
rlWheel = np.dot(Rot1, rlWheel)
car = np.dot(Rot1, car)
frWheel += np.array([[x], [y]])
flWheel += np.array([[x], [y]])
rrWheel += np.array([[x], [y]])
rlWheel += np.array([[x], [y]])
car += np.array([[x], [y]])
plt.plot(car[0, :], car[1, :], color)
plt.plot(frWheel[0, :], frWheel[1, :], color)
plt.plot(rrWheel[0, :], rrWheel[1, :], color)
plt.plot(flWheel[0, :], flWheel[1, :], color)
plt.plot(rlWheel[0, :], rlWheel[1, :], color)
Arrow(x, y, yaw, Car_C.WB * 0.8, color)
| 2.8125 | 3 |
models/database_models/comment_model.py | RuiCoreSci/Flask-Restful | 7 | 4212 | from sqlalchemy import Integer, Text, DateTime, func, Boolean, text
from models.database_models import Base, Column
class Comment(Base):
__tablename__ = "comment"
id = Column(Integer, primary_key=True, )
user_id = Column(Integer, nullable=False, comment="评论用户的 ID")
post_id = Column(Integer, nullable=False, comment="Post 文章的 ID")
content = Column(Text, nullable=False, comment="用户的评论")
create_time = Column(DateTime, server_default=func.now(), comment="创建时间")
update_time = Column(DateTime, server_default=func.now(), onupdate=func.now(), comment="更新时间")
deleted = Column(Boolean, default=False, server_default=text('0'), nullable=False, comment="该项目是否被删除")
| 2.75 | 3 |
aws_deploy/ecs/helper.py | jmsantorum/aws-deploy | 0 | 4213 | <reponame>jmsantorum/aws-deploy
import json
import re
from datetime import datetime
from json.decoder import JSONDecodeError
import click
from boto3.session import Session
from boto3_type_annotations.ecs import Client
from botocore.exceptions import ClientError, NoCredentialsError
from dateutil.tz.tz import tzlocal
from dictdiffer import diff
JSON_LIST_REGEX = re.compile(r'^\[.*\]$')
LAUNCH_TYPE_EC2 = 'EC2'
LAUNCH_TYPE_FARGATE = 'FARGATE'
def read_env_file(container_name, file):
env_vars = []
try:
with open(file) as f:
for line in f:
if line.startswith('#') or not line.strip() or '=' not in line:
continue
key, value = line.strip().split('=', 1)
env_vars.append((container_name, key, value))
except Exception as e:
raise EcsTaskDefinitionCommandError(str(e))
return tuple(env_vars)
class EcsClient(object):
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None,
profile_name=None):
session = Session(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
aws_session_token=aws_session_token,
region_name=region_name,
profile_name=profile_name
)
self.boto: Client = session.client('ecs')
self.events = session.client('events')
def describe_services(self, cluster_name, service_name):
return self.boto.describe_services(
cluster=cluster_name,
services=[service_name]
)
def describe_task_definition(self, task_definition_arn):
try:
return self.boto.describe_task_definition(
taskDefinition=task_definition_arn,
include=[
'TAGS',
]
)
except ClientError:
raise UnknownTaskDefinitionError(
u'Unknown task definition arn: %s' % task_definition_arn
)
def list_tasks(self, cluster_name, service_name):
return self.boto.list_tasks(
cluster=cluster_name,
serviceName=service_name
)
def describe_tasks(self, cluster_name, task_arns):
return self.boto.describe_tasks(cluster=cluster_name, tasks=task_arns)
def register_task_definition(self, family, containers, volumes, role_arn,
execution_role_arn, tags, additional_properties):
if tags:
additional_properties['tags'] = tags
return self.boto.register_task_definition(
family=family,
containerDefinitions=containers,
volumes=volumes,
taskRoleArn=role_arn,
executionRoleArn=execution_role_arn,
**additional_properties
)
def deregister_task_definition(self, task_definition_arn):
return self.boto.deregister_task_definition(
taskDefinition=task_definition_arn
)
def update_service(self, cluster, service, desired_count, task_definition):
if desired_count is None:
return self.boto.update_service(
cluster=cluster,
service=service,
taskDefinition=task_definition
)
return self.boto.update_service(
cluster=cluster,
service=service,
desiredCount=desired_count,
taskDefinition=task_definition
)
def run_task(self, cluster, task_definition, count, started_by, overrides,
launchtype='EC2', subnets=(), security_groups=(),
public_ip=False, platform_version=None):
if launchtype == LAUNCH_TYPE_FARGATE:
if not subnets or not security_groups:
msg = 'At least one subnet (--subnet) and one security ' \
'group (--securitygroup) definition are required ' \
'for launch type FARGATE'
raise TaskPlacementError(msg)
network_configuration = {
"awsvpcConfiguration": {
"subnets": subnets,
"securityGroups": security_groups,
"assignPublicIp": "ENABLED" if public_ip else "DISABLED"
}
}
if platform_version is None:
platform_version = 'LATEST'
return self.boto.run_task(
cluster=cluster,
taskDefinition=task_definition,
count=count,
startedBy=started_by,
overrides=overrides,
launchType=launchtype,
networkConfiguration=network_configuration,
platformVersion=platform_version,
)
return self.boto.run_task(
cluster=cluster,
taskDefinition=task_definition,
count=count,
startedBy=started_by,
overrides=overrides
)
def update_rule(self, cluster, rule, task_definition):
target = self.events.list_targets_by_rule(Rule=rule)['Targets'][0]
target['Arn'] = task_definition.arn.partition('task-definition')[0] + 'cluster/' + cluster
target['EcsParameters']['TaskDefinitionArn'] = task_definition.arn
self.events.put_targets(Rule=rule, Targets=[target])
return target['Id']
class EcsService(dict):
def __init__(self, cluster, service_definition=None, **kwargs):
self._cluster = cluster
super(EcsService, self).__init__(service_definition, **kwargs)
def set_task_definition(self, task_definition):
self[u'taskDefinition'] = task_definition.arn
@property
def cluster(self):
return self._cluster
@property
def name(self):
return self.get(u'serviceName')
@property
def task_definition(self):
return self.get(u'taskDefinition')
@property
def desired_count(self):
return self.get(u'desiredCount')
@property
def deployment_created_at(self):
for deployment in self.get(u'deployments'):
if deployment.get(u'status') == u'PRIMARY':
return deployment.get(u'createdAt')
return datetime.now()
@property
def deployment_updated_at(self):
for deployment in self.get(u'deployments'):
if deployment.get(u'status') == u'PRIMARY':
return deployment.get(u'updatedAt')
return datetime.now()
@property
def errors(self):
return self.get_warnings(
since=self.deployment_updated_at
)
@property
def older_errors(self):
return self.get_warnings(
since=self.deployment_created_at,
until=self.deployment_updated_at
)
def get_warnings(self, since=None, until=None):
since = since or self.deployment_created_at
until = until or datetime.now(tz=tzlocal())
errors = {}
for event in self.get(u'events'):
if u'unable' not in event[u'message']:
continue
if since < event[u'createdAt'] < until:
errors[event[u'createdAt']] = event[u'message']
return errors
class EcsTaskDefinition(object):
def __init__(self, containerDefinitions, volumes, family, revision, status, taskDefinitionArn,
requiresAttributes=None, taskRoleArn=None, executionRoleArn=None, compatibilities=None, tags=None,
**kwargs):
self.containers = containerDefinitions
self.volumes = volumes
self.family = family
self.revision = revision
self.status = status
self.arn = taskDefinitionArn
self.requires_attributes = requiresAttributes or {}
self.role_arn = taskRoleArn or ''
self.execution_role_arn = executionRoleArn or ''
self.tags = tags
self.additional_properties = kwargs
self._diff = []
# the compatibilities parameter is returned from the ECS API, when
# describing a task, but may not be included, when registering a new
# task definition. Just storing it for now.
self.compatibilities = compatibilities
@property
def container_names(self):
for container in self.containers:
yield container['name']
@property
def images(self):
for container in self.containers:
yield container['name'], container['image']
@property
def family_revision(self):
return f'{self.family}:{self.revision}'
@property
def updated(self) -> bool:
return self._diff != []
@property
def diff(self):
return self._diff
def show_diff(self, show_diff: bool = False):
if show_diff:
click.secho('Task definition modified:')
for d in self._diff:
click.secho(f' {str(d)}', fg='blue')
click.secho('')
def diff_raw(self, task_b):
containers_a = {c['name']: c for c in self.containers}
containers_b = {c['name']: c for c in task_b.containers}
requirements_a = sorted([r['name'] for r in self.requires_attributes])
requirements_b = sorted([r['name'] for r in task_b.requires_attributes])
for container in containers_a:
containers_a[container]['environment'] = {e['name']: e['value'] for e in
containers_a[container].get('environment', {})}
for container in containers_b:
containers_b[container]['environment'] = {e['name']: e['value'] for e in
containers_b[container].get('environment', {})}
for container in containers_a:
containers_a[container]['secrets'] = {e['name']: e['valueFrom'] for e in
containers_a[container].get('secrets', {})}
for container in containers_b:
containers_b[container]['secrets'] = {e['name']: e['valueFrom'] for e in
containers_b[container].get('secrets', {})}
composite_a = {
'containers': containers_a,
'volumes': self.volumes,
'requires_attributes': requirements_a,
'role_arn': self.role_arn,
'execution_role_arn': self.execution_role_arn,
'compatibilities': self.compatibilities,
'additional_properties': self.additional_properties,
}
composite_b = {
'containers': containers_b,
'volumes': task_b.volumes,
'requires_attributes': requirements_b,
'role_arn': task_b.role_arn,
'execution_role_arn': task_b.execution_role_arn,
'compatibilities': task_b.compatibilities,
'additional_properties': task_b.additional_properties,
}
return list(diff(composite_a, composite_b))
def get_overrides(self):
override = dict()
overrides = []
for diff in self.diff:
if override.get('name') != diff.container:
override = dict(name=diff.container)
overrides.append(override)
if diff.field == 'command':
override['command'] = self.get_overrides_command(diff.value)
elif diff.field == 'environment':
override['environment'] = self.get_overrides_env(diff.value)
elif diff.field == 'secrets':
override['secrets'] = self.get_overrides_secrets(diff.value)
return overrides
@staticmethod
def parse_command(command):
if re.match(JSON_LIST_REGEX, command):
try:
return json.loads(command)
except JSONDecodeError as e:
raise EcsTaskDefinitionCommandError(
f"command should be valid JSON list. Got following command: {command} resulting in error: {str(e)}"
)
return command.split()
@staticmethod
def get_overrides_command(command):
return EcsTaskDefinition.parse_command(command)
@staticmethod
def get_overrides_env(env):
return [{"name": e, "value": env[e]} for e in env]
@staticmethod
def get_overrides_secrets(secrets):
return [{"name": s, "valueFrom": secrets[s]} for s in secrets]
def get_tag(self, key):
for tag in self.tags:
if tag['key'] == key:
return tag['value']
return None
def set_tag(self, key: str, value: str):
if key and value:
done = False
for tag in self.tags:
if tag['key'] == key:
if tag['value'] != value:
diff = EcsTaskDefinitionDiff(
container=None,
field=f"tags['{key}']",
value=value,
old_value=tag['value']
)
self._diff.append(diff)
tag['value'] = value
done = True
break
if not done:
diff = EcsTaskDefinitionDiff(container=None, field=f"tags['{key}']", value=value, old_value=None)
self._diff.append(diff)
self.tags.append({'key': key, 'value': value})
def set_images(self, tag=None, **images):
self.validate_container_options(**images)
for container in self.containers:
if container['name'] in images:
new_image = images[container['name']]
diff = EcsTaskDefinitionDiff(
container=container['name'],
field='image',
value=new_image,
old_value=container['image']
)
self._diff.append(diff)
container['image'] = new_image
elif tag:
image_definition = container['image'].rsplit(':', 1)
new_image = f'{image_definition[0]}:{tag.strip()}'
# check if tag changes
if new_image != container['image']:
diff = EcsTaskDefinitionDiff(
container=container['name'],
field='image',
value=new_image,
old_value=container['image']
)
self._diff.append(diff)
container['image'] = new_image
def set_commands(self, **commands):
self.validate_container_options(**commands)
for container in self.containers:
if container['name'] in commands:
new_command = commands[container['name']]
diff = EcsTaskDefinitionDiff(
container=container['name'],
field='command',
value=new_command,
old_value=container.get('command')
)
self._diff.append(diff)
container['command'] = self.parse_command(new_command)
def set_environment(self, environment_list, exclusive=False, env_file=((None, None),)):
environment = {}
if None not in env_file[0]:
for env in env_file:
line = read_env_file(env[0], env[1])
environment_list = line + environment_list
for env in environment_list:
environment.setdefault(env[0], {})
environment[env[0]][env[1]] = env[2]
self.validate_container_options(**environment)
for container in self.containers:
if container['name'] in environment:
self.apply_container_environment(
container=container,
new_environment=environment[container['name']],
exclusive=exclusive,
)
elif exclusive is True:
self.apply_container_environment(
container=container,
new_environment={},
exclusive=exclusive,
)
def apply_container_environment(self, container, new_environment, exclusive=False):
environment = container.get('environment', {})
old_environment = {env['name']: env['value'] for env in environment}
if exclusive is True:
merged = new_environment
else:
merged = old_environment.copy()
merged.update(new_environment)
if old_environment == merged:
return
diff = EcsTaskDefinitionDiff(
container=container['name'],
field='environment',
value=merged,
old_value=old_environment
)
self._diff.append(diff)
container['environment'] = [
{"name": e, "value": merged[e]} for e in merged
]
def set_secrets(self, secrets_list, exclusive=False):
secrets = {}
for secret in secrets_list:
secrets.setdefault(secret[0], {})
secrets[secret[0]][secret[1]] = secret[2]
self.validate_container_options(**secrets)
for container in self.containers:
if container['name'] in secrets:
self.apply_container_secrets(
container=container,
new_secrets=secrets[container['name']],
exclusive=exclusive,
)
elif exclusive is True:
self.apply_container_secrets(
container=container,
new_secrets={},
exclusive=exclusive,
)
def apply_container_secrets(self, container, new_secrets, exclusive=False):
secrets = container.get('secrets', {})
old_secrets = {secret['name']: secret['valueFrom'] for secret in secrets}
if exclusive is True:
merged = new_secrets
else:
merged = old_secrets.copy()
merged.update(new_secrets)
if old_secrets == merged:
return
diff = EcsTaskDefinitionDiff(
container=container['name'],
field='secrets',
value=merged,
old_value=old_secrets
)
self._diff.append(diff)
container['secrets'] = [
{"name": s, "valueFrom": merged[s]} for s in merged
]
def validate_container_options(self, **container_options):
for container_name in container_options:
if container_name not in self.container_names:
raise UnknownContainerError(f'Unknown container: {container_name}')
def set_role_arn(self, role_arn):
if role_arn:
diff = EcsTaskDefinitionDiff(
container=None,
field='role_arn',
value=role_arn,
old_value=self.role_arn
)
self.role_arn = role_arn
self._diff.append(diff)
def set_execution_role_arn(self, execution_role_arn):
if execution_role_arn:
diff = EcsTaskDefinitionDiff(
container=None,
field='execution_role_arn',
value=execution_role_arn,
old_value=self.execution_role_arn
)
self.execution_role_arn = execution_role_arn
self._diff.append(diff)
class EcsTaskDefinitionDiff(object):
def __init__(self, container, field, value, old_value):
self.container = container
self.field = field
self.value = value
self.old_value = old_value
def __repr__(self):
if self.field == 'environment':
return '\n'.join(self._get_environment_diffs(
self.container,
self.value,
self.old_value,
))
elif self.field == 'secrets':
return '\n'.join(self._get_secrets_diffs(
self.container,
self.value,
self.old_value,
))
elif self.container:
return f'Changed {self.field} of container "{self.container}" to: "{self.value}" (was: "{self.old_value}")'
else:
return f'Changed {self.field} to: "{self.value}" (was: "{self.old_value}")'
@staticmethod
def _get_environment_diffs(container, env, old_env):
diffs = []
for name, value in env.items():
old_value = old_env.get(name)
if value != old_value or value and not old_value:
message = f'Changed environment "{name}" of container "{container}" to: "{value}"'
diffs.append(message)
for old_name in old_env.keys():
if old_name not in env.keys():
message = f'Removed environment "{old_name}" of container "{container}"'
diffs.append(message)
return diffs
@staticmethod
def _get_secrets_diffs(container, secrets, old_secrets):
diffs = []
for name, value in secrets.items():
old_value = old_secrets.get(name)
if value != old_value or not old_value:
message = f'Changed secret "{name}" of container "{container}" to: "{value}"'
diffs.append(message)
for old_name in old_secrets.keys():
if old_name not in secrets.keys():
message = f'Removed secret "{old_name}" of container "{container}"'
diffs.append(message)
return diffs
class EcsAction(object):
def __init__(self, client: EcsClient, cluster_name: str, service_name: str):
self._client = client
self._cluster_name = cluster_name
self._service_name = service_name
try:
if service_name:
self._service = self.get_service()
except IndexError:
raise EcsConnectionError(
u'An error occurred when calling the DescribeServices '
u'operation: Service not found.'
)
except ClientError as e:
raise EcsConnectionError(str(e))
except NoCredentialsError:
raise EcsConnectionError(
u'Unable to locate credentials. Configure credentials '
u'by running "aws configure".'
)
def get_service(self):
services_definition = self._client.describe_services(
cluster_name=self._cluster_name,
service_name=self._service_name
)
return EcsService(
cluster=self._cluster_name,
service_definition=services_definition[u'services'][0]
)
def get_current_task_definition(self, service):
return self.get_task_definition(service.task_definition)
def get_task_definition(self, task_definition):
task_definition_payload = self._client.describe_task_definition(
task_definition_arn=task_definition
)
task_definition = EcsTaskDefinition(
tags=task_definition_payload.get('tags', None),
**task_definition_payload[u'taskDefinition']
)
return task_definition
def update_task_definition(self, task_definition):
response = self._client.register_task_definition(
family=task_definition.family,
containers=task_definition.containers,
volumes=task_definition.volumes,
role_arn=task_definition.role_arn,
execution_role_arn=task_definition.execution_role_arn,
tags=task_definition.tags,
additional_properties=task_definition.additional_properties
)
new_task_definition = EcsTaskDefinition(**response[u'taskDefinition'])
return new_task_definition
def deregister_task_definition(self, task_definition):
self._client.deregister_task_definition(task_definition.arn)
def update_service(self, service, desired_count=None):
response = self._client.update_service(
cluster=service.cluster,
service=service.name,
desired_count=desired_count,
task_definition=service.task_definition
)
return EcsService(self._cluster_name, response[u'service'])
def is_deployed(self, service):
if len(service[u'deployments']) != 1:
return False
running_tasks = self._client.list_tasks(
cluster_name=service.cluster,
service_name=service.name
)
if not running_tasks[u'taskArns']:
return service.desired_count == 0
running_count = self.get_running_tasks_count(
service=service,
task_arns=running_tasks[u'taskArns']
)
return service.desired_count == running_count
def get_running_tasks_count(self, service, task_arns):
running_count = 0
tasks_details = self._client.describe_tasks(
cluster_name=self._cluster_name,
task_arns=task_arns
)
for task in tasks_details[u'tasks']:
arn = task[u'taskDefinitionArn']
status = task[u'lastStatus']
if arn == service.task_definition and status == u'RUNNING':
running_count += 1
return running_count
@property
def client(self):
return self._client
@property
def service(self):
return self._service
@property
def cluster_name(self):
return self._cluster_name
@property
def service_name(self):
return self._service_name
class DeployAction(EcsAction):
def deploy(self, task_definition):
try:
self._service.set_task_definition(task_definition)
return self.update_service(self._service)
except ClientError as e:
raise EcsError(str(e))
class ScaleAction(EcsAction):
def scale(self, desired_count):
try:
return self.update_service(self._service, desired_count)
except ClientError as e:
raise EcsError(str(e))
class RunAction(EcsAction):
def __init__(self, client, cluster_name):
super(RunAction, self).__init__(client, cluster_name, None)
self._client = client
self._cluster_name = cluster_name
self.started_tasks = []
def run(self, task_definition, count, started_by, launchtype, subnets,
security_groups, public_ip, platform_version):
try:
result = self._client.run_task(
cluster=self._cluster_name,
task_definition=task_definition.family_revision,
count=count,
started_by=started_by,
overrides=dict(containerOverrides=task_definition.get_overrides()),
launchtype=launchtype,
subnets=subnets,
security_groups=security_groups,
public_ip=public_ip,
platform_version=platform_version,
)
self.started_tasks = result['tasks']
return True
except ClientError as e:
raise EcsError(str(e))
class UpdateAction(EcsAction):
def __init__(self, client):
super(UpdateAction, self).__init__(client, None, None)
class DiffAction(EcsAction):
def __init__(self, client):
super(DiffAction, self).__init__(client, None, None)
class EcsError(Exception):
pass
class EcsConnectionError(EcsError):
pass
class UnknownContainerError(EcsError):
pass
class TaskPlacementError(EcsError):
pass
class UnknownTaskDefinitionError(EcsError):
pass
class EcsTaskDefinitionCommandError(EcsError):
pass
| 2.046875 | 2 |
sbm.py | emmaling27/networks-research | 0 | 4214 | <reponame>emmaling27/networks-research
import networkx as nx
from scipy.special import comb
import attr
@attr.s
class Count(object):
"""Count class with monochromatic and bichromatic counts"""
n = attr.ib()
monochromatic = attr.ib(default=0)
bichromatic = attr.ib(default=0)
def count_edge(self, u, v):
if (u < self.n / 2) != (v < self.n / 2):
self.bichromatic += 1
else:
self.monochromatic += 1
class SBM():
"""SBM class with predicted numbers of wedges and local bridges and actual counts"""
def __init__(self, n, p, q, seed=0):
self.n = n
self.p = p
self.q = q
self.g = nx.generators.community.stochastic_block_model(
[int(self.n / 2), int(self.n / 2)],
[[p, q], [q, p]],
seed=seed)
def is_bichromatic(self, u, v):
return (u < self.n / 2) != (v < self.n / 2)
def get_bichromatic_fraction(self):
bichromatic = 0
for (x, y) in self.g.edges():
if self.is_bichromatic(x, y):
bichromatic += 1
return bichromatic / len(self.g.edges())
def is_local_bridge(self, u, v):
return not set(self.g.neighbors(u)).intersection(set(self.g.neighbors(v)))
def count_local_bridges(self):
monochromatic, bichromatic = 0, 0
for (u, v) in self.g.edges():
if self.is_local_bridge(u, v):
if self.is_bichromatic(u, v):
bichromatic += 1
else:
monochromatic += 1
return monochromatic, bichromatic
def _count_possible_edges(self, local_bridge):
count = Count(self.n)
for u in range(self.n):
for v in range(u+1, self.n):
if not self.g.has_edge(u, v) and \
(self.is_local_bridge(u, v) == local_bridge):
count.count_edge(u, v)
return count
def count_possible_local_bridges(self):
return self._count_possible_edges(local_bridge=True)
def count_possible_closures(self):
return self._count_possible_edges(local_bridge=False)
def count_wedges(self):
count = Count(self.n)
for v in self.g.nodes():
sorted_neighbors = sorted(self.g.neighbors(v))
for i in range(len(sorted_neighbors)):
for j in range(i + 1, len(sorted_neighbors)):
if not self.g.has_edge(sorted_neighbors[i], sorted_neighbors[j]):
count.count_edge(sorted_neighbors[i], sorted_neighbors[j])
return count
def predicted_wedges(self):
return Count(
self.n,
monochromatic=3 * 2 * comb(self.n/2, 3) * self.p**2 * (1-self.p) \
+ self.n * comb(self.n/2, 2) * self.q**2 * (1-self.p),
bichromatic=2 * self.n * comb(self.n/2, 2) * self.p * self.q * (1-self.q)
)
def predicted_local_bridges(self):
return Count(
self.n,
monochromatic=2 * (1-self.p) * comb(self.n/2, 2) * (1-self.p**2)**(self.n/2-2) * (1-self.q**2)**(self.n/2),
bichromatic=(1-self.q) * (self.n/2) ** 2 * (1-self.p*self.q)**(self.n-2)
)
def predicted_possible_closures(self):
return Count(
self.n,
monochromatic=2 * (1-self.p) * comb(self.n/2, 2) * (1 - (1-self.p**2)**(self.n/2-2) * (1-self.q**2)**(self.n/2)),
bichromatic=(1-self.q) * (self.n/2) ** 2 * (1 - (1-self.p*self.q)**(self.n-2))
)
def predicted_possible_edges(self):
return Count(
self.n,
monochromatic=2 * (1-self.p) * comb(self.n/2, 2),
bichromatic=(1-self.q) * (self.n/2) ** 2
) | 2.828125 | 3 |
src/data/graph/ops/anagram_transform_op.py | PhilHarnish/forge | 2 | 4215 | from typing import Callable, Collection, Iterable, List, Union
from data.anagram import anagram_iter
from data.graph import _op_mixin, bloom_mask, bloom_node, bloom_node_reducer
Transformer = Callable[['bloom_node.BloomNode'], 'bloom_node.BloomNode']
_SPACE_MASK = bloom_mask.for_alpha(' ')
def merge_fn(
host: 'bloom_node.BloomNode',
sources: List['bloom_node.BloomNode'],
extra: list,
whitelist: Collection = None,
blacklist: Collection = None,
**kwargs) -> None:
del kwargs
assert len(sources) == 1
exit_node = sources[0]
assert len(extra) == 1
state = _normalize_state(exit_node, extra[0])
children = list(state)
# TODO: Need a cleaner way to inject and rerun these nodes.
if len(children) == 1:
host.op = _op_mixin.Op(_op_mixin.OP_IDENTITY, children)
else:
host.op = _op_mixin.Op(_op_mixin.OP_ADD, children)
# HACK: This duplicates BloomNode._expand, essentially.
for key, reduced in bloom_node_reducer.reduce(
host, whitelist=whitelist, blacklist=blacklist):
host.link(key, reduced)
class _AnagramTransformIndex(object):
"""Singleton object used during anagram traversal."""
def __init__(
self,
exit_node: 'bloom_node.BloomNode',
root: anagram_iter.AnagramIter) -> None:
self._exit_node = exit_node
reference = bloom_node.BloomNode()
reference.distance(0)
reference.weight(1, True)
reference_choice_paths = {}
for choice, _ in root.available():
reference_choice_paths[choice] = choice(reference)
self._reference_choice_paths = reference_choice_paths
self._child_cache = {}
def iter(
self,
anagrams: anagram_iter.AnagramIter,
) -> Iterable['bloom_node.BloomNode']:
for child_choice, child_anagrams in anagrams.items():
key = (child_choice, child_anagrams)
if key not in self._child_cache:
self._child_cache[key] = self._make_child(child_choice, child_anagrams)
yield self._child_cache[key]
def _make_child(
self,
choice: Transformer,
anagrams: anagram_iter.AnagramIter) -> 'bloom_node.BloomNode':
children = list(anagrams.available())
if not children:
return choice(self._exit_node)
elif len(children) == 1:
child_choice, child_duplicates = children[0]
node = self._exit_node
while child_duplicates:
node = child_choice(node)
child_duplicates -= 1
return choice(node)
# Compute requirements from exits.
node = self._exit_node // _AnagramState(self, anagrams)
node.provide_mask = self._exit_node.provide_mask
node.require_mask = self._exit_node.require_mask
node.lengths_mask = self._exit_node.lengths_mask
node.annotate({'anagrams': anagrams})
node.max_weight = self._exit_node.max_weight
nodes_with_spaces = []
for child_choice, child_duplicates in children:
path = self._reference_choice_paths[child_choice]
if path.require_mask and path.require_mask & _SPACE_MASK:
nodes_with_spaces.append(path)
node.provide_mask |= path.provide_mask
node.require_mask |= path.require_mask
node.lengths_mask = bloom_mask.lengths_product(
node.lengths_mask, path.lengths_mask, duplicates=child_duplicates)
if nodes_with_spaces:
# Distance and provide masks should be correct. Reset required values.
# Any route to any of the spaces is now okay but 1+ must be taken.
node.require_mask = bloom_mask.REQUIRE_NOTHING
for node_with_spaces in nodes_with_spaces:
# Only require what all node_with_spaces require.
node.require_mask &= node_with_spaces.require_mask
return choice(node)
class _AnagramState(object):
def __init__(
self,
index: _AnagramTransformIndex,
anagrams: anagram_iter.AnagramIter):
self._index = index
self._anagrams = anagrams
def __iter__(self) -> Iterable['bloom_node.BloomNode']:
yield from self._index.iter(self._anagrams)
def __repr__(self) -> str:
return '_AnagramState(%s)' % self._anagrams
__str__ = __repr__
def _normalize_state(
exit_node: 'bloom_node.BloomNode',
index: Union[Iterable, anagram_iter.AnagramIter]) -> _AnagramState:
if isinstance(index, _AnagramState):
return index
# `index` is an iterable list of ???, one-by-one these will be taken as a
# route to the `exit_node`.
initial_anagrams = anagram_iter.from_choices(index)
index = _AnagramTransformIndex(exit_node, initial_anagrams)
return _AnagramState(index, initial_anagrams)
| 2.171875 | 2 |
gogapi/api.py | tikki/pygogapi | 23 | 4216 | <gh_stars>10-100
import json
import re
import logging
import html.parser
import zlib
import requests
from gogapi import urls
from gogapi.base import NotAuthorizedError, logger
from gogapi.product import Product, Series
from gogapi.search import SearchResult
DEBUG_JSON = False
GOGDATA_RE = re.compile(r"gogData\.?(.*?) = (.+);")
CLIENT_VERSION = "1.2.17.9" # Just for their statistics
USER_AGENT = "GOGGalaxyClient/{} pygogapi/0.1".format(CLIENT_VERSION)
REQUEST_RETRIES = 3
PRODUCT_EXPANDABLE = [
"downloads", "expanded_dlcs", "description", "screenshots", "videos",
"related_products", "changelog"
]
USER_EXPANDABLE = ["friendStatus", "wishlistStatus", "blockedStatus"]
LOCALE_CODES = ["de-DE", "en-US", "fr-FR", "pt-BR", "pl-PL", "ru-RU", "zh-Hans"]
CURRENCY_CODES = [
"USD", "EUR", "GBP", "AUD", "RUB", "PLN", "CAD", "CHF", "NOK", "SEK", "DKK"
]
def find_scripts(site):
parser = ScriptParser()
parser.feed(site)
return parser.scripts
class ScriptParser(html.parser.HTMLParser):
def __init__(self):
super().__init__()
self.last_tag = None
self.scripts = []
def handle_starttag(self, tag, attrs):
self.last_tag = tag
def handle_data(self, data):
if self.last_tag == "script":
self.scripts.append(data)
class GogApi:
def __init__(self, token=None):
self.token = token
self.locale = (None, None, None) # TODO: replace tuple
self.session = requests.Session()
self.session.headers["User-Agent"] = USER_AGENT
self.force_authorize = False
# Helpers
def request(self, method, url, authorized=True, allow_redirects=False,
**kwargs):
"""
Wrapper around requests.request that also handles authorization,
retries and logging
"""
if authorized or self.force_authorize:
if self.token is None:
raise NotAuthorizedError()
if self.token.expired():
self.token.refresh()
self.session.headers["Authorization"] = \
"Bearer " + self.token.access_token
else:
self.session.headers.pop("Authorization", None)
# Retries
retries = REQUEST_RETRIES
while retries > 0:
resp = self.session.request(
method, url, allow_redirects=allow_redirects, **kwargs)
if resp.status_code < 400:
return resp
elif 400 <= resp.status_code < 500:
break
else:
retries -= 1
resp.raise_for_status()
def get(self, *args, **kwargs):
"""
Wrapper around requests.get
"""
return self.request("GET", *args, **kwargs)
def post(self, *args, **kwargs):
"""
Wrapper around requests.post
"""
return self.request("POST", *args, **kwargs)
def request_json(self, *args, compressed=False, **kwargs):
"""
Wrapper around GogApi.request that automatically parses the
JSON response. Also does zlib decompression because GOG decided
to reinvent the wheel instead of using HTTP gzip encoding for
their content system V2.
"""
resp = self.request(*args, **kwargs)
if not compressed:
if DEBUG_JSON:
print(resp.text)
return resp.json()
else:
json_comp = resp.content
json_text = zlib.decompress(json_comp, 15).decode("utf-8")
if DEBUG_JSON:
print(json_text)
return json.loads(json_text)
def get_json(self, *args, **kwargs):
"""
Wrapper around GogApi.get with JSON parsing
"""
return self.request_json("GET", *args, **kwargs)
def get_gogdata(self, url, *args, **kwargs):
"""
Downloads a page and returns the embedded JavaScript gogData
variable.
"""
resp = self.get(url, *args, **kwargs)
gogdata = {}
for script in find_scripts(resp.text):
matches = GOGDATA_RE.finditer(resp.text)
for match in matches:
subkey = match.group(1)
value = match.group(2)
value_parsed = json.loads(value)
if subkey:
data = {subkey: value_parsed}
else:
data = value_parsed
gogdata.update(data)
return gogdata
def set_locale(self, country, currency, locale):
"""
country: ISO 3166 Alpha-2
currency: ISO 4217
locale: ISO 639 + ISO 3166 like language[_territory]
"""
if len(country) != 2:
return AttributeError("Invalid country code {}".format(country))
elif currency not in CURRENCY_CODES:
return AttributeError("Invalid currency code {}".format(locale))
elif locale not in LOCALE_CODES:
return AttributeError("Invalid locale code {}".format(locale))
self.locale = (country, currency, locale)
self.session.cookies["gog_lc"] = "_".join(self.locale)
# Web APIs
def web_game_gogdata(self, slug):
return self.get_gogdata(urls.web("game", slug), authorized=False)
def web_games_gogdata(self):
return self.get_gogdata(urls.web("account.games"))
def web_movies_gogdata(self):
return self.get_gogdata(urls.web("account.movies"))
def web_wishlist_gogdata(self):
return self.get_gogdata(urls.web("account.wishlist"))
def web_friends_gogdata(self):
return self.get_gogdata(urls.web("account.friends"))
def web_chat_gogdata(self):
return self.get_gogdata(urls.web("account.chat"))
def web_wallet_gogdata(self):
return self.get_gogdata(urls.web("wallet"))
def web_orders_gogdata(self):
return self.get_gogdata(urls.web("settings.orders"))
def web_account_gamedetails(self, game_id):
return self.get_json(urls.web("account.gamedetails", game_id))
def web_account_search(self, **query):
"""
Allowed query keys:
category: Genre
feature: Feature
hiddenFlag: Show hidden games
language: Language
mediaType: Game or movie
page: Page number
search: Search string
sortBy: Sort order
system: OS
tags: Tags
totalPages: Total Pages
"""
return self.get_json(urls.web("account.get_filtered"), params=query)
def web_search(self, **query):
"""
Allowed query keys:
category: Genre
devpub: Developer or Published
feature: Features
language: Language
mediaType: Game or movie
page: Page number
price: Price range
release: Release timeframe
search: Search string
sort: Sort order
system: OS
limit: Max results
"""
return self.get_json(
urls.web("search.filtering"), params=query, authorized=False)
def web_user_data(self):
return self.get_json(urls.web("user.data"))
def web_user_games(self):
return self.get_json(urls.web("user.games"))
def web_user_wishlist(self):
return self.get_json(urls.web("user.wishlist"))
def web_user_wishlist_add(self, game_id):
"""Returns new wishlist"""
return self.get_json(urls.web("user.wishlist.add", game_id))
def web_user_wishlist_remove(self, game_id):
"""Returns new wishlist"""
return self.get_json(urls.web("user.wishlist.remove", game_id))
def web_user_ratings(self):
return self.get_json(urls.web("user.ratings"))
def web_user_review_votes(self):
return self.get_json(urls.web("user.review_votes"))
def web_user_change_currency(self, currency):
return self.get_json(urls.web("user.change_currency", currency))
def web_user_change_language(self, lang):
return self.get_json(urls.web("user.change_language", lang))
def web_user_set_redirect_url(self, url):
"""Set redirect url after login. Only know valid url: checkout"""
return self.get(urls.web("user.set_redirect_url", params={"url": url}))
def web_user_review_guidelines(self):
return self.get_json(urls.web("user.review_guidelines"))
def web_user_public_info(self, user_id, expand=None):
if not expand:
params = None
elif expand == True:
params = {"expand": ",".join(USER_EXPANDABLE)}
else:
params = {"expand": ",".join(expand)}
return self.get_json(
urls.web("user.public.info", user_id, params=params))
def web_user_public_block(self, user_id):
return self.get_json(urls.web("user.public.block", user_id))
def web_user_public_unblock(self, user_id):
return self.get_json(urls.web("user.public.unblock", user_id))
def web_friends_remove(self, user_id):
return self.get_json(urls.web("friends.remove", user_id))
def web_friends_invite(self, user_id):
return self.get_json(urls.web("friends.invite", user_id))
def web_friends_accept(self, user_id):
return self.get_json(urls.web("friends.accept", user_id))
def web_friends_decline(self, user_id):
return self.get_json(urls.web("friends.decline", user_id))
def web_cart_get(self):
return self.get_json(urls.web("cart.get"))
def web_cart_add(self, game_id):
return self.get_json(urls.web("cart.add", game_id))
def web_cart_add_series(self, series_id):
return self.get_json(urls.web("cart.add_series", series_id))
def web_cart_remove(self, game_id):
return self.get_json(urls.web("cart.remove", game_id))
def web_reviews_search(self, game_id):
return self.get_json(urls.web("reviews.search", game_id))
def web_reviews_vote(self, game_id):
return self.get_json(urls.web("reviews.vote", game_id))
def web_reviews_report(self, game_id):
return self.get_json(urls.web("reviews.report", game_id))
def web_reviews_rate(self, game_id):
return self.get_json(urls.web("reviews.rate", game_id))
def web_reviews_add(self, game_id):
return self.get_json(urls.web("reviews.add", game_id))
def web_order_change_currency(self, order_id, currency):
return self.get_json(
urls.web("order.change_currency", order_id, currency))
def web_order_add(self, order_id, game_id):
return self.get_json(urls.web("order.add", order_id, game_id))
def web_order_remove(self, order_id, game_id):
return self.get_json(urls.web("order.remove", order_id, game_id))
def web_order_enable_store_credit(self, order_id):
return self.get_json(urls.web("order.enable_store_credit", order_id))
def web_order_disable_store_credit(self, order_id):
return self.get_json(urls.web("order.disable_store_credit", order_id))
def web_order_set_as_gift(self, order_id):
return self.get_json(urls.web("order.set_as_gift", order_id))
def web_order_set_as_not_gift(self, order_id):
return self.get_json(urls.web("order.set_as_non_gift", order_id))
def web_order_process_order(self, order_id):
return self.get_json(urls.web("order.process_order", order_id))
def web_order_payment_status(self, order_id):
return self.get_json(urls.web("order.payment_status", order_id))
def web_order_check_status(self, order_id):
return self.get_json(urls.web("order.check_status", order_id))
def web_checkout(self, order_id=None):
if order_id is None:
return self.get_json(urls.web("checkout"))
else:
return self.get_json(urls.web("checkout_id", order_id))
def web_checkout_manual(self, order_id):
return self.get_json(urls.web("checkout_manual", order_id))
# Galaxy APIs
def galaxy_file(self, game_id, dl_url):
dl_url = dl_url.lstrip("/")
return self.get_json(urls.galaxy("file", game_id, dl_url))
def galaxy_user(self, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("user", user_id))
def galaxy_friends(self, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("friends", user_id))
def galaxy_invitations(self, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("invitations", user_id))
def galaxy_status(self, user_id=None):
if user_id is None:
user_id = self.token.user_id
reqdata = {"version": CLIENT_VERSION}
self.post(urls.galaxy("status", user_id), data=reqdata)
def galaxy_statuses(self, user_ids):
user_ids_str = ",".join(user_ids)
params = {"user_id": user_ids_str}
#self.request("OPTIONS", urls.galaxy("statuses"), params=params)
return self.get_json(urls.galaxy("statuses"), params=params)
def galaxy_achievements(self, game_id, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("achievements", game_id, user_id))
def galaxy_sessions(self, game_id, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("sessions", game_id, user_id))
def galaxy_friends_achievements(self, game_id, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(
urls.galaxy("friends.achievements", game_id, user_id))
def galaxy_friends_sessions(self, game_id, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("friends.sessions", game_id, user_id))
def galaxy_product(self, game_id, expand=None):
if not expand:
params = {}
elif expand is True:
params = {"expand": ",".join(PRODUCT_EXPANDABLE)}
else:
params = {"expand": ",".join(expand)}
if self.locale[2]:
params["locale"] = self.locale[2]
return self.get_json(
urls.galaxy("product", game_id), params=params,
authorized=False)
def galaxy_products(self, game_ids, expand=None):
if not expand:
params = {}
elif expand is True:
params = {"expand": ",".join(PRODUCT_EXPANDABLE)}
else:
params = {"expand": ",".join(expand)}
if self.locale[2]:
params["locale"] = self.locale[2]
ids_string = ",".join(str(game_id) for game_id in game_ids)
params["ids"] = ids_string
return self.get_json(
urls.galaxy("products"), params=params, authorized=False)
def galaxy_secure_link(self, game_id, path, generation):
return self.get_json(
urls.galaxy("cs.securelink", game_id),
params={"path": path, "generation": generation})
def galaxy_builds(self, game_id, system):
return self.get_json(
urls.galaxy("cs.builds", game_id, system), authorized=False)
def galaxy_cs_meta(self, meta_id):
return self.get_json(
urls.galaxy("cs.meta", meta_id[0:2], meta_id[2:4], meta_id),
compressed=True,
authorized=False)
def galaxy_client_config():
return self.get_json(urls.galaxy("client-config"), authorized=False)
def product(self, product_id, slug=None):
return Product(self, product_id, slug)
def search(self, **query):
search_data = self.web_search(**query)
return SearchResult(self, query, search_data)
| 2.21875 | 2 |
setup.py | gibsonMatt/stacks-pairwise | 0 | 4217 | import pathlib
import os
from setuptools import setup
# The directory containing this file
HERE = pathlib.Path(__file__).parent
# The text of the README file
README = (HERE / "README.md").read_text()
# specify requirements of your package here
REQUIREMENTS = ['biopython', 'numpy', 'pandas']
setup(name='stacksPairwise',
version='0.0.0',
description='Calculate pairwise divergence (pairwise pi) from Stacks `samples.fa` output fle',
long_description=README,
long_description_content_type="text/markdown",
url='https://github.com/gibsonmatt/stacks-pairwise',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['stacksPairwise'],
install_requires=REQUIREMENTS,
entry_points={
"console_scripts": [
"stacksPairwise=stacksPairwise.__main__:main"
]
},
keywords='genetics genotyping sequencing Stacks'
)
| 1.5 | 2 |
csv_experiment.py | komax/spanningtree-crossingnumber | 2 | 4218 | <gh_stars>1-10
#! /usr/bin/env python
import os
import sys
args = sys.argv[1:]
os.system('python -O -m spanningtree.csv_experiment_statistics ' +
' '.join(args))
| 1.609375 | 2 |
projects/tutorials/object_nav_ithor_dagger_then_ppo_one_object.py | klemenkotar/dcrl | 18 | 4219 | <reponame>klemenkotar/dcrl<filename>projects/tutorials/object_nav_ithor_dagger_then_ppo_one_object.py<gh_stars>10-100
import torch
import torch.optim as optim
from torch.optim.lr_scheduler import LambdaLR
from allenact.algorithms.onpolicy_sync.losses import PPO
from allenact.algorithms.onpolicy_sync.losses.imitation import Imitation
from allenact.algorithms.onpolicy_sync.losses.ppo import PPOConfig
from allenact.utils.experiment_utils import (
Builder,
PipelineStage,
TrainingPipeline,
LinearDecay,
)
from projects.tutorials.object_nav_ithor_ppo_one_object import (
ObjectNavThorPPOExperimentConfig,
)
class ObjectNavThorDaggerThenPPOExperimentConfig(ObjectNavThorPPOExperimentConfig):
"""A simple object navigation experiment in THOR.
Training with DAgger and then PPO.
"""
@classmethod
def tag(cls):
return "ObjectNavThorDaggerThenPPO"
@classmethod
def training_pipeline(cls, **kwargs):
dagger_steos = int(1e4)
ppo_steps = int(1e6)
lr = 2.5e-4
num_mini_batch = 2 if not torch.cuda.is_available() else 6
update_repeats = 4
num_steps = 128
metric_accumulate_interval = cls.MAX_STEPS * 10 # Log every 10 max length tasks
save_interval = 10000
gamma = 0.99
use_gae = True
gae_lambda = 1.0
max_grad_norm = 0.5
return TrainingPipeline(
save_interval=save_interval,
metric_accumulate_interval=metric_accumulate_interval,
optimizer_builder=Builder(optim.Adam, dict(lr=lr)),
num_mini_batch=num_mini_batch,
update_repeats=update_repeats,
max_grad_norm=max_grad_norm,
num_steps=num_steps,
named_losses={
"ppo_loss": PPO(clip_decay=LinearDecay(ppo_steps), **PPOConfig),
"imitation_loss": Imitation(), # We add an imitation loss.
},
gamma=gamma,
use_gae=use_gae,
gae_lambda=gae_lambda,
advance_scene_rollout_period=cls.ADVANCE_SCENE_ROLLOUT_PERIOD,
pipeline_stages=[
PipelineStage(
loss_names=["imitation_loss"],
teacher_forcing=LinearDecay(
startp=1.0, endp=0.0, steps=dagger_steos,
),
max_stage_steps=dagger_steos,
),
PipelineStage(loss_names=["ppo_loss"], max_stage_steps=ppo_steps,),
],
lr_scheduler_builder=Builder(
LambdaLR, {"lr_lambda": LinearDecay(steps=ppo_steps)}
),
)
| 1.984375 | 2 |
BioCAT/src/Calculating_scores.py | DanilKrivonos/BioCAT-nrp-BIOsynthesis-Caluster-Analyzing-Tool | 4 | 4220 | from numpy import array
from pickle import load
from pandas import read_csv
import os
from BioCAT.src.Combinatorics import multi_thread_shuffling, multi_thread_calculating_scores, make_combine, get_score, get_max_aminochain, skipper
# Importing random forest model
modelpath = os.path.dirname(os.path.abspath(__file__)) + '/RFC.dump'
Rf = load(open(modelpath, 'rb'))
# The function generate list of shuflled matrix
def make_shuffle_matrix(matrix, cpu, iterat):
"""
The functuion generate massive of shuffled matrix.
Parameters
----------
matrix : pandas DataFrame
PSSM profile.
cpu : int
Number of tred used.
iterat : int
Number of iterations of shuffling.
Returns
-------
module_shuffling_matrix : list
List of matrix, shuffled by module.
substrate_shuffling_matrix : list
List of matrix, shuffled by substrate.
"""
module_shuffling_matrix = multi_thread_shuffling(matrix, ShufflingType='module', iterations=iterat, threads=cpu)
substrate_shuffling_matrix = multi_thread_shuffling(matrix, ShufflingType='substrate', iterations=iterat, threads=cpu)
return module_shuffling_matrix, substrate_shuffling_matrix
# The fujnction finds suquence with maximum possible value, results from alignment
def get_MaxSeq(matrix, variant_seq):
"""
The functuion parallel calculation of scores for shuffled matrix.
Parameters
----------
matrix : pandas DataFrame
PSSM profile.
variant_seq : list
Variant of core peptide chain.
Returns
-------
shuffled_scores : list
List of scores for shuffled matrix.
"""
MaxSeq = []
subs = matrix.keys()[1: ]
# Find sequence, wich have maximum alignment score
for idx in matrix.index:
MAX_value = max(list(matrix.iloc[idx][1:]))
for key in subs:
if matrix[key][idx] == MAX_value:
MaxSeq.append(key) # If two smonomer have same value
break
# Making two variants of MaxSeq
MaxSeq_full = MaxSeq.copy()
MaxSeq_nan = MaxSeq.copy()
for max_sub_idx in range(len(MaxSeq)):
if variant_seq[max_sub_idx] == 'nan':
MaxSeq_nan[max_sub_idx] = 'nan' # Adding nan to MaxSeq
return MaxSeq_full, MaxSeq_nan
# The function gives an information about clusters
def get_cluster_info(table, BGC_ID, target_file):
"""
The functuion return information about cluster.
Parameters
----------
table : pandas DataFrame
Table with meta inforamtion about NRPS clusters.
BGC_ID : str
PSSM cluster ID.
target_file : pandas DataFrame
PSSM profile.
Returns
-------
Name : str
Cluster ID.
Coord_cluster : str
Coordinate of cluster.
strand : str
Strand of cluster.
"""
for ind in table[table['ID'].str.contains(BGC_ID)].index:
Name = table[table['ID'].str.contains(target_file.split('.')[0].split('_A_')[1])]['Name'][ind]
Coord_cluster = table['Coordinates of cluster'][ind]
strand = table['Gen strand'][ind]
break
return Name, Coord_cluster, strand
# Calculate scores
def calculate_scores(variant_seq, matrix, substrate_shuffling_matrix, module_shuffling_matrix, cpu, iterat):
"""
Calculating scores.
Parameters
----------
variant_seq : list
Variant of core peptide chain.
matrix : pandas DataFrame
PSSM profile.
substrate_shuffling_matrix : list
List of matrix, shuffled by substrate.
module_shuffling_matrix : list
List of matrix, shuffled by module.
cpu : int
Number of threads used.
iterat : int
Number of iterations of shuffling.
Returns
-------
Sln_score : float
Mln_score : float
Slt_score : float
Mlt_score : float
Sdn_score : float
Mdn_score : float
Sdt_score : float
Mdt_score : float
Scores, which calculated with shuffling matrix by different variants.
M - module shuffling S - substrate shuffling
l - logarithmic transformation of score d - raw score
n - MaxSeq with nan replacement t - MaxSeq without nan replacement
Relative_score : float
Relative score (Probability of target class)
Binary : float
Binary score of cluster matching.
"""
# Finding suquence with maximum possible value, results from alignment
MaxSeq_full, MaxSeq_nan = get_MaxSeq(matrix, variant_seq)
# Calculating shuffled scores
Sln_shuffled_score = array(multi_thread_calculating_scores(MaxSeq_nan, substrate_shuffling_matrix, type_value='log', iterations=iterat, threads=cpu))
Mln_shuffled_score = array(multi_thread_calculating_scores(MaxSeq_nan, module_shuffling_matrix, type_value='log', iterations=iterat, threads=cpu))
Slt_shuffled_score = array(multi_thread_calculating_scores(MaxSeq_full, substrate_shuffling_matrix, type_value='log', iterations=iterat, threads=cpu))
Mlt_shuffled_score = array(multi_thread_calculating_scores(MaxSeq_full, module_shuffling_matrix, type_value='log', iterations=iterat, threads=cpu))
Sdn_shuffled_score = array(multi_thread_calculating_scores(MaxSeq_nan, substrate_shuffling_matrix, type_value=None, iterations=iterat, threads=cpu))
Mdn_shuffled_score = array(multi_thread_calculating_scores(MaxSeq_nan, module_shuffling_matrix, type_value=None, iterations=iterat, threads=cpu))
Sdt_shuffled_score = array(multi_thread_calculating_scores(MaxSeq_full, substrate_shuffling_matrix, type_value=None, iterations=iterat, threads=cpu))
Mdt_shuffled_score = array(multi_thread_calculating_scores(MaxSeq_full, module_shuffling_matrix, type_value=None, iterations=iterat, threads=cpu))
# Calculating scores for target sequence
log_target_score = get_score(variant_seq, matrix, type_value='log')
non_log_target_score = get_score(variant_seq, matrix, type_value=None)
# Calculating features scores
Sln_score = len(Sln_shuffled_score[Sln_shuffled_score < log_target_score])/len(Sln_shuffled_score)
Mln_score = len(Mln_shuffled_score[Mln_shuffled_score < log_target_score])/len(Mln_shuffled_score)
Slt_score = len(Slt_shuffled_score[Slt_shuffled_score < log_target_score])/len(Slt_shuffled_score)
Mlt_score = len(Mlt_shuffled_score[Mlt_shuffled_score < log_target_score])/len(Mlt_shuffled_score)
Sdn_score = len(Sdn_shuffled_score[Sdn_shuffled_score < non_log_target_score])/len(Sdn_shuffled_score)
Mdn_score = len(Mdn_shuffled_score[Mdn_shuffled_score < non_log_target_score])/len(Mdn_shuffled_score)
Sdt_score = len(Sdt_shuffled_score[Sdt_shuffled_score < non_log_target_score])/len(Sdt_shuffled_score)
Mdt_score = len(Mdt_shuffled_score[Mdt_shuffled_score < non_log_target_score])/len(Mdt_shuffled_score)
# Calculating Relative score
Relative_score = round(Rf.predict_proba([[Sln_score, Mln_score,
Sdn_score, Mdn_score,
Sdt_score, Mdt_score,
Slt_score, Mlt_score
]])[0][1], 3)
Binary = Rf.predict([[Sln_score, Mln_score,
Sdn_score, Mdn_score,
Sdt_score, Mdt_score,
Slt_score, Mlt_score
]])[0]
return Sln_score, Mln_score, Slt_score, Mlt_score, Sdn_score, Mdn_score, Sdt_score, Mdt_score, Relative_score, Binary
def give_results(tsv_out, folder, files, table, ID, PeptideSeq, skip, cpu, iterat):
"""
The functuion return information about cluster.
Parameters
----------
tsv_out : dict
Empty dictionary for adding results.
folder : str
Path to PSSMs.
files : list
List of PSSMs.
table : pandas DataFrame
Table with meta inforamtion about NRPS clusters.
ID : str
Name of substance.
PeptideSeq : dict
Core peptide chains for different biosynthesis types (e.g. A, B, or C).
kip : int
Number of presumptive skip.
cpu : int
Number of threads used.
iterat : int
Number of iterations of shuffling.
Returns
-------
tsv_out : dict
Full dictionary for adding results.
"""
for target_file in files:
try:
BGC_ID = target_file.split('.')[0].split('_A_')[1]
except:
continue
if '_A_' not in target_file:
continue
Name, Coord_cluster, strand = get_cluster_info(table, BGC_ID, target_file) # Getting information about cluster
BGC = read_csv(folder + target_file, sep='\t')
# Skipping mode
if skip == 0:
BGC = [BGC]
else:
BGC == skipper(BGC, skip)
for matrix in BGC:
# Check quality of matrix
if len(matrix) == 1:
continue
check = 0
values = matrix.drop(matrix.columns[0], axis=1).values
for i in values:
if all(i) == 0:
check += 1
if check == len(values): # If thes condition is True, the matrix of unrecognized monomers
continue
# Generating shuffling matrix
module_shuffling_matrix, substrate_shuffling_matrix = make_shuffle_matrix(matrix, cpu, iterat)
for BS_type in PeptideSeq:# For every biosynthesis profile pathways
if PeptideSeq[BS_type] == None: # If in sequence only nan monomers
continue
if len(PeptideSeq[BS_type]) == 0: # If have not the variant
continue
# Check correctness of PeptideSeq
length_max= get_max_aminochain(PeptideSeq[BS_type])
EPs = make_combine(PeptideSeq[BS_type], length_max, matrix, delta=3)
if EPs is None: # If length sequnce can't be scaled to cluster size
continue
for variant_seq in EPs:
Sln_score, Mln_score, Slt_score, Mlt_score, Sdn_score, Mdn_score, Sdt_score, Mdt_score, Relative_score, Binary = calculate_scores(variant_seq, matrix, substrate_shuffling_matrix, module_shuffling_matrix, cpu, iterat)
#Recordind dictionary
tsv_out['Chromosome ID'].append(Name)
tsv_out['Coordinates of cluster'].append(Coord_cluster)
tsv_out['Strand'].append(strand)
tsv_out['Substance'].append(ID)
tsv_out['BGC ID'].append(BGC_ID)
tsv_out['Putative linearized NRP sequence'].append('--'.join(variant_seq))
tsv_out['Biosynthesis profile'].append('Type {}'.format(BS_type))
tsv_out['Sln score'].append(Sln_score) #shaffling substrates in matrix with log score and nan in maximally possible sequence
tsv_out['Mln score'].append(Mln_score) #shaffling modules matrix with log score and nan in maximally possible sequence
tsv_out['Sdn score'].append(Sdn_score) #shaffling substrates matrix without log score and nan in maximally possible sequence
tsv_out['Mdn score'].append(Mdn_score) #shaffling modules matrix without log score and nan in maximally possible sequence
tsv_out['Sdt score'].append(Sdt_score) #shaffling substrates matrix without log score in maximally possible sequence
tsv_out['Mdt score'].append(Mdt_score) #shaffling modules matrix without log score in maximally possible sequence
tsv_out['Slt score'].append(Slt_score) #shaffling substrates matrix with log score in maximally possible sequence
tsv_out['Mlt score'].append(Mlt_score) #shaffling modules matrix with log score in maximally possible sequence
tsv_out['Relative score'].append(Relative_score) #Final score
tsv_out['Binary'].append(Binary) #Binary value
return tsv_out
| 2.75 | 3 |
deal/linter/_extractors/returns.py | m4ta1l/deal | 1 | 4221 | <reponame>m4ta1l/deal
# built-in
from typing import Optional
# app
from .common import TOKENS, Extractor, Token, traverse
from .value import UNKNOWN, get_value
get_returns = Extractor()
inner_extractor = Extractor()
def has_returns(body: list) -> bool:
for expr in traverse(body=body):
if isinstance(expr, TOKENS.RETURN + TOKENS.YIELD):
return True
return False
@get_returns.register(*TOKENS.RETURN)
def handle_return(expr) -> Optional[Token]:
value = get_value(expr=expr.value)
if value is UNKNOWN:
return None
return Token(value=value, line=expr.lineno, col=expr.value.col_offset)
@get_returns.register(*TOKENS.YIELD)
def handle_yield(expr) -> Optional[Token]:
value = get_value(expr=expr.value)
if value is UNKNOWN:
return None
return Token(value=value, line=expr.lineno, col=expr.value.col_offset)
| 2.140625 | 2 |
qubiter/device_specific/chip_couplings_ibm.py | yourball/qubiter | 3 | 4222 | <reponame>yourball/qubiter
def aaa():
# trick sphinx to build link in doc
pass
# retired
ibmqx2_c_to_tars =\
{
0: [1, 2],
1: [2],
2: [],
3: [2, 4],
4: [2]
} # 6 edges
# retired
ibmqx4_c_to_tars =\
{
0: [],
1: [0],
2: [0, 1, 4],
3: [2, 4],
4: []
} # 6 edges
# retired
ibmq16Rus_c_to_tars = \
{
0: [],
1: [0, 2],
2: [3],
3: [4, 14],
4: [],
5: [4],
6: [5, 7, 11],
7: [10],
8: [7],
9: [8, 10],
10: [],
11: [10],
12: [5, 11, 13],
13: [4, 14],
14: [],
15: [0, 2, 14]
} # 22 edges
ibm20AustinTokyo_c_to_tars = \
{
0: [1, 5],
1: [0, 2, 6, 7],
2: [1, 3, 6, 7],
3: [2, 4, 8, 9],
4: [3, 8, 9],
5: [0, 6, 10, 11],
6: [1, 2, 5, 7, 10, 11],
7: [1, 2, 6, 8, 12, 13],
8: [3, 4, 7, 9, 12, 13],
9: [3, 4, 8, 14],
10: [5, 6, 11, 15],
11: [5, 6, 10, 12, 16, 17],
12: [7, 8, 11, 13, 16, 17],
13: [7, 8, 12, 14, 18, 19],
14: [9, 13, 18, 19],
15: [10, 16],
16: [11, 12, 15, 17],
17: [11, 12, 16, 18],
18: [13, 14, 17, 19],
19: [13, 14, 18]
} # 86 edges
ibmq5YorktownTenerife_c_to_tars = \
{
0: [1, 2],
1: [0, 2],
2: [0, 1, 3, 4],
3: [2, 4],
4: [2, 3]
} # 12 edges
ibmq14Melb_c_to_tars = \
{
0: [1],
1: [0, 2, 13],
2: [1, 3, 12],
3: [2, 4, 11],
4: [3, 5, 10],
5: [4, 6, 9],
6: [5, 8],
7: [8],
8: [6, 7, 9],
9: [5, 8, 10],
10: [4, 9, 11],
11: [3, 10, 12],
12: [2, 11, 13],
13: [1, 12]
} # 36 edges
| 1.703125 | 2 |
Template.py | rainshen49/citadel-trading-comp | 2 | 4223 | import signal
import requests
import time
from math import floor
shutdown = False
MAIN_TAKER = 0.0065
MAIN_MAKER = 0.002
ALT_TAKER = 0.005
ALT_MAKER = 0.0035
TAKER = (MAIN_TAKER + ALT_TAKER)*2
MAKER = MAIN_MAKER + ALT_MAKER
TAKEMAIN = MAIN_TAKER - ALT_MAKER
TAKEALT = ALT_TAKER - MAIN_MAKER
BUFFER = 0.01
NaN = float('nan')
class ApiException(Exception):
pass
class Book(object):
def __init__(self, sym, json):
global NaN
self.sym = sym
self.json = json
# could be cached
self.bids = self.json['bids']
self.asks = self.json['asks']
self.ask_price = 1
self.asks_quantity_left = 0
self.bid_price = 1
self.bids_quantity_left = 0
if self.bids:
self.bid_price = self.bids[0]['price']
if self.asks:
self.ask_price = self.asks[0]['price']
def bids_room(self):
if self.bids:
quantity = sum([b['quantity']
for b in self.bids if b['price'] == self.bid_price])
filled = sum([b['quantity_filled']
for b in self.bids if b['price'] == self.bid_price])
return quantity - filled
else:
return 0
def asks_room(self):
if self.asks:
quantity = sum([b['quantity']
for b in self.asks if b['price'] == self.ask_price])
filled = sum([b['quantity_filled']
for b in self.asks if b['price'] == self.ask_price])
return quantity - filled
else:
return 0
class Limits(dict):
def __init__(self, json):
self.update(json)
self.gross_limit = int(json['gross_limit'])
self.net_limit = int(json['net_limit'])
self.gross = int(json['gross'])
self.net = int(json['net'])
class OHLC(dict):
def __init__(self, sym, json):
self.sym = sym
self.update(json)
self.tick = json['tick']
self.open = json['open']
self.high = json['high']
self.low = json['low']
self.close = json['close']
class Shock(dict):
def __init__(self, news, currtick):
self.ticker = news['ticker']
self.elapsed = currtick - news['tick']
headline = news['headline']
try:
self.amount = float(headline[-6:].replace('$', ''))
except:
self.amount = 0
class Session(object):
def __init__(self, url, key):
self.url = url
self.key = key
self.tick = -1
def __enter__(self):
self.session = requests.Session()
self.session.headers.update({'X-API-Key': self.key})
return self
def __exit__(self, type, value, traceback):
self.session.close()
def get_tick(self):
while True:
resp = self.session.get(self.url + '/v1/case', params=None)
if not resp.ok:
raise ApiException('could not get tick: ' + str(resp))
json = resp.json()
if json['status'] == 'STOPPED' or shutdown:
return False
if json['tick'] != self.tick:
self.tick = json['tick']
print('.', self.tick)
return True
# this timer is unnecessary, network latency should be enough
time.sleep(0.1)
def get_book(self, sym):
resp = self.session.get(
self.url + '/v1/securities/book', params={'ticker': sym})
if not resp.ok:
raise ApiException('could not get book: ' + str(resp))
return Book(sym, resp.json())
def send_order(self, sym, side, price, size):
resp = self.session.post(self.url + '/v1/orders', params={
'ticker': sym, 'type': 'LIMIT', 'action': side, 'quantity': size, 'price': price})
if resp.ok:
print('sent order', side, sym, size, '@', price)
else:
print('failed to send order', side, sym,
size, '@', price, ':', resp.text)
def getLimit(self):
resp = self.session.get(self.url+'/v1/limits')
if not resp.ok:
raise ApiException('could not get limit: '+str(resp))
return Limits(resp.json()[0])
def getSecurities(self, sym=None):
if sym is None:
resp = self.session.get(self.url+'/v1/securities')
else:
resp = self.session.get(
self.url+'/v1/securities', params={'ticker': sym})
if not resp.ok:
raise ApiException('could not get position: '+str(resp))
json = resp.json()
return {sec['ticker']: {k: sec[k] for k in [
"position",
"vwap",
"nlv",
"last",
"bid",
"bid_size",
"ask",
"ask_size",
"unrealized",
"realized"
]} for sec in json}
def get_OHLC(self, sym, ticks=50):
resp = self.session.get(
self.url + '/v1/securities/history', params={'ticker': sym,'limit':ticks})
if not resp.ok:
raise ApiException('could not get OHLC: ' + str(resp))
return [OHLC(sym, ohlc) for ohlc in resp.json()]
def buy(self, sym, price, size):
self.send_order(sym, 'BUY', price, size)
def sell(self, sym, price, size):
self.send_order(sym, 'SELL', price, size)
def send_market(self, sym, side, size):
resp = self.session.post(self.url + '/v1/orders', params={
'ticker': sym, 'type': 'MARKET', 'action': side, 'quantity': size})
if resp.ok:
json = resp.json()
print('market order', side, sym, size, '@', json['vwap'])
return json['vwap']
else:
print('failed to send order', side, sym,
size, '@Market:', resp.text)
return 0
def buyM(self, sym, size):
return self.send_market(sym, 'BUY', size)
def sellM(self, sym, size):
return self.send_market(sym, 'SELL', size)
def getNews(self):
resp = self.session.get(self.url + '/v1/news', params={'limit': 10})
if not resp.ok:
raise ApiException('failed to get news', resp.text)
else:
json = resp.json()
# only care about recent news
return [Shock(news, self.tick) for news in json if news['tick'] > self.tick-4]
def getTrader(self):
resp = self.session.get(self.url + '/v1/trader')
if not resp.ok:
raise ApiException('failed to get trader info', resp.text)
else:
json = resp.json()
return json
def main():
# price does change in every tick
# check position
# plain arbitradge
# index arbitrage
# shock handling
# wave riding
# pairTickers = [('WMT-M', 'WMT-A'), ('CAT-M', 'CAT-A'), ('MMM-M', 'MMM-A')]
with Session('http://localhost:9998', 'VHK3DEDE') as session:
while session.get_tick():
try:
shock_runner(session)
exchange_arbitrage(session, "WMT-M", "WMT-A")
exchange_arbitrage(session, "CAT-M", "CAT-A")
exchange_arbitrage(session, "MMM-M", "MMM-A")
index_arbitrage(session, ['WMT', 'MMM', 'CAT'])
except Exception as ex:
print("error", str(ex))
# trader = session.getTrader()
# print(trader['nlv'])
# TODO: position cleaner: try to reduce gross position loss-free
# TODO: implement range runner for the last x ticks
def avg(arr):
return sum(arr)/float(len(arr))
def window_trend(left,right):
leftavg = avg(left)
rightavg = avg(right)
if rightavg > leftavg:
return 1
elif rightavg < leftavg:
return -1
else:
return 0
def splitarr(arr):
n = len(arr)
left = arr[:n//2]
right = arr[n//2:]
return left,right
def wwindow_trend(prices):
left, right = splitarr(prices)
trend = window_trend(left,right)
lleft, lright = splitarr(left)
rleft, rright = splitarr(right)
trendl = window_trend(lleft,lright)
trendr = window_trend(rleft,rright)
return trend + trendl + trendr
def trend_runner(session, ticker):
if session.tick<20:
return
# short term trend
prices = session.get_OHLC(ticker, 20)
highs = [price.high for price in prices]
lows = [price.low for price in prices]
highTrend = wwindow_trend(highs)
lowTrend = wwindow_trend(lows)
if highTrend+lowTrend < -4:
# volatile, but no trend
session.buyM(ticker,1000)
if highTrend+lowTrend > 4:
session.sellM(ticker,1000)
print(ticker,"short hightrend",highTrend,"lowtrend",lowTrend)
if session.tick<100:
return
prices = session.get_OHLC(ticker, 100)
highs = [price.high for price in prices]
lows = [price.low for price in prices]
highTrend = wwindow_trend(highs)
lowTrend = wwindow_trend(lows)
# grown too much
if highTrend+lowTrend < -4:
# volatile, but no trend
session.sellM(ticker,1000)
# dropped too much
if highTrend+lowTrend > 4:
session.buyM(ticker,1000)
print(ticker,"long hightrend",highTrend,"lowtrend",lowTrend)
def shock_runner(session):
shocks = session.getNews()
quantity = 50000
for shock in sorted(shocks, key=lambda s: s.elapsed):
Mticker = shock.ticker+"-M"
Aticker = shock.ticker+"-A"
if shock.elapsed < 2:
if shock.amount > MAIN_TAKER + BUFFER*2:
session.buyM(Mticker, quantity)
session.buyM(Aticker, quantity)
elif - shock.amount > MAIN_TAKER + BUFFER*2:
session.sellM(Mticker, quantity)
session.sellM(Aticker, quantity)
print('shock', shock.ticker, shock.amount)
if shock.elapsed == 2:
if shock.amount > MAIN_TAKER + BUFFER*2:
session.sellM(Mticker, quantity)
session.sellM(Aticker, quantity)
elif - shock.amount > MAIN_TAKER + BUFFER*2:
session.buyM(Mticker, quantity)
session.buyM(Aticker, quantity)
print('post shock', shock.ticker, shock.amount)
TAKER4 = MAIN_TAKER * 5
def index_arbitrage(session, tickers):
secs = session.getSecurities()
ETF = secs['ETF']
etfBid = ETF['bid']
etfAsk = ETF['ask']
bestBids = {}
bestBidsQ = {}
bestAsks = {}
bestAsksQ = {}
for ticker in tickers:
tickerM = ticker+"-M"
tickerA = ticker+"-A"
Mticker = secs[tickerM]
Aticker = secs[tickerA]
Mbid = Mticker['bid']
Abid = Aticker['bid']
Mask = Mticker['ask']
Aask = Aticker['ask']
if Mbid >= Abid:
bestBids[tickerM] = Mbid
bestBidsQ[tickerM] = Mticker['bid_size']
else:
bestBids[tickerA] = Abid
bestBidsQ[tickerA] = Aticker['bid_size']
if Mask <= Aask:
bestAsks[tickerM] = Mask
bestAsksQ[tickerM] = Mticker['ask_size']
else:
bestAsks[tickerA] = Aask
bestAsksQ[tickerA] = Aticker['ask_size']
compositBid = sum(bestBids.values())
compositBidQ = min(bestBidsQ.values())
compositAsk = sum(bestAsks.values())
compositAskQ = min(bestAsksQ.values())
boughtprice = 0
soldprice = 0
if etfBid - compositAsk > TAKER4+BUFFER:
quantity = ETF['bid_size'] if ETF['bid_size'] < compositAskQ else compositAskQ
if quantity == 0:
return
quantity = min([quantity, 50000])
soldprice = session.sellM('ETF', quantity)
for ticker in bestAsks:
boughtprice += session.buyM(ticker, quantity)
print('Plan ETF', etfBid, 'Stocks', compositAsk)
print('Actual ETF', soldprice, 'Stocks', boughtprice)
elif compositBid - etfAsk > TAKER4+BUFFER:
quantity = ETF['ask_size'] if ETF['ask_size'] < compositBidQ else compositBidQ
if quantity == 0:
return
quantity = min([quantity, 50000])
for ticker in bestBids:
soldprice += session.sellM(ticker, quantity)
boughtprice = session.buyM('ETF', quantity)
print('Plan Stocks', compositBid, 'ETF', etfAsk)
print('Actual Stocks', soldprice, 'ETF', boughtprice)
# TODO: send limit orders and use market to cover unfilled ones after
def exchange_arbitrage(session, mticker, aticker):
global NaN
mbook = session.get_book(mticker)
masks_room = mbook.asks_room()
mbids_room = mbook.bids_room()
abook = session.get_book(aticker)
aasks_room = abook.asks_room()
abids_room = abook.bids_room()
# a lot of room, make market orders
if mbook.bid_price - abook.ask_price > TAKER+BUFFER*2:
quantity = aasks_room if aasks_room < mbids_room else mbids_room
quantity = min([quantity, 50000])
session.sellM(mbook.sym, quantity)
session.buyM(abook.sym, quantity)
elif abook.bid_price - mbook.ask_price > TAKER+BUFFER*2:
quantity = aasks_room if aasks_room < mbids_room else mbids_room
quantity = min([quantity, 50000])
session.sellM(abook.sym, quantity)
session.buyM(mbook.sym, quantity)
# only a little room, make limit orders
if mbook.bid_price - abook.ask_price > BUFFER:
quantity = aasks_room if aasks_room < mbids_room else mbids_room
quantity = min([quantity, 50000])
session.sell(mbook.sym, mbook.bid_price, quantity)
session.buy(abook.sym, abook.ask_price, quantity)
elif abook.bid_price - mbook.ask_price > BUFFER:
quantity = aasks_room if aasks_room < mbids_room else mbids_room
quantity = min([quantity, 50000])
session.sell(abook.sym, abook.bid_price, quantity)
session.buy(mbook.sym, mbook.ask_price, quantity)
def sigint(signum, frame):
global shutdown
signal.signal(signal.SIGINT, signal.SIG_DFL)
shutdown = True
if __name__ == '__main__':
signal.signal(signal.SIGINT, sigint)
main()
| 2.71875 | 3 |
examples/basic/wire_feedthrough.py | souviksaha97/spydrnet-physical | 0 | 4224 | """
==========================================
Genrating feedthrough from single instance
==========================================
This example demostrates how to generate a feedthrough wire connection for
a given scalar or vector wires.
**Initial Design**
.. hdl-diagram:: ../../../examples/basic/_initial_design.v
:type: netlistsvg
:align: center
:module: top
**Output1** ``wire0`` feedthough from ``inst_2_1``
.. hdl-diagram:: ../../../examples/basic/_output_wire.v
:type: netlistsvg
:align: center
:module: top
**Output2** ``bus_in`` feedthrough from ``inst_1_0``
.. hdl-diagram:: ../../../examples/basic/_output_bus.v
:type: netlistsvg
:align: center
:module: top
"""
from os import path
import spydrnet as sdn
import spydrnet_physical as sdnphy
netlist = sdnphy.load_netlist_by_name('basic_hierarchy')
top = netlist.top_instance.reference
cable0 = next(top.get_cables("wire0"))
inst2 = next(top.get_instances("inst_2_0"))
sdn.compose(netlist, '_initial_design.v', skip_constraints=True)
top.create_feedthrough(inst2, cable0)
top.create_unconn_wires()
sdn.compose(netlist, '_output_wire.v', skip_constraints=True)
netlist = sdnphy.load_netlist_by_name('basic_hierarchy')
top = netlist.top_instance.reference
bus_in = next(top.get_cables("bus_in"))
inst1 = next(top.get_instances("inst_1_0"))
cables = top.create_feedthrough(inst1, bus_in)
top.create_unconn_wires()
sdn.compose(netlist, '_output_bus.v', skip_constraints=True)
| 2.53125 | 3 |
workflows/workflow.py | sunnyfloyd/panderyx | 0 | 4225 | from __future__ import annotations
from typing import Optional, Union
from tools import tools
from exceptions import workflow_exceptions
class Workflow:
"""A class to represent a workflow.
Workflow class provides set of methods to manage state of the workflow.
It allows for tool insertions, removals and modifications.
When workflow is run data flow is built and each tool linked to the workflow
instance is executed in determined order. Tool outputs are then consolidated
in a JSON format.
"""
TOOL_CHOICES = {
"generic": tools.GenericTool,
"large_generic": tools.LargeGenericTool,
"input": tools.InputTool,
}
def __init__(self) -> None:
"""Initializes Workflow class with root tool.
Workflow class is initialized with root tool with tool ID `0`. `_root`
points to root tool directly.
"""
self._root = tools.RootTool(id=0)
self._tools = {0: self._root}
self._used_ids = {0}
def insert_tool(
self,
tool_choice: str,
input_ids: Optional[Union[list[int], int]] = None,
output_ids: Optional[Union[list[int], int]] = None,
coordinates: Optional[tuple[int, int]] = None,
) -> tools.Tool:
"""Inserts a new tool to the current workflow.
Args:
tool_choice (str): determines what tool is created (based on the
available choices defined within the Workflow class).
input_ids (list[int], int]): starting input or inputs for the tool
identified by their IDs. Defaults to None.
output_ids (list[int], int): starting output or outputs for the tool
identified by their IDs. Defaults to None.
coordinates (tuple[int, int]): coordinates for the tool on canvas.
Defaults to None.
Raises:
workflow_exceptions.ToolNotAvailable: indicates that provided string
does not refer to an available tool from the Workflow class.
Returns:
tools.Tool: instance of a Tool's class.
"""
try:
tool_class = self.TOOL_CHOICES[tool_choice]
except KeyError:
raise workflow_exceptions.ToolNotAvailable
next_id = self._get_next_tool_id()
tool = tool_class(id=next_id)
self._tools[next_id] = tool
self._add_tool_id(next_id)
if input_ids is not None:
self.add_tool_input(tool_id=tool.id, input_ids=input_ids)
if output_ids is not None:
output_ids = self._clean_tool_ids(output_ids)
for output_id in output_ids:
self.add_tool_input(tool_id=output_id, input_ids=tool.id)
if coordinates is not None:
self.set_tool_coordinates(tool_id=tool.id, coordinates=coordinates)
return tool
def remove_tool(self, tool_ids: Union[list[int], int]) -> None:
"""Removes existing tool from the current workflow.
Removes the tool from the workflow and updates inputs and outputs of the
linked tool instances.
Args:
tool_ids (list[int], int): tool ID or IDs that ought to be removed.
Raises:
workflow_exceptions.RootCannotBeDeleted: indicates that selected
tool for removal is a root which cannot be deleted.
"""
tool_ids = self._clean_tool_ids(tool_ids)
for tool_id in tool_ids:
tool = self._get_tool_by_id(tool_id)
if tool.is_root:
raise workflow_exceptions.RootCannotBeDeleted
# remove tool from linked tools' inputs
tool_outputs = tool.outputs
for output_id in tool_outputs:
self.remove_tool_input(tool_id=output_id, input_ids=tool.id)
# remove tool from linked tools' outputs
tool_inputs = tool.inputs
for input_id in tool_inputs:
self.remove_tool_input(tool_id=tool.id, input_ids=input_id)
del self._tools[tool_id]
def add_tool_input(
self, tool_id: int, input_ids: Union[list[int], int]
) -> tools.Tool:
"""Adds new input(s) for the tool existing in the current workflow.
Args:
tool_id (int): tool ID to which input(s) should be added.
input_ids (list[int], int]): input(s) to be added to the tool
identified by their IDs.
Returns:
tools.Tool: instance of a Tool's class.
"""
tool = self._get_tool_by_id(tool_id)
input_ids = self._clean_tool_ids(input_ids)
for input_id in input_ids:
tool.add_input(input_id)
self._tools[input_id].add_output(tool_id)
return tool
def remove_tool_input(
self, tool_id: int, input_ids: Union[list[int], int]
) -> tools.Tool:
"""Removes input(s) from the tool existing in the current workflow.
Args:
tool_id (int): tool ID from which input(s) should be removed.
input_ids (list[int], int]): input(s) to be removed from the tool
identified by their IDs.
Returns:
tools.Tool: instance of a Tool's class.
"""
tool = self._get_tool_by_id(tool_id)
input_ids = self._clean_tool_ids(input_ids)
for input_id in input_ids:
tool.remove_input(input_id)
self._tools[input_id].remove_output(tool_id)
return tool
def set_tool_config(self, tool_id: int, data: dict) -> tools.Tool:
"""Sets tool's config to passed data dict.
Args:
tool_id (int): tool ID for which config should be set.
data (dict): dict of parameters for given tool.
Returns:
tools.Tool: instance of a Tool's class.
"""
tool = self._get_tool_by_id(tool_id)
tool.config = data
return tool
def set_tool_coordinates(
self, tool_id: int, coordinates: Optional[tuple[int, int]] = None
) -> tools.Tool:
"""Sets (x, y) coordinates for the tool existing in the current workflow.
If no coordinates are passed to this method, default coordinates will be
calculated using `_get_default_coordinates()` internal method.
Args:
tool_id (int): tool ID for which coordinates are to be set.
coordinates (tuple[int, int]): tuple of (x, y) coordinates.
Defaults to None.
Returns:
tools.Tool: instance of a Tool's class.
"""
# I need to decide where to put a check if coordinates will fit a canvas
tool = self._get_tool_by_id(tool_id)
coordinates = (
coordinates if coordinates is not None else self._get_default_coordinates()
)
tool.coordinates = coordinates
return tool
def _get_default_coordinates(self) -> tuple[int, int]:
# might require more sophisticated logic in the future
return (0, 0)
def _get_tool_by_id(self, tool_id: int) -> tools.Tool:
"""Returns an instance of a Tool class selected by its ID.
Args:
tool_id (int): tool ID.
Raises:
workflow_exceptions.ToolDoesNotExist: indicates that for provided ID
there is no tool in this workflow.
Returns:
tools.Tool: instance of a Tool's class.
"""
try:
tool = self._tools[tool_id]
except KeyError:
raise workflow_exceptions.ToolDoesNotExist
return tool
def _clean_tool_ids(self, tool_ids: Union[list[int], int]) -> list[int]:
"""Returns a validated list of tool ID(s).
Checks whether passed tool ID(s) exist in the current workflow
and returns the list of tool IDs. If at least one of the provided tool
IDs is not found, it raises an exception.
Args:
tool_ids (list[int], int): tool ID(s) to be cleaned.
Raises:
workflow_exceptions.ToolDoesNotExist: indicates that at least one of
the provided tool IDs is not present in the current workflow.
Returns:
list[int]: list of checked tool IDs.
"""
cleaned_tool_ids = (
list(set(tool_ids)) if isinstance(tool_ids, list) else [tool_ids]
)
if any(tool_id not in self._tools for tool_id in cleaned_tool_ids):
raise workflow_exceptions.ToolDoesNotExist
return cleaned_tool_ids
def _add_tool_id(self, tool_id: int) -> None:
"""Adds an ID to the used ID pool.
Args:
tool_id (int): ID to be added to the used ID pool.
"""
self._used_ids.add(tool_id)
def _get_next_tool_id(self) -> int:
"""Returns a next available ID to be used for a tool instance.
Returns:
int: next available tool ID.
"""
return max(self._used_ids) + 1
def _build_flow(self) -> None:
NotImplementedError
def __len__(self) -> int:
return len(self._tools) - 1
| 3.078125 | 3 |
team_fundraising/text.py | namtel-hp/fundraising-website | 5 | 4226 |
class Donation_text:
# Shown as a message across the top of the page on return from a donation
# used in views.py:new_donation()
thank_you = (
"Thank you for your donation. "
"You may need to refresh this page to see the donation."
)
confirmation_email_subject = (
'Thank you for donating to the Triple Crown for Heart! '
)
# Start of the email sent confirming the paypal payment has gone through
# used in paypal.py:process_paypal()
confirmation_email_opening = (
'Thank you for your donation of '
)
# Closing of the email sent confirming the paypal payment has gone through
# used in paypal.py:process_paypal()
confirmation_email_closing = (
'.\n\nFor all donations over $20, you will receive a tax receipt for '
'the 2019 tax year.'
'\nYour PayPal receipt should arrive in a separate email.\n'
)
notification_email_subject = (
"You got a donation!"
)
notification_email_opening = (
"Great news! You've just received a donation of "
)
notification_email_closing = (
"\n\nAwesome work! They would probably appreciate "
"a quick thank you email.\n\n"
"-- Triple Crown for Heart\n"
)
class Fundraiser_text:
# Subject of the email sent on signup
signup_email_subject = (
"Welcome to fundraising for the Triple Crown for Heart!"
)
# Start of the email sent when someone signs up
# used in views.py:signup()
signup_email_opening = (
"Thanks for signing up to fundraise with us!\n"
"Your fundraising page can be found at:\n"
)
# Closing of the email sent when someone signs up
# used in views.py:signup()
signup_email_closing = (
'\n\nYou can change your information by using the "Login" link at the '
'top of that page.'
'\n\nThe easiest way to start fundraising is to post the above link '
'on social media or write a short email to your friends telling them '
'about your ride.'
'\nDon\'t forget to include the link to your page!\n'
)
# Message show at the top of the fundraiser page after signing up
# used in views.py:signup()
signup_return_message = (
"Thank you for signing up. Sharing your fundraiser page on social "
"media or over email is the best way to get donations."
)
signup_wrong_password_existing_user = (
"The username already exists, but the password entered is incorrect. "
"If you were already a fundraiser for a previous campaign, please "
"enter your previous password or use "
"<a href='/team_fundraising/accounts/password_reset/'>"
"Forgot your password</a>. If this is your first campaign, "
"please choose a different username."
)
| 2.765625 | 3 |
tests/wagtail_live/test_apps.py | wagtail/wagtail-live | 22 | 4227 | from django.apps import apps
from django.test import override_settings
from wagtail_live.signals import live_page_update
def test_live_page_update_signal_receivers():
assert len(live_page_update.receivers) == 0
@override_settings(
WAGTAIL_LIVE_PUBLISHER="tests.testapp.publishers.DummyWebsocketPublisher"
)
def test_live_page_update_signal_receivers_websocket():
app_config = apps.get_app_config("wagtail_live")
app_config.ready()
try:
# Receiver should be connected, no IndexError
receiver = live_page_update.receivers[0]
finally:
live_page_update.disconnect(receiver)
| 1.710938 | 2 |
PLM/options.py | vtta2008/pipelineTool | 7 | 4228 | # -*- coding: utf-8 -*-
"""
Script Name:
Author: <NAME>/Jimmy - 3D artist.
Description:
"""
# -------------------------------------------------------------------------------------------------------------
""" Import """
import os
from PySide2.QtWidgets import (QFrame, QStyle, QAbstractItemView, QSizePolicy, QLineEdit, QPlainTextEdit,
QGraphicsItem, QGraphicsView, QGraphicsScene, QRubberBand, QCalendarWidget, )
from PySide2.QtCore import QEvent, QSettings, QSize, Qt, QDateTime
from PySide2.QtGui import QColor, QPainter, QFont, QTextCursor
SingleSelection = QCalendarWidget.SingleSelection
NoSelection = QCalendarWidget.NoSelection
SingleLetterDay = QCalendarWidget.SingleLetterDayNames
ShortDay = QCalendarWidget.ShortDayNames
LongDay = QCalendarWidget.LongDayNames
NoHoriHeader = QCalendarWidget.NoHorizontalHeader
NoVertHeader = QCalendarWidget.NoVerticalHeader
IsoWeekNum = QCalendarWidget.ISOWeekNumbers
SelectMode = QCalendarWidget.SelectionMode
HoriHeaderFm = QCalendarWidget.HorizontalHeaderFormat
VertHeaderFm = QCalendarWidget.VerticalHeaderFormat
DayOfWeek = Qt.DayOfWeek
Sunday = Qt.Sunday
Monday = Qt.Monday
Tuesday = Qt.Tuesday
Wednesday = Qt.Wednesday
Thursday = Qt.Thursday
Friday = Qt.Friday
Saturday = Qt.Saturday
ICONSIZE = 32
ICONBUFFER = -1
BTNTAGSIZE = QSize(87, 20)
TAGBTNSIZE = QSize(87-1, 20-1)
BTNICONSIZE = QSize(ICONSIZE, ICONSIZE)
ICONBTNSIZE = QSize(ICONSIZE+ICONBUFFER, ICONSIZE+ICONBUFFER)
DAMG_LOGO_COLOR = QColor(0, 114, 188, 255)
# Basic color
GlobalColor = Qt.GlobalColor
WHITE = QColor(Qt.white)
LIGHTGRAY = QColor(Qt.lightGray)
GRAY = QColor(Qt.gray)
DARKGRAY = QColor(Qt.darkGray)
BLACK = QColor(Qt.black)
RED = QColor(Qt.red)
GREEN = QColor(Qt.green)
BLUE = QColor(Qt.blue)
DARKRED = QColor(Qt.darkRed)
DARKGREEN = QColor(Qt.darkGreen)
DARKBLUE = QColor(Qt.darkBlue)
CYAN = QColor(Qt.cyan)
MAGENTA = QColor(Qt.magenta)
YELLOW = QColor(Qt.yellow)
DARKCYAN = QColor(Qt.darkCyan)
DARKMAGENTA = QColor(Qt.darkMagenta)
DARKYELLOW = QColor(Qt.darkYellow)
# Dark Palette color
Color_BACKGROUND_LIGHT = QColor('#505F69')
COLOR_BACKGROUND_NORMAL = QColor('#32414B')
COLOR_BACKGROUND_DARK = QColor('#19232D')
COLOR_FOREGROUND_LIGHT = QColor('#F0F0F0')
COLOR_FOREGROUND_NORMAL = QColor('#AAAAAA')
COLOR_FOREGROUND_DARK = QColor('#787878')
COLOR_SELECTION_LIGHT = QColor('#148CD2')
COLOR_SELECTION_NORMAL = QColor('#1464A0')
COLOR_SELECTION_DARK = QColor('#14506E')
# Nice color
blush = QColor(246, 202, 203, 255)
petal = QColor(247, 170, 189, 255)
petunia = QColor(231, 62, 151, 255)
deep_pink = QColor(229, 2, 120, 255)
melon = QColor(241, 118, 110, 255)
pomegranate = QColor(178, 27, 32, 255)
poppy_red = QColor(236, 51, 39, 255)
orange_red = QColor(240, 101, 53, 255)
olive = QColor(174, 188, 43, 255)
spring = QColor(227, 229, 121, 255)
yellow = QColor(255, 240, 29, 255)
mango = QColor(254, 209, 26, 255)
cantaloupe = QColor(250, 176, 98, 255)
tangelo = QColor(247, 151, 47, 255)
burnt_orange = QColor(236, 137, 36, 255)
bright_orange = QColor(242, 124, 53, 255)
moss = QColor(176, 186, 39, 255)
sage = QColor(212, 219, 145, 255)
apple = QColor(178, 215, 140, 255)
grass = QColor(111, 178, 68, 255)
forest = QColor(69, 149, 62, 255)
peacock = QColor(21, 140, 167, 255)
teal = QColor(24, 157, 193, 255)
aqua = QColor(153, 214, 218, 255)
violet = QColor(55, 52, 144, 255)
deep_blue = QColor(15, 86, 163, 255)
hydrangea = QColor(150, 191, 229, 255)
sky = QColor(139, 210, 244, 255)
dusk = QColor(16, 102, 162, 255)
midnight = QColor(14, 90, 131, 255)
seaside = QColor(87, 154, 188, 255)
poolside = QColor(137, 203, 225, 255)
eggplant = QColor(86, 5, 79, 255)
lilac = QColor(222, 192, 219, 255)
chocolate = QColor(87, 43, 3, 255)
blackout = QColor(19, 17, 15, 255)
stone = QColor(125, 127, 130, 255)
gravel = QColor(181, 182, 185, 255)
pebble = QColor(217, 212, 206, 255)
sand = QColor(185, 172, 151, 255)
ignoreARM = Qt.IgnoreAspectRatio
scrollAsNeed = Qt.ScrollBarAsNeeded
scrollOff = Qt.ScrollBarAlwaysOff
scrollOn = Qt.ScrollBarAlwaysOn
SiPoMin = QSizePolicy.Minimum # Size policy
SiPoMax = QSizePolicy.Maximum
SiPoExp = QSizePolicy.Expanding
SiPoPre = QSizePolicy.Preferred
SiPoIgn = QSizePolicy.Ignored
frameStyle = QFrame.Sunken | QFrame.Panel
center = Qt.AlignCenter # Alignment
right = Qt.AlignRight
left = Qt.AlignLeft
top = Qt.AlignTop
bottom = Qt.AlignBottom
hori = Qt.Horizontal
vert = Qt.Vertical
dockL = Qt.LeftDockWidgetArea # Docking area
dockR = Qt.RightDockWidgetArea
dockT = Qt.TopDockWidgetArea
dockB = Qt.BottomDockWidgetArea
dockAll = Qt.AllDockWidgetAreas
datetTimeStamp = QDateTime.currentDateTime().toString("hh:mm - dd MMMM yy") # datestamp
PRS = dict(password = QLineEdit.Password, center = center , left = left , right = right,
spmax = SiPoMax , sppre = SiPoPre, spexp = SiPoExp, spign = SiPoIgn,
expanding = QSizePolicy.Expanding, spmin = SiPoMin,)
# -------------------------------------------------------------------------------------------------------------
""" Event """
NO_WRAP = QPlainTextEdit.NoWrap
NO_FRAME = QPlainTextEdit.NoFrame
ELIDE_RIGHT = Qt.ElideRight
ELIDE_NONE = Qt.ElideNone
# -------------------------------------------------------------------------------------------------------------
""" Window state """
StateNormal = Qt.WindowNoState
StateMax = Qt.WindowMaximized
StateMin = Qt.WindowMinimized
State_Selected = QStyle.State_Selected
# -------------------------------------------------------------------------------------------------------------
""" Nodegraph setting variables """
ASPEC_RATIO = Qt.KeepAspectRatio
SMOOTH_TRANS = Qt.SmoothTransformation
SCROLLBAROFF = Qt.ScrollBarAlwaysOff # Scrollbar
SCROLLBARON = Qt.ScrollBarAlwaysOn
SCROLLBARNEED = Qt.ScrollBarAsNeeded
WORD_WRAP = Qt.TextWordWrap
INTERSECT_ITEM_SHAPE = Qt.IntersectsItemShape
CONTAIN_ITEM_SHAPE = Qt.ContainsItemShape
MATCH_EXACTLY = Qt.MatchExactly
DRAG_ONLY = QAbstractItemView.DragOnly
# -------------------------------------------------------------------------------------------------------------
""" UI flags """
ITEMENABLE = Qt.ItemIsEnabled
ITEMMOVEABLE = QGraphicsItem.ItemIsMovable
ITEMSENDGEOCHANGE = QGraphicsItem.ItemSendsGeometryChanges
ITEMSCALECHANGE = QGraphicsItem.ItemScaleChange
ITEMPOSCHANGE = QGraphicsItem.ItemPositionChange
DEVICECACHE = QGraphicsItem.DeviceCoordinateCache
SELECTABLE = QGraphicsItem.ItemIsSelectable
MOVEABLE = QGraphicsItem.ItemIsMovable
FOCUSABLE = QGraphicsItem.ItemIsFocusable
PANEL = QGraphicsItem.ItemIsPanel
NOINDEX = QGraphicsScene.NoIndex # Scene
RUBBER_DRAG = QGraphicsView.RubberBandDrag # Viewer
RUBBER_REC = QRubberBand.Rectangle
POS_CHANGE = QGraphicsItem.ItemPositionChange
NODRAG = QGraphicsView.NoDrag
NOFRAME = QGraphicsView.NoFrame
ANCHOR_NO = QGraphicsView.NoAnchor
ANCHOR_UNDERMICE = QGraphicsView.AnchorUnderMouse
ANCHOR_CENTER = QGraphicsView.AnchorViewCenter
CACHE_BG = QGraphicsView.CacheBackground
UPDATE_VIEWRECT = QGraphicsView.BoundingRectViewportUpdate
UPDATE_FULLVIEW = QGraphicsView.FullViewportUpdate
UPDATE_SMARTVIEW = QGraphicsView.SmartViewportUpdate
UPDATE_BOUNDINGVIEW = QGraphicsView.BoundingRectViewportUpdate
UPDATE_MINIMALVIEW = QGraphicsView.MinimalViewportUpdate
STAY_ON_TOP = Qt.WindowStaysOnTopHint
STRONG_FOCUS = Qt.StrongFocus
SPLASHSCREEN = Qt.SplashScreen
FRAMELESS = Qt.FramelessWindowHint
CUSTOMIZE = Qt.CustomizeWindowHint
CLOSEBTN = Qt.WindowCloseButtonHint
MINIMIZEBTN = Qt.WindowMinimizeButtonHint
AUTO_COLOR = Qt.AutoColor
# -------------------------------------------------------------------------------------------------------------
""" Drawing """
ANTIALIAS = QPainter.Antialiasing # Painter
ANTIALIAS_TEXT = QPainter.TextAntialiasing
ANTIALIAS_HIGH_QUALITY = QPainter.HighQualityAntialiasing
SMOOTH_PIXMAP_TRANSFORM = QPainter.SmoothPixmapTransform
NON_COSMETIC_PEN = QPainter.NonCosmeticDefaultPen
NO_BRUSH = Qt.NoBrush # Brush
NO_PEN = Qt.NoPen # Pen
ROUND_CAP = Qt.RoundCap
ROUND_JOIN = Qt.RoundJoin
PATTERN_SOLID = Qt.SolidPattern # Pattern
LINE_SOLID = Qt.SolidLine # Line
LINE_DASH = Qt.DashLine
LINE_DOT = Qt.DotLine
LINE_DASH_DOT = Qt.DashDotDotLine
TRANSPARENT = Qt.transparent
TRANSPARENT_MODE = Qt.TransparentMode
# -------------------------------------------------------------------------------------------------------------
""" Meta Object """
QUEUEDCONNECTION = Qt.QueuedConnection
# -------------------------------------------------------------------------------------------------------------
""" Keyboard and cursor """
TEXT_BOLD = QFont.Bold
TEXT_NORMAL = QFont.Normal
MONO_SPACE = QFont.Monospace
TEXT_MENEOMIC = Qt.TextShowMnemonic
KEY_PRESS = QEvent.KeyPress
KEY_RELEASE = QEvent.KeyRelease
KEY_ALT = Qt.Key_Alt
KEY_DEL = Qt.Key_Delete
KEY_TAB = Qt.Key_Tab
KEY_SHIFT = Qt.Key_Shift
KEY_CTRL = Qt.Key_Control
KEY_BACKSPACE = Qt.Key_Backspace
KEY_ENTER = Qt.Key_Enter
KEY_RETURN = Qt.Key_Return
KEY_F = Qt.Key_F
KEY_S = Qt.Key_S
ALT_MODIFIER = Qt.AltModifier
CTRL_MODIFIER = Qt.ControlModifier
SHIFT_MODIFIER = Qt.ShiftModifier
NO_MODIFIER = Qt.NoModifier
CLOSE_HAND_CUSOR = Qt.ClosedHandCursor
SIZEF_CURSOR = Qt.SizeFDiagCursor
windows = os.name = 'nt'
DMK = Qt.AltModifier if windows else CTRL_MODIFIER
MOUSE_LEFT = Qt.LeftButton
MOUSE_RIGHT = Qt.RightButton
MOUSE_MIDDLE = Qt.MiddleButton
NO_BUTTON = Qt.NoButton
ARROW_NONE = Qt.NoArrow # Cursor
CURSOR_ARROW = Qt.ArrowCursor
CURSOR_SIZEALL = Qt.SizeAllCursor
MOVE_OPERATION = QTextCursor.MoveOperation
MOVE_ANCHOR = QTextCursor.MoveMode.MoveAnchor
KEEP_ANCHOR = QTextCursor.MoveMode.KeepAnchor
ACTION_MOVE = Qt.MoveAction # Action
ignoreARM = Qt.IgnoreAspectRatio
# -------------------------------------------------------------------------------------------------------------
""" Set number """
RELATIVE_SIZE = Qt.RelativeSize # Size
INI = QSettings.IniFormat
NATIVE = QSettings.NativeFormat
INVALID = QSettings.InvalidFormat
SYS_SCOPE = QSettings.SystemScope
USER_SCOPE = QSettings.UserScope
# -------------------------------------------------------------------------------------------------------------
# Created by <NAME> on 5/6/2020 - 3:13 AM
# © 2017 - 2020 DAMGteam. All rights reserved | 1.640625 | 2 |
Crawling/ssafyCrawling.py | Nyapy/FMTG | 0 | 4229 | from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import sys
import time
import urllib.request
import os
sys.stdin = open('idpwd.txt')
site = input()
id = input()
pwd = input()
# selenium에서 사용할 웹 드라이버 절대 경로 정보
chromedriver = 'C:\Webdriver\chromedriver.exe'
# selenum의 webdriver에 앞서 설치한 chromedirver를 연동한다.
driver = webdriver.Chrome(chromedriver)
# driver로 특정 페이지를 크롤링한다.
driver.get(site)
driver.find_element_by_name('userId').send_keys(id)
driver.find_element_by_name('userPwd').send_keys(<PASSWORD>)
driver.find_element_by_class_name('form-btn').click()
driver.set_window_size(1600, 800)
driver.find_element_by_xpath("//a[@href='/edu/lectureroom/openlearning/openLearningList.do']/span").click()
# driver.find_element_by_id('searchContNm').send_keys('aps')
#
# driver.find_element_by_xpath("//button[@onclick='fnSearch();']").click()
driver.find_elements_by_xpath("//*[contains(text(), '5기_B반_Java(1)')]")[0].click()
driver.find_element_by_xpath("//span[@class='file-name']").click()
driver.switch_to.window(driver.window_handles[1])
print(driver.find_elements_by_xpath("//button[@title='다음 페이지']")[0].get_attribute('disabled'))
# driver.find_elements_by_xpath("//button[@title='마지막 페이지']")[0].click()
# print(driver.find_elements_by_xpath("//button[@title='다음 페이지']")[0].get_attribute('disabled'))
# url 가져오기 + find 함수 연습
# pre = driver.current_url
# find = pre.find('/index.html')
# url = pre[:find]
# src = driver.find_element_by_class_name("background").get_attribute('src')
# print(src)
## 다음페이지 넘기기
# for i in driver.find_elements_by_xpath("//button[@title='다음 페이지']"):
# print(i)
cnt = 1
# url = driver.find_elements_by_class_name("background")[-1].get_attribute('src')
# print(url)
# urllib.request.urlretrieve(url, '123.jpg')
# os.system("curl " + url + " > test.jpg")
time.sleep(2)
driver.get_screenshot_as_file("hi.png")
# for i in driver.find_elements_by_class_name("background"):
# time.sleep(2)
# print(i.get_attribute('style'))
# i.screenshot(str(cnt)+'.png')
# cnt += 1
while 1:
time.sleep(0.4)
driver.save_screenshot('APS/C/'+str(cnt)+'.png')
# print(driver.find_element_by_class_name("background").get_attribute('src'))
# driver.find_element_by_class_name("background").screenshot(str(cnt)+'.png')
driver.find_elements_by_xpath("//button[@title='다음 페이지']")[0].click()
cnt += 1
if driver.find_elements_by_xpath("//button[@title='다음 페이지']")[0].get_attribute('disabled') == 'disabled':
break
| 3.171875 | 3 |
100days/day95/StringIO_demo.py | chainren/python-learn | 0 | 4230 | <reponame>chainren/python-learn<gh_stars>0
from io import StringIO
# 定义一个 StringIO 对象,写入并读取其在内存中的内容
f = StringIO()
f.write('Python-100')
str = f.getvalue() # 读取写入的内容
print('写入内存中的字符串为:%s' %str)
f.write('\n') # 追加内容
f.write('坚持100天')
f.close() # 关闭
f1 = StringIO('Python-100' + '\n' + '坚持100天')
# 读取内容
print(f1.read())
f1.close()
# 假设的爬虫数据输出函数 outputData()
def outputData():
dataOne = '我是 1 号爬虫数据\n'
dataTwo = '我是 2 号爬虫数据\n'
dataThree = '我是 3 号爬虫数据'
data = dataOne + dataTwo + dataThree
return data
# dataStr 为爬虫数据字符串
dataStr = outputData()
# 1. 将 outputData() 函数返回的内容写入内存中
dataIO = StringIO(dataStr)
# 假设的爬虫数据输出函数 outputData()
def outputData():
dataOne = '我是 1 号爬虫数据\n'
dataTwo = '我是 2 号爬虫数据\n'
dataThree = '我是 3 号爬虫数据'
data = dataOne + dataTwo + dataThree
return data
# dataStr 为爬虫数据字符串
dataStr = outputData()
# 1. 将 outputData() 函数返回的内容写入内存中
dataIO = StringIO(dataStr)
# 1.1 输出 StringIO 在内存中写入的数据
print('1.1内存中写入的数据为:\n%s' %dataIO.getvalue())
# 1.2 按行输出写入的数据方式一
print('1.2按行输出写入的数据方式一:')
for data in dataIO.readlines():
print(data.strip('\n')) # 去掉每行数据末尾的换行符
# 1.2 按行输出写入的数据方式一
print('1.2按行输出写入的数据方式一:')
for data in dataIO.readlines():
print(data.strip('\n')) # 去掉每行数据末尾的换行符
# 1.3 按行输出写入的数据方式二
# 由于上一步的操作,此时文件指针指向数据末尾(32),我们需要将指针指向起始位置
print('由于上一步操作的输出,此时文件指针位置为:%d' %dataIO.tell())
# 将文件指针指向起始位置,方便下面的演示
dataIO.seek(0)
print('1.3按行输出写入的数据方式二:')
for data in dataIO:
print(data.strip('\n')) | 3.234375 | 3 |
tests/test_cli.py | Nate1729/FinPack | 1 | 4231 | """Contains tests for finpack/core/cli.py
"""
__copyright__ = "Copyright (C) 2021 <NAME>"
import os
import unittest
from importlib import metadata
from docopt import docopt
from finpack.core import cli
class TestCli(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.DATA_DIR = "temp"
os.mkdir(cls.DATA_DIR)
@classmethod
def tearDownClass(cls):
os.rmdir(cls.DATA_DIR)
def test_version_option(self):
argv = ["--version"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["--version"])
def test_init_no_options(self):
argv = ["init"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["init"])
def test_init_with_filepath_option(self):
argv = ["init", "--filepath=temp/data.csv"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["init"])
self.assertEqual(args["--filepath"], "temp/data.csv")
def test_init_with_sample_dataset_option(self):
argv = ["init", "--sample-dataset"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["init"])
self.assertTrue(args["--sample-dataset"])
def test_init_with_overwrite_option(self):
argv = ["init", "--overwrite"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["init"])
self.assertTrue(args["--overwrite"])
def test_balsheet_no_option(self):
argv = ["balsheet"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["balsheet"])
def test_balsheet_with_filepath_option(self):
argv = ["balsheet", "--filepath=temp/data2.csv"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["balsheet"])
self.assertEqual(args["--filepath"], "temp/data2.csv")
def test_balsheet_with_levels_default(self):
argv = ["balsheet"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["balsheet"])
self.assertEqual(args["--levels"], "3")
def test_balsheet_with_levels_option(self):
argv = ["balsheet", "--levels=2"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["balsheet"])
self.assertEqual(args["--levels"], "2")
def test_balsheet_with_date_default(self):
argv = ["balsheet"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["balsheet"])
self.assertEqual(args["--date"], "today")
def test_balsheet_with_date_option(self):
argv = ["balsheet", "--date=2021-12-01"]
args = docopt(cli.__doc__, argv=argv)
self.assertTrue(args["balsheet"])
self.assertEqual(args["--date"], "2021-12-01")
| 2.421875 | 2 |
python/Patterns/inheritance/main.py | zinderud/ysa | 0 | 4232 | class Yaratik(object):
def move_left(self):
print('Moving left...')
def move_right(self):
print('Moving left...')
class Ejderha(Yaratik):
def Ates_puskurtme(self):
print('ates puskurtum!')
class Zombie(Yaratik):
def Isirmak(self):
print('Isirdim simdi!')
enemy = Yaratik()
enemy.move_left()
# ejderha also includes all functions from parent class (yaratik)
ejderha = Ejderha()
ejderha.move_left()
ejderha.Ates_puskurtme()
# Zombie is called the (child class), inherits from Yaratik (parent class)
zombie = Zombie()
zombie.move_right()
zombie.Isirmak()
| 3.546875 | 4 |
clustering/graph_utils.py | perathambkk/ml-techniques | 0 | 4233 | """
Author: <NAME>
"""
import numpy as np
import pandas as pd
from sklearn.neighbors import NearestNeighbors
def affinity_graph(X):
'''
This function returns a numpy array.
'''
ni, nd = X.shape
A = np.zeros((ni, ni))
for i in range(ni):
for j in range(i+1, ni):
dist = ((X[i] - X[j])**2).sum() # compute L2 distance
A[i][j] = dist
A[j][i] = dist # by symmetry
return A
def knn_graph(X, knn=4):
'''
This function returns a numpy array.
'''
ni, nd = X.shape
nbrs = NearestNeighbors(n_neighbors=(knn+1), algorithm='ball_tree').fit(X)
distances, indices = nbrs.kneighbors(X)
A = np.zeros((ni, ni))
for dist, ind in zip(distances, indices):
i0 = ind[0]
for i in range(1,knn+1):
d = dist[i]
A[i0, i] = d
A[i, i0] = d # by symmetry
return A
def sparse_affinity_graph(X):
'''
TODO: This function returns a numpy sparse matrix.
'''
ni, nd = X.shape
A = np.zeros((ni, ni))
for i in range(ni):
for j in range(i+1, ni):
dist = ((X[i] - X[j])**2).sum() # compute L2 distance
A[i][j] = dist
A[j][i] = dist # by symmetry
return A
def laplacian_graph(X, mode='affinity', knn=3, eta=0.01, sigma=2.5):
'''
The unnormalized graph Laplacian, L = D − W.
'''
if mode == 'affinity':
W = affinity_graph(X)
W[abs(W) > eta] = 0
elif mode == 'nearestneighbor':
W = knn_graph(X, knn=knn)
elif mode == 'gaussian':
W = affinity_graph(X)
bandwidth = 2.0*(sigma**2)
W = np.exp(W) / bandwidth
else:
pass
D = np.diag(W.sum(axis=1))
L = D - W
return L
| 3.375 | 3 |
recipe_engine/internal/commands/__init__.py | Acidburn0zzz/luci | 1 | 4234 | <gh_stars>1-10
# Copyright 2019 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""This package houses all subcommands for the recipe engine.
See implementation_details.md for the expectations of the modules in this
directory.
"""
import argparse
import errno
import logging
import os
import pkgutil
import sys
if sys.version_info >= (3, 5): # we're running python > 3.5
OS_WALK = os.walk
else:
# From vpython
from scandir import walk as OS_WALK
# pylint: disable=wrong-import-position
from .. import simple_cfg
from ..recipe_deps import RecipeDeps
from ..recipe_module_importer import RecipeModuleImporter
LOG = logging.getLogger(__name__)
# This incantation finds all loadable submodules of ourself. The
# `prefix=__name__` bit is so that these modules get loaded with the correct
# import names, i.e.
#
# recipe_engine.internal.commands.<submodule>
#
# If omitted, then these submodules can get double loaded as both:
#
# <submodule> AND
# recipe_engine.internal.commands.<submodule>
#
# Which can both interfere with the global python module namespace, and lead to
# strange errors when doing type assertions (since all data in these modules
# will be loaded under two different names; classes will fail isinstance checks
# even though they are "the same").
_COMMANDS = [
loader.find_module(module_name).load_module(module_name)
for (loader, module_name, _) in pkgutil.walk_packages(
__path__, prefix=__name__+'.')
if '.' not in module_name[len(__name__)+1:]
]
# Order all commands by an optional __cmd_priority__ field, and then by module
# name.
_COMMANDS.sort(
key=lambda mod: (
not hasattr(mod, '__cmd_priority__'), # modules defining priority first
getattr(mod, '__cmd_priority__', None), # actual priority
mod.__name__ # name
))
# Now actually set these commands on ourself so that 'mock' works correctly.
#
# This is needed to allow some tests (though it may be worth adjusting these
# tests later to not need this. Just delete this function and see which tests
# fail to find the dependencies on this behavior).
def _patch_our_attrs():
self = sys.modules[__name__]
self.__all__ = [mod.__name__[len(__name__)+1:] for mod in _COMMANDS]
for modname, mod in zip(self.__all__, _COMMANDS):
setattr(self, modname, mod)
_patch_our_attrs()
def _check_recipes_cfg_consistency(recipe_deps):
"""Checks all recipe.cfg files for the loaded recipe_deps and logs
inconsistent dependencies.
Args:
recipe_deps (RecipeDeps) - The loaded+fetched recipe deps
for the current run.
"""
actual = recipe_deps.main_repo.simple_cfg.deps
# For every repo we loaded
for repo_name in actual:
required_deps = recipe_deps.repos[repo_name].simple_cfg.deps
for req_repo_name, req_spec in required_deps.iteritems():
# If this depends on something we didn't load, log an error.
if req_repo_name not in actual:
LOG.error(
'%r depends on %r, but your recipes.cfg is missing an '
'entry for this.', repo_name, req_repo_name)
continue
actual_spec = actual[req_repo_name]
if req_spec.revision == actual_spec.revision:
# They match, it's all good.
continue
LOG.warn(
'recipes.cfg depends on %r @ %s, but %r depends on version %s.',
req_repo_name, actual_spec.revision, repo_name, req_spec.revision)
def _cleanup_pyc(recipe_deps):
"""Removes any .pyc files from the recipes/recipe_module directories.
Args:
* recipe_deps (RecipeDeps) - The loaded recipe dependencies.
"""
for repo in recipe_deps.repos.itervalues():
for to_walk in (repo.recipes_dir, repo.modules_dir):
for root, _dirs, files in OS_WALK(to_walk):
for fname in files:
if not fname.endswith('.pyc'):
continue
try:
to_clean = os.path.join(root, fname)
LOG.info('cleaning %r', to_clean)
os.unlink(to_clean)
except OSError as ex:
# If multiple things are cleaning pyc's at the same time this can
# race. Fortunately we only care that SOMETHING deleted the pyc :)
if ex.errno != errno.ENOENT:
raise
def _common_post_process(args):
# TODO(iannucci): We should always do logging.basicConfig() (probably with
# logging.WARNING), even if no verbose is passed. However we need to be
# careful as this could cause issues with spurious/unexpected output.
# Once the recipe engine is on native build.proto, this should be safe to
# do.
if args.verbose > 0:
logging.basicConfig()
logging.getLogger().setLevel(logging.INFO)
if args.verbose > 1:
logging.getLogger().setLevel(logging.DEBUG)
else:
# Prevent spurious "No handlers could be found for ..." stderr messages.
# Once we always set a basicConfig (per TODO above), this can go away as
# well.
logging.root.manager.emittedNoHandlerWarning = True
if args.pid_file:
try:
with open(args.pid_file, 'w') as pid_file:
pid_file.write('%d\n' % os.getpid())
except Exception:
logging.exception("unable to write pidfile")
args.recipe_deps = RecipeDeps.create(
args.main_repo_path,
args.repo_override,
args.proto_override,
)
_check_recipes_cfg_consistency(args.recipe_deps)
# Allows:
# import RECIPE_MODULES.repo_name.module_name.submodule
sys.meta_path = [RecipeModuleImporter(args.recipe_deps)] + sys.meta_path
_cleanup_pyc(args.recipe_deps)
# Remove flags that subcommands shouldn't use; everything from this point on
# should ONLY use args.recipe_deps.
del args.main_repo_path
del args.verbose
del args.repo_override
def _add_common_args(parser):
class _RepoOverrideAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
tokens = values.split('=', 2)
if len(tokens) != 2:
raise ValueError('Override must have the form: repo=path')
repo_name, path = tokens
override_dict = getattr(namespace, self.dest)
if repo_name in override_dict:
raise ValueError('An override is already defined for [%s] (%s)' % (
repo_name, override_dict[repo_name]))
path = os.path.abspath(os.path.expanduser(path))
if not os.path.isdir(path):
raise ValueError('Override path [%s] is not a directory' % (path,))
override_dict[repo_name] = path
def _package_to_main_repo(value):
try:
value = os.path.abspath(value)
except Exception as ex: # pylint: disable=broad-except
parser.error(
'--package %r could not be converted to absolute path: %r' % (
value, ex,))
recipes_cfg_rel = simple_cfg.RECIPES_CFG_LOCATION_REL
if not value.endswith(recipes_cfg_rel):
parser.error('--package must end with %r.' % (recipes_cfg_rel,))
# We know the arg ends with 'infra/config/recipes.cfg', so chop those
# elements off the path to get the path to the recipe repo root.
for _ in simple_cfg.RECIPES_CFG_LOCATION_TOKS:
value = os.path.dirname(value)
return value
# TODO(iannucci): change --package to --repo-path and avoid having recipes.py
# pass the path to the recipes.cfg. This is preferable because the location of
# recipes.cfg MUST be discovered for recipe dependencies; the RepoSpec
# protobuf doesn't specify where the recipes.cfg is in the dependency repos
# (nor can it, even if it was dynamic; this would be a nightmare to maintain,
# and the autoroller would need to discover it automatically ANYWAY. If we
# allow it to be relocatable, the engine needs to be able to discover it, in
# which case the minimal information is still 'repo root').
parser.add_argument(
'--package',
dest='main_repo_path', type=_package_to_main_repo, required=True,
help='Path to recipes.cfg of the recipe repo to operate on.')
parser.add_argument(
'--verbose', '-v', action='count',
help='Increase logging verboisty')
parser.add_argument('-O', '--repo-override', metavar='ID=PATH',
action=_RepoOverrideAction, default={},
help='Override a repo repository path with a local one.')
parser.add_argument('--pid-file', metavar='PATH',
help=(
'Absolute path to a file where the engine should write its pid. '
'Path must be absolute and not exist.'))
def _proto_override_abspath(value):
try:
value = os.path.abspath(value)
except Exception as ex: # pylint: disable=broad-except
parser.error(
'--proto-override %r could not be converted to absolute path: %r' % (
value, ex,))
return value
# Override the location of the folder containing the `PB` module. This should
# only be used for recipe bundles, so we don't bother giving it a shortform
# option, and suppress the option's help to avoid confusing users.
parser.add_argument(
'--proto-override', type=_proto_override_abspath, help=argparse.SUPPRESS)
parser.set_defaults(
postprocess_func=lambda error, args: None,
)
def parse_and_run():
"""Parses the command line and runs the chosen subcommand.
Returns the command's return value (either int or None, suitable as input to
`os._exit`).
"""
parser = argparse.ArgumentParser(
description='Interact with the recipe system.')
_add_common_args(parser)
subp = parser.add_subparsers(dest='command')
for module in _COMMANDS:
description = module.__doc__
helplines = []
for line in description.splitlines():
line = line.strip()
if not line:
break
helplines.append(line)
module.add_arguments(subp.add_parser(
module.__name__.split('.')[-1], # use module's short name
formatter_class=argparse.RawDescriptionHelpFormatter,
help=' '.join(helplines),
description=description,
))
args = parser.parse_args()
_common_post_process(args)
args.postprocess_func(parser.error, args)
return args.func(args)
| 1.976563 | 2 |
openfl/pipelines/stc_pipeline.py | sarthakpati/openfl | 0 | 4235 | <filename>openfl/pipelines/stc_pipeline.py
# Copyright (C) 2020-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
"""STCPipelinemodule."""
import numpy as np
import gzip as gz
from .pipeline import TransformationPipeline, Transformer
class SparsityTransformer(Transformer):
"""A transformer class to sparsify input data."""
def __init__(self, p=0.01):
"""Initialize.
Args:
p (float): sparsity ratio (Default=0.01)
"""
self.lossy = True
self.p = p
def forward(self, data, **kwargs):
"""Sparsify data and pass over only non-sparsified elements by reducing the array size.
Args:
data: an numpy array from the model tensor_dict
Returns:
condensed_data: an numpy array being sparsified.
metadata: dictionary to store a list of meta information.
"""
metadata = {'int_list': list(data.shape)}
# sparsification
data = data.astype(np.float32)
flatten_data = data.flatten()
n_elements = flatten_data.shape[0]
k_op = int(np.ceil(n_elements * self.p))
topk, topk_indices = self._topk_func(flatten_data, k_op)
#
condensed_data = topk
sparse_data = np.zeros(flatten_data.shape)
sparse_data[topk_indices] = topk
nonzero_element_bool_indices = sparse_data != 0.0
metadata['bool_list'] = list(nonzero_element_bool_indices)
return condensed_data, metadata
# return sparse_data, metadata
def backward(self, data, metadata, **kwargs):
"""Recover data array with the right shape and numerical type.
Args:
data: an numpy array with non-zero values.
metadata: dictionary to contain information for recovering back to original data array.
Returns:
recovered_data: an numpy array with original shape.
"""
data = data.astype(np.float32)
data_shape = metadata['int_list']
nonzero_element_bool_indices = list(metadata['bool_list'])
recovered_data = np.zeros(data_shape).reshape(-1).astype(np.float32)
recovered_data[nonzero_element_bool_indices] = data
recovered_data = recovered_data.reshape(data_shape)
return recovered_data
@staticmethod
def _topk_func(x, k):
"""Select top k values.
Args:
x: an numpy array to be sorted out for top-k components.
k: k most maximum values.
Returns:
topk_mag: components with top-k values.
indices: indices of the top-k components.
"""
# quick sort as default on magnitude
idx = np.argsort(np.abs(x))
# sorted order, the right most is the largest magnitude
length = x.shape[0]
start_idx = length - k
# get the top k magnitude
topk_mag = np.asarray(x[idx[start_idx:]])
indices = np.asarray(idx[start_idx:])
if min(topk_mag) - 0 < 10e-8: # avoid zeros
topk_mag = topk_mag + 10e-8
return topk_mag, indices
class TernaryTransformer(Transformer):
"""A transformer class to ternerize input data."""
def __init__(self):
"""Initialize."""
self.lossy = True
def forward(self, data, **kwargs):
"""Ternerize data into positive mean value, negative mean value and zero value.
Args:
data: an flattened numpy array
Returns:
int_data: an numpy array being terneraized.
metadata: dictionary to store a list of meta information.
"""
# ternarization, data is sparse and flattened
mean_topk = np.mean(np.abs(data))
out_ = np.where(data > 0.0, mean_topk, 0.0)
out = np.where(data < 0.0, -mean_topk, out_)
int_array, int2float_map = self._float_to_int(out)
metadata = {'int_to_float': int2float_map}
return int_array, metadata
def backward(self, data, metadata, **kwargs):
"""Recover data array back to the original numerical type.
Args:
data: an numpy array with non-zero values.
Returns:
metadata: dictionary to contain information for recovering back to original data array.
data (return): an numpy array with original numerical type.
"""
# TODO
import copy
data = copy.deepcopy(data)
int2float_map = metadata['int_to_float']
for key in int2float_map:
indices = data == key
data[indices] = int2float_map[key]
return data
@staticmethod
def _float_to_int(np_array):
"""Create look-up table for conversion between floating and integer types.
Args:
np_array:
Returns:
int_array:
int_to_float_map:
"""
flatten_array = np_array.reshape(-1)
unique_value_array = np.unique(flatten_array)
int_array = np.zeros(flatten_array.shape, dtype=np.int)
int_to_float_map = {}
float_to_int_map = {}
# create table
for idx, u_value in enumerate(unique_value_array):
int_to_float_map.update({idx: u_value})
float_to_int_map.update({u_value: idx})
# assign to the integer array
indices = np.where(flatten_array == u_value)
int_array[indices] = idx
int_array = int_array.reshape(np_array.shape)
return int_array, int_to_float_map
class GZIPTransformer(Transformer):
"""A transformer class to losslessly compress data."""
def __init__(self):
"""Initialize."""
self.lossy = False
def forward(self, data, **kwargs):
"""Compress data into numpy of float32.
Args:
data: an numpy array with non-zero values
Returns:
compressed_bytes :
metadata: dictionary to contain information for recovering back to original data array
"""
bytes_ = data.astype(np.float32).tobytes()
compressed_bytes = gz.compress(bytes_)
metadata = {}
return compressed_bytes, metadata
def backward(self, data, metadata, **kwargs):
"""Decompress data into numpy of float32.
Args:
data: an numpy array with non-zero values
metadata: dictionary to contain information for recovering back to original data array
Returns:
data:
"""
decompressed_bytes_ = gz.decompress(data)
data = np.frombuffer(decompressed_bytes_, dtype=np.float32)
return data
class STCPipeline(TransformationPipeline):
"""A pipeline class to compress data lossly using sparsity and ternerization methods."""
def __init__(self, p_sparsity=0.01, n_clusters=6, **kwargs):
"""Initialize a pipeline of transformers.
Args:
p_sparsity (float): Sparsity factor (Default=0.01)
n_cluster (int): Number of K-Means clusters (Default=6)
Returns:
Data compression transformer pipeline object
"""
# instantiate each transformer
self.p = p_sparsity
transformers = [SparsityTransformer(self.p), TernaryTransformer(), GZIPTransformer()]
super(STCPipeline, self).__init__(transformers=transformers, **kwargs)
| 2.328125 | 2 |
tests/component/test_grid_mixin.py | csdms/pymt | 38 | 4236 | import numpy as np
import pytest
from pytest import approx
from pymt.component.grid import GridMixIn
class Port:
def __init__(self, name, uses=None, provides=None):
self._name = name
self._uses = uses or []
self._provides = provides or []
def get_component_name(self):
return self._name
def get_input_item_count(self):
return len(self._uses)
def get_input_item_list(self):
return self._uses
def get_output_item_count(self):
return len(self._provides)
def get_output_item_list(self):
return self._provides
def test_exchange_items():
class Component(GridMixIn):
def __init__(self):
self._port = Port("test", uses=["invar"], provides=["outvar"])
super().__init__()
c = Component()
assert c.input_items == ["invar"]
assert c.output_items == ["outvar"]
def test_no_exchange_items():
class Component(GridMixIn):
def __init__(self):
self._port = Port("test")
super().__init__()
c = Component()
assert c.input_items == []
assert c.output_items == []
def test_raster_1d():
class RasterPort(Port):
def get_grid_shape(self, grid_id):
return (3,)
def get_grid_spacing(self, grid_id):
return (2.0,)
def get_grid_origin(self, grid_id):
return (3.0,)
class Component(GridMixIn):
def __init__(self):
self._port = RasterPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_x("invar") == approx(np.array([3.0, 5.0, 7.0]))
def test_raster_2d():
class RasterPort(Port):
def get_grid_shape(self, grid_id):
return (2, 3)
def get_grid_spacing(self, grid_id):
return (2.0, 1.0)
def get_grid_origin(self, grid_id):
return (0.0, 0.0)
class Component(GridMixIn):
def __init__(self):
self._port = RasterPort("test-2d", uses=["invar"], provides=["outvar"])
super().__init__()
c = Component()
assert c.name == "test-2d"
assert c.get_grid_type(0) == "RASTER"
assert c.get_x(0) == approx(np.array([[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]]))
assert c.get_y(0) == approx(np.array([[0.0, 0.0, 0.0], [2.0, 2.0, 2.0]]))
assert np.all(c.get_connectivity(0) == np.array([0, 1, 4, 3, 1, 2, 5, 4]))
assert np.all(c.get_offset(0) == np.array([4, 8]))
def test_raster_3d():
class RasterPort(Port):
def get_grid_shape(self, grid_id):
return (2, 2, 3)
def get_grid_spacing(self, grid_id):
return (1.0, 2.0, 1.0)
def get_grid_origin(self, grid_id):
return (0.0, 0.0, 0.0)
class Component(GridMixIn):
def __init__(self):
self._port = RasterPort("test-3d", uses=["invar"])
super().__init__()
c = Component()
assert c.get_x(0) == approx(
np.array(
[[[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]], [[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]]]
)
)
assert c.get_y(0) == approx(
np.array(
[[[0.0, 0.0, 0.0], [2.0, 2.0, 2.0]], [[0.0, 0.0, 0.0], [2.0, 2.0, 2.0]]]
)
)
assert c.get_z(0) == approx(
np.array(
[[[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]]
)
)
def test_rectilinear():
class RectilinearPort(Port):
def get_grid_shape(self, grid_id):
return (2, 3)
def get_grid_x(self, grid_id):
return (0.0, 3.0, 4)
def get_grid_y(self, grid_id):
return (2.0, 7.0)
class Component(GridMixIn):
def __init__(self):
self._port = RectilinearPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "RECTILINEAR"
assert c.get_x(0) == approx(np.array([[0.0, 3.0, 4.0], [0.0, 3.0, 4.0]]))
assert c.get_y(0) == approx(np.array([[2.0, 2.0, 2.0], [7.0, 7.0, 7.0]]))
def test_structured():
class StructuredPort(Port):
def get_grid_shape(self, grid_id):
return (2, 3)
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 2.0, 0.0, 1.0, 2.0])
def get_grid_y(self, grid_id):
return np.array([0.0, 1.0, 2.0, 1.0, 2.0, 3.0])
class Component(GridMixIn):
def __init__(self):
self._port = StructuredPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "STRUCTURED"
assert c.get_x(0) == approx(np.array([0.0, 1.0, 2.0, 0.0, 1.0, 2.0]))
assert c.get_y(0) == approx(np.array([0.0, 1.0, 2.0, 1.0, 2.0, 3.0]))
def test_unstructured():
class UnstructuredPort(Port):
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 0.0, 1.0, 2.0])
def get_grid_y(self, grid_id):
return np.array([0.0, 0.0, 1.0, 1.0, 0.0])
def get_grid_connectivity(self, grid_id):
return np.array([0, 1, 3, 2, 4, 3, 1])
def get_grid_offset(self, grid_id):
return np.array([4, 7])
class Component(GridMixIn):
def __init__(self):
self._port = UnstructuredPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "UNSTRUCTURED"
assert c.get_x(0) == approx(np.array([0.0, 1.0, 0.0, 1.0, 2.0]))
assert c.get_y(0) == approx(np.array([0.0, 0.0, 1.0, 1.0, 0.0]))
def test_get_grid_shape_is_none():
class UnstructuredPort(Port):
def get_grid_shape(self, grid_id):
return None
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 2.0])
class Component(GridMixIn):
def __init__(self):
self._port = UnstructuredPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "UNSTRUCTURED"
def test_get_grid_shape_raises():
class UnstructuredPort(Port):
def get_grid_shape(self, grid_id):
raise NotImplementedError("get_grid_shape")
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 2.0])
class Component(GridMixIn):
def __init__(self):
self._port = UnstructuredPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "UNSTRUCTURED"
def test_structured_1d():
class RectilinearPort(Port):
def get_grid_shape(self, grid_id):
return (2, 3)
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 2.0])
def get_grid_y(self, grid_id):
raise NotImplementedError("get_grid_y")
def get_grid_z(self, grid_id):
raise NotImplementedError("get_grid_z")
class Component(GridMixIn):
def __init__(self):
self._port = RectilinearPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "RECTILINEAR"
with pytest.raises(IndexError):
c.get_z(0)
| 2.125 | 2 |
scripts/compare.py | SnoozeTime/nes | 1 | 4237 | import sys
def load_log_sp(filename):
data = []
with open(filename) as f:
for line in f.readlines():
tokens = line.split(" ")
spidx = line.find("SP:")
endidx = line.find(' ', spidx)
data.append((line[0:4], line[spidx+3:endidx]))
return data
if __name__ == "__main__":
mylog = sys.argv[1]
correctlog = sys.argv[2]
mylog_sp = load_log_sp(mylog)
correctlog_sp = load_log_sp(correctlog)
for (i, ((nb1, sp1), (nb2, sp2))) in enumerate(zip(mylog_sp, correctlog_sp)):
print('{} {} - {} vs {}'.format(
nb1, nb2, sp1, sp2))
if sp1.lower() != sp2.lower() or int(nb1.lower(),16) != int(nb2.lower(), 16):
break
| 2.796875 | 3 |
tercer_modelo.py | nahuelalmeira/deepLearning | 0 | 4238 | """Exercise 1
Usage:
$ CUDA_VISIBLE_DEVICES=2 python practico_1_train_petfinder.py --dataset_dir ../ --epochs 30 --dropout 0.1 0.1 --hidden_layer_sizes 200 100
To know which GPU to use, you can check it with the command
$ nvidia-smi
"""
import argparse
import os
import mlflow
import pickle
import numpy as np
import pandas as pd
import tensorflow as tf
from sklearn.model_selection import train_test_split
from tensorflow.keras import layers, models
import warnings
warnings.filterwarnings("ignore")
from auxiliary import process_features, load_dataset, build_columns, log_dir_name
TARGET_COL = 'AdoptionSpeed'
def read_args():
parser = argparse.ArgumentParser(
description='Training a MLP on the petfinder dataset')
# Here you have some examples of classifier parameters. You can add
# more arguments or change these if you need to.
parser.add_argument('--experiment_name', type=str, default='Base model',
help='Name of the experiment, used in mlflow.')
parser.add_argument('--dataset_dir', default='../petfinder_dataset', type=str,
help='Directory with the training and test files.')
parser.add_argument('--hidden_layer_sizes', nargs='+', default=[100], type=int,
help='Number of hidden units of each hidden layer.')
parser.add_argument('--epochs', default=50, type=int,
help='Number of epochs to train.')
parser.add_argument('--dropout', nargs='+', default=[0.5], type=float,
help='Dropout ratio for every layer.')
parser.add_argument('--batch_size', type=int, default=32,
help='Number of instances in each batch.')
parser.add_argument('--learning_rate', default=1e-3, type=float,
help='Learning rate.')
args = parser.parse_args()
assert len(args.hidden_layer_sizes) == len(args.dropout)
return args
def print_args(args):
print('-------------------------------------------')
print('PARAMS ------------------------------------')
print('-------------------------------------------')
print('--experiment_name ', args.experiment_name)
print('--dataset_dir ', args.dataset_dir)
print('--epochs ', args.epochs)
print('--hidden_layer_sizes', args.hidden_layer_sizes)
print('--dropout ', args.dropout)
print('--batch_size ', args.batch_size)
print('--learning_rate ', args.learning_rate)
print('-------------------------------------------')
def main():
args = read_args()
print_args(args)
experiment_name = args.experiment_name
batch_size = args.batch_size
learning_rate = args.learning_rate
hidden_layer_sizes = args.hidden_layer_sizes
dropout = args.dropout
epochs = args.epochs
### Output directory
dir_name = log_dir_name(args)
print()
print(dir_name)
print()
output_dir = os.path.join('experiments', experiment_name, dir_name)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
dataset, dev_dataset, test_dataset = load_dataset(args.dataset_dir)
nlabels = dataset[TARGET_COL].unique().shape[0]
columns = [
'Gender', 'Color1', 'Vaccinated', 'Dewormed',
'Breed1',
'Age', 'Fee', 'Quantity']
one_hot_columns, embedded_columns, numeric_columns = build_columns(dataset, columns)
# TODO (optional) put these three types of columns in the same dictionary with "column types"
X_train, y_train = process_features(dataset, one_hot_columns, numeric_columns, embedded_columns)
direct_features_input_shape = (X_train['direct_features'].shape[1],)
X_dev, y_dev = process_features(dev_dataset, one_hot_columns, numeric_columns, embedded_columns)
###########################################################################################################
### TODO: Shuffle train dataset - Done
###########################################################################################################
shuffle_len = X_train['direct_features'].shape[0]
train_ds = tf.data.Dataset.from_tensor_slices((X_train, y_train)).shuffle(shuffle_len).batch(batch_size)
###########################################################################################################
dev_ds = tf.data.Dataset.from_tensor_slices((X_dev, y_dev)).batch(batch_size)
test_ds = tf.data.Dataset.from_tensor_slices(process_features(
test_dataset, one_hot_columns, numeric_columns, embedded_columns, test=True)[0]).batch(batch_size)
###########################################################################################################
### TODO: Build the Keras model - Done
###########################################################################################################
tf.keras.backend.clear_session()
# Add one input and one embedding for each embedded column
embedding_layers = []
inputs = []
for embedded_col, max_value in embedded_columns.items():
input_layer = layers.Input(shape=(1,), name=embedded_col)
inputs.append(input_layer)
# Define the embedding layer
embedding_size = int(max_value / 4)
embedding_layers.append(
tf.squeeze(layers.Embedding(input_dim=max_value, output_dim=embedding_size)(input_layer), axis=-2))
print('Adding embedding of size {} for layer {}'.format(embedding_size, embedded_col))
# Add the direct features already calculated
direct_features_input = layers.Input(shape=direct_features_input_shape, name='direct_features')
inputs.append(direct_features_input)
# Concatenate everything together
features = layers.concatenate(embedding_layers + [direct_features_input])
denses = []
dense1 = layers.Dense(hidden_layer_sizes[0], activation='relu')(features)
denses.append(dense1)
if len(hidden_layer_sizes) > 1:
for hidden_layer_size in hidden_layer_sizes[1:]:
dense = layers.Dense(hidden_layer_size, activation='relu')(denses[-1])
denses.append(dense)
output_layer = layers.Dense(nlabels, activation='softmax')(dense1)
model = models.Model(inputs=inputs, outputs=output_layer)
###########################################################################################################
###########################################################################################################
### TODO: Fit the model - Done
###########################################################################################################
mlflow.set_experiment(experiment_name)
optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
model.compile(loss='categorical_crossentropy', optimizer=optimizer,
metrics=['accuracy'])
logdir = "logs/scalars/" + dir_name
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=logdir)
with mlflow.start_run(nested=True):
# Log model hiperparameters first
mlflow.log_param('hidden_layer_size', hidden_layer_sizes)
mlflow.log_param('dropout', dropout)
mlflow.log_param('embedded_columns', embedded_columns)
mlflow.log_param('one_hot_columns', one_hot_columns)
mlflow.log_param('numeric_columns', numeric_columns) # Not using these yet
mlflow.log_param('epochs', epochs)
mlflow.log_param('batch_size', batch_size)
mlflow.log_param('learning_rate', learning_rate)
# Train
history = model.fit(train_ds, epochs=epochs,
validation_data=dev_ds,
callbacks=[tensorboard_callback])
#######################################################################################################
### TODO: analyze history to see if model converges/overfits
#######################################################################################################
output_csv = os.path.join(output_dir, 'history.pickle')
with open(output_csv, 'bw') as f:
pickle.dump(history.history, f)
#######################################################################################################
#######################################################################################################
### TODO: Evaluate the model, calculating the metrics. - Done
#######################################################################################################
loss, accuracy = model.evaluate(dev_ds)
print("*** Dev loss: {} - accuracy: {}".format(loss, accuracy))
mlflow.log_metric('loss', loss)
mlflow.log_metric('accuracy', accuracy)
predictions = model.predict(test_ds)
#######################################################################################################
#######################################################################################################
### TODO: Convert predictions to classes - Done
#######################################################################################################
prediction_classes = np.argmax(predictions, axis=1)
#######################################################################################################
#######################################################################################################
### TODO: Save the results for submission - Done
#######################################################################################################
output_csv = os.path.join(output_dir, 'submit.csv')
submissions = pd.DataFrame(prediction_classes, columns=[TARGET_COL], index=test_dataset.PID)
submissions.to_csv(output_csv)
#######################################################################################################
###########################################################################################################
print('All operations completed')
if __name__ == '__main__':
main()
| 2.828125 | 3 |
catpy/applications/export.py | catmaid/catpy | 5 | 4239 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from pkg_resources import parse_version
from warnings import warn
from copy import deepcopy
import networkx as nx
from networkx.readwrite import json_graph
from catpy.applications.base import CatmaidClientApplication
NX_VERSION_INFO = parse_version(nx.__version__)._key[1]
err_msg = (
"Tried to treat the edge's source/target fields as indices into the list of nodes, but failed. "
"See issue #26 [1]. "
"Has CATMAID upgraded to networkx 2.x? [2]\n\n"
"[1]: https://github.com/catmaid/catpy/issues/26\n"
"[2]: https://github.com/catmaid/CATMAID/blob/master/django/requirements.txt"
)
def convert_nodelink_data(jso):
"""NetworkX serialises graphs differently in v1.x and v2.x.
This converts v1-style data (as emitted by CATMAID) to v2-style data.
See issue #26 https://github.com/catmaid/catpy/issues/26
Parameters
----------
jso : dict
Returns
-------
dict
"""
if NX_VERSION_INFO < (2, 0):
warn(
"You are converting networkx v1-style JSON (emitted by CATMAID) to v2-style JSON,"
" but you are using networkx v1"
)
out = deepcopy(jso)
for edge in out["links"]:
for label in ["source", "target"]:
try:
edge[label] = out["nodes"][edge[label]]["id"]
except (KeyError, IndexError):
raise RuntimeError(err_msg)
return out
class ExportWidget(CatmaidClientApplication):
def get_swc(self, skeleton_id, linearize_ids=False):
"""
Get a single skeleton in SWC format.
Parameters
----------
skeleton_id : int or str
linearize_ids : bool
Returns
-------
str
"""
return self.get(
(self.project_id, "skeleton", skeleton_id, "swc"),
{"linearize_ids": "true" if linearize_ids else "false"},
)
def get_connector_archive(self, *args, **kwargs):
"""Not implemented: requires an async job"""
raise NotImplementedError("Requires an async job")
def get_treenode_archive(self, *args, **kwargs):
"""Not implemented: requires an async job"""
raise NotImplementedError("Requires an async job")
def get_networkx_dict(self, *skeleton_ids):
"""
Get the data for a networkx graph of the given skeletons in node-link format.
In networkx 1.x, as used by CATMAID and therefore returned by this method,
"source" and "target" in the dicts in "links" refer to nodes by their indices in the "nodes" array.
See ``convert_nodelink_data`` function to convert into networkx 2.x-compatible format.
https://networkx.readthedocs.io/en/networkx-1.11/reference/generated/networkx.readwrite.json_graph.node_link_data.html
Parameters
----------
skeleton_ids : array-like of (int or str)
Returns
-------
dict
"""
return self.post(
(self.project_id, "graphexport", "json"),
data={"skeleton_list": list(skeleton_ids)},
)
def get_networkx(self, *skeleton_ids):
"""
Get a networkx MultiDiGraph of the given skeletons.
Parameters
----------
skeleton_ids : array-like of (int or str)
Returns
-------
networkx.MultiDiGraph
"""
data = self.get_networkx_dict(*skeleton_ids)
if NX_VERSION_INFO >= (2, 0):
data = convert_nodelink_data(data)
return json_graph.node_link_graph(data, directed=True)
def get_neuroml(self, skeleton_ids, skeleton_inputs=tuple()):
"""
Get NeuroML v1.8.1 (level 3, NetworkML) for the given skeletons, possibly with their input synapses
constrained to another set of skeletons.
N.B. If len(skeleton_ids) > 1, skeleton_inputs will be ignored and only synapses within the first skeleton
set will be used in the model.
Parameters
----------
skeleton_ids : array-like
Skeletons whose NeuroML to return
skeleton_inputs : array-like, optional
If specified, only input synapses from these skeletons will be added to the NeuroML
Returns
-------
str
NeuroML output string
"""
data = {"skids": list(skeleton_ids)}
if skeleton_inputs:
if len(skeleton_ids) > 1:
warn(
"More than one skeleton ID was selected: ignoring skeleton input constraints"
)
else:
data["inputs"] = list(skeleton_inputs)
return self.post((self.project_id, "neuroml", "neuroml_level3_v181"), data=data)
def get_treenode_and_connector_geometry(self, *skeleton_ids):
"""
Get the treenode and connector information for the given skeletons. The returned dictionary will be of the form
{
"skeletons": {
skeleton_id1: {
"treenodes": {
treenode_id1: {
"location": [x, y, z],
"parent_id": id_of_parent_treenode
},
treenode_id2: ...
},
"connectors": {
connector_id1: {
"location": [x, y, z],
"presynaptic_to": [list, of, treenode, ids],
"postsynaptic_to": [list, of, treenode, ids]
},
connector_id2: ...
}
},
skeleton_id2: ...
}
}
Parameters
----------
skeleton_ids : array-like of (int or str)
Returns
-------
dict
"""
# todo: factor API call into MorphologyFetcher
skeletons = dict()
warnings = set()
relation_names = {0: "presnaptic_to", 1: "postsynaptic_to"}
for skeleton_id in skeleton_ids:
data = self.get(
"{}/{}/1/0/compact-skeleton".format(self.project_id, skeleton_id)
)
skeleton = {"treenodes": dict(), "connectors": dict()}
for treenode in data[0]:
skeleton["treenodes"][int(treenode[0])] = {
"location": treenode[3:6],
"parent_id": None if treenode[1] is None else int(treenode[1]),
}
for connector in data[1]:
# NOT the database relation ID
# {pre: 0, post: 1, gj: 2}
relation_number = connector[2]
if relation_number not in relation_names:
continue
conn_id = int(connector[1])
if conn_id not in skeleton["connectors"]:
skeleton["connectors"][conn_id] = {
rn: [] for rn in relation_names.values()
}
skeleton["connectors"][conn_id]["location"] = connector[3:6]
skeleton["connectors"][conn_id][relation_names[relation_number]].append(
connector[0]
)
skeletons[int(skeleton_id)] = skeleton
warn(
"Skeleton representations contained some unknown treenode->connector relation IDs:\n\t"
"\n\t".join(sorted(warnings))
)
return {"skeletons": skeletons}
| 2.25 | 2 |
packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py | Indexical-Metrics-Measure-Advisory/watchmen | 0 | 4240 | from typing import Optional
from watchmen_auth import PrincipalService
from watchmen_data_kernel.cache import CacheService
from watchmen_data_kernel.common import DataKernelException
from watchmen_data_kernel.external_writer import find_external_writer_create, register_external_writer_creator
from watchmen_meta.common import ask_meta_storage, ask_snowflake_generator
from watchmen_meta.system import ExternalWriterService as ExternalWriterStorageService
from watchmen_model.common import ExternalWriterId
from watchmen_model.system import ExternalWriter
def register_external_writer(external_writer: ExternalWriter) -> None:
create = find_external_writer_create(external_writer.type)
if create is None:
raise DataKernelException(f'Creator not found for external writer[{external_writer.dict()}].')
register_external_writer_creator(external_writer.writerCode, create())
class ExternalWriterService:
def __init__(self, principal_service: PrincipalService):
self.principalService = principal_service
def find_by_id(self, writer_id: ExternalWriterId) -> Optional[ExternalWriter]:
external_writer = CacheService.external_writer().get(writer_id)
if external_writer is not None:
if external_writer.tenantId != self.principalService.get_tenant_id():
raise DataKernelException(
f'External writer[id={writer_id}] not belongs to '
f'current tenant[id={self.principalService.get_tenant_id()}].')
register_external_writer(external_writer)
return external_writer
storage_service = ExternalWriterStorageService(
ask_meta_storage(), ask_snowflake_generator(), self.principalService)
storage_service.begin_transaction()
try:
# noinspection PyTypeChecker
external_writer: ExternalWriter = storage_service.find_by_id(writer_id)
if external_writer is None:
return None
CacheService.external_writer().put(external_writer)
register_external_writer(external_writer)
return external_writer
finally:
storage_service.close_transaction()
| 1.875 | 2 |
udemy-python/mediaponderada.py | AlbertoAlfredo/exercicios-cursos | 1 | 4241 | <reponame>AlbertoAlfredo/exercicios-cursos
nota1 = float(input('Digite a nota da primeira nota '))
peso1 = float(input('Digite o peso da primeira nota '))
nota2 = float(input('Digite a nota da seugundo nota '))
peso2 = float(input('Digite o peso da segundo nota '))
media = (nota1/peso1+nota2/peso2)/2
print('A média das duas notas é:', media)
| 3.6875 | 4 |
scrywarden/module.py | chasebrewsky/scrywarden | 1 | 4242 | <reponame>chasebrewsky/scrywarden
from importlib import import_module
from typing import Any
def import_string(path: str) -> Any:
"""Imports a dotted path name and returns the class/attribute.
Parameters
----------
path: str
Dotted module path to retrieve.
Returns
-------
Class/attribute at the given import path.
Raises
------
ImportError
If the path does not exist.
"""
try:
module_path, class_name = path.rsplit('.', 1)
except ValueError as error:
raise ImportError(
f"{path} does not look like a module path",
) from error
module = import_module(module_path)
try:
return getattr(module, class_name)
except AttributeError as error:
raise ImportError(
f"Module '{module_path}' does not define a '{class_name}' "
"attribute/class",
) from error
| 2.65625 | 3 |
release/scripts/modules/bl_i18n_utils/utils_spell_check.py | dvgd/blender | 0 | 4243 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import enchant
import os
import pickle
import re
class SpellChecker:
"""
A basic spell checker.
"""
# These must be all lower case for comparisons
uimsgs = {
# OK words
"adaptively", "adaptivity",
"aren", # aren't
"betweens", # yuck! in-betweens!
"boolean", "booleans",
"chamfer",
"couldn", # couldn't
"decrement",
"derivate",
"deterministically",
"doesn", # doesn't
"duplications",
"effector",
"equi", # equi-angular, etc.
"fader",
"globbing",
"hasn", # hasn't
"hetero",
"hoc", # ad-hoc
"incompressible",
"indices",
"instantiation",
"iridas",
"isn", # isn't
"iterable",
"kyrgyz",
"latin",
"merchantability",
"mplayer",
"ons", # add-ons
"pong", # ping pong
"scalable",
"shadeless",
"shouldn", # shouldn't
"smoothen",
"spacings",
"teleport", "teleporting",
"vertices",
"wasn", # wasn't
# Merged words
"antialiasing", "antialias",
"arcsine", "arccosine", "arctangent",
"autoclip",
"autocomplete",
"autoexec",
"autoexecution",
"autogenerated",
"autolock",
"automasking",
"autoname",
"autopack",
"autosave",
"autoscale",
"autosmooth",
"autosplit",
"backface", "backfacing",
"backimage",
"backscattered",
"bandnoise",
"bindcode",
"bitdepth",
"bitflag", "bitflags",
"bitrate",
"blackbody",
"blendfile",
"blendin",
"bonesize",
"boundbox",
"boxpack",
"buffersize",
"builtin", "builtins",
"bytecode",
"chunksize",
"customdata",
"dataset", "datasets",
"de",
"deadzone",
"deconstruct",
"defocus",
"denoise", "denoised", "denoising", "denoiser",
"deselect", "deselecting", "deselection",
"despill", "despilling",
"dirtree",
"editcurve",
"editmesh",
"filebrowser",
"filelist",
"filename", "filenames",
"filepath", "filepaths",
"forcefield", "forcefields",
"fulldome", "fulldomes",
"fullscreen",
"gridline",
"hardlight",
"hemi",
"hostname",
"inbetween",
"inscatter", "inscattering",
"libdata",
"lightprobe", "lightprobes",
"lightless",
"lineset",
"linestyle", "linestyles",
"localview",
"lookup", "lookups",
"mathutils",
"micropolygon",
"midlevel",
"midground",
"mixdown",
"multi",
"multifractal",
"multiframe",
"multilayer",
"multipaint",
"multires", "multiresolution",
"multisampling",
"multiscatter",
"multitexture",
"multithreaded",
"multiuser",
"multiview",
"namespace",
"nodetree", "nodetrees",
"keyconfig",
"offscreen",
"online",
"playhead",
"popup", "popups",
"pre",
"precache", "precaching",
"precalculate",
"precomputing",
"prefetch",
"premultiply", "premultiplied",
"prepass",
"prepend",
"preprocess", "preprocessing",
"preseek",
"promillage",
"pushdown",
"raytree",
"readonly",
"realtime",
"reinject", "reinjected",
"rekey",
"remesh",
"reprojection", "reproject", "reprojecting",
"resize",
"restpose",
"retarget", "retargets", "retargeting", "retargeted",
"retiming",
"rigidbody",
"ringnoise",
"rolloff",
"runtime",
"scanline",
"screenshot", "screenshots",
"seekability",
"selfcollision",
"shadowbuffer", "shadowbuffers",
"singletexture",
"spellcheck", "spellchecking",
"startup",
"stateful",
"starfield",
"studiolight",
"subflare", "subflares",
"subframe", "subframes",
"subclass", "subclasses", "subclassing",
"subdirectory", "subdirectories", "subdir", "subdirs",
"subitem",
"submode",
"submodule", "submodules",
"subpath",
"subsize",
"substep", "substeps",
"targetless",
"textbox", "textboxes",
"tilemode",
"timestamp", "timestamps",
"timestep", "timesteps",
"todo",
"tradeoff",
"un",
"unassociate", "unassociated",
"unbake",
"unclosed",
"uncomment",
"unculled",
"undeformed",
"undistort", "undistorted", "undistortion",
"ungroup", "ungrouped",
"unhide",
"unindent",
"unkeyed",
"unlink", "unlinked",
"unmute",
"unphysical",
"unpremultiply",
"unprojected",
"unprotect",
"unreacted",
"unreferenced",
"unregister",
"unselect", "unselected", "unselectable",
"unsets",
"unshadowed",
"unspill",
"unstitchable", "unstitch",
"unsubdivided", "unsubdivide",
"untrusted",
"vectorscope",
"whitespace", "whitespaces",
"worldspace",
"workflow",
"workspace", "workspaces",
# Neologisms, slangs
"affectable",
"animatable",
"automagic", "automagically",
"blobby",
"blockiness", "blocky",
"collider", "colliders",
"deformer", "deformers",
"determinator",
"editability",
"effectors",
"expander",
"instancer",
"keyer",
"lacunarity",
"linkable",
"numerics",
"occluder", "occluders",
"overridable",
"passepartout",
"perspectively",
"pixelate",
"pointiness",
"polycount",
"polygonization", "polygonalization", # yuck!
"scalings",
"selectable", "selectability",
"shaper",
"smoothen", "smoothening",
"spherize", "spherized",
"stitchable",
"symmetrize",
"trackability",
"transmissivity",
"rasterized", "rasterization", "rasterizer",
"renderer", "renderers", "renderable", "renderability",
# Really bad!!!
"convertor",
"fullscr",
# Abbreviations
"aero",
"amb",
"anim",
"aov",
"app",
"bbox", "bboxes",
"bksp", # Backspace
"bool",
"calc",
"cfl",
"config", "configs",
"const",
"coord", "coords",
"degr",
"diff",
"dof",
"dupli", "duplis",
"eg",
"esc",
"expr",
"fac",
"fra",
"fract",
"frs",
"grless",
"http",
"init",
"irr", # Irradiance
"kbit", "kb",
"lang", "langs",
"lclick", "rclick",
"lensdist",
"loc", "rot", "pos",
"lorem",
"luma",
"mbs", # mouse button 'select'.
"mem",
"multicam",
"num",
"ok",
"orco",
"ortho",
"pano",
"persp",
"pref", "prefs",
"prev",
"param",
"premul",
"quad", "quads",
"quat", "quats",
"recalc", "recalcs",
"refl",
"sce",
"sel",
"spec",
"struct", "structs",
"subdiv",
"sys",
"tex",
"texcoord",
"tmr", # timer
"tri", "tris",
"udim", "udims",
"upres", # Upresolution
"usd",
"uv", "uvs", "uvw", "uw", "uvmap",
"ve",
"vec",
"vel", # velocity!
"vert", "verts",
"vis",
"vram",
"xor",
"xyz", "xzy", "yxz", "yzx", "zxy", "zyx",
"xy", "xz", "yx", "yz", "zx", "zy",
# General computer/science terms
"affine",
"albedo",
"anamorphic",
"anisotropic", "anisotropy",
"bitangent",
"boid", "boids",
"ceil",
"compressibility",
"curvilinear",
"equiangular",
"equisolid",
"euler", "eulers",
"fribidi",
"gettext",
"hashable",
"hotspot",
"interocular",
"intrinsics",
"irradiance",
"isosurface",
"jitter", "jittering", "jittered",
"keymap", "keymaps",
"lambertian",
"laplacian",
"metadata",
"msgfmt",
"nand", "xnor",
"normals",
"numpad",
"octahedral",
"octree",
"omnidirectional",
"opengl",
"openmp",
"parametrization",
"photoreceptor",
"poly",
"polyline", "polylines",
"probabilistically",
"pulldown", "pulldowns",
"quantized",
"quartic",
"quaternion", "quaternions",
"quintic",
"samplerate",
"sawtooth",
"scrollback",
"scrollbar",
"scroller",
"searchable",
"spacebar",
"subtractive",
"superellipse",
"tooltip", "tooltips",
"trackpad",
"tuple",
"unicode",
"viewport", "viewports",
"viscoelastic",
"vorticity",
"waveform", "waveforms",
"wildcard", "wildcards",
"wintab", # Some Windows tablet API
# General computer graphics terms
"anaglyph",
"bezier", "beziers",
"bicubic",
"bilinear",
"bindpose",
"binormal",
"blackpoint", "whitepoint",
"blinn",
"bokeh",
"catadioptric",
"centroid",
"chroma",
"chrominance",
"clearcoat",
"codec", "codecs",
"collada",
"compositing",
"crossfade",
"cubemap", "cubemaps",
"cuda",
"deinterlace",
"dropoff",
"duotone",
"dv",
"eigenvectors",
"emissive",
"equirectangular",
"fisheye",
"framerate",
"gimbal",
"grayscale",
"icosphere",
"inpaint",
"kerning",
"lightmap",
"linearlight",
"lossless", "lossy",
"luminance",
"mantaflow",
"matcap",
"midtones",
"mipmap", "mipmaps", "mip",
"ngon", "ngons",
"ntsc",
"nurb", "nurbs",
"perlin",
"phong",
"pinlight",
"qi",
"radiosity",
"raycasting",
"raytrace", "raytracing", "raytraced",
"refractions",
"remesher", "remeshing", "remesh",
"renderfarm",
"scanfill",
"shader", "shaders",
"shadowmap", "shadowmaps",
"softlight",
"specular", "specularity",
"spillmap",
"sobel",
"stereoscopy",
"texel",
"timecode",
"tonemap",
"toon",
"transmissive",
"vividlight",
"volumetrics",
"voronoi",
"voxel", "voxels",
"vsync",
"wireframe",
"zmask",
"ztransp",
# Blender terms
"audaspace",
"azone", # action zone
"backwire",
"bbone",
"bendy", # bones
"bmesh",
"breakdowner",
"bspline",
"bweight",
"colorband",
"datablock", "datablocks",
"despeckle",
"depsgraph",
"dopesheet",
"dupliface", "duplifaces",
"dupliframe", "dupliframes",
"dupliobject", "dupliob",
"dupligroup",
"duplivert",
"dyntopo",
"editbone",
"editmode",
"eevee",
"fcurve", "fcurves",
"fedge", "fedges",
"filmic",
"fluidsim",
"freestyle",
"enum", "enums",
"gizmogroup",
"gons", # N-Gons
"gpencil",
"idcol",
"keyframe", "keyframes", "keyframing", "keyframed",
"lookdev",
"luminocity",
"mathvis",
"metaball", "metaballs", "mball",
"metaelement", "metaelements",
"metastrip", "metastrips",
"movieclip",
"mpoly",
"mtex",
"nabla",
"navmesh",
"outliner",
"overscan",
"paintmap", "paintmaps",
"polygroup", "polygroups",
"poselib",
"pushpull",
"pyconstraint", "pyconstraints",
"qe", # keys...
"shaderfx", "shaderfxs",
"shapekey", "shapekeys",
"shrinkfatten",
"shrinkwrap",
"softbody",
"stucci",
"subdiv",
"subtype",
"sunsky",
"tessface", "tessfaces",
"texface",
"timeline", "timelines",
"tosphere",
"uilist",
"userpref",
"vcol", "vcols",
"vgroup", "vgroups",
"vinterlace",
"vse",
"wasd", "wasdqe", # keys...
"wetmap", "wetmaps",
"wpaint",
"uvwarp",
# UOC (Ugly Operator Categories)
"cachefile",
"paintcurve",
"ptcache",
"dpaint",
# Algorithm/library names
"ashikhmin", # Ashikhmin-Shirley
"arsloe", # Texel-Marsen-Arsloe
"beckmann",
"blackman", # Blackman-Harris
"blosc",
"burley", # Christensen-Burley
"catmull",
"catrom",
"chebychev",
"courant",
"cryptomatte", "crypto",
"embree",
"hosek",
"kutta",
"lennard",
"marsen", # Texel-Marsen-Arsloe
"mikktspace",
"minkowski",
"minnaert",
"moskowitz", # Pierson-Moskowitz
"musgrave",
"nayar",
"netravali",
"nishita",
"ogawa",
"oren",
"peucker", # Ramer-Douglas-Peucker
"pierson", # Pierson-Moskowitz
"preetham",
"prewitt",
"ramer", # Ramer-Douglas-Peucker
"runge",
"sobol",
"verlet",
"wilkie",
"worley",
# Acronyms
"aa", "msaa",
"ao",
"api",
"asc", "cdl",
"ascii",
"atrac",
"avx",
"bsdf",
"bssrdf",
"bw",
"ccd",
"cmd",
"cmos",
"cpus",
"ctrl",
"cw", "ccw",
"dev",
"djv",
"dpi",
"dvar",
"dx",
"eo",
"fh",
"fk",
"fov",
"fft",
"futura",
"fx",
"gfx",
"ggx",
"gl",
"glsl",
"gpl",
"gpu", "gpus",
"hc",
"hdc",
"hdr", "hdri", "hdris",
"hh", "mm", "ss", "ff", # hh:mm:ss:ff timecode
"hsv", "hsva", "hsl",
"id",
"ies",
"ior",
"itu",
"jonswap",
"lhs",
"lmb", "mmb", "rmb",
"kb",
"mocap",
"msgid", "msgids",
"mux",
"ndof",
"ppc",
"precisa",
"px",
"qmc",
"rdp",
"rgb", "rgba",
"rhs",
"rv",
"sdl",
"sl",
"smpte",
"ssao",
"ssr",
"svn",
"tma",
"ui",
"unix",
"vbo", "vbos",
"vr",
"wxyz",
"xr",
"ycc", "ycca",
"yrgb",
"yuv", "yuva",
# Blender acronyms
"bli",
"bpy",
"bvh",
"dbvt",
"dop", # BLI K-Dop BVH
"ik",
"nla",
"py",
"qbvh",
"rna",
"rvo",
"simd",
"sph",
"svbvh",
# Files types/formats
"avi",
"attrac",
"autocad",
"autodesk",
"bmp",
"btx",
"cineon",
"dpx",
"dwaa",
"dwab",
"dxf",
"eps",
"exr",
"fbx",
"fbxnode",
"ffmpeg",
"flac",
"gltf",
"gzip",
"ico",
"jpg", "jpeg", "jpegs",
"json",
"matroska",
"mdd",
"mkv",
"mpeg", "mjpeg",
"mtl",
"ogg",
"openjpeg",
"osl",
"oso",
"piz",
"png", "pngs",
"po",
"quicktime",
"rle",
"sgi",
"stl",
"svg",
"targa", "tga",
"tiff",
"theora",
"vorbis",
"vp9",
"wav",
"webm",
"xiph",
"xml",
"xna",
"xvid",
}
_valid_before = "(?<=[\\s*'\"`])|(?<=[a-zA-Z][/-])|(?<=^)"
_valid_after = "(?=[\\s'\"`.!?,;:])|(?=[/-]\\s*[a-zA-Z])|(?=$)"
_valid_words = "(?:{})(?:(?:[A-Z]+[a-z]*)|[A-Z]*|[a-z]*)(?:{})".format(_valid_before, _valid_after)
_split_words = re.compile(_valid_words).findall
@classmethod
def split_words(cls, text):
return [w for w in cls._split_words(text) if w]
def __init__(self, settings, lang="en_US"):
self.settings = settings
self.dict_spelling = enchant.Dict(lang)
self.cache = set(self.uimsgs)
cache = self.settings.SPELL_CACHE
if cache and os.path.exists(cache):
with open(cache, 'rb') as f:
self.cache |= set(pickle.load(f))
def __del__(self):
cache = self.settings.SPELL_CACHE
if cache and os.path.exists(cache):
with open(cache, 'wb') as f:
pickle.dump(self.cache, f)
def check(self, txt):
ret = []
if txt in self.cache:
return ret
for w in self.split_words(txt):
w_lower = w.lower()
if w_lower in self.cache:
continue
if not self.dict_spelling.check(w):
ret.append((w, self.dict_spelling.suggest(w)))
else:
self.cache.add(w_lower)
if not ret:
self.cache.add(txt)
return ret
| 2.171875 | 2 |
naslib/predictors/mlp.py | gmeyerlee/NASLib | 0 | 4244 | <gh_stars>0
import numpy as np
import os
import json
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader, TensorDataset
from naslib.utils.utils import AverageMeterGroup
from naslib.predictors.utils.encodings import encode
from naslib.predictors import Predictor
# NOTE: faster on CPU
device = torch.device("cpu")
print("device:", device)
def accuracy_mse(prediction, target, scale=100.0):
prediction = prediction.detach() * scale
target = (target) * scale
return F.mse_loss(prediction, target)
class FeedforwardNet(nn.Module):
def __init__(
self,
input_dims: int = 5,
num_layers: int = 3,
layer_width: list = [10, 10, 10],
output_dims: int = 1,
activation="relu",
):
super(FeedforwardNet, self).__init__()
assert (
len(layer_width) == num_layers
), "number of widths should be \
equal to the number of layers"
self.activation = eval("F." + activation)
all_units = [input_dims] + layer_width
self.layers = nn.ModuleList(
[nn.Linear(all_units[i], all_units[i + 1]) for i in range(num_layers)]
)
self.out = nn.Linear(all_units[-1], 1)
# make the init similar to the tf.keras version
for l in self.layers:
torch.nn.init.xavier_uniform_(l.weight)
torch.nn.init.zeros_(l.bias)
torch.nn.init.xavier_uniform_(self.out.weight)
torch.nn.init.zeros_(self.out.bias)
def forward(self, x):
for layer in self.layers:
x = self.activation(layer(x))
return self.out(x)
def basis_funcs(self, x):
for layer in self.layers:
x = self.activation(layer(x))
return x
class MLPPredictor(Predictor):
def __init__(
self,
encoding_type="adjacency_one_hot",
ss_type="nasbench201",
hpo_wrapper=False,
hparams_from_file=False
):
self.encoding_type = encoding_type
self.ss_type = ss_type
self.hpo_wrapper = hpo_wrapper
self.default_hyperparams = {
"num_layers": 20,
"layer_width": 20,
"batch_size": 32,
"lr": 0.001,
"regularization": 0.2,
}
self.hyperparams = None
self.hparams_from_file = hparams_from_file
def get_model(self, **kwargs):
predictor = FeedforwardNet(**kwargs)
return predictor
def fit(self, xtrain, ytrain, train_info=None, epochs=500, loss="mae", verbose=0):
if self.hparams_from_file and self.hparams_from_file not in ['False', 'None'] \
and os.path.exists(self.hparams_from_file):
self.hyperparams = json.load(open(self.hparams_from_file, 'rb'))['mlp']
print('loaded hyperparams from', self.hparams_from_file)
elif self.hyperparams is None:
self.hyperparams = self.default_hyperparams.copy()
num_layers = self.hyperparams["num_layers"]
layer_width = self.hyperparams["layer_width"]
batch_size = self.hyperparams["batch_size"]
lr = self.hyperparams["lr"]
regularization = self.hyperparams["regularization"]
self.mean = np.mean(ytrain)
self.std = np.std(ytrain)
if self.encoding_type is not None:
_xtrain = np.array(
[
encode(arch, encoding_type=self.encoding_type, ss_type=self.ss_type)
for arch in xtrain
]
)
else:
_xtrain = xtrain
_ytrain = np.array(ytrain)
X_tensor = torch.FloatTensor(_xtrain).to(device)
y_tensor = torch.FloatTensor(_ytrain).to(device)
train_data = TensorDataset(X_tensor, y_tensor)
data_loader = DataLoader(
train_data,
batch_size=batch_size,
shuffle=True,
drop_last=False,
pin_memory=False,
)
self.model = self.get_model(
input_dims=_xtrain.shape[1],
num_layers=num_layers,
layer_width=num_layers * [layer_width],
)
self.model.to(device)
optimizer = optim.Adam(self.model.parameters(), lr=lr, betas=(0.9, 0.99))
if loss == "mse":
criterion = nn.MSELoss().to(device)
elif loss == "mae":
criterion = nn.L1Loss().to(device)
self.model.train()
for e in range(epochs):
meters = AverageMeterGroup()
for b, batch in enumerate(data_loader):
optimizer.zero_grad()
input = batch[0].to(device)
target = batch[1].to(device)
prediction = self.model(input).view(-1)
loss_fn = criterion(prediction, target)
# add L1 regularization
params = torch.cat(
[
x[1].view(-1)
for x in self.model.named_parameters()
if x[0] == "out.weight"
]
)
loss_fn += regularization * torch.norm(params, 1)
loss_fn.backward()
optimizer.step()
mse = accuracy_mse(prediction, target)
meters.update(
{"loss": loss_fn.item(), "mse": mse.item()}, n=target.size(0)
)
if verbose and e % 100 == 0:
print("Epoch {}, {}, {}".format(e, meters["loss"], meters["mse"]))
train_pred = np.squeeze(self.query(xtrain))
train_error = np.mean(abs(train_pred - ytrain))
return train_error
def query(self, xtest, info=None, eval_batch_size=None):
if self.encoding_type is not None:
xtest = np.array(
[
encode(arch, encoding_type=self.encoding_type, ss_type=self.ss_type)
for arch in xtest
]
)
X_tensor = torch.FloatTensor(xtest).to(device)
test_data = TensorDataset(X_tensor)
eval_batch_size = len(xtest) if eval_batch_size is None else eval_batch_size
test_data_loader = DataLoader(
test_data, batch_size=eval_batch_size, pin_memory=False
)
self.model.eval()
pred = []
with torch.no_grad():
for _, batch in enumerate(test_data_loader):
prediction = self.model(batch[0].to(device)).view(-1)
pred.append(prediction.cpu().numpy())
pred = np.concatenate(pred)
return np.squeeze(pred)
def set_random_hyperparams(self):
if self.hyperparams is None:
params = self.default_hyperparams.copy()
else:
params = {
"num_layers": int(np.random.choice(range(5, 25))),
"layer_width": int(np.random.choice(range(5, 25))),
"batch_size": 32,
"lr": np.random.choice([0.1, 0.01, 0.005, 0.001, 0.0001]),
"regularization": 0.2,
}
self.hyperparams = params
return params
| 2.390625 | 2 |
pythonforandroid/recipes/libx264/__init__.py | Joreshic/python-for-android | 1 | 4245 | from pythonforandroid.toolchain import Recipe, shprint, current_directory, ArchARM
from os.path import exists, join, realpath
from os import uname
import glob
import sh
class LibX264Recipe(Recipe):
version = 'x264-snapshot-20170608-2245-stable' # using mirror url since can't use ftp
url = 'http://mirror.yandex.ru/mirrors/ftp.videolan.org/x264/snapshots/{version}.tar.bz2'
md5sum = 'adf3b87f759b5cc9f100f8cf99276f77'
def should_build(self, arch):
build_dir = self.get_build_dir(arch.arch)
return not exists(join(build_dir, 'lib', 'libx264.a'))
def build_arch(self, arch):
with current_directory(self.get_build_dir(arch.arch)):
env = self.get_recipe_env(arch)
configure = sh.Command('./configure')
shprint(configure,
'--cross-prefix=arm-linux-androideabi-',
'--host=arm-linux',
'--disable-asm',
'--disable-cli',
'--enable-pic',
'--disable-shared',
'--enable-static',
'--prefix={}'.format(realpath('.')),
_env=env)
shprint(sh.make, '-j4', _env=env)
shprint(sh.make, 'install', _env=env)
recipe = LibX264Recipe()
| 1.976563 | 2 |
Win/reg.py | QGB/QPSU | 6 | 4246 | #coding=utf-8
try:
if __name__.startswith('qgb.Win'):
from .. import py
else:
import py
except Exception as ei:
raise ei
raise EnvironmentError(__name__)
if py.is2():
import _winreg as winreg
from _winreg import *
else:
import winreg
from winreg import *
def get(skey,name,root=HKEY_CURRENT_USER,returnType=True):
''' from qgb.Win import reg
reg.get(r'Software\Microsoft\Windows\CurrentVersion\Internet Settings','ProxyEnable')
reg.get(r'HKLM\SYSTEM\CurrentControlSet\Services\LanmanServer\Parameters\Size' )
There are seven predefined root keys, traditionally named according to their constant handles defined in the Win32 API
skey不能包含 name,否则 FileNotFoundError: [WinError 2] 系统找不到指定的文件。
'''
r = OpenKey(root,skey)
r = QueryValueEx(r,name)
if returnType:return r[0],'{} : {}'.format(REG_TYPE[r[1]],r[1])
else :return r[0]
def set(skey,name,value,root=HKEY_CURRENT_USER,type='auto,or REG_TYPE int',returnType=True):
r = OpenKey(root,skey,0,KEY_SET_VALUE)
if not py.isint(type):
if py.isint(value):type=4
if py.istr(value):type=1
if py.isbyte(value):type=3 #TODO test,and add more rule
SetValueEx(r,'ProxyEnable',0,type,value)
if get(skey,name,root=root,returnType=False)==value:
return 'reg.set [{}] {}={} sucess!'.format(skey[-55:],name,value)
else:
return 'reg.set [{}] {}={} Failed !'.format(skey,name,value)
REG_TYPE={ 0 : 'REG_NONE',
1 : 'REG_SZ',
2 : 'REG_EXPAND_SZ',
3 : 'REG_BINARY',
4 : 'REG_DWORD',
5 : 'REG_DWORD_BIG_ENDIAN',
6 : 'REG_LINK',
7 : 'REG_MULTI_SZ',
8 : 'REG_RESOURCE_LIST',
9 : 'REG_FULL_RESOURCE_DESCRIPTOR',
10: 'REG_RESOURCE_REQUIREMENTS_LIST',
11: 'REG_QWORD'}
| 2.203125 | 2 |
tests/test_handler.py | CJSoldier/webssh | 13 | 4247 | <filename>tests/test_handler.py
import unittest
import paramiko
from tornado.httputil import HTTPServerRequest
from tests.utils import read_file, make_tests_data_path
from webssh.handler import MixinHandler, IndexHandler, InvalidValueError
class TestMixinHandler(unittest.TestCase):
def test_get_real_client_addr(self):
handler = MixinHandler()
handler.request = HTTPServerRequest(uri='/')
self.assertIsNone(handler.get_real_client_addr())
ip = '127.0.0.1'
handler.request.headers.add('X-Real-Ip', ip)
self.assertEqual(handler.get_real_client_addr(), False)
handler.request.headers.add('X-Real-Port', '12345x')
self.assertEqual(handler.get_real_client_addr(), False)
handler.request.headers.update({'X-Real-Port': '12345'})
self.assertEqual(handler.get_real_client_addr(), (ip, 12345))
handler.request.headers.update({'X-Real-ip': None})
self.assertEqual(handler.get_real_client_addr(), False)
handler.request.headers.update({'X-Real-Port': '12345x'})
self.assertEqual(handler.get_real_client_addr(), False)
class TestIndexHandler(unittest.TestCase):
def test_get_specific_pkey_with_plain_key(self):
fname = 'test_rsa.key'
cls = paramiko.RSAKey
key = read_file(make_tests_data_path(fname))
pkey = IndexHandler.get_specific_pkey(cls, key, None)
self.assertIsInstance(pkey, cls)
pkey = IndexHandler.get_specific_pkey(cls, key, 'iginored')
self.assertIsInstance(pkey, cls)
pkey = IndexHandler.get_specific_pkey(cls, 'x'+key, None)
self.assertIsNone(pkey)
def test_get_specific_pkey_with_encrypted_key(self):
fname = 'test_rsa_password.key'
cls = paramiko.RSAKey
password = '<PASSWORD>'
key = read_file(make_tests_data_path(fname))
pkey = IndexHandler.get_specific_pkey(cls, key, password)
self.assertIsInstance(pkey, cls)
pkey = IndexHandler.get_specific_pkey(cls, 'x'+key, None)
self.assertIsNone(pkey)
with self.assertRaises(paramiko.PasswordRequiredException):
pkey = IndexHandler.get_specific_pkey(cls, key, None)
def test_get_pkey_obj_with_plain_key(self):
fname = 'test_ed25519.key'
cls = paramiko.Ed25519Key
key = read_file(make_tests_data_path(fname))
pkey = IndexHandler.get_pkey_obj(key, None, fname)
self.assertIsInstance(pkey, cls)
pkey = IndexHandler.get_pkey_obj(key, 'iginored', fname)
self.assertIsInstance(pkey, cls)
with self.assertRaises(InvalidValueError) as exc:
pkey = IndexHandler.get_pkey_obj('x'+key, None, fname)
self.assertIn('Invalid private key', str(exc))
def test_get_pkey_obj_with_encrypted_key(self):
fname = 'test_ed25519_password.key'
password = '<PASSWORD>'
cls = paramiko.Ed25519Key
key = read_file(make_tests_data_path(fname))
pkey = IndexHandler.get_pkey_obj(key, password, fname)
self.assertIsInstance(pkey, cls)
with self.assertRaises(InvalidValueError) as exc:
pkey = IndexHandler.get_pkey_obj(key, 'wrongpass', fname)
self.assertIn('Wrong password', str(exc))
with self.assertRaises(InvalidValueError) as exc:
pkey = IndexHandler.get_pkey_obj('x'+key, password, fname)
self.assertIn('Invalid private key', str(exc))
with self.assertRaises(paramiko.PasswordRequiredException):
pkey = IndexHandler.get_pkey_obj(key, '', fname)
| 2.359375 | 2 |
apps/notifications/tests/test_views.py | SCiO-systems/qcat | 0 | 4248 | <gh_stars>0
import logging
from unittest import mock
from unittest.mock import call
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.signing import Signer
from django.urls import reverse
from django.http import Http404
from django.test import RequestFactory
from braces.views import LoginRequiredMixin
from django.test import override_settings
from model_mommy import mommy
from apps.notifications.models import Log, StatusUpdate, MemberUpdate, ReadLog, \
ActionContextQuerySet
from apps.notifications.views import LogListView, LogCountView, ReadLogUpdateView, \
LogQuestionnairesListView, LogInformationUpdateCreateView, \
LogSubscriptionPreferencesView, SignedLogSubscriptionPreferencesView
from apps.qcat.tests import TestCase
class LogListViewTest(TestCase):
def setUp(self):
self.view = LogListView()
self.url_path = reverse('notification_partial_list')
self.request = RequestFactory().get(self.url_path)
self.user = {}
self.request.user = self.user
self.view_instance = self.setup_view(
view=self.view, request=self.request
)
member_add_log = mommy.make(
_model=Log,
id=8,
action=settings.NOTIFICATIONS_ADD_MEMBER
)
self.change_log = mommy.make(
_model=Log,
id=42,
action=settings.NOTIFICATIONS_CHANGE_STATUS
)
mommy.make(_model=StatusUpdate, log=self.change_log)
mommy.make(_model=MemberUpdate, log=member_add_log)
def get_view_with_get_querystring(self, param):
request = RequestFactory().get(
'{url}?{param}'.format(url=self.url_path, param=param)
)
request.user = self.user
return self.setup_view(view=self.view, request=request)
def test_force_login(self):
self.assertIsInstance(self.view_instance, LoginRequiredMixin)
def test_queryset_method(self):
self.assertEqual(
self.view_instance.queryset_method,
'user_log_list'
)
def test_queryset_method_pending(self):
self.assertEqual(
self.get_view_with_get_querystring('is_pending').queryset_method,
'user_pending_list'
)
def test_get_paginate_by(self):
self.assertEqual(
self.view_instance.get_paginate_by(None),
settings.NOTIFICATIONS_LIST_PAGINATE_BY
)
def test_get_paginate_by_teaser(self):
self.assertEqual(
self.get_view_with_get_querystring('is_teaser').get_paginate_by(None),
settings.NOTIFICATIONS_TEASER_PAGINATE_BY
)
@mock.patch('apps.notifications.views.Log.actions.user_log_list')
def test_get_queryset(self, mock_actions):
self.view_instance.get_queryset()
mock_actions.assert_called_once_with(user={})
@mock.patch('apps.notifications.views.Log.actions.user_pending_list')
def test_get_queryset_pending(self, mock_actions):
self.get_view_with_get_querystring('is_pending').get_queryset()
mock_actions.assert_called_once_with(user={})
@mock.patch.object(LogListView, 'add_user_aware_data')
def test_get_context_data_logs(self, mock_add_user_aware_data):
self.view_instance.object_list = 'foo'
self.view_instance.get_context_data()
mock_add_user_aware_data.assert_called_once_with('foo')
def _test_add_user_aware_data(self):
# for faster tests, mock all the elements. elements are created here
# as this makes the tests more readable.
pth = 'apps.notifications.views.Log.actions'
with mock.patch('{}.read_id_list'.format(pth)) as read_id_list:
read_id_list.return_value = [42]
with mock.patch('{}.user_pending_list'.format(pth)) as pending:
pending.values_list.return_value = [8, 42]
logs = Log.objects.all()
return list(self.view_instance.add_user_aware_data(logs))
def test_add_user_aware_data_keys(self):
data_keys = self._test_add_user_aware_data()[0].keys()
for key in ['id', 'created', 'text', 'is_read', 'is_todo', 'edit_url']:
self.assertTrue(key in data_keys)
def test_add_user_aware_data_is_read(self):
data = self._test_add_user_aware_data()
# logs are ordered by creation date - 42 is the newer one
self.assertTrue(data[0]['is_read'])
def test_add_user_aware_data_is_not_read(self):
data = self._test_add_user_aware_data()
self.assertFalse(data[1]['is_read'])
#def test_add_user_aware_data_is_todo(self):
# data = self._test_add_user_aware_data()
# self.assertTrue(data[1]['is_todo'])
def test_add_user_aware_data_is_not_todo(self):
data = self._test_add_user_aware_data()
self.assertFalse(data[0]['is_todo'])
@override_settings(NOTIFICATIONS_ACTIONS={'foo': 'bar', 'result': '42'})
def test_statuses_in_context(self):
self.view_instance.object_list = []
context = self.view_instance.get_context_data()
self.assertDictEqual(
context['statuses'],
{'foo': 'bar', 'result': '42'}
)
@mock.patch('apps.notifications.views.Log.actions.user_log_list')
def test_status_filter_queryset(self, mock_user_log_list):
mock_user_log_list.return_value = []
self.assertEqual(
[], self.view_instance.get_queryset()
)
@mock.patch('apps.notifications.views.Log.actions.user_log_list')
def test_status_filter_queryset_for_status(self, mock_user_log_list):
mock_user_log_list.return_value = Log.objects.filter()
view = self.view
view.get_statuses = mock.MagicMock(return_value=[3])
view_instance = self.setup_view(
view=view, request=self.request
)
self.assertQuerysetEqual(
view_instance.get_queryset(),
[self.change_log.id],
transform=lambda item: item.id
)
def test_get_status_invalid(self):
request = RequestFactory().get('{}?statuses=foo'.format(self.url_path))
view = self.setup_view(self.view, request)
self.assertEqual(view.get_statuses(), [])
@override_settings(NOTIFICATIONS_ACTIONS={'2': 'bar'})
def test_get_status_invalid_config(self):
request = RequestFactory().get('{}?statuses=1'.format(self.url_path))
view = self.setup_view(self.view, request)
self.assertEqual(view.get_statuses(), [])
def test_get_status_valid(self):
request = RequestFactory().get('{}?statuses=1,2,3'.format(self.url_path))
view = self.setup_view(self.view, request)
self.assertEqual(view.get_statuses(), [1, 2, 3])
class ReadLogUpdateViewTest(TestCase):
def setUp(self):
self.view = ReadLogUpdateView()
self.request = RequestFactory().post(
reverse('notification_read'),
data={'user': 123, 'log': 'log', 'checked': 'true'}
)
self.user = mock.MagicMock(id=123)
self.request.user = self.user
self.view_instance = self.setup_view(view=self.view, request=self.request)
def test_validate_data_all_keys(self):
self.assertFalse(
self.view_instance.validate_data()
)
def test_validate_data_id_type(self):
self.assertFalse(
self.view_instance.validate_data(checked='1', log='1', user='foo')
)
def test_validate_data_invalid_user(self):
self.assertFalse(
self.view_instance.validate_data(checked='456', log='1', user='456')
)
def test_validate_data_valid(self):
self.assertTrue(
self.view_instance.validate_data(checked='1', log='1', user='123')
)
@mock.patch('apps.notifications.views.ReadLog.objects.update_or_create')
def test_post_valid_checked(self, mock_get_or_create):
self.view_instance.post(request=self.request)
mock_get_or_create.assert_called_once_with(
user_id='123', log_id='log', defaults={'is_read': True}
)
@mock.patch('apps.notifications.views.ReadLog.objects.update_or_create')
def test_post_valid_unchecked(self, mock_get_or_create):
request = RequestFactory().post(
reverse('notification_read'),
data={'user': 123, 'log': 'log', 'checked': 'false'}
)
self.view_instance.post(request=request)
mock_get_or_create.assert_called_once_with(
user_id='123', log_id='log', defaults={'is_read': False}
)
@mock.patch.object(ReadLogUpdateView, 'validate_data')
def test_post_invalid(self, mock_validate_data):
logging.disable(logging.CRITICAL)
mock_validate_data.return_value = False
with self.assertRaises(Http404):
self.view_instance.post(request=self.request)
class LogCountViewTest(TestCase):
def setUp(self):
super().setUp()
self.request = RequestFactory().get(reverse('notification_new_count'))
self.request.user = mommy.make(_model=get_user_model())
self.view = self.setup_view(view=LogCountView(), request=self.request)
mommy.make(
_model=Log,
catalyst=self.request.user,
action=settings.NOTIFICATIONS_CHANGE_STATUS,
_quantity=4
)
mommy.make(
_model=Log,
catalyst=self.request.user,
action=settings.NOTIFICATIONS_EDIT_CONTENT,
_quantity=2
)
@mock.patch('apps.notifications.views.Log.actions.only_unread_logs')
def test_get_unread_only(self, mock_only_unread_logs):
self.view.get(request=self.request)
mock_only_unread_logs.assert_called_once_with(
user=self.request.user
)
def test_log_count(self):
response = self.view.get(request=self.request)
self.assertEqual(response.content, b'4')
def test_log_count_one_read(self):
mommy.make(
_model=ReadLog,
log=Log.objects.filter(action=settings.NOTIFICATIONS_CHANGE_STATUS).first(),
user=self.request.user,
is_read=True
)
response = self.view.get(request=self.request)
self.assertEqual(response.content, b'3')
class LogQuestionnairesListViewTest(TestCase):
def setUp(self):
super().setUp()
self.request = RequestFactory().get(reverse('notification_questionnaire_logs'))
self.request.user = 'foo'
self.view = self.setup_view(view=LogQuestionnairesListView(), request=self.request)
@mock.patch.object(ActionContextQuerySet, 'user_log_list')
def test_get_questionnaire_logs(self, mock_user_log_list):
self.view.get_questionnaire_logs('foo')
mock_user_log_list.assert_called_once_with(user='foo')
@mock.patch.object(LogQuestionnairesListView, 'get_questionnaire_logs')
def test_get(self, mock_get_questionnaire_logs):
mock_get_questionnaire_logs.return_value = ['foo_1', 'foo_2', 'bar_3']
response = self.view.get(self.request)
self.assertEqual(
response.content, b'{"questionnaires": ["bar_3", "foo_1", "foo_2"]}'
)
class LogInformationUpdateCreateViewTest(TestCase):
def setUp(self):
super().setUp()
self.url = reverse('notification_inform_compiler')
self.view = LogInformationUpdateCreateView()
self.request = RequestFactory().get(self.url)
self.request.user = 'foo'
self.view = self.setup_view(view=self.view, request=self.request)
def test_get_compiler_query(self):
questionnaire = mock.MagicMock()
self.view.get_compiler(questionnaire)
self.assertEqual(
questionnaire.method_calls[0],
call.questionnairemembership_set.get(role='compiler')
)
def test_get_compiler(self):
sentinel = mock.sentinel
questionnaire = mock.MagicMock()
questionnaire.questionnairemembership_set.get.return_value = sentinel
self.assertEqual(
self.view.get_compiler(questionnaire),
sentinel.user
)
@mock.patch('apps.notifications.views.query_questionnaire')
def test_get_questionnaire(self, mock_query_questionnaire):
one_questionnaire = mock.MagicMock()
one_questionnaire.first = lambda : 'foo'
mock_query_questionnaire.return_value = one_questionnaire
self.assertEqual(
self.view.get_questionnaire('foo'), 'foo'
)
@mock.patch('apps.notifications.views.query_questionnaire')
def test_get_questionnaire_raises(self, mock_query_questionnaire):
not_exists = mock.MagicMock()
not_exists.exists = lambda : False
mock_query_questionnaire.return_value = not_exists
with self.assertRaises(Http404):
self.view.get_questionnaire('foo')
@mock.patch('apps.notifications.views.query_questionnaire')
def test_get_questionnaire_calls_filter(self, mock_query_questionnaire):
self.view.get_questionnaire('foo')
mock_query_questionnaire.assert_called_once_with(
identifier='foo', request=self.request
)
@override_settings(NOTIFICATIONS_FINISH_EDITING='setting')
@mock.patch.object(LogInformationUpdateCreateView, 'get_questionnaire')
@mock.patch.object(LogInformationUpdateCreateView, 'get_compiler')
def test_post(self, mock_get_compiler, mock_get_questionnaire):
compiler = mock.MagicMock()
mock_get_questionnaire.return_value = mock.sentinel.questionnaire
mock_get_compiler.return_value = compiler
request = RequestFactory().post(self.url, data={
'identifier': 'foo',
'message': 'bar'
})
with mock.patch('apps.notifications.views.InformationLog') as mock_create:
self.setup_view(view=self.view, request=self.request).post(request)
mock_create.assert_called_once_with(
action='setting',
questionnaire=mock.sentinel.questionnaire,
receiver=compiler,
sender='foo'
)
class LogSubscriptionPreferencesMixinTest(TestCase):
def setUp(self):
self.url = reverse('notification_preferences')
self.view = LogSubscriptionPreferencesView()
self.request = RequestFactory().get(self.url)
self.user = mommy.make(_model=get_user_model())
self.obj = self.user.mailpreferences
self.request.user = self.user
self.request._messages = mock.MagicMock()
self.view = self.setup_view(view=self.view, request=self.request)
self.view.object = self.obj
def test_get_initial(self):
self.obj.wanted_actions = 'some,thing,yay'
self.assertEqual(
['some', 'thing', 'yay'],
self.view.get_initial()['wanted_actions']
)
def test_get_form_valid_changed_language(self):
self.view.object = mock.MagicMock()
self.view.object.has_changed_language = False
form = mock.MagicMock()
form.changed_data = ['language']
self.view.form_valid(form)
self.assertTrue(self.view.object.has_changed_language)
def test_get_form_valid_message(self):
self.view.form_valid(mock.MagicMock())
self.assertTrue(self.request._messages.method_calls)
class SignedLogSubscriptionPreferencesViewTest(TestCase):
def setUp(self):
self.user = mommy.make(_model=get_user_model())
self.obj = self.user.mailpreferences
self.view = SignedLogSubscriptionPreferencesView()
self.request = RequestFactory().get(str(self.obj.get_signed_url()))
self.request._messages = mock.MagicMock()
self.view = self.setup_view(view=self.view, request=self.request)
self.view.object = self.obj
def test_get_success_url_signed(self):
mock_user = mock.MagicMock(return_value=self.user)
mock_user.is_authenticated = False
mock_user.id = self.user.id
self.request.user = mock_user
self.assertEqual(
self.view.get_success_url(),
self.obj.get_signed_url()
)
def test_get_success_url_user(self):
self.request.user = self.user
self.assertEqual(
self.view.get_success_url(),
reverse('notification_preferences')
)
def test_get_object_user(self):
self.request.user = self.user
self.assertEqual(
self.view.get_object(),
self.obj
)
def test_get_signed_object(self):
mock_user = mock.MagicMock(return_value=self.user)
mock_user.is_authenticated = False
mock_user.id=self.user.id
self.request.user = mock_user
self.view.kwargs['token'] = mock.MagicMock()
with mock.patch.object(Signer, 'unsign') as mock_unsign:
mock_unsign.return_value = self.obj.id
self.assertEqual(
self.view.get_object(), self.obj
)
mock_unsign.assert_called_with(self.view.kwargs['token'])
def test_get_signed_object_404(self):
mock_user = mock.MagicMock(return_value=self.user)
mock_user.is_authenticated = False
mock_user.id = self.user.id
self.request.user = mock_user
self.view.kwargs['token'] = mock.MagicMock()
with self.assertRaises(Http404):
self.view.get_object()
| 2.140625 | 2 |
examples/resources.py | willvousden/clint | 1,230 | 4249 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import os
sys.path.insert(0, os.path.abspath('..'))
from clint import resources
resources.init('kennethreitz', 'clint')
lorem = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'
print('%s created.' % resources.user.path)
resources.user.write('lorem.txt', lorem)
print('lorem.txt created')
assert resources.user.read('lorem.txt') == lorem
print('lorem.txt has correct contents')
resources.user.delete('lorem.txt')
print('lorem.txt deleted')
assert resources.user.read('lorem.txt') == None
print('lorem.txt deletion confirmed')
| 2.21875 | 2 |
photos/urls.py | charlesmugambi/Instagram | 0 | 4250 | from django.conf.urls import url
from django.conf import settings
from django.conf.urls.static import static
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^image/$', views.add_image, name='upload_image'),
url(r'^profile/$', views.profile_info, name='profile'),
url(r'^update/$', views.profile_update, name='update'),
url(r'^comment/(?P<image_id>\d+)', views.comment, name='comment'),
url(r'^search/', views.search_results, name = 'search_results'),
url(r'^follow/(?P<user_id>\d+)', views.follow, name = 'follow'),
url(r'^unfollow/(?P<user_id>\d+)', views.unfollow, name='unfollow'),
url(r'^likes/(\d+)/$', views.like_images,name='likes')
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 1.90625 | 2 |
bread.py | vgfang/breadbot | 0 | 4251 | <reponame>vgfang/breadbot
import random
import math
from fractions import Fraction
from datetime import datetime
from jinja2 import Template
# empty class for passing to template engine
class Recipe:
def __init__(self):
return
# returns flour percent using flour type
def get_special_flour_percent(flourType: str, breadFlourPercent:int) -> int:
if flourType == 'Hard Red Whole Wheat' or flourType == 'Hard White Wheat':
percentages = [0,25,30,35,40,45,50]
percentages = list(filter(lambda x: 100-breadFlourPercent >= x, percentages))
return random.choice(percentages)
elif flourType == 'Rye' and breadFlourPercent >= 75:
percentages = [0,10,15,20]
percentages = list(filter(lambda x: 100-breadFlourPercent >= x, percentages))
return random.choice(percentages)
else:
percentages = [0,10,15,20,25.30]
percentages = list(filter(lambda x: 100-breadFlourPercent >= x, percentages))
return random.choice(percentages)
# returns multiplied spoon units from teaspoon fraction input, 3 tsp = 1 tbsp
def spoon_mult(tsp: Fraction(), multiplier: float) -> str:
tsp *= Fraction(multiplier)
spoonString = ""
if tsp >= 3: # use tablespoons
tablespoons = int(tsp // 3)
remainder = (tsp % 3) / 3
if tablespoons != 0:
spoonString += f"{tablespoons} "
if remainder.numerator != 0:
spoonString += f"{remainder.numerator}/{remainder.denominator} "
return f"{spoonString}tbsp"
else:
teaspoons = int(tsp // 1)
remainder = tsp % 1
if teaspoons != 0:
spoonString += f"{teaspoons} "
if remainder.numerator != 0:
spoonString += f"{remainder.numerator}/{remainder.denominator} "
return f"{spoonString}tsp"
# returns amount given the type of flavoring(spices)
def get_flavor_amount(flavor: str, flourAmount: int) -> str:
colorsDict = {}
scale = 4 # floors to the 500g/scale for clean fractional multiplication
multiplier = math.floor(flourAmount/500*scale) / scale
# flavors in category
red = ('Cardamom', 'Nutmeg','Hazelnut','Almond','Lemon Extract','Peppermint')
blue = ('Cinnamon', 'Allspice')
green = ('Vanilla', 'Instant Coffee')
purple = ('Orange Zest', 'Lime Zest', 'Lemon Zest', 'Ginger')
orange = ('Lavender', 'Hojicha', 'Matcha', 'Earl Grey', 'Oolong')
# default possible teaspoon values list for flour = 500, 3 tsp = 1 tbsp
redAmt = list(map(Fraction, [1/4, 1/2]))
blueAmt = list(map(Fraction, [1/4, 1/2, 1]))
greenAmt = list(map(Fraction, [1/2, 1, 3/2]))
purpleAmt = list(map(Fraction, [2, 3, 9/2]))
orangeAmt = list(map(Fraction, [9]))
# random tablespoons
colorsDict[red] = list(map(lambda x: spoon_mult(x, multiplier), redAmt))
colorsDict[blue] = list(map(lambda x: spoon_mult(x, multiplier), blueAmt))
colorsDict[green] = list(map(lambda x: spoon_mult(x, multiplier), greenAmt))
colorsDict[purple] = list(map(lambda x: spoon_mult(x, multiplier), purpleAmt))
colorsDict[orange] = list(map(lambda x: spoon_mult(x, multiplier), orangeAmt))
for color in colorsDict.keys():
if flavor in color:
return random.choice(colorsDict[color])
# print("Error in Flavor Input: " + flavor)
return "get_flavor_amount wrong input"
# returns list of spices using number of spices
def get_spices(spicesNum: int) -> [str]:
spicesList = ['Cinnamon', 'Allspice', 'Cardamom', 'Nutmeg']
if spicesNum > len(spicesList):
print("WARNING: spicesNum exceeds spices of num")
return spicesList
if spicesNum == 1:
return random.sample(['Cinnamon', 'Cardamom'], 1)
return random.sample(spicesList, spicesNum)
# check if extract is nut
def is_nut(extract: str) -> bool:
nuts = ['Hazelnut','Almond']
return extract in nuts
# checks if extract1 and extract2 are both allowed based on zest/extract same flavor
def zest_extract_same_flavor(extract1: str, extract2: str) -> bool:
if extract1 == extract2:
return False
e1 = extract1.split(" ") # may need to change if new types are added
e2 = extract2.split(" ")
if len(e1) != 2 or len(e2) != 2:
return False
if e1[0]==e2[0] and 'Zest' in [e1[1],e2[1]] and 'Extract' in [e1[1],e2[1]]:
return True
return False
# return list of extracts using number of extracts
def get_extracts(extractsNum: int) -> [str]:
if extractsNum == 0:
return []
allowedExtracts = ['Vanilla', 'Hazelnut', 'Almond', 'Lemon Extract', 'Peppermint',
'Orange Zest', 'Lime Zest', 'Lemon Zest', 'Ginger']
# if more than one, vanilla must be included
currentExtracts = ['Vanilla']
allowedExtracts.remove('Vanilla')
extractsLeft = extractsNum-1
while extractsLeft > 0:
if len(allowedExtracts) <= 0:
print("Incorrecnt number of extracts")
return "Incorrecnt number of extracts"
newExtract = random.choice(allowedExtracts)
# one nut at a time
if True in map(is_nut, currentExtracts) and is_nut(newExtract):
allowedExtracts.remove(newExtract)
continue # skips decrement, try again
# no zest + extract comibination of the same flavor
for currentExtract in currentExtracts:
exit = False
if zest_extract_same_flavor(currentExtract, newExtract):
allowedExtracts.remove(newExtract)
exit = True # skips decrement, try again
if exit:
continue
# passed restraints, remove it from allowed
currentExtracts.append(newExtract)
if newExtract in allowedExtracts:
allowedExtracts.remove(newExtract)
extractsLeft -= 1
return currentExtracts
# return percentage of enrichment
def get_enrichment_percent(enrichment: str) -> int:
if enrichment == 'Cream Cheese':
return 10
return 5
# return liquid percent from liquid tpye
def get_liquid_percent(liquidType: str) -> int:
if liquidType in ['Heavy Cream', 'Coconut Milk']:
return 13
elif liquidType in ['Cow Milk']:
return 63
# print("Error in liquidType input.")
return -1
# return fruit puree fruit choice(s), omitted fruit chance weighting for now
def get_fruit_purees() -> [str]:
fruitPureesNum = random.randint(1,2)
fruitPureesChoices = ['Banana','Apple','Cherry','Strawberry','Fig','Mango']
return random.sample(fruitPureesChoices, fruitPureesNum)
# retrun fruit puree percent from 0-2 fruitPurees using random generation
def get_fruit_purees_percent(fruitPurees) -> [float]:
totalFruitPureePercent = random.choice([25,30,35,40,45,50])
fruitPureeNum = len(fruitPurees)
if fruitPureeNum == 1:
return [totalFruitPureePercent]
elif fruitPureeNum == 2:
firstPercent = random.randint(0,totalFruitPureePercent)
return [firstPercent, totalFruitPureePercent - firstPercent]
return [0]
# returns rounded ml conversion from percent, used in template
def to_g(flourMl, percent) -> int:
return round(flourMl * percent/100)
# takes filename and writes an html recipe file
def generate_recipe(breadname: str, filename: str, flourGramInput: int) -> str:
# ALL NUMBERICAL VALUES REPRESENT PERCENTAGES
r = Recipe()
r.breadname = breadname
r.totalFlourGrams = flourGramInput
r.totalLiquidPercent = 63
r.preferment = random.choice(['Poolish', 'None'])
r.breadFlourPercent = random.choice([75, 50])
# FLOUR STYLE
r.breadShape = random.choice(['Pullman', 'Regular'])
# FLOUR TYPES
r.specialFlour = random.choice([
'Einkorn',
'Khorasan',
'Spelt',
'Emmer',
'Semolina (Durum)',
'Hard Red Whole Wheat',
'Regular Whole Wheat',
'Hard White Wheat',
'Rye'
])
r.specialFlourPercent = get_special_flour_percent(r.specialFlour, r.breadFlourPercent)
r.whiteFlourPercent = 100 - r.breadFlourPercent - r.specialFlourPercent
# SPICES/FLAVORING
spicesNum = random.randint(0,4)
r.spices = get_spices(spicesNum)
extractsNum = random.randint(0,3)
r.extracts = get_extracts(extractsNum)
teaList = ['Lavender', 'Hojicha', 'Matcha', 'Earl Grey', 'Oolong', 'Instant Coffee']
r.tea = random.choice(teaList)
# illegal with fruit purees and all extracts but ginger, almond, and hazelnut
# BASIC INGREDIENTS
r.sugar = random.choice(['Brown Sugar','White Sugar','Honey','Molasses'])
r.sugarPercent = random.choice([5,10,15])
r.salt = 'Table Salt'
r.saltPercent = random.choice([1,1.5,2])
r.yeast = random.choice(['Instant Yeast','Active Yeast'])
r.yeastPercent = 0.62
# ENRICHMENTS – All 5% , only one chosen
enrichmentList = ['Olive Oil','Butter','Cream Cheese','Coconut oil']
if r.tea == 'Instant Coffee':
enrichmentList.remove('Olive Oil')
r.enrichment = random.choice(enrichmentList)
r.enrichmentPercent = get_enrichment_percent(r.enrichment)
if r.enrichment == 'Cream Cheese':
r.totalLiquidPercent -= 5
# LIQUIDS
# cap total liquid at 60% when these sugars are used
if r.sugar in ['Honey', 'Molasses']:
r.totalLiquidPercent = 60
# cow milk only if there is no preferemnt
viableLiquids = ['Heavy Cream', 'Coconut Milk', 'Cow Milk']
if r.preferment != 'None':
viableLiquids.remove('Cow Milk')
r.liquid = random.choice(viableLiquids)
r.liquidPercent = get_liquid_percent(r.liquid)
## LIQUIDS - FRUIT PUREE
r.fruitPurees = []
r.fruitPureesPercent = []
if r.preferment != 'Poolish':
# 50 percent chance to include
# sugar reduction by 5 percent
r.sugarPercent -= 5
r.fruitPurees = get_fruit_purees()
r.fruitPureesPercent = get_fruit_purees_percent(r.fruitPurees)
# account for cow milk
r.liquidPercent = min(r.liquidPercent, r.totalLiquidPercent - sum(r.fruitPureesPercent))
r.waterPercent = max(0, r.totalLiquidPercent - sum(r.fruitPureesPercent) - r.liquidPercent)
# BICOLOR ROLL
r.isBicolorRoll = False
if len(r.fruitPureesPercent) > 0 or r.tea in ['Lavender', 'Hojicha', 'Matcha', 'Earl Grey', 'Oolong']:
r.isBicolorRoll = random.choice([True,False])
# COCOA POWDER
r.cocoaPowderPercent = 0
cocoaPowderAllowedExtracts = ['Ginger', 'Almond', 'Hazelnut']
if r.fruitPurees == [] and any(not x in cocoaPowderAllowedExtracts for x in r.extracts): # allowed
if random.randint(0,2) == 0:
r.tea = '' # removes tea
r.cocoaPowderPercent = round(random.choice([5,10])/100 * r.whiteFlourPercent,1)
r.whiteFlourPercent = round(r.whiteFlourPercent - r.cocoaPowderPercent,1)
# WRITE FORMAT
time = datetime.now()
r.datetime = time.strftime('%A, %b %d %Y')
templateFile = open("./template.html")
templateString = templateFile.read()
## Conversion to ml for percentages
r.totalLiquidGrams = to_g(r.totalFlourGrams, r.totalLiquidPercent)
r.breadFlourGrams = to_g(r.totalFlourGrams, r.breadFlourPercent)
r.specialFlourGrams = to_g(r.totalFlourGrams, r.specialFlourPercent)
r.whiteFlourGrams = to_g(r.totalFlourGrams, r.whiteFlourPercent)
r.sugarGrams = to_g(r.totalFlourGrams, r.sugarPercent)
r.saltGrams = to_g(r.totalFlourGrams, r.saltPercent)
r.yeastGrams = to_g(r.totalFlourGrams, r.yeastPercent)
r.spicesAmt = list(map(lambda x: get_flavor_amount(x, r.totalFlourGrams), r.spices))
r.extractsAmt = list(map(lambda x: get_flavor_amount(x, r.totalFlourGrams), r.extracts))
r.teaAmt = get_flavor_amount(r.tea, r.totalFlourGrams)
r.enrichmentGrams = to_g(r.totalFlourGrams, r.enrichmentPercent)
r.waterGrams = to_g(r.totalFlourGrams, r.waterPercent)
r.liquidGrams = to_g(r.totalFlourGrams, r.liquidPercent)
r.fruitPureesGrams = list(map(lambda x: to_g(r.totalFlourGrams,x), r.fruitPureesPercent))
r.cocoaPowderGrams = round(r.cocoaPowderPercent/100 * r.totalFlourGrams)
template = Template(templateString)
htmlString = template.render(r = r)
outfile = open(f'{filename}', 'w')
outfile.write(htmlString)
outfile.close()
templateFile.close()
return htmlString | 3.1875 | 3 |
posthog/api/test/test_organization_domain.py | msnitish/posthog | 0 | 4252 | import datetime
from unittest.mock import patch
import dns.resolver
import dns.rrset
import pytest
import pytz
from django.utils import timezone
from freezegun import freeze_time
from rest_framework import status
from posthog.models import Organization, OrganizationDomain, OrganizationMembership, Team
from posthog.test.base import APIBaseTest, BaseTest
class FakeAnswer(object):
def __init__(self, answer):
self.answer = answer
class FakeDNSResponse(object):
def __init__(self, answer):
self.response = FakeAnswer(answer)
class TestOrganizationDomains(BaseTest):
def test_continuous_verification_task(self):
"""
Tests the task that re-verifies domains to ensure ownership is maintained.
"""
pass
class TestOrganizationDomainsAPI(APIBaseTest):
domain: OrganizationDomain = None # type: ignore
another_domain: OrganizationDomain = None # type: ignore
another_org: Organization = None # type: ignore
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.domain = OrganizationDomain.objects.create(organization=cls.organization, domain="myposthog.com")
cls.another_org = Organization.objects.create(name="Another Org")
Team.objects.create(organization=cls.another_org)
cls.another_domain = OrganizationDomain.objects.create(organization=cls.another_org, domain="org.posthog.net")
# List & retrieve domains
def test_can_list_and_retrieve_domains(self):
response = self.client.get("/api/organizations/@current/domains")
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.json()
self.assertEqual(response_data["count"], 1)
item = response_data["results"][0]
self.assertEqual(item["domain"], "myposthog.com")
self.assertEqual(item["verified_at"], None)
self.assertEqual(item["is_verified"], False)
self.assertEqual(item["jit_provisioning_enabled"], False)
self.assertEqual(item["sso_enforcement"], "")
self.assertRegex(item["verification_challenge"], r"[0-9A-Za-z_-]{32}")
retrieve_response = self.client.get(f"/api/organizations/{self.organization.id}/domains/{self.domain.id}")
self.assertEqual(retrieve_response.status_code, status.HTTP_200_OK)
self.assertEqual(retrieve_response.json(), response_data["results"][0])
def test_cannot_list_or_retrieve_domains_for_other_org(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
response = self.client.get(f"/api/organizations/@current/domains/{self.another_domain.id}")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(response.json(), self.not_found_response())
response = self.client.get(f"/api/organizations/{self.another_org.id}/domains/{self.another_domain.id}")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.json(), self.permission_denied_response())
# Create domains
def test_create_domain(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
with self.settings(MULTI_TENANCY=True):
response = self.client.post(
"/api/organizations/@current/domains/",
{
"domain": "the.posthog.com",
"verified_at": "2022-01-01T14:25:25.000Z", # ignore me
"verification_challenge": "123", # ignore me
"jit_provisioning_enabled": True, # ignore me
"sso_enforcement": "saml", # ignore me
},
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response_data = response.json()
self.assertEqual(response_data["domain"], "the.posthog.com")
self.assertEqual(response_data["verified_at"], None)
self.assertEqual(response_data["jit_provisioning_enabled"], False)
self.assertRegex(response_data["verification_challenge"], r"[0-9A-Za-z_-]{32}")
instance = OrganizationDomain.objects.get(id=response_data["id"])
self.assertEqual(instance.domain, "the.posthog.com")
self.assertEqual(instance.verified_at, None)
self.assertEqual(instance.last_verification_retry, None)
self.assertEqual(instance.sso_enforcement, "")
@pytest.mark.skip_on_multitenancy
def test_creating_domain_on_self_hosted_is_automatically_verified(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
with freeze_time("2021-08-08T20:20:08Z"):
response = self.client.post(
"/api/organizations/@current/domains/",
{
"domain": "the.posthog.com",
"verified_at": "2022-01-01T14:25:25.000Z", # ignore me
"verification_challenge": "123", # ignore me
"jit_provisioning_enabled": True, # ignore me
"sso_enforcement": "saml", # ignore me
},
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response_data = response.json()
self.assertEqual(response_data["domain"], "the.posthog.com")
self.assertEqual(
response_data["verified_at"], "2021-08-08T20:20:08Z",
)
self.assertEqual(response_data["jit_provisioning_enabled"], False)
self.assertRegex(response_data["verification_challenge"], r"[0-9A-Za-z_-]{32}")
instance = OrganizationDomain.objects.get(id=response_data["id"])
self.assertEqual(instance.domain, "the.posthog.com")
self.assertEqual(
instance.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=pytz.UTC),
)
self.assertEqual(instance.last_verification_retry, None)
self.assertEqual(instance.sso_enforcement, "")
def test_cannot_create_duplicate_domain(self):
OrganizationDomain.objects.create(domain="i-registered-first.com", organization=self.another_org)
count = OrganizationDomain.objects.count()
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
response = self.client.post("/api/organizations/@current/domains/", {"domain": "i-registered-first.com"},)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.json(),
{
"type": "validation_error",
"code": "unique",
"detail": "domain with this domain already exists.",
"attr": "domain",
},
)
self.assertEqual(OrganizationDomain.objects.count(), count)
def test_cannot_create_invalid_domain(self):
count = OrganizationDomain.objects.count()
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
invalid_domains = ["<EMAIL>", "🦔🦔🦔.com", "one.two.c", "--alpha.com", "javascript: alert(1)"]
for _domain in invalid_domains:
response = self.client.post("/api/organizations/@current/domains/", {"domain": _domain,},)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.json(),
{
"type": "validation_error",
"code": "invalid_input",
"detail": "Please enter a valid domain or subdomain name.",
"attr": "domain",
},
)
self.assertEqual(OrganizationDomain.objects.count(), count)
@patch("posthog.models.organization_domain.dns.resolver.resolve")
def test_can_request_verification_for_unverified_domains(self, mock_dns_query):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
mock_dns_query.return_value = FakeDNSResponse(
[
dns.rrset.from_text(
"_posthog-challenge.myposthog.com.", 3600, "IN", "TXT", self.domain.verification_challenge,
)
],
)
with freeze_time("2021-08-08T20:20:08Z"):
response = self.client.post(f"/api/organizations/@current/domains/{self.domain.id}/verify")
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.json()
self.domain.refresh_from_db()
self.assertEqual(response_data["domain"], "myposthog.com")
self.assertEqual(
response_data["verified_at"], self.domain.verified_at.strftime("%Y-%m-%dT%H:%M:%SZ"),
)
self.assertEqual(response_data["is_verified"], True)
self.assertEqual(
self.domain.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=pytz.UTC),
)
self.assertEqual(self.domain.is_verified, True)
@patch("posthog.models.organization_domain.dns.resolver.resolve")
def test_domain_is_not_verified_with_missing_challenge(self, mock_dns_query):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
mock_dns_query.side_effect = dns.resolver.NoAnswer()
with freeze_time("2021-10-10T10:10:10Z"):
with self.settings(MULTI_TENANCY=True):
response = self.client.post(f"/api/organizations/@current/domains/{self.domain.id}/verify")
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.json()
self.domain.refresh_from_db()
self.assertEqual(response_data["domain"], "myposthog.com")
self.assertEqual(response_data["verified_at"], None)
self.assertEqual(self.domain.verified_at, None)
self.assertEqual(
self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC),
)
@patch("posthog.models.organization_domain.dns.resolver.resolve")
def test_domain_is_not_verified_with_incorrect_challenge(self, mock_dns_query):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
mock_dns_query.return_value = FakeDNSResponse(
[dns.rrset.from_text("_posthog-challenge.myposthog.com.", 3600, "IN", "TXT", "incorrect_challenge",)],
)
with freeze_time("2021-10-10T10:10:10Z"):
with self.settings(MULTI_TENANCY=True):
response = self.client.post(f"/api/organizations/@current/domains/{self.domain.id}/verify")
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.json()
self.domain.refresh_from_db()
self.assertEqual(response_data["domain"], "myposthog.com")
self.assertEqual(response_data["verified_at"], None)
self.assertEqual(self.domain.verified_at, None)
self.assertEqual(
self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC),
)
def test_cannot_request_verification_for_verified_domains(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
self.domain.verified_at = timezone.now()
self.domain.save()
response = self.client.post(f"/api/organizations/@current/domains/{self.domain.id}/verify")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.json(),
{
"type": "validation_error",
"code": "already_verified",
"detail": "This domain has already been verified.",
"attr": None,
},
)
def test_only_admin_can_create_verified_domains(self):
count = OrganizationDomain.objects.count()
response = self.client.post("/api/organizations/@current/domains/", {"domain": "evil.posthog.com"})
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(
response.json(), self.permission_denied_response("Your organization access level is insufficient."),
)
self.assertEqual(OrganizationDomain.objects.count(), count)
def test_only_admin_can_request_verification(self):
response = self.client.post(f"/api/organizations/@current/domains/{self.domain.id}/verify")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(
response.json(), self.permission_denied_response("Your organization access level is insufficient."),
)
self.domain.refresh_from_db()
self.assertEqual(self.domain.verified_at, None)
# Update domains
def test_can_update_jit_provisioning_and_sso_enforcement(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
self.domain.verified_at = timezone.now()
self.domain.save()
response = self.client.patch(
f"/api/organizations/@current/domains/{self.domain.id}/",
{"sso_enforcement": "google-oauth2", "jit_provisioning_enabled": True},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.json()["sso_enforcement"], "google-oauth2")
self.assertEqual(response.json()["jit_provisioning_enabled"], True)
self.domain.refresh_from_db()
self.assertEqual(self.domain.sso_enforcement, "google-oauth2")
self.assertEqual(self.domain.jit_provisioning_enabled, True)
def test_cannot_enforce_sso_or_enable_jit_provisioning_on_unverified_domain(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
# SSO Enforcement
response = self.client.patch(
f"/api/organizations/@current/domains/{self.domain.id}/", {"sso_enforcement": "google-oauth2"},
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.json(),
{
"type": "validation_error",
"code": "verification_required",
"detail": "This attribute cannot be updated until the domain is verified.",
"attr": "sso_enforcement",
},
)
self.domain.refresh_from_db()
self.assertEqual(self.domain.sso_enforcement, "")
# JIT Provisioning
response = self.client.patch(
f"/api/organizations/@current/domains/{self.domain.id}/", {"jit_provisioning_enabled": True},
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.json(),
{
"type": "validation_error",
"code": "verification_required",
"detail": "This attribute cannot be updated until the domain is verified.",
"attr": "jit_provisioning_enabled",
},
)
self.domain.refresh_from_db()
self.assertEqual(self.domain.jit_provisioning_enabled, False)
def test_only_allowed_parameters_can_be_updated(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
response = self.client.patch(
f"/api/organizations/@current/domains/{self.domain.id}/",
{"verified_at": "2020-01-01T12:12:12Z", "verification_challenge": "123"},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.json()["verified_at"], None)
self.assertRegex(response.json()["verification_challenge"], r"[0-9A-Za-z_-]{32}")
def test_only_admin_can_update_domain(self):
self.domain.verified_at = timezone.now()
self.domain.save()
response = self.client.patch(
f"/api/organizations/{self.organization.id}/domains/{self.domain.id}/",
{"sso_enforcement": "google-oauth2", "jit_provisioning_enabled": True},
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(
response.json(), self.permission_denied_response("Your organization access level is insufficient."),
)
self.domain.refresh_from_db()
self.assertEqual(self.domain.jit_provisioning_enabled, False)
self.assertEqual(self.domain.sso_enforcement, "")
def test_cannot_update_domain_for_another_org(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
self.another_domain.verified_at = timezone.now()
self.another_domain.save()
response = self.client.patch(
f"/api/organizations/{self.another_org.id}/domains/{self.another_domain.id}/",
{"sso_enforcement": "google-oauth2", "jit_provisioning_enabled": True},
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.json(), self.permission_denied_response())
self.another_domain.refresh_from_db()
self.assertEqual(self.another_domain.jit_provisioning_enabled, False)
self.assertEqual(self.another_domain.sso_enforcement, "")
# Delete domains
def test_admin_can_delete_domain(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
response = self.client.delete(f"/api/organizations/@current/domains/{self.domain.id}")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(response.content, b"")
self.assertFalse(OrganizationDomain.objects.filter(id=self.domain.id).exists())
def test_only_admin_can_delete_domain(self):
response = self.client.delete(f"/api/organizations/@current/domains/{self.domain.id}")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(
response.json(), self.permission_denied_response("Your organization access level is insufficient."),
)
self.domain.refresh_from_db()
def test_cannot_delete_domain_for_another_org(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
response = self.client.delete(f"/api/organizations/{self.another_org.id}/domains/{self.another_domain.id}")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.json(), self.permission_denied_response())
self.another_domain.refresh_from_db()
| 2.28125 | 2 |
tutorial/test input.py | nataliapryakhina/FA_group3 | 0 | 4253 | import numpy as np
import tensorflow as tf
from tensorflow import keras
import matplotlib.pyplot as plt
from os import listdir
from tensorflow.keras.callbacks import ModelCheckpoint
dataDir = "./data/trainSmallFA/"
files = listdir(dataDir)
files.sort()
totalLength = len(files)
inputs = np.empty((len(files), 3, 64, 64))
targets = np.empty((len(files), 3, 64, 64))
for i, file in enumerate(files):
npfile = np.load(dataDir + file)
d = npfile['a']
inputs[i] = d[0:3] # inx, iny, mask
targets[i] = d[3:6] # p, velx, vely
# print("inputs shape = ", inputs.shape)
print(np.shape(targets[:, 1, :, :].flatten()))
maxvel = np.amax(np.sqrt(targets[:, 1, :, :]* targets[:, 1, :, :]
+ targets[:, 2, :, :]* targets[:, 2, :, :]))
print(maxvel)
targets[:, 1:3, :, :] /= maxvel
targets[:, 0, :, :] /= np.amax(targets[:, 0, :, :])
for input in inputs:
plt.figure(num=None, figsize=(20, 10), dpi=80, facecolor='w', edgecolor='k')
# predicted data
plt.subplot(331)
plt.title('x vel')
plt.imshow(input[0, :, :], cmap='jet') # vmin=-100,vmax=100, cmap='jet')
plt.colorbar()
plt.subplot(332)
plt.title('y vel')
plt.imshow(input[1, :, :], cmap='jet')
plt.colorbar()
plt.show() | 2.328125 | 2 |
pepper/responder/brain.py | cltl/pepper | 29 | 4254 | <filename>pepper/responder/brain.py<gh_stars>10-100
from pepper.framework import *
from pepper import logger
from pepper.language import Utterance
from pepper.language.generation.thoughts_phrasing import phrase_thoughts
from pepper.language.generation.reply import reply_to_question
from .responder import Responder, ResponderType
from pepper.language import UtteranceType
from pepper.knowledge import sentences, animations
from random import choice
import re
from typing import Optional, Union, Tuple, Callable
class BrainResponder(Responder):
def __init__(self):
self._log = logger.getChild(self.__class__.__name__)
@property
def type(self):
return ResponderType.Brain
@property
def requirements(self):
return [TextToSpeechComponent, BrainComponent]
def respond(self, utterance, app):
# type: (Utterance, Union[TextToSpeechComponent, BrainComponent]) -> Optional[Tuple[float, Callable]]
try:
utterance.analyze()
self._log.debug("TRIPLE: {}".format(utterance.triple))
if utterance.triple is not None:
brain_response_statement = []
brain_response_question = []
if utterance.type == UtteranceType.QUESTION:
brain_response_question = app.brain.query_brain(utterance)
reply = reply_to_question(brain_response_question)
self._log.info("REPLY to question: {}".format(reply))
else:
brain_response_statement = app.brain.update(utterance, reason_types=True) # Searches for types in dbpedia
reply = phrase_thoughts(brain_response_statement, True, True, True)
self._log.info("REPLY to statement: {}".format(reply))
if (isinstance(reply, str) or isinstance(reply, unicode)) and reply != "":
# Return Score and Response
# Make sure to not execute the response here, but just to return the response function
return 1.0, lambda: app.say(re.sub(r"[\s+_]", " ", reply))
elif brain_response_statement:
# Thank Human for the Data!
return 1.0, lambda: app.say("{} {}".format(choice([choice(sentences.THANK), choice(sentences.HAPPY)]),
choice(sentences.PARSED_KNOWLEDGE)), animations.HAPPY)
elif brain_response_question:
# Apologize to human for not knowing
return 1.0, lambda: app.say("{} {}".format(choice(sentences.SORRY),
choice(sentences.NO_ANSWER)), animations.ASHAMED)
except Exception as e:
self._log.error(e)
| 2.5 | 2 |
fedora_college/modules/content/views.py | fedora-infra/fedora-college | 2 | 4255 | <filename>fedora_college/modules/content/views.py
# -*- coding: utf-8 -*-
import re
from unicodedata import normalize
from flask import Blueprint, render_template, current_app
from flask import redirect, url_for, g, abort
from sqlalchemy import desc
from fedora_college.core.database import db
from fedora_college.modules.content.forms import * # noqa
from fedora_college.core.models import * # noqa
from fedora_college.fedmsgshim import publish
from flask_fas_openid import fas_login_required
bundle = Blueprint('content', __name__, template_folder='templates')
from fedora_college.modules.content.media import * # noqa
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
# Verify if user is authenticated
def authenticated():
return hasattr(g, 'fas_user') and g.fas_user
# generate url slug
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
# attach tags to a content entry
def attach_tags(tags, content):
rem = TagsMap.query.filter_by(content_id=content.content_id).all()
for r in rem:
db.session.delete(r)
db.session.commit()
for tag in tags:
tag_db = Tags.query.filter_by(tag_text=tag).first()
if tag_db is None:
tag_db = Tags(tag)
db.session.add(tag_db)
db.session.commit()
Map = TagsMap(tag_db.tag_id, content.content_id)
db.session.add(Map)
db.session.commit()
# delete content
@bundle.route('/content/delete/<posturl>', methods=['GET', 'POST'])
@bundle.route('/content/delete/<posturl>/', methods=['GET', 'POST'])
@fas_login_required
def delete_content(posturl=None):
if posturl is not None:
db.session.rollback()
content = Content.query.filter_by(slug=posturl).first_or_404()
rem = TagsMap.query.filter_by(
content_id=content.content_id).all()
'''delete mapped tags'''
for r in rem:
db.session.delete(r)
comments = Comments.query.filter_by(
content_id=content.content_id).all()
'''delete comments with foriegn keys'''
for r in comments:
db.session.delete(r)
db.session.delete(content)
db.session.commit()
return redirect(url_for('profile.user',
nickname=g.fas_user['username']))
abort(404)
# add / edit more content
@bundle.route('/content/add/', methods=['GET', 'POST'])
@bundle.route('/content/add', methods=['GET', 'POST'])
@bundle.route('/content/edit/<posturl>/', methods=['GET', 'POST'])
@bundle.route('/content/edit/<posturl>', methods=['GET', 'POST'])
@fas_login_required
def addcontent(posturl=None):
if authenticated():
form = CreateContent()
form_action = url_for('content.addcontent')
media = Media.query.order_by(desc(Media.timestamp)).limit(10).all()
if posturl is not None:
content = Content.query.filter_by(slug=posturl).first_or_404()
form = CreateContent(obj=content)
if form.validate_on_submit():
form.populate_obj(content)
tags = str(form.tags.data).split(',')
attach_tags(tags, content)
content.rehtml()
db.session.commit()
'''Publish the message'''
msg = content.getdata()
msg['title'] = content.title
msg['link'] = current_app.config[
'EXTERNAL_URL'] + content.slug
publish(
topic=current_app.config['CONTENT_EDIT_TOPIC'],
msg=msg
)
if content.type_content == "blog":
print url_for('content.blog', slug=posturl)
return redirect(url_for('content.blog', slug=posturl))
return redirect(url_for('home.content', slug=posturl))
else:
if form.validate_on_submit():
url_name = slugify(form.title.data)
content = Content(form.title.data,
url_name,
form.description.data,
form.active.data,
form.tags.data,
g.fas_user['username'],
form.type_content.data
)
tags = str(form.tags.data).split(',')
try:
db.session.add(content)
db.session.commit()
attach_tags(tags, content)
'''Publish the message'''
msg = content.getdata()
msg['title'] = content.title
msg['link'] = current_app.config[
'EXTERNAL_URL'] + url_name
publish(
topic=current_app.config['CONTENT_CREATE_TOPIC'],
msg=msg
)
if content.type_content == "blog":
return redirect(url_for('content.blog', slug=posturl))
return redirect(url_for('home.content', slug=url_name))
# Duplicate entry
except Exception as e:
return str(e)
db.session.rollback()
pass
tags = Tags.query.all()
return render_template('content/edit_content.html', form=form,
form_action=form_action, title="Create Content",
media=media[0:5], tags=tags)
abort(404)
# View Blog post
@bundle.route('/blog', methods=['GET', 'POST'])
@bundle.route('/blog/', methods=['GET', 'POST'])
@bundle.route('/blog/<slug>/', methods=['GET', 'POST'])
@bundle.route('/blog/<slug>', methods=['GET', 'POST'])
@bundle.route('/blog/page/<id>', methods=['GET', 'POST'])
@bundle.route('/blog/page/<id>', methods=['GET', 'POST'])
def blog(slug=None, id=0):
id = int(id)
screen = Content.query. \
filter_by(
type_content="lecture",
active=True
).limit(10).all()
if slug is not None:
try:
posts = Content.query. \
filter_by(slug=slug).all()
except:
posts = "No such posts in database."
else:
try:
posts = Content.query. \
filter_by(type_content="blog").all()
if id > 0:
posts = posts[id - 1:id + 5]
else:
posts = posts[0:5]
except:
posts = []
return render_template('blog/index.html',
title='Blog',
content=posts,
screen=screen,
id=id,
slug=slug
)
| 2.0625 | 2 |
tests/components/airthings/test_config_flow.py | MrDelik/core | 30,023 | 4256 | <reponame>MrDelik/core<filename>tests/components/airthings/test_config_flow.py
"""Test the Airthings config flow."""
from unittest.mock import patch
import airthings
from homeassistant import config_entries
from homeassistant.components.airthings.const import CONF_ID, CONF_SECRET, DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM
from tests.common import MockConfigEntry
TEST_DATA = {
CONF_ID: "client_id",
CONF_SECRET: "secret",
}
async def test_form(hass: HomeAssistant) -> None:
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] is None
with patch("airthings.get_token", return_value="test_token",), patch(
"homeassistant.components.airthings.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_DATA,
)
await hass.async_block_till_done()
assert result2["type"] == RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == "Airthings"
assert result2["data"] == TEST_DATA
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass: HomeAssistant) -> None:
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"airthings.get_token",
side_effect=airthings.AirthingsAuthError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_DATA,
)
assert result2["type"] == RESULT_TYPE_FORM
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass: HomeAssistant) -> None:
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"airthings.get_token",
side_effect=airthings.AirthingsConnectionError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_DATA,
)
assert result2["type"] == RESULT_TYPE_FORM
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_unknown_error(hass: HomeAssistant) -> None:
"""Test we handle unknown error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"airthings.get_token",
side_effect=Exception,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_DATA,
)
assert result2["type"] == RESULT_TYPE_FORM
assert result2["errors"] == {"base": "unknown"}
async def test_flow_entry_already_exists(hass: HomeAssistant) -> None:
"""Test user input for config_entry that already exists."""
first_entry = MockConfigEntry(
domain="airthings",
data=TEST_DATA,
unique_id=TEST_DATA[CONF_ID],
)
first_entry.add_to_hass(hass)
with patch("airthings.get_token", return_value="token"):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}, data=TEST_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
| 2.34375 | 2 |
utils/utils.py | scomup/StereoNet-ActiveStereoNet | 0 | 4257 | <reponame>scomup/StereoNet-ActiveStereoNet
# ------------------------------------------------------------------------------
# Copyright (c) NKU
# Licensed under the MIT License.
# Written by <NAME> (<EMAIL>)
# ------------------------------------------------------------------------------
import os
import torch
import torch.nn.functional as F
#import cv2 as cv
import numpy as np
def GERF_loss(GT, pred, args):
# mask = (GT < args.maxdisp) & (GT >= 0)
mask = GT > 0
mask.detach_()
# print(mask.size(), GT.size(), pred.size())
count = len(torch.nonzero(mask))
# print(count)
if count == 0:
count = 1
return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) + 4) /2 - 1) / count
def smooth_L1_loss(GT, pred, args):
mask = GT < args.maxdisp
mask.detach_()
# loss = F.smooth_l1_loss(pred[mask], GT[mask], size_average=True)
loss = (pred[mask] - GT[mask]).abs().mean()
return loss
if __name__ == '__main__':
pass
# import matplotlib.pyplot as plt
# image = cv.imread('/media/lxy/sdd1/ActiveStereoNet/StereoNet_pytorch/results/forvideo/iter-122.jpg')
#im_gray = cv.imread('/media/lxy/sdd1/ActiveStereoNet/StereoNet_pytorch/results/forvideo/iter-133.jpg', cv.IMREAD_GRAYSCALE)
# print(im_gray.shape)
#im_color = cv.applyColorMap(im_gray*2, cv.COLORMAP_JET)
# cv.imshow('test', im_color)
# cv.waitKey(0)
#cv.imwrite('test.png',im_color)
# print(image.shape)
# plt.figure('Image')
# sc =plt.imshow(image)
# sc.set_cmap('hsv')
# plt.colorbar()
# plt.axis('off')
# plt.show()
# print('end')
# image[:,:,0].save('/media/lxy/sdd1/ActiveStereoNet/StereoNet_pytorch/results/pretrained_StereoNet_single/it1er-151.jpg')
| 2.15625 | 2 |
worker/main.py | Devalent/facial-recognition-service | 0 | 4258 | <reponame>Devalent/facial-recognition-service
from aiohttp import web
import base64
import io
import face_recognition
async def encode(request):
request_data = await request.json()
# Read base64 encoded image
url = request_data['image'].split(',')[1]
image = io.BytesIO(base64.b64decode(url))
# Load image data
np_array = face_recognition.load_image_file(image)
# Find face locations
locations = face_recognition.face_locations(np_array)
# Create face encodings
encodings = face_recognition.face_encodings(np_array, locations)
results = []
for i in range(len(locations)):
top, right, bottom, left = locations[i]
result = {
'x': left,
'y': top,
'width': right - left,
'height': bottom - top,
'encodings': encodings[i].tolist()
}
results.append(result)
return web.json_response(results)
def main():
app = web.Application()
app.router.add_post('/encode', encode)
web.run_app(app, host='0.0.0.0', port='3000')
main()
| 2.71875 | 3 |
rblod/setup.py | TiKeil/Two-scale-RBLOD | 0 | 4259 | # ~~~
# This file is part of the paper:
#
# " An Online Efficient Two-Scale Reduced Basis Approach
# for the Localized Orthogonal Decomposition "
#
# https://github.com/TiKeil/Two-scale-RBLOD.git
#
# Copyright 2019-2021 all developers. All rights reserved.
# License: Licensed as BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
# Authors:
# <NAME>
# <NAME>
# ~~~
from setuptools import setup
setup(name='rblod',
version='2021.1',
description='Pymor support for RBLOD',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['rblod'])
| 1.382813 | 1 |
bin/euclid_fine_plot_job_array.py | ndeporzio/cosmicfish | 0 | 4260 | import os
import shutil
import numpy as np
import pandas as pd
import seaborn as sns
import cosmicfish as cf
import matplotlib.pyplot as plt
import dill
# Instruct pyplot to use seaborn
sns.set()
# Set project, data, CLASS directories
projectdir = os.environ['STORAGE_DIR']
datastore = os.environ['DATASTORE_DIR']
classpath = os.environ['CLASS_DIR']
fidx = int(os.environ['FORECAST_INDEX'])
# Generate output paths
fp_resultsdir = projectdir
cf.makedirectory(fp_resultsdir)
# Specify resolution of numerical integrals
derivative_step = 0.008 # How much to vary parameter to calculate numerical derivative
g_derivative_step = 0.1
mu_integral_step = 0.05 # For calculating numerical integral wrt mu between -1 and 1
# Linda Fiducial Cosmology
fp_fid = {
"A_s" : 2.2321e-9,
"n_s" : 0.967,
"omega_b" : 0.02226,
"omega_cdm" : 0.1127,
"tau_reio" : 0.0598,
"h" : 0.701,
"T_cmb" : 2.726, # Units [K]
"N_ncdm" : 4.,
"deg_ncdm" : 1.0,
"T_ncdm" : (0.79/2.726), # Units [T_cmb].
"m_ncdm" : 0.01, # Units [eV]
"b0" : 1.0,
"beta0" : 1.7,
"beta1" : 1.0,
"alphak2" : 1.0,
"sigma_fog_0" : 250000, #Units [m s^-2]
"N_eff" : 0.0064, #We allow relativistic neutrinos in addition to our DM relic
"relic_vary" : "N_ncdm", # Fix T_ncdm or m_ncdm
"m_nu" : 0.02
}
# EUCLID values
z_table = np.array([0.65, 0.75, 0.85, 0.95, 1.05, 1.15, 1.25, 1.35, 1.45, 1.55, 1.65, 1.75, 1.85, 1.95])
dNdz = np.array([2434.280, 4364.812, 4728.559, 4825.798, 4728.797, 4507.625, 4269.851, 3720.657, 3104.309,
2308.975, 1514.831, 1474.707, 893.716, 497.613])
skycover = 0.3636
# Run Fisher Forecast
full_masses = np.geomspace(0.01, 10., 21)
full_temps = np.array([0.79, 0.91, 0.94, 1.08])
mass_index=(fidx % 21)
temp_index=(fidx // 21)
masses = np.array([full_masses[mass_index]])
temps = np.array([full_temps[temp_index]])
omegacdm_set = np.array([
fp_fid['omega_cdm']
- ((masses/cf.NEUTRINO_SCALE_FACTOR)* np.power(tval / 1.95, 3.))
for tidx, tval in enumerate(temps)])
fp_fiducialset = [[
dict(fp_fid, **{
'm_ncdm' : masses[midx],
'omega_cdm' : omegacdm_set[tidx, midx],
'T_ncdm' : temps[tidx]/2.726})
for midx, mval in enumerate(masses)]
for tidx, tval in enumerate(temps)]
fp_forecastset = [[cf.forecast(
classpath,
datastore,
'2relic',
fidval,
z_table,
"EUCLID",
dNdz,
fsky=skycover,
dstep=derivative_step,
gstep=g_derivative_step,
RSD=True,
FOG=True,
AP=True,
COV=True)
for fididx, fidval in enumerate(fidrowvals)]
for fidrowidx, fidrowvals in enumerate(fp_fiducialset)]
#dill.load_session('')
for frowidx, frowval in enumerate(fp_forecastset):
for fidx, fcst in enumerate(frowval):
if type(fcst.fisher)==type(None):
fcst.gen_pm()
fcst.gen_fisher(
fisher_order=[
'omega_b',
'omega_cdm',
'n_s',
'A_s',
'tau_reio',
'h',
'N_ncdm',
'M_ncdm',
'sigma_fog',
'beta0',
'beta1',
'alpha_k2'],
mu_step=mu_integral_step,
skipgen=False)
print("Relic Forecast ", fidx, " complete...")
dill.dump_session(os.path.join(fp_resultsdir, 'fp_'+str(temp_index)+'_'+str(mass_index)+'.db'))
else:
print('Fisher matrix already generated!')
| 2.296875 | 2 |
project4/test/test_arm.py | XDZhelheim/CS205_C_CPP_Lab | 3 | 4261 | <reponame>XDZhelheim/CS205_C_CPP_Lab
import os
if __name__ == "__main__":
dims = ["32", "64", "128", "256", "512", "1024", "2048"]
for dim in dims:
os.system(
f"perf stat -e r11 -x, -r 10 ../matmul.out ../data/mat-A-{dim}.txt ../data/mat-B-{dim}.txt ./out/out-{dim}.txt 2>>res_arm.csv"
)
print(f"Finished {dim}")
print("Finished.") | 2.0625 | 2 |
src/oci/apm_traces/models/query_result_row_type_summary.py | Manny27nyc/oci-python-sdk | 249 | 4262 | <reponame>Manny27nyc/oci-python-sdk<filename>src/oci/apm_traces/models/query_result_row_type_summary.py
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class QueryResultRowTypeSummary(object):
"""
A summary of the datatype, unit and related metadata of an individual row element of a query result row that is returned.
"""
def __init__(self, **kwargs):
"""
Initializes a new QueryResultRowTypeSummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param data_type:
The value to assign to the data_type property of this QueryResultRowTypeSummary.
:type data_type: str
:param unit:
The value to assign to the unit property of this QueryResultRowTypeSummary.
:type unit: str
:param display_name:
The value to assign to the display_name property of this QueryResultRowTypeSummary.
:type display_name: str
:param expression:
The value to assign to the expression property of this QueryResultRowTypeSummary.
:type expression: str
:param query_result_row_type_summaries:
The value to assign to the query_result_row_type_summaries property of this QueryResultRowTypeSummary.
:type query_result_row_type_summaries: list[oci.apm_traces.models.QueryResultRowTypeSummary]
"""
self.swagger_types = {
'data_type': 'str',
'unit': 'str',
'display_name': 'str',
'expression': 'str',
'query_result_row_type_summaries': 'list[QueryResultRowTypeSummary]'
}
self.attribute_map = {
'data_type': 'dataType',
'unit': 'unit',
'display_name': 'displayName',
'expression': 'expression',
'query_result_row_type_summaries': 'queryResultRowTypeSummaries'
}
self._data_type = None
self._unit = None
self._display_name = None
self._expression = None
self._query_result_row_type_summaries = None
@property
def data_type(self):
"""
Gets the data_type of this QueryResultRowTypeSummary.
Datatype of the query result row element.
:return: The data_type of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._data_type
@data_type.setter
def data_type(self, data_type):
"""
Sets the data_type of this QueryResultRowTypeSummary.
Datatype of the query result row element.
:param data_type: The data_type of this QueryResultRowTypeSummary.
:type: str
"""
self._data_type = data_type
@property
def unit(self):
"""
Gets the unit of this QueryResultRowTypeSummary.
Granular unit in which the query result row element's data is represented.
:return: The unit of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._unit
@unit.setter
def unit(self, unit):
"""
Sets the unit of this QueryResultRowTypeSummary.
Granular unit in which the query result row element's data is represented.
:param unit: The unit of this QueryResultRowTypeSummary.
:type: str
"""
self._unit = unit
@property
def display_name(self):
"""
Gets the display_name of this QueryResultRowTypeSummary.
Alias name if an alias is used for the query result row element or an assigned display name from the query language
in some default cases.
:return: The display_name of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this QueryResultRowTypeSummary.
Alias name if an alias is used for the query result row element or an assigned display name from the query language
in some default cases.
:param display_name: The display_name of this QueryResultRowTypeSummary.
:type: str
"""
self._display_name = display_name
@property
def expression(self):
"""
Gets the expression of this QueryResultRowTypeSummary.
Actual show expression in the user typed query that produced this column.
:return: The expression of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._expression
@expression.setter
def expression(self, expression):
"""
Sets the expression of this QueryResultRowTypeSummary.
Actual show expression in the user typed query that produced this column.
:param expression: The expression of this QueryResultRowTypeSummary.
:type: str
"""
self._expression = expression
@property
def query_result_row_type_summaries(self):
"""
Gets the query_result_row_type_summaries of this QueryResultRowTypeSummary.
A query result row type summary object that represents a nested table structure.
:return: The query_result_row_type_summaries of this QueryResultRowTypeSummary.
:rtype: list[oci.apm_traces.models.QueryResultRowTypeSummary]
"""
return self._query_result_row_type_summaries
@query_result_row_type_summaries.setter
def query_result_row_type_summaries(self, query_result_row_type_summaries):
"""
Sets the query_result_row_type_summaries of this QueryResultRowTypeSummary.
A query result row type summary object that represents a nested table structure.
:param query_result_row_type_summaries: The query_result_row_type_summaries of this QueryResultRowTypeSummary.
:type: list[oci.apm_traces.models.QueryResultRowTypeSummary]
"""
self._query_result_row_type_summaries = query_result_row_type_summaries
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 2.15625 | 2 |
jaxformer/hf/sample.py | salesforce/CodeGen | 105 | 4263 | <gh_stars>100-1000
# Copyright (c) 2022, salesforce.com, inc.
# All rights reserved.
# SPDX-License-Identifier: BSD-3-Clause
# For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
import os
import re
import time
import random
import argparse
import torch
from transformers import GPT2TokenizerFast
from jaxformer.hf.codegen.modeling_codegen import CodeGenForCausalLM
########################################################################
# util
class print_time:
def __init__(self, desc):
self.desc = desc
def __enter__(self):
print(self.desc)
self.t = time.time()
def __exit__(self, type, value, traceback):
print(f'{self.desc} took {time.time()-self.t:.02f}s')
def set_env():
os.environ['TOKENIZERS_PARALLELISM'] = 'false'
def set_seed(seed, deterministic=True):
random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
torch.backends.cudnn.deterministic = deterministic
torch.backends.cudnn.benchmark = not deterministic
# torch.use_deterministic_algorithms(deterministic)
def cast(model, fp16=True):
if fp16:
model.half()
return model
########################################################################
# model
def create_model(ckpt, fp16=True):
if fp16:
return CodeGenForCausalLM.from_pretrained(ckpt, revision='float16', torch_dtype=torch.float16, low_cpu_mem_usage=True)
else:
return CodeGenForCausalLM.from_pretrained(ckpt)
def create_tokenizer():
t = GPT2TokenizerFast.from_pretrained('gpt2')
t.max_model_input_sizes['gpt2'] = 1e20
return t
def include_whitespace(t, n_min=2, n_max=20, as_special_tokens=False):
t.add_tokens([' ' * n for n in reversed(range(n_min, n_max))], special_tokens=as_special_tokens)
return t
def include_tabs(t, n_min=2, n_max=20, as_special_tokens=False):
t.add_tokens(['\t' * n for n in reversed(range(n_min, n_max))], special_tokens=as_special_tokens)
return t
def create_custom_gpt2_tokenizer():
t = create_tokenizer()
t = include_whitespace(t=t, n_min=2, n_max=32, as_special_tokens=False)
t = include_tabs(t=t, n_min=2, n_max=10, as_special_tokens=False)
return t
########################################################################
# sample
def sample(
device,
model,
tokenizer,
context,
pad_token_id,
num_return_sequences=1,
temp=0.2,
top_p=0.95,
max_length_sample=128,
max_length=2048
):
input_ids = tokenizer(
context,
truncation=True,
padding=True,
max_length=max_length,
return_tensors='pt',
).input_ids
input_ids_len = input_ids.shape[1]
assert input_ids_len < max_length
with torch.no_grad():
input_ids = input_ids.to(device)
tokens = model.generate(
input_ids,
do_sample=True,
num_return_sequences=num_return_sequences,
temperature=temp,
max_length=input_ids_len + max_length_sample,
top_p=top_p,
pad_token_id=pad_token_id,
use_cache=True,
)
text = tokenizer.batch_decode(tokens[:, input_ids_len:, ...])
return text
def truncate(completion):
def find_re(string, pattern, start_pos):
m = pattern.search(string, start_pos)
return m.start() if m else -1
terminals = [
re.compile(r, re.MULTILINE)
for r in
[
'^#',
re.escape('<|endoftext|>'),
"^'''",
'^"""',
'\n\n\n'
]
]
prints = list(re.finditer('^print', completion, re.MULTILINE))
if len(prints) > 1:
completion = completion[:prints[1].start()]
defs = list(re.finditer('^def', completion, re.MULTILINE))
if len(defs) > 1:
completion = completion[:defs[1].start()]
start_pos = 0
terminals_pos = [pos for pos in [find_re(completion, terminal, start_pos) for terminal in terminals] if pos != -1]
if len(terminals_pos) > 0:
return completion[:min(terminals_pos)]
else:
return completion
def test_truncate():
assert truncate('\nif len_a > len_b:\n result = a\nelse:\n result = b\n\n\n\n#') == '\nif len_a > len_b:\n result = a\nelse:\n result = b'
########################################################################
# main
def main():
# (0) constants
models_nl = ['codegen-350M-nl', 'codegen-2B-nl', 'codegen-6B-nl', 'codegen-16B-nl']
models_pl = ['codegen-350M-multi', 'codegen-2B-multi', 'codegen-6B-multi', 'codegen-16B-multi', 'codegen-350M-mono', 'codegen-2B-mono', 'codegen-6B-mono', 'codegen-16B-mono']
models = models_nl + models_pl
# (1) params
parser = argparse.ArgumentParser()
parser.add_argument('--model', type=str, choices=models, default='codegen-350M-mono')
parser.add_argument('--device', type=str, default='cuda:0')
parser.add_argument('--rng-seed', type=int, default=42)
parser.add_argument('--rng-deterministic', type=bool, default=True)
parser.add_argument('--p', type=float, default=0.95)
parser.add_argument('--t', type=float, default=0.2)
parser.add_argument('--max-length', type=int, default=128)
parser.add_argument('--batch-size', type=int, default=1)
parser.add_argument('--no-fp16', action="store_false")
parser.add_argument('--pad', type=int, default=50256)
parser.add_argument('--context', type=str, default='def helloworld():')
args = parser.parse_args()
# (2) preamble
set_env()
set_seed(args.rng_seed, deterministic=args.rng_deterministic)
device = torch.device(args.device)
if device.type == "cpu":
args.no_fp16 = False
if args.model.startswith("codegen-16B"):
args.no_fp16 = True
ckpt = f'./checkpoints/{args.model}'
# (3) load
with print_time('loading parameters'):
model = create_model(ckpt=ckpt, fp16=args.no_fp16).to(device)
with print_time('loading tokenizer'):
if args.model in models_pl:
tokenizer = create_custom_gpt2_tokenizer()
else:
tokenizer = create_tokenizer()
tokenizer.padding_side = 'left'
tokenizer.pad_token = args.pad
# (4) sample
with print_time('sampling'):
completion = sample(device=device, model=model, tokenizer=tokenizer, context=args.context, pad_token_id=args.pad, num_return_sequences=args.batch_size, temp=args.t, top_p=args.p, max_length_sample=args.max_length)[0]
truncation = truncate(completion)
print('=' * 100)
print(completion)
print('=' * 100)
print(args.context+truncation)
print('=' * 100)
if __name__ == '__main__':
test_truncate()
main()
print('done.')
| 2 | 2 |
tests/services/test_rover_runner_service.py | dev-11/mars-rover-challenge | 0 | 4264 | <reponame>dev-11/mars-rover-challenge
import unittest
from services import RoverRunnerService
from tests.test_environment.marses import small_mars_with_one_rover_empty_commands
from tests.test_environment import mocks as m
from data_objects import Rover
class TestRoverRunnerService(unittest.TestCase):
def test_rover_runner_moves_rover_forward(self):
grid = small_mars_with_one_rover_empty_commands.grid
rover = small_mars_with_one_rover_empty_commands.rover_setups[0].rover
tss = m.get_mocked_turn_command_selector_turn_left_from_north_command_only()
mss = m.get_mocked_move_command_selector_north_command_only()
rrs = RoverRunnerService(grid, rover, mss, tss)
final_pos = rrs.run(['M'])
self.assertEqual(Rover(0, 1, 'N'), final_pos)
def test_rover_runner_turns_rover_left(self):
grid = small_mars_with_one_rover_empty_commands.grid
rover = small_mars_with_one_rover_empty_commands.rover_setups[0].rover
tss = m.get_mocked_turn_command_selector_turn_left_from_north_command_only()
mss = m.get_mocked_move_command_selector_north_command_only()
rrs = RoverRunnerService(grid, rover, mss, tss)
final_pos = rrs.run(['L'])
self.assertEqual(Rover(0, 0, 'W'), final_pos)
def test_rover_runner_turns_rover_right(self):
grid = small_mars_with_one_rover_empty_commands.grid
rover = small_mars_with_one_rover_empty_commands.rover_setups[0].rover
tss = m.get_mocked_turn_command_selector_turn_right_from_north_command_only()
mss = m.get_mocked_move_command_selector_north_command_only()
rrs = RoverRunnerService(grid, rover, mss, tss)
final_pos = rrs.run(['R'])
self.assertEqual(Rover(0, 0, 'E'), final_pos)
def test_rover_runner_goes_off_gird_east(self):
grid = small_mars_with_one_rover_empty_commands.grid
rover = Rover(1, 1, "E")
tss = m.get_mocked_turn_command_selector_turn_right_from_north_command_only()
mss = m.get_mocked_move_command_selector_east_command_only()
rrs = RoverRunnerService(grid, rover, mss, tss)
self.assertRaises(ValueError, rrs.run, ['M'])
def test_rover_runner_goes_off_gird_north(self):
grid = small_mars_with_one_rover_empty_commands.grid
rover = Rover(1, 1, "N")
tss = m.get_mocked_turn_command_selector_turn_right_from_north_command_only()
mss = m.get_mocked_move_command_selector_north_command_only()
rrs = RoverRunnerService(grid, rover, mss, tss)
self.assertRaises(ValueError, rrs.run, ['M'])
def test_rover_runner_goes_off_gird_west(self):
grid = small_mars_with_one_rover_empty_commands.grid
rover = Rover(0, 1, "W")
tss = m.get_mocked_turn_command_selector_turn_right_from_north_command_only()
mss = m.get_mocked_move_command_selector_west_command_only()
rrs = RoverRunnerService(grid, rover, mss, tss)
self.assertRaises(ValueError, rrs.run, ['M'])
def test_rover_runner_goes_off_gird_south(self):
grid = small_mars_with_one_rover_empty_commands.grid
rover = Rover(0, 0, "S")
tss = m.get_mocked_turn_command_selector_turn_right_from_north_command_only()
mss = m.get_mocked_move_command_selector_south_command_only()
rrs = RoverRunnerService(grid, rover, mss, tss)
self.assertRaises(ValueError, rrs.run, ['M'])
def test_rover_runner_does_nothing_empty_command(self):
grid = small_mars_with_one_rover_empty_commands.grid
rover = small_mars_with_one_rover_empty_commands.rover_setups[0].rover
tss = m.get_mocked_turn_command_selector_turn_left_from_north_command_only()
mss = m.get_mocked_move_command_selector_north_command_only()
rrs = RoverRunnerService(grid, rover, mss, tss)
final_pos = rrs.run([])
self.assertEqual(rover, final_pos)
def test_rover_runner_raises_error_for_None_command(self):
grid = small_mars_with_one_rover_empty_commands.grid
rover = small_mars_with_one_rover_empty_commands.rover_setups[0].rover
tss = m.get_mocked_turn_command_selector_turn_left_from_north_command_only()
mss = m.get_mocked_move_command_selector_north_command_only()
rrs = RoverRunnerService(grid, rover, mss, tss)
self.assertRaises(TypeError, rrs.run, None)
| 2.609375 | 3 |
retrain_with_rotnet.py | ericdaat/self-label | 440 | 4265 | import argparse
import warnings
warnings.simplefilter("ignore", UserWarning)
import files
from tensorboardX import SummaryWriter
import os
import numpy as np
import time
import torch
import torch.optim
import torch.nn as nn
import torch.utils.data
import torchvision
import torchvision.transforms as tfs
from data import DataSet,return_model_loader
from util import weight_init, write_conv, setup_runtime, AverageMeter, MovingAverage
def RotationDataLoader(image_dir, is_validation=False,
batch_size=256, crop_size=224, num_workers=4,shuffle=True):
normalize = tfs.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
transforms = tfs.Compose([
tfs.RandomResizedCrop(crop_size),
tfs.RandomGrayscale(p=0.2),
tfs.ColorJitter(0.4, 0.4, 0.4, 0.4),
tfs.RandomHorizontalFlip(),
tfs.Lambda(lambda img: torch.stack([normalize(tfs.ToTensor()(
tfs.functional.rotate(img, angle))) for angle in [0, 90, 180, 270]]
))
])
if is_validation:
dataset = DataSet(torchvision.datasets.ImageFolder(image_dir + '/val', transforms))
else:
dataset = DataSet(torchvision.datasets.ImageFolder(image_dir + '/train', transforms))
loader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
shuffle=shuffle,
num_workers=num_workers,
pin_memory=True,
drop_last=False
)
return loader
class Optimizer:
def __init__(self):
self.num_epochs = 30
self.lr = 0.05
self.lr_schedule = lambda epoch: (self.lr * (0.1 ** (epoch//args.lrdrop)))*(epoch<80) + (epoch>=80)*self.lr*(0.1**3)
self.momentum = 0.9
self.weight_decay = 10**(-5)
self.resume = True
self.checkpoint_dir = None
self.writer = None
self.K = args.ncl
self.dev = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.val_loader = RotationDataLoader(args.imagenet_path, is_validation=True,
batch_size=args.batch_size, num_workers=args.workers,shuffle=True)
def optimize_epoch(self, model, optimizer, loader, epoch, validation=False):
print(f"Starting epoch {epoch}, validation: {validation} " + "="*30)
loss_value = AverageMeter()
rotacc_value = AverageMeter()
# house keeping
if not validation:
model.train()
lr = self.lr_schedule(epoch)
for pg in optimizer.param_groups:
pg['lr'] = lr
else:
model.eval()
XE = torch.nn.CrossEntropyLoss().to(self.dev)
l_dl = 0 # len(loader)
now = time.time()
batch_time = MovingAverage(intertia=0.9)
for iter, (data, label, selected) in enumerate(loader):
now = time.time()
if not validation:
niter = epoch * len(loader.dataset) + iter*args.batch_size
data = data.to(self.dev)
mass = data.size(0)
where = np.arange(mass,dtype=int) * 4
data = data.view(mass * 4, 3, data.size(3), data.size(4))
rotlabel = torch.tensor(range(4)).view(-1, 1).repeat(mass, 1).view(-1).to(self.dev)
#################### train CNN ###########################################
if not validation:
final = model(data)
if args.onlyrot:
loss = torch.Tensor([0]).to(self.dev)
else:
if args.hc == 1:
loss = XE(final[0][where], self.L[selected])
else:
loss = torch.mean(torch.stack([XE(final[k][where], self.L[k, selected]) for k in range(args.hc)]))
rotloss = XE(final[-1], rotlabel)
pred = torch.argmax(final[-1], 1)
total_loss = loss + rotloss
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
correct = (pred == rotlabel).to(torch.float)
rotacc = correct.sum() / float(mass)
else:
final = model(data)
pred = torch.argmax(final[-1], 1)
correct = (pred == rotlabel.cuda()).to(torch.float)
rotacc = correct.sum() / float(mass)
total_loss = torch.Tensor([0])
loss = torch.Tensor([0])
rotloss = torch.Tensor([0])
rotacc_value.update(rotacc.item(), mass)
loss_value.update(total_loss.item(), mass)
batch_time.update(time.time() - now)
now = time.time()
print(
f"Loss: {loss_value.avg:03.3f}, RotAcc: {rotacc_value.avg:03.3f} | {epoch: 3}/{iter:05}/{l_dl:05} Freq: {mass / batch_time.avg:04.1f}Hz:",
end='\r', flush=True)
# every few iter logging
if (iter % args.logiter == 0):
if not validation:
print(niter, " Loss: {0:.3f}".format(loss.item()), flush=True)
with torch.no_grad():
if not args.onlyrot:
pred = torch.argmax(final[0][where], dim=1)
pseudoloss = XE(final[0][where], pred)
if not args.onlyrot:
self.writer.add_scalar('Pseudoloss', pseudoloss.item(), niter)
self.writer.add_scalar('lr', self.lr_schedule(epoch), niter)
self.writer.add_scalar('Loss', loss.item(), niter)
self.writer.add_scalar('RotLoss', rotloss.item(), niter)
self.writer.add_scalar('RotAcc', rotacc.item(), niter)
if iter > 0:
self.writer.add_scalar('Freq(Hz)', mass/(time.time() - now), niter)
# end of epoch logging
if self.writer and (epoch % self.log_interval == 0):
write_conv(self.writer, model, epoch)
if validation:
print('val Rot-Acc: ', rotacc_value.avg)
self.writer.add_scalar('val Rot-Acc', rotacc_value.avg, epoch)
files.save_checkpoint_all(self.checkpoint_dir, model, args.arch,
optimizer, self.L, epoch,lowest=False)
return {'loss': loss_value.avg}
def optimize(self, model, train_loader):
"""Perform full optimization."""
first_epoch = 0
model = model.to(self.dev)
self.optimize_times = [0]
optimizer = torch.optim.SGD(filter(lambda p: p.requires_grad, model.parameters()),
weight_decay=self.weight_decay,
momentum=self.momentum,
lr=self.lr)
if self.checkpoint_dir is not None and self.resume:
self.L, first_epoch = files.load_checkpoint_all(self.checkpoint_dir, model=None, opt=None)
print('loaded from: ', self.checkpoint_dir,flush=True)
print('first five entries of L: ', self.L[:5], flush=True)
print('found first epoch to be', first_epoch, flush=True)
first_epoch = 0
self.optimize_times = [0]
self.L = self.L.cuda()
print("model.headcount ", model.headcount, flush=True)
#####################################################################################
# Perform optmization ###############################################################
lowest_loss = 1e9
epoch = first_epoch
while epoch < (self.num_epochs+1):
if not args.val_only:
m = self.optimize_epoch(model, optimizer, train_loader, epoch, validation=False)
if m['loss'] < lowest_loss:
lowest_loss = m['loss']
files.save_checkpoint_all(self.checkpoint_dir, model, args.arch,
optimizer, self.L, epoch, lowest=True)
else:
print('='*30 +' doing only validation ' + "="*30)
epoch = self.num_epochs
m = self.optimize_epoch(model, optimizer, self.val_loader, epoch, validation=True)
epoch += 1
print(f"Model optimization completed. Saving final model to {os.path.join(self.checkpoint_dir, 'model_final.pth.tar')}")
torch.save(model, os.path.join(self.checkpoint_dir, 'model_final.pth.tar'))
return model
def get_parser():
parser = argparse.ArgumentParser(description='Retrain with given labels combined with RotNet loss')
# optimizer
parser.add_argument('--epochs', default=90, type=int, metavar='N', help='number of epochs')
parser.add_argument('--batch-size', default=64, type=int, metavar='BS', help='batch size')
parser.add_argument('--lr', default=0.05, type=float, metavar='FLOAT', help='initial learning rate')
parser.add_argument('--lrdrop', default=30, type=int, metavar='INT', help='multiply LR by 0.1 every')
# architecture
parser.add_argument('--arch', default='alexnet', type=str, help='alexnet or resnet')
parser.add_argument('--archspec', default='big', type=str, help='big or small for alexnet ')
parser.add_argument('--ncl', default=1000, type=int, metavar='INT', help='number of clusters')
parser.add_argument('--hc', default=1, type=int, metavar='INT', help='number of heads')
parser.add_argument('--init', default=False, action='store_true', help='initialization of network to PyTorch 0.4')
# what we do in this code
parser.add_argument('--val-only', default=False, action='store_true', help='if we run only validation set')
parser.add_argument('--onlyrot', default=False, action='store_true', help='if train only RotNet')
# housekeeping
parser.add_argument('--data', default="Imagenet", type=str)
parser.add_argument('--device', default="0", type=str, metavar='N', help='GPU device')
parser.add_argument('--exp', default='./rot-retrain', metavar='DIR', help='path to result dirs')
parser.add_argument('--workers', default=6, type=int, metavar='N', help='number workers (default: 6)')
parser.add_argument('--imagenet-path', default='/home/ubuntu/data/imagenet', type=str, help='')
parser.add_argument('--comment', default='rot-retrain', type=str, help='comment for tensorboardX')
parser.add_argument('--log-interval', default=1, type=int, metavar='INT', help='save stuff every x epochs')
parser.add_argument('--logiter', default=200, type=int, metavar='INT', help='log every x-th batch')
return parser
if __name__ == "__main__":
args = get_parser().parse_args()
name = "%s" % args.comment.replace('/', '_')
try:
args.device = [int(item) for item in args.device.split(',')]
except AttributeError:
args.device = [int(args.device)]
setup_runtime(seed=42, cuda_dev_id=args.device)
print(args, flush=True)
print()
print(name,flush=True)
writer = SummaryWriter('./runs/%s/%s'%(args.data,name))
writer.add_text('args', " \n".join(['%s %s' % (arg, getattr(args, arg)) for arg in vars(args)]))
# Setup model and train_loader
print('Commencing!', flush=True)
model, train_loader = return_model_loader(args)
train_loader = RotationDataLoader(args.imagenet_path, is_validation=False,
crop_size=224, batch_size=args.batch_size, num_workers=args.workers,
shuffle=True)
# add additional head to the network for RotNet loss.
if args.arch == 'alexnet':
if args.hc == 1:
model.__setattr__("top_layer0", nn.Linear(4096, args.ncl))
model.top_layer = None
model.headcount = args.hc+1
model.__setattr__("top_layer%s" % args.hc, nn.Linear(4096, 4))
else:
if args.hc == 1:
model.__setattr__("top_layer0", nn.Linear(2048*int(args.archspec), args.ncl))
model.top_layer = None
model.headcount = args.hc+1
model.__setattr__("top_layer%s" % args.hc, nn.Linear(2048*int(args.archspec), 4))
if args.init:
for mod in model.modules():
mod.apply(weight_init)
# Setup optimizer
o = Optimizer()
o.writer = writer
o.lr = args.lr
o.num_epochs = args.epochs
o.resume = True
o.log_interval = args.log_interval
o.checkpoint_dir = os.path.join(args.exp, 'checkpoints')
# Optimize
o.optimize(model, train_loader)
| 2.109375 | 2 |
tests/vie.py | Jinwithyoo/han | 0 | 4266 | # -*- coding: utf-8 -*-
from tests import HangulizeTestCase
from hangulize.langs.vie import Vietnamese
class VietnameseTestCase(HangulizeTestCase):
""" http://korean.go.kr/09_new/dic/rule/rule_foreign_0218.jsp """
lang = Vietnamese()
def test_1st(self):
"""제1항
nh는 이어지는 모음과 합쳐서 한 음절로 적는다. 어말이나 자음 앞에서는
받침 ‘ㄴ' 으로 적되, 그 앞의 모음이 a인 경우에는 a와 합쳐 ‘아인'으로
적는다.
"""
self.assert_examples({
# u'Nha Trang': u'냐짱',
# u'<NAME>': u'호찌민',
# u'Thanh Hoa': u'타인호아',
# u'Đông Khanh': u'동카인',
})
def test_2nd(self):
"""제2항
qu는 이어지는 모음이 a일 경우에는 합쳐서 ‘꽈'로 적는다.
"""
self.assert_examples({
'Quang': '꽝',
# u'hat quan ho': u'핫꽌호',
'Quôc': '꾸옥',
'Quyên': '꾸옌',
})
def test_3rd(self):
"""제3항
y는 뒤따르는 모음과 합쳐서 한 음절로 적는다.
"""
self.assert_examples({
'yên': '옌',
'Nguyên': '응우옌',
})
def test_4th(self):
"""제4항
어중의 l이 모음 앞에 올 때에는 ‘ㄹㄹ'로 적는다.
다만, 인명의 성과 이름은 별개의 단어로 보아 이 규칙을 적용하지 않는다.
"""
self.assert_examples({
# u'klông put': u'끌롱쁫',
'Pleiku': '쁠래이꾸',
# u'Ha Long': u'할롱',
# u'My Lay': u'밀라이',
}) | 2.59375 | 3 |
tests/test_functions/test_trig.py | jackromo/mathLibPy | 1 | 4267 | from mathlibpy.functions import *
import unittest
class SinTester(unittest.TestCase):
def setUp(self):
self.sin = Sin()
def test_call(self):
self.assertEqual(self.sin(0), 0)
def test_eq(self):
self.assertEqual(self.sin, Sin())
def test_get_derivative_call(self):
self.assertEqual(self.sin.get_derivative()(0), 1)
class CosTester(unittest.TestCase):
def setUp(self):
self.cos = Cos()
def test_call(self):
self.assertEqual(self.cos(0), 1)
def test_eq(self):
self.assertEqual(self.cos, Cos())
def test_get_derivative_call(self):
self.assertEqual(self.cos.get_derivative()(math.pi/2), -1)
class TanTester(unittest.TestCase):
def setUp(self):
self.tan = Tan()
def test_call(self):
self.assertEqual(self.tan(0), 0)
def test_eq(self):
self.assertEqual(self.tan, Tan())
def test_get_derivative(self):
self.assertEqual(self.tan.get_derivative()(0), 1)
if __name__ == "__main__":
unittest.main()
| 3.40625 | 3 |
src/smallestLetter/target.py | rajitbanerjee/leetcode | 0 | 4268 | class Solution:
def nextGreatestLetter(self, letters: list, target: str) -> str:
if target < letters[0] or target >= letters[-1]:
return letters[0]
left, right = 0, len(letters) - 1
while left < right:
mid = left + (right - left) // 2
if letters[mid] > target:
right = mid
else:
left = mid + 1
return letters[right]
if __name__ == '__main__':
letters = ["c", "f", "j"]
target = "a"
print(f"Input: letters = {letters}, target = {target}")
print(f"Output: {Solution().nextGreatestLetter(letters, target)}")
| 3.546875 | 4 |
anti_cpdaily/command.py | hyx0329/nonebot_plugin_anti_cpdaily | 2 | 4269 | <reponame>hyx0329/nonebot_plugin_anti_cpdaily
import nonebot
from nonebot import on_command
from nonebot.rule import to_me
from nonebot.typing import T_State
from nonebot.adapters import Bot, Event
from nonebot.log import logger
from .config import global_config
from .schedule import anti_cpdaily_check_routine
cpdaily = on_command('cpdaily')
scheduler = nonebot.require("nonebot_plugin_apscheduler").scheduler
async def one_shot_routine():
scheduler.remove_job('anti_cpdaily_oneshot')
await anti_cpdaily_check_routine()
@cpdaily.handle()
async def handle_command(bot: Bot, event: Event, state: T_State):
""" Manually activate the routine in 1 min
"""
if event.get_user_id() in bot.config.superusers:
logger.debug('manually activate the cpdaily routine')
# await anti_cpdaily_check_routine()
scheduler.add_job(one_shot_routine, trigger='interval', minutes=1, id='anti_cpdaily_oneshot', replace_existing=True)
logger.debug('manual process end')
await cpdaily.finish('启动今日校园打卡程序ing')
| 2.015625 | 2 |
matplotlib/gallery_python/pyplots/dollar_ticks.py | gottaegbert/penter | 13 | 4270 | <reponame>gottaegbert/penter
"""
============
Dollar Ticks
============
Use a `~.ticker.FormatStrFormatter` to prepend dollar signs on y axis labels.
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
# Fixing random state for reproducibility
np.random.seed(19680801)
fig, ax = plt.subplots()
ax.plot(100*np.random.rand(20))
formatter = ticker.FormatStrFormatter('$%1.2f')
ax.yaxis.set_major_formatter(formatter)
for tick in ax.yaxis.get_major_ticks():
tick.label1.set_visible(False)
tick.label2.set_visible(True)
tick.label2.set_color('green')
plt.show()
#############################################################################
#
# ------------
#
# References
# """"""""""
#
# The use of the following functions, methods, classes and modules is shown
# in this example:
import matplotlib
matplotlib.ticker
matplotlib.ticker.FormatStrFormatter
matplotlib.axis.Axis.set_major_formatter
matplotlib.axis.Axis.get_major_ticks
matplotlib.axis.Tick
| 3.21875 | 3 |
Chibrary/utils.py | chiro2001/chibrary | 0 | 4271 | <reponame>chiro2001/chibrary
import json
import re
from flask import request, abort, jsonify
from Chibrary import config
from Chibrary.config import logger
from Chibrary.exceptions import *
from functools import wraps
from urllib import parse
from Chibrary.server import db
def parse_url_query(url: str) -> dict:
if not url.lower().startswith('http://') \
and not url.lower().startswith('https://'):
return {}
query = url[url.rindex('/') + 1:]
if '?' not in query:
return {}
query = query[query.index('?') + 1:]
lines = query.split('&')
result = {}
for line in lines:
if line.count('=') != 1:
continue
key, val = line.split('=')
# 注意这里的类型转化处理
if val == 'undefined':
val = None
else:
try:
val = int(val)
except ValueError:
try:
val = float(val)
except ValueError:
pass
if val is not None:
if type(val) is str:
result[key] = parse.unquote(val)
else:
result[key] = val
return result
def form_url_query(url: str, data: dict):
# if not url.lower().startswith('http://') \
# and not url.lower().startswith('https://'):
# logger.warning('Provided wrong url %s !' % url)
# return url
# if len(data) == 0:
# return url
# query = '?'
# for key in data:
# # 特事特办(?)
# if type(data[key]) is str and '/' in data[key]:
# query = query + parse.urlencode({key: data[key]}) + '&'
# else:
# query = query + key + '=' + parse.quote(str(data[key])) + '&'
# query = query[:-1]
# return url + query
# 这里是+和%20的坑
return url + '?' + parse.urlencode(data).replace('+', '%20')
def remove_ids_dfs(data: dict):
if '_id' in data:
del data['_id']
for key in data:
if type(data[key]) is dict:
data[key] = remove_ids_dfs(data[key])
return data
"""
返回值格式:
{
code: ...,
message: ...,
data: ...,
}
"""
def make_result(code: int, message=None, data=None):
result = {
'code': code,
}
# 根据code选message
if message is None:
try:
result['message'] = config.code[str(code)]
except ValueError:
logger.warning('Error code %s not found!' % code)
result['message'] = config.code['0']
else:
result['message'] = message
if data is not None:
# 一定要删除所有_id元素
data = remove_ids_dfs(data)
result['data'] = data
return result
def make_error_result(error):
return make_result(1, message=str(error))
def dump(data):
return json.dumps(data)
def check_args(args: dict, requirements: list):
for r in requirements:
if r not in args:
return False
return True
def format_file_size(size_by_bytes: int) -> str:
units = ['B', 'KB', 'MB', 'GB', 'TB']
# 最终数值应该在1~999之间
index = 0
unit = units[index]
while size_by_bytes > 1000:
index = index + 1
unit = units[index]
size_by_bytes = size_by_bytes / 1000
if index == len(units):
break
if size_by_bytes > 20:
return "%.0f%s" % (size_by_bytes, unit)
return "%.2f%s" % (size_by_bytes, unit)
# 用户在header里面加上Authorization: {token}
def login_check(f):
@wraps(f)
def decorated(*args, **kwargs):
headers = dict(request.headers)
if 'Authorization' not in headers:
return make_result(3) # login error
token = headers['Authorization']
if db.token_find_by_token(token) is None:
return make_result(3) # login error
return f(*args, **kwargs)
return decorated
# 用户在header里面加上Authorization: {token}
def admin_check(f):
@wraps(f)
def decorated(*args, **kwargs):
headers = dict(request.headers)
if 'Authorization' not in headers:
return make_result(3) # login error
token = headers['Authorization']
token_data = db.token_find_by_token(token)
if token_data is None:
return make_result(3) # login error
# 用户level大于等于10表示有管理员效力
user = db.user_find(username=token_data['username'])
if user is None:
return make_result(3) # login error,不会有效
if user['info']['level'] < 10:
return make_result(10) # No permission
return f(*args, **kwargs)
return decorated
# 必须在request过程中调用,获取不到直接打断
def get_user_from_headers():
headers = dict(request.headers)
if 'Authorization' not in headers:
abort(jsonify(make_result(3))) # login error
token = headers['Authorization']
token_data = db.token_find_by_token(token)
if token_data is None:
abort(jsonify(make_result(3))) # login error
# 用户level大于等于10表示有管理员效力
user = db.user_find(username=token_data['username'])
if user is None:
abort(jsonify(make_result(3))) # login error,不会有效
return user
def check_admin_abort():
headers = dict(request.headers)
if 'Authorization' not in headers:
abort(jsonify(make_result(3))) # login error
token = headers['Authorization']
token_data = db.token_find_by_token(token)
if token_data is None:
abort(jsonify(make_result(3))) # login error
# 用户level大于等于10表示有管理员效力
user = db.user_find(username=token_data['username'])
if user is None:
abort(jsonify(make_result(3))) # login error,不会有效
if user['info']['level'] < 10:
abort(jsonify(make_result(10))) # No permission
def is_number(s):
try:
float(s)
return True
except ValueError:
pass
# try:
# import unicodedata
# unicodedata.numeric(s)
# return True
# except (TypeError, ValueError):
# pass
return False
# def url_check(url: str):
# url = url.lower()
# reg = "^(https|http|ftp|rtsp|mms)\\://?([a-zA-Z0-9\\.\\-]+(\\:[a-zA-Z0-9\\.&%\\$\\-]+)*@)?((25[0-5]|2" \
# "[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]" \
# "{1}[0-9]{1}|[1-9]|0)\\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\\.(25[0-5]|" \
# "2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|([a-zA-Z0-9\\-]+\\.)*[a-zA-Z0-9\\-]+\\.[a-zA-Z]" \
# "{2,4})(\\:[0-9]+)?(/[^/][a-zA-Z0-9\\.\\,\\?\\'\\\\/\\+&%\\$\\=~_\\-@]*)*$"
# print(re.search(url, reg))
if __name__ == '__main__':
print(parse_url_query('http://blog.com/sss/ssss/s?wd=dsfa&a=fdsa&a=1&b=1.1&a=s'))
print(format_file_size(20250000))
# print(url_check('http://www.bilibili.com/'))
| 2.671875 | 3 |
tests/inputs/loops/51-arrays-in-loop.py | helq/pytropos | 4 | 4272 | import numpy as np
from something import Top
i = 0
while i < 10:
a = np.ndarray((10,4))
b = np.ones((10, Top))
i += 1
del Top
# show_store()
| 2.71875 | 3 |
tests/mappers/fields/test_float_field.py | Arfey/aiohttp_admin2 | 12 | 4273 | from aiohttp_admin2.mappers import Mapper
from aiohttp_admin2.mappers import fields
class FloatMapper(Mapper):
field = fields.FloatField()
def test_correct_float_type():
"""
In this test we check success convert to float type.
"""
mapper = FloatMapper({"field": 1})
mapper.is_valid()
assert mapper.data["field"] == 1.0
mapper = FloatMapper({"field": 2})
mapper.is_valid()
assert mapper.data["field"] == 2.0
mapper = FloatMapper({"field": -3})
mapper.is_valid()
assert mapper.data["field"] == -3.0
mapper = FloatMapper({"field": 0})
mapper.is_valid()
assert mapper.data["field"] == 0.0
def test_wrong_float_type():
"""
In this test we check error when we received wrong float type.
"""
assert FloatMapper({"field": "string"}).is_valid() is False
assert FloatMapper({"field": []}).is_valid() is False
| 3.21875 | 3 |
autotest/t038_test.py | jdlarsen-UA/flopy | 2 | 4274 | <gh_stars>1-10
"""
Try to load all of the MODFLOW-USG examples in ../examples/data/mfusg_test.
These are the examples that are distributed with MODFLOW-USG.
"""
import os
import flopy
# make the working directory
tpth = os.path.join("temp", "t038")
if not os.path.isdir(tpth):
os.makedirs(tpth)
# build list of name files to try and load
usgpth = os.path.join("..", "examples", "data", "mfusg_test")
usg_files = []
for path, subdirs, files in os.walk(usgpth):
for name in files:
if name.endswith(".nam"):
usg_files.append(os.path.join(path, name))
#
def test_load_usg():
for fusg in usg_files:
d, f = os.path.split(fusg)
yield load_model, f, d
# function to load a MODFLOW-USG model and then write it back out
def load_model(namfile, model_ws):
m = flopy.modflow.Modflow.load(
namfile, model_ws=model_ws, version="mfusg", verbose=True, check=False
)
assert m, f"Could not load namefile {namfile}"
assert m.load_fail is False
m.change_model_ws(tpth)
m.write_input()
return
if __name__ == "__main__":
for fusg in usg_files:
d, f = os.path.split(fusg)
load_model(f, d)
| 2.328125 | 2 |
botlib/cli.py | relikd/botlib | 0 | 4275 | #!/usr/bin/env python3
import os
from argparse import ArgumentParser, ArgumentTypeError, FileType, Namespace
from typing import Any
def DirType(string: str) -> str:
if os.path.isdir(string):
return string
raise ArgumentTypeError(
'Directory does not exist: "{}"'.format(os.path.abspath(string)))
class Cli(ArgumentParser):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
def arg(self, *args: Any, **kwargs: Any) -> None:
self.add_argument(*args, **kwargs)
def arg_bool(self, *args: Any, **kwargs: Any) -> None:
self.add_argument(*args, **kwargs, action='store_true')
def arg_dir(self, *args: Any, **kwargs: Any) -> None:
self.add_argument(*args, **kwargs, type=DirType)
def arg_file(self, *args: Any, mode: str = 'r', **kwargs: Any) -> None:
self.add_argument(*args, **kwargs, type=FileType(mode))
def parse(self) -> Namespace:
return self.parse_args()
| 3.046875 | 3 |
pyhanko_certvalidator/asn1_types.py | MatthiasValvekens/certvalidator | 4 | 4276 | <filename>pyhanko_certvalidator/asn1_types.py
from typing import Optional
from asn1crypto import core, x509, cms
__all__ = [
'Target', 'TargetCert', 'Targets', 'SequenceOfTargets',
'AttrSpec', 'AAControls'
]
class TargetCert(core.Sequence):
_fields = [
('target_certificate', cms.IssuerSerial),
('target_name', x509.GeneralName, {'optional': True}),
('cert_digest_info', cms.ObjectDigestInfo, {'optional': True})
]
class Target(core.Choice):
_alternatives = [
('target_name', x509.GeneralName, {'explicit': 0}),
('target_group', x509.GeneralName, {'explicit': 1}),
('target_cert', TargetCert, {'explicit': 2})
]
class Targets(core.SequenceOf):
_child_spec = Target
# Blame X.509...
class SequenceOfTargets(core.SequenceOf):
_child_spec = Targets
class AttrSpec(core.SequenceOf):
_child_spec = cms.AttCertAttributeType
class AAControls(core.Sequence):
_fields = [
('path_len_constraint', core.Integer, {'optional': True}),
('permitted_attrs', AttrSpec, {'optional': True, 'implicit': 0}),
('excluded_attrs', AttrSpec, {'optional': True, 'implicit': 1}),
('permit_unspecified', core.Boolean, {'default': True})
]
def accept(self, attr_id: cms.AttCertAttributeType) -> bool:
attr_id_str = attr_id.native
excluded = self['excluded_attrs'].native
if excluded is not None:
excluded = frozenset(excluded)
if excluded is not None and attr_id_str in excluded:
return False
permitted = self['permitted_attrs'].native
if permitted is not None:
permitted = frozenset(permitted)
if permitted is not None and attr_id_str in permitted:
return True
return bool(self['permit_unspecified'])
@classmethod
def read_extension_value(cls, cert: x509.Certificate) \
-> Optional['AAControls']:
# handle AA controls (not natively supported by asn1crypto, so
# not available as an attribute).
try:
return next(
ext['extn_value'].parsed
for ext in cert['tbs_certificate']['extensions']
if ext['extn_id'].native == 'aa_controls'
)
except StopIteration:
return None
def _make_tag_explicit(field_decl):
tag_dict = field_decl[2]
if 'explicit' in tag_dict:
return
tag_dict['explicit'] = tag_dict['implicit']
del tag_dict['implicit']
def _make_tag_implicit(field_decl):
tag_dict = field_decl[2]
if 'implicit' in tag_dict:
return
tag_dict['implicit'] = tag_dict['explicit']
del tag_dict['explicit']
# Deal with wbond/asn1crypto#218
_make_tag_explicit(cms.RoleSyntax._fields[1])
_make_tag_explicit(cms.SecurityCategory._fields[1])
# Deal with wbond/asn1crypto#220
_make_tag_implicit(cms.AttCertIssuer._alternatives[1])
# patch in attribute certificate extensions
# Note: unlike in Certomancer, we don't do this one conditionally, since
# we need the actual Python types to agree with what we export
ext_map = x509.ExtensionId._map
ext_specs = x509.Extension._oid_specs
ext_map['172.16.17.325'] = 'target_information'
ext_specs['target_information'] = SequenceOfTargets
ext_map['2.5.29.56'] = 'no_rev_avail'
ext_specs['no_rev_avail'] = core.Null
ext_map['1.3.6.1.5.5.7.1.6'] = 'aa_controls'
ext_specs['aa_controls'] = AAControls
ext_map['1.3.6.1.5.5.7.1.4'] = 'audit_identity'
ext_specs['audit_identity'] = core.OctetString
| 2.109375 | 2 |
test/test_delete_group.py | ruslankl9/ironpython_training | 0 | 4277 | <filename>test/test_delete_group.py
from model.group import Group
import random
def test_delete_some_group(app):
if len(app.group.get_group_list()) <= 1:
app.group.add_new_group(Group(name='test'))
old_list = app.group.get_group_list()
index = random.randrange(len(old_list))
app.group.delete_group_by_index(index)
new_list = app.group.get_group_list()
assert len(old_list) - 1 == len(new_list)
del old_list[index]
assert old_list == new_list | 2.734375 | 3 |
Evaluation/batch_detection.py | gurkirt/actNet-inAct | 27 | 4278 | <filename>Evaluation/batch_detection.py
'''
Autor: <NAME>
Start data: 15th May 2016
purpose: of this file is read frame level predictions and process them to produce a label per video
'''
from sklearn.svm import LinearSVC
from sklearn.ensemble import RandomForestClassifier
import numpy as np
import pickle
import os
import time,json
import pylab as plt
from eval_detection import ANETdetection
import scipy.io as sio
#######baseDir = "/mnt/sun-alpha/actnet/";
baseDir = "/data/shared/solar-machines/actnet/";
#baseDir = "/mnt/solar-machines/actnet/";
########imgDir = "/mnt/sun-alpha/actnet/rgb-images/";
######## imgDir = "/mnt/DATADISK2/ss-workspace/actnet/rgb-images/";
annotPklFile = "../Evaluation/data/actNet200-V1-3.pkl"
def getscore(ground_truth_filename, prediction_filename,
tiou_thr=0.5,subset='validation', verbose=True, check_status=True):
anet_detection = ANETdetection(ground_truth_filename, prediction_filename,
subset=subset, tiou_thr=tiou_thr,
verbose=verbose, check_status=True)
ap = anet_detection.evaluate()
return ap
def saveAPs():
K = 5;
subset = 'validation';#,'testing']:
featType = 'IMS-MBH'
# savename = '{}data/predictions-{}-{}.pkl'.format(baseDir,subset,featType)
# with open(savename,'r') as f:
# data = pickle.load(f)
outfilename = '{}results/classification/{}-{}-{}.json'.format(baseDir,subset,featType,str(K).zfill(3))
gtfiile = 'data/activity_net.v1-3.min.json'
ap = getscore(gtfiile,outfilename,top_k=1)
print ap
print np.mean(ap)
savename = '{}data/weightAP-{}.pkl'.format(baseDir,featType)
print 'Results saved in ',savename
with open(savename,'w') as f:
pickle.dump(ap,f)
def plotAPs():
K = 1;
subset = 'validation';#,'testing']:
aps = [];
count = 0;
colors = ['red','green','blue']
for featType in ['IMS-MBH','IMS','MBH']:
savename = '{}data/weightAP-{}.pkl'.format(baseDir,featType)
print 'Results saved in ',savename
with open(savename,'r') as f:
ap = pickle.load(f)
ind = np.arange(count,600+count,3)
plt.bar(ind,ap,width=0.5,color=colors[count])
count += 1
plt.show()
def evalAll():
K = 10;
subset = 'validation';#,'testing']:
gtfiile = 'data/activity_net.v1-3.min.json'
result = []; count = 0;
featType = 'C3D-BIN-BOOST-LONG'
# outfilename = '{}results/detection/{}-{}-K-{}-{}.json'.format(baseDir,subset,featType,str(K).zfill(3),'alpha-001')
for alpha in [1,3,5,]:
outfilename = '{}results/detection/{}-{}-K-{}-{}.json'.format(baseDir,subset,featType,str(K).zfill(3),'alpha-{}'.format(str(int(alpha*10)).zfill(3)))
print 'Evaluating results from ',outfilename
for tioth in [0.5,0.4,0.3,0.2,0.1]:
ap = getscore(gtfiile,outfilename,tiou_thr=tioth)
result.append([alpha,tioth,np.mean(ap)])
result = np.aaarray(result)
sio.savemat('result-{}.mat'.format(featType),mdict={'ap':ap})
if __name__=="__main__":
#processOnePredictions()
# saveAps()
# plotmAPs()
evalALL()
| 2.375 | 2 |
python/csv/csv_dict_writer.py | y2ghost/study | 0 | 4279 | <filename>python/csv/csv_dict_writer.py
import csv
def csv_dict_writer(path, headers, data):
with open(path, 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, delimiter=',',
fieldnames=headers)
writer.writeheader()
for record in data:
writer.writerow(record)
if __name__ == '__main__':
data = '''book_title,author,publisher,pub_date,isbn
Python 101,<NAME>, <NAME>,2020,123456789
wxPython Recipes,<NAME>,Apress,2018,978-1-4842-3237-8
Python Interviews,<NAME>,Packt Publishing,2018,9781788399081'''
records = []
for line in data.splitlines():
records.append(line.strip().split(','))
headers = records.pop(0)
list_of_dicts = []
for row in records:
my_dict = dict(zip(headers, row))
list_of_dicts.append(my_dict)
csv_dict_writer('output_dict.csv', headers, list_of_dicts)
| 3.890625 | 4 |
src/decisionengine/framework/modules/tests/test_module_decorators.py | moibenko/decisionengine | 9 | 4280 | <gh_stars>1-10
# SPDX-FileCopyrightText: 2017 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
import pytest
from decisionengine.framework.modules import Publisher, Source
from decisionengine.framework.modules.Module import verify_products
from decisionengine.framework.modules.Source import Parameter
def test_multiple_consumes_declarations():
with pytest.raises(Exception, match="@consumes has already been called"):
@Publisher.consumes(a=int)
@Publisher.consumes(b=float)
class _(Publisher.Publisher):
pass
def test_multiple_produces_declarations():
with pytest.raises(Exception, match="@produces has already been called"):
@Source.produces(c=str)
@Source.produces(d=bool)
class _(Source.Source):
pass
def test_wrong_product_names():
@Source.produces(a=str)
class BMaker(Source.Source):
def __init__(self, config):
super().__init__(config)
def acquire(self):
return {"b": ""}
maker = BMaker({"channel_name": "test"})
expected_err_msg = (
"The following products were not produced:\n"
+ " - 'a' of type 'str'\n\n"
+ "The following products were not declared:\n"
+ " - 'b' of type 'str'"
)
with pytest.raises(Exception, match=expected_err_msg):
verify_products(maker, maker.acquire())
def test_wrong_product_types():
@Source.produces(a=str, b=int)
class AMaker(Source.Source):
def __init__(self, config):
super().__init__(config)
def acquire(self):
return {"a": 42, "b": 17}
maker = AMaker({"channel_name": "test"})
expected_err_msg = "The following products have the wrong types:\n" + r" - 'a' \(expected 'str', got 'int'\)"
with pytest.raises(Exception, match=expected_err_msg):
verify_products(maker, maker.acquire())
def test_supports_config():
expected_err_msg = (
"An error occurred while processing the parameter 'conflicting_types':\n"
+ "The specified type 'int' conflicts with the type of the default value "
+ r"'hello' \(type 'str'\)"
)
with pytest.raises(Exception, match=expected_err_msg):
@Source.supports_config(Parameter("conflicting_types", type=int, default="hello"))
class _(Source.Source):
pass
| 2.046875 | 2 |
models/cnn_layer.py | RobinRojowiec/intent-recognition-in-doctor-patient-interviews | 0 | 4281 | import torch
import torch.nn as nn
from torch.nn.functional import max_pool1d
from utility.model_parameter import Configuration, ModelParameter
class CNNLayer(nn.Module):
def __init__(self, config: Configuration, vocab_size=30000, use_embeddings=True, embed_dim=-1, **kwargs):
super(CNNLayer, self).__init__()
# set parameters
self.max_seq_length = config.get_int(ModelParameter.MAX_LENGTH)
self.use_gpu = torch.cuda.is_available()
if embed_dim == -1:
self.embedding_dim = config.get_int(ModelParameter.EMBEDDING_SIZE)
else:
self.embedding_dim = embed_dim
self.max_length = config.get_int(ModelParameter.MAX_LENGTH)
self.use_embeddings = use_embeddings
self.conv_out_channels = config.get_int(ModelParameter.CHANNELS)
self.filter_sizes = [2]
# create and initialize layers
self.embedding = nn.Embedding(vocab_size, self.embedding_dim)
self.relu = nn.ReLU()
self.convolutions = nn.ModuleList(
[nn.Conv2d(1, self.conv_out_channels, (K, self.embedding_dim)) for K in self.filter_sizes])
self.dropout = nn.Dropout(0.3)
def get_output_length(self):
return len(self.filter_sizes) * self.conv_out_channels
def forward(self, samples, **kwargs):
encoded_samples = self.encode(samples)
return encoded_samples
def encode(self, samples):
x = self.embedding(samples)
x = x.unsqueeze(1)
x = [self.relu(conv(x)).squeeze(3) for conv in self.convolutions]
x = [max_pool1d(i, i.size(2)).squeeze(2) for i in x]
x = self.dropout(torch.cat(x, 1))
return x
| 2.6875 | 3 |
musicscore/musicxml/types/complextypes/backup.py | alexgorji/music_score | 2 | 4282 | '''
<xs:complexType name="backup">
<xs:annotation>
<xs:documentation></xs:documentation>
</xs:annotation>
<xs:sequence>
<xs:group ref="duration"/>
<xs:group ref="editorial"/>
</xs:sequence>
</xs:complexType>
'''
from musicscore.dtd.dtd import Sequence, GroupReference, Element
from musicscore.musicxml.groups.common import Editorial
from musicscore.musicxml.elements.note import Duration
from musicscore.musicxml.types.complextypes.complextype import ComplexType
class ComplexTypeBackup(ComplexType):
"""
The backup and forward elements are required to coordinate multiple voices in one part, including music on multiple
staves. The backup type is generally used to move between voices and staves. Thus the backup element does not
include voice or staff elements. Duration values should always be positive, and should not cross measure boundaries
or mid-measure changes in the divisions value.
"""
_DTD = Sequence(
Element(Duration),
GroupReference(Editorial)
)
def __init__(self, tag, *args, **kwargs):
super().__init__(tag=tag, *args, **kwargs)
| 2.078125 | 2 |
NLP programmes in Python/9.Text Clustering/kmeans.py | AlexandrosPlessias/NLP-Greek-Presentations | 0 | 4283 | <filename>NLP programmes in Python/9.Text Clustering/kmeans.py
import nltk
import re
import csv
import string
import collections
import numpy as np
from nltk.corpus import wordnet
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import WordPunctTokenizer
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
""""Pre - Processing: tokenization, stopwords removal, remove words(with size 1), lower capitalization & lemmatization"""
def preprocessing(text):
# text = text.decode("utf8")
# remove punctuation
text = punctuation(text)
# remove extra spaces
text = re.sub(' +', ' ', text)
# tokenize into words
tokens = text.split(" ")
# remove number
tokens = [word for word in tokens if word.isalpha()]
# remove stopwords
stop = stopwords.words('english')
tokens = [token for token in tokens if token not in stop]
# remove words less than three letters
tokens = [word for word in tokens if len(word) >= 3]
# lower capitalization
tokens = [word.lower() for word in tokens]
# keep only real words
tokens = KeepRealWords(tokens)
# lemmatize
lmtzr = WordNetLemmatizer()
tokens = [lmtzr.lemmatize(word) for word in tokens]
# return only tokens with size over 1
if len(tokens) > 0:
preprocessed_text = " ".join(tokens)
return preprocessed_text
return None
def KeepRealWords(text):
wpt = WordPunctTokenizer()
only_recognized_words = []
for s in text:
tokens = wpt.tokenize(s)
if tokens: # check if empty string
for t in tokens:
if wordnet.synsets(t):
only_recognized_words.append(t) # only keep recognized words
return only_recognized_words
def punctuation(text):
translator = str.maketrans(string.punctuation, ' '*len(string.punctuation)) # map punctuation to space
return (text.translate(translator))
""""Read Data"""
# Open sms corpus.
sms_file = open('SMSSpamCollection.txt', encoding="utf8") # Check the structure of this file!
sms_data = []
sms_labels = []
# CSV Reader LABEL & DATA are separated by TAB.
csv_reader = csv.reader(sms_file,delimiter='\t')
# Store labels and data.
for line in csv_reader:
sms_text = preprocessing(line[1])
if ( sms_text != None):
# adding the sms_id
sms_labels.append( line[0])
# adding the cleaned text We are calling preprocessing method
sms_data.append(sms_text)
sms_file.close()
"""Sampling steps (70:30)"""
trainset_size = int(round(len(sms_data)*0.70))
# I chose this threshold for 70:30 train and test split.
print('The training set size for this classifier is ' + str(trainset_size) + '\n')
x_train = np.array([''.join(el) for el in sms_data[0:trainset_size]]) # train sms_data (70%).
y_train = np.array([el for el in sms_labels[0:trainset_size]]) # train sms_labels (70%).
x_test = np.array([''.join(el) for el in sms_data[trainset_size+1:len(sms_data)]]) # test sms_data (30%).
y_test = np.array([el for el in sms_labels[trainset_size+1:len(sms_labels)]]) # test sms_labels (30%).
"""We are building a TFIDF vectorizer here"""
from sklearn.feature_extraction.text import TfidfVectorizer
vectorizer = TfidfVectorizer(min_df=2, ngram_range=(1, 2), stop_words='english', strip_accents='unicode', norm='l2')
X_train = vectorizer.fit_transform(x_train)
X_test = vectorizer.transform(x_test)
"""Text Clustering - K Means"""
from sklearn.cluster import KMeans, MiniBatchKMeans
print('--> Text Clustering - K Means')
true_k = 5
km = KMeans(n_clusters=true_k, init='k-means++', max_iter=100, n_init=1)
kmini = MiniBatchKMeans(n_clusters=true_k, init='k-means++', n_init=1, init_size=1000, batch_size=1000, verbose=False) #verbose=opts.verbose
# we are using the same test,train data in TFIDF form as we did in text classification
km_model = km.fit(X_train)
print("For K-mean clustering ")
clustering = collections.defaultdict(list)
for idx, label in enumerate(km_model.labels_):
clustering[label].append(idx)
print(clustering)
kmini_model = kmini.fit(X_train)
print("For K-mean Mini batch clustering ")
clustering = collections.defaultdict(list)
for idx, label in enumerate(kmini_model.labels_):
clustering[label].append(idx)
print(clustering)
| 3.59375 | 4 |
common/utils.py | paTRICK-swk/P-STMO | 8 | 4284 | import torch
import numpy as np
import hashlib
from torch.autograd import Variable
import os
def deterministic_random(min_value, max_value, data):
digest = hashlib.sha256(data.encode()).digest()
raw_value = int.from_bytes(digest[:4], byteorder='little', signed=False)
return int(raw_value / (2 ** 32 - 1) * (max_value - min_value)) + min_value
def mpjpe_cal(predicted, target):
assert predicted.shape == target.shape
return torch.mean(torch.norm(predicted - target, dim=len(target.shape) - 1))
def test_calculation(predicted, target, action, error_sum, data_type, subject, MAE=False):
error_sum = mpjpe_by_action_p1(predicted, target, action, error_sum)
if not MAE:
error_sum = mpjpe_by_action_p2(predicted, target, action, error_sum)
return error_sum
def mpjpe_by_action_p1(predicted, target, action, action_error_sum):
assert predicted.shape == target.shape
batch_num = predicted.size(0)
frame_num = predicted.size(1)
dist = torch.mean(torch.norm(predicted - target, dim=len(target.shape) - 1), dim=len(target.shape) - 2)
if len(set(list(action))) == 1:
end_index = action[0].find(' ')
if end_index != -1:
action_name = action[0][:end_index]
else:
action_name = action[0]
action_error_sum[action_name]['p1'].update(torch.mean(dist).item()*batch_num*frame_num, batch_num*frame_num)
else:
for i in range(batch_num):
end_index = action[i].find(' ')
if end_index != -1:
action_name = action[i][:end_index]
else:
action_name = action[i]
action_error_sum[action_name]['p1'].update(torch.mean(dist[i]).item()*frame_num, frame_num)
return action_error_sum
def mpjpe_by_action_p2(predicted, target, action, action_error_sum):
assert predicted.shape == target.shape
num = predicted.size(0)
pred = predicted.detach().cpu().numpy().reshape(-1, predicted.shape[-2], predicted.shape[-1])
gt = target.detach().cpu().numpy().reshape(-1, target.shape[-2], target.shape[-1])
dist = p_mpjpe(pred, gt)
if len(set(list(action))) == 1:
end_index = action[0].find(' ')
if end_index != -1:
action_name = action[0][:end_index]
else:
action_name = action[0]
action_error_sum[action_name]['p2'].update(np.mean(dist) * num, num)
else:
for i in range(num):
end_index = action[i].find(' ')
if end_index != -1:
action_name = action[i][:end_index]
else:
action_name = action[i]
action_error_sum[action_name]['p2'].update(np.mean(dist), 1)
return action_error_sum
def p_mpjpe(predicted, target):
assert predicted.shape == target.shape
muX = np.mean(target, axis=1, keepdims=True)
muY = np.mean(predicted, axis=1, keepdims=True)
X0 = target - muX
Y0 = predicted - muY
normX = np.sqrt(np.sum(X0 ** 2, axis=(1, 2), keepdims=True))
normY = np.sqrt(np.sum(Y0 ** 2, axis=(1, 2), keepdims=True))
X0 /= normX
Y0 /= normY
H = np.matmul(X0.transpose(0, 2, 1), Y0)
U, s, Vt = np.linalg.svd(H)
V = Vt.transpose(0, 2, 1)
R = np.matmul(V, U.transpose(0, 2, 1))
sign_detR = np.sign(np.expand_dims(np.linalg.det(R), axis=1))
V[:, :, -1] *= sign_detR
s[:, -1] *= sign_detR.flatten()
R = np.matmul(V, U.transpose(0, 2, 1))
tr = np.expand_dims(np.sum(s, axis=1, keepdims=True), axis=2)
a = tr * normX / normY
t = muX - a * np.matmul(muY, R)
predicted_aligned = a * np.matmul(predicted, R) + t
return np.mean(np.linalg.norm(predicted_aligned - target, axis=len(target.shape) - 1), axis=len(target.shape) - 2)
def define_actions( action ):
actions = ["Directions","Discussion","Eating","Greeting",
"Phoning","Photo","Posing","Purchases",
"Sitting","SittingDown","Smoking","Waiting",
"WalkDog","Walking","WalkTogether"]
if action == "All" or action == "all" or action == '*':
return actions
if not action in actions:
raise( ValueError, "Unrecognized action: %s" % action )
return [action]
def define_error_list(actions):
error_sum = {}
error_sum.update({actions[i]: {'p1':AccumLoss(), 'p2':AccumLoss()} for i in range(len(actions))})
return error_sum
class AccumLoss(object):
def __init__(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val
self.count += n
self.avg = self.sum / self.count
def get_varialbe(split, target):
num = len(target)
var = []
if split == 'train':
for i in range(num):
temp = Variable(target[i], requires_grad=False).contiguous().type(torch.cuda.FloatTensor)
var.append(temp)
else:
for i in range(num):
temp = Variable(target[i]).contiguous().cuda().type(torch.cuda.FloatTensor)
var.append(temp)
return var
def print_error(data_type, action_error_sum, is_train):
mean_error_p1, mean_error_p2 = print_error_action(action_error_sum, is_train)
return mean_error_p1, mean_error_p2
def print_error_action(action_error_sum, is_train):
mean_error_each = {'p1': 0.0, 'p2': 0.0}
mean_error_all = {'p1': AccumLoss(), 'p2': AccumLoss()}
if is_train == 0:
print("{0:=^12} {1:=^10} {2:=^8}".format("Action", "p#1 mm", "p#2 mm"))
for action, value in action_error_sum.items():
if is_train == 0:
print("{0:<12} ".format(action), end="")
mean_error_each['p1'] = action_error_sum[action]['p1'].avg * 1000.0
mean_error_all['p1'].update(mean_error_each['p1'], 1)
mean_error_each['p2'] = action_error_sum[action]['p2'].avg * 1000.0
mean_error_all['p2'].update(mean_error_each['p2'], 1)
if is_train == 0:
print("{0:>6.2f} {1:>10.2f}".format(mean_error_each['p1'], mean_error_each['p2']))
if is_train == 0:
print("{0:<12} {1:>6.2f} {2:>10.2f}".format("Average", mean_error_all['p1'].avg, \
mean_error_all['p2'].avg))
return mean_error_all['p1'].avg, mean_error_all['p2'].avg
def save_model(previous_name, save_dir,epoch, data_threshold, model, model_name):
# if os.path.exists(previous_name):
# os.remove(previous_name)
torch.save(model.state_dict(),
'%s/%s_%d_%d.pth' % (save_dir, model_name, epoch, data_threshold * 100))
previous_name = '%s/%s_%d_%d.pth' % (save_dir, model_name, epoch, data_threshold * 100)
return previous_name
def save_model_new(save_dir,epoch, data_threshold, lr, optimizer, model, model_name):
# if os.path.exists(previous_name):
# os.remove(previous_name)
# torch.save(model.state_dict(),
# '%s/%s_%d_%d.pth' % (save_dir, model_name, epoch, data_threshold * 100))
torch.save({
'epoch': epoch,
'lr': lr,
'optimizer': optimizer.state_dict(),
'model_pos': model.state_dict(),
},
'%s/%s_%d_%d.pth' % (save_dir, model_name, epoch, data_threshold * 100))
| 2.125 | 2 |
personal_ad/advice/converter.py | Sailer43/CSE5914Project | 0 | 4285 | <gh_stars>0
from ibm_watson import TextToSpeechV1, SpeechToTextV1, DetailedResponse
from os import system
from json import loads
class Converter:
k_s2t_api_key = "<KEY>"
k_t2s_api_key = "<KEY>"
k_s2t_url = "https://stream.watsonplatform.net/speech-to-text/api"
k_t2s_url = "https://gateway-wdc.watsonplatform.net/text-to-speech/api"
k_t2s_voice = "en-US_AllisonVoice"
k_t2s_format = "audio/webm"
k_st2_model = "en-US_NarrowbandModel"
def __init__(self):
self.s2t = SpeechToTextV1(iam_apikey=self.k_s2t_api_key, url=self.k_s2t_url)
self.t2s = TextToSpeechV1(iam_apikey=self.k_t2s_api_key, url=self.k_t2s_url)
def read(self, string: str):
return self.t2s.synthesize(
string,
voice=self.k_t2s_voice,
accept=self.k_t2s_format
).get_result().content
def listen(self, audio_input):
try:
result = self.s2t.recognize(audio_input, model=self.k_st2_model)
result = loads(str(result))
result = result["result"]["results"][0]["alternatives"][0]['transcript']
except Exception:
return False, "I don't understand what you are saying."
return True, str(result)
def main():
pass
if __name__ == '__main__':
main()
| 3.109375 | 3 |
warg_client/client/apis/controller/attack_controller.py | neel4os/warg-client | 0 | 4286 | <gh_stars>0
from subprocess import run
def perform_shutdown(body):
arg = ""
if body["reboot"]:
_is_reboot = arg + "-r"
else:
_is_reboot = arg + "-h"
time_to_shutdown = str(body['timeToShutdown'])
result = run(["/sbin/shutdown", _is_reboot, time_to_shutdown])
return body
| 2.640625 | 3 |
torrents/migrations/0011_auto_20190223_2345.py | 2600box/harvest | 9 | 4287 | <reponame>2600box/harvest<filename>torrents/migrations/0011_auto_20190223_2345.py
# Generated by Django 2.1.7 on 2019-02-23 23:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('torrents', '0010_auto_20190223_0326'),
]
operations = [
migrations.AlterModelOptions(
name='realm',
options={'ordering': ('name',)},
),
]
| 1.40625 | 1 |
common/__init__.py | whyh/FavourDemo | 1 | 4288 | <filename>common/__init__.py
from . import (emoji as emj,
keyboards as kb,
telegram as tg,
phrases as phr,
finance as fin,
utils,
glossary,
bots,
gcp,
sed,
db)
| 1.242188 | 1 |
questions/serializers.py | aneumeier/questions | 0 | 4289 | <filename>questions/serializers.py
#!/usr/bin/env python
# -*- coding: utf-8
"""
:mod:`question.serializers` -- serializers
"""
from rest_framework import serializers
from .models import Question, PossibleAnswer
from category.models import Category
class PossibleAnswerSerializer(serializers.ModelSerializer):
class Meta:
model = PossibleAnswer
fields = (
'id',
'possible_answer',
)
class QuestionSerializer(serializers.ModelSerializer):
category = serializers.StringRelatedField()
possible_answer = serializers.StringRelatedField(many=True)
class Meta:
model = Question
fields = (
'id',
'question',
'category',
'possible_answer',
'male_answer_count',
'female_answer_count',
'all_answer_count',
)
class CategorySerializer(serializers.ModelSerializer):
def count(self):
"""
{{ category.question_set.count }}
"""
return self.question_set.count()
class Meta:
model = Category
fields = (
'id',
'title',
)
| 2.609375 | 3 |
widgets/ui_ShowResultDialog.py | JaySon-Huang/SecertPhotos | 0 | 4290 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'src/ui_ShowResultDialog.ui'
#
# Created: Sat May 16 17:05:43 2015
# by: PyQt5 UI code generator 5.4
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(400, 300)
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.lb_image = ImageLabel(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lb_image.sizePolicy().hasHeightForWidth())
self.lb_image.setSizePolicy(sizePolicy)
self.lb_image.setMinimumSize(QtCore.QSize(100, 100))
self.lb_image.setAlignment(QtCore.Qt.AlignCenter)
self.lb_image.setObjectName("lb_image")
self.verticalLayout.addWidget(self.lb_image)
self.hLayout = QtWidgets.QHBoxLayout()
self.hLayout.setObjectName("hLayout")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.hLayout.addItem(spacerItem)
self.btn_save = QtWidgets.QPushButton(Dialog)
self.btn_save.setObjectName("btn_save")
self.hLayout.addWidget(self.btn_save)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.hLayout.addItem(spacerItem1)
self.verticalLayout.addLayout(self.hLayout)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.lb_image.setText(_translate("Dialog", "Image Label"))
self.btn_save.setText(_translate("Dialog", "Save it"))
from widgets.ImageLabel import ImageLabel
| 1.664063 | 2 |
mixcoatl/admin/api_key.py | zomGreg/mixcoatl | 0 | 4291 | <gh_stars>0
"""
mixcoatl.admin.api_key
----------------------
Implements access to the DCM ApiKey API
"""
from mixcoatl.resource import Resource
from mixcoatl.decorators.lazy import lazy_property
from mixcoatl.decorators.validations import required_attrs
from mixcoatl.utils import uncamel, camelize, camel_keys, uncamel_keys
import json
class ApiKey(Resource):
"""An API key is an access key and secret key that provide API access into DCM."""
PATH = 'admin/ApiKey'
COLLECTION_NAME = 'apiKeys'
PRIMARY_KEY = 'access_key'
def __init__(self, access_key=None, endpoint=None, *args, **kwargs):
Resource.__init__(self, endpoint=endpoint)
self.__access_key = access_key
@property
def access_key(self):
"""The primary identifier of the `ApiKey`. Same as `DCM_ACCESS_KEY`"""
return self.__access_key
@lazy_property
def account(self):
"""`dict` - The account with which this API key is associated."""
return self.__account
@lazy_property
def activation(self):
"""`str` - The date and time when this key was activated."""
return self.__activation
@lazy_property
def expiration(self):
"""`str` - The date and time when this API key should automatically be made inactivate."""
return self.__expiration
@expiration.setter
def expiration(self, e):
self.__expiration = e
@lazy_property
def customer(self):
"""`dict` - The customer to whom this API key belongs."""
return self.__customer
@lazy_property
def customer_management_key(self):
"""`bool` - Identifies whether or not this key can be used across all customer accounts."""
return self.__customer_management_key
@lazy_property
def description(self):
"""`str` - A user-friendly description of this API key."""
return self.__description
@description.setter
def description(self, d):
self.__description = d
@lazy_property
def name(self):
"""`str` - The user-friendly name used to identify the key."""
return self.__name
@name.setter
def name(self, n):
self.__name = n
@lazy_property
def secret_key(self):
"""`str` - The secret part of this API key."""
return self.__secret_key
@lazy_property
def state(self):
"""`str` - The status of the key *(i.e. `ACTIVE`)*"""
return self.__state
@lazy_property
def system_management_key(self):
"""`bool` - Identifies if the key can be used for DCM system management functions"""
return self.__system_management_key
@lazy_property
def user(self):
"""`dict` - The user associated with this API key. Account-level keys return `{'user_id': -1}`"""
return self.__user
@required_attrs(['description', 'name'])
def create(self):
"""Call the API to generate an API key from the current instance of `ApiKey`"""
payload = {
'generateApiKey': [{'description': self.description, 'name': self.name}]}
s = self.post(data=json.dumps(payload))
if self.last_error is None:
self.__access_key = s['apiKeys'][0]['accessKey']
self.load()
else:
raise ApiKeyGenerationException(self.last_error)
def invalidate(self, reason='key deleted via mixcoatl'):
"""Call the API to invalidate the current instance of `ApiKey`
This is the same as deleting the api key
:param reason: the reason for invalidating the key
:type reason: str.
:returns: True
:raises: :class:`ApiKeyInvalidationException`
"""
params = {'reason': reason}
self.delete(params=params)
if self.last_error is None:
return True
else:
raise ApiKeyInvalidationException(self.last_error)
@classmethod
def generate_api_key(cls, key_name, description, expiration=None):
"""Generates a new API key
>>> ApiKey.generate_api_key('my-api-key', 'this is my api key')
{'access_key':'<KEY>':....}
:param key_name: the name for the key
:type key_name: str.
:param description: the description for the key
:type description: str.
:param expiration: *unused for now*
:type expiration: str.
:returns: :class:`ApiKey`
:raises: :class:`ApiKeyGenerationException`
"""
a = cls()
a.name = key_name
a.description = description
a.create()
return a
@classmethod
def all(cls, keys_only=False, endpoint=None, **kwargs):
"""Get all api keys
.. note::
The keys used to make the request determine results visibility
:param keys_only: Only return `access_key` instead of `ApiKey` objects
:type keys_only: bool.
:param detail: The level of detail to return - `basic` or `extended`
:type detail: str.
:param account_id: Display all system keys belonging to `account_id`
:type account_id: int.
:param user_id: Display all keys belonging to `user_id`
:type user_id: int.
:returns: `list` - of :class:`ApiKey` or :attr:`access_key`
"""
if 'access_key' in kwargs:
r = Resource(cls.PATH + "/" + kwargs['access_key'], endpoint=endpoint)
params = {}
else:
r = Resource(cls.PATH, endpoint=endpoint)
if 'detail' in kwargs:
r.request_details = kwargs['detail']
else:
r.request_details = 'basic'
if 'account_id' in kwargs:
params = {'accountId': kwargs['account_id']}
elif 'user_id' in kwargs:
params = {'userId': kwargs['user_id']}
else:
params = {}
x = r.get(params=params)
if r.last_error is None:
if keys_only is True:
return [i[camelize(cls.PRIMARY_KEY)]
for i in x[cls.COLLECTION_NAME]]
else:
return [type(cls.__name__, (object,), i)
for i in uncamel_keys(x)[uncamel(cls.COLLECTION_NAME)]]
else:
raise ApiKeyException(r.last_error)
class ApiKeyException(BaseException):
pass
class ApiKeyGenerationException(ApiKeyException):
pass
class ApiKeyInvalidationException(ApiKeyException):
pass
| 2.140625 | 2 |
Python tests/dictionaries.py | Johnny-QA/Python_training | 0 | 4292 | <reponame>Johnny-QA/Python_training<filename>Python tests/dictionaries.py
my_set = {1, 3, 5}
my_dict = {'name': 'Jose', 'age': 90}
another_dict = {1: 15, 2: 75, 3: 150}
lottery_players = [
{
'name': 'Rolf',
'numbers': (13, 45, 66, 23, 22)
},
{
'name': 'John',
'numbers': (14, 56, 80, 23, 22)
}
]
universities = [
{
'name': 'Oxford',
'location': 'UK'
},
{
'name': 'MIT',
'location': 'US'
}
] | 3.09375 | 3 |
psdaq/psdaq/control_gui/QWTable.py | ZhenghengLi/lcls2 | 16 | 4293 | <reponame>ZhenghengLi/lcls2
"""Class :py:class:`QWTable` is a QTableView->QWidget for tree model
======================================================================
Usage ::
# Run test: python lcls2/psdaq/psdaq/control_gui/QWTable.py
from psdaq.control_gui.QWTable import QWTable
w = QWTable()
Created on 2019-03-28 by <NAME>
Re-designed after copy psana/graphqt/QWTable.py -> psdaq/control_gui/
"""
import logging
logger = logging.getLogger(__name__)
from PyQt5.QtWidgets import QTableView, QVBoxLayout, QAbstractItemView, QSizePolicy
from PyQt5.QtGui import QStandardItemModel, QStandardItem
from PyQt5.QtCore import Qt, QModelIndex
from psdaq.control_gui.QWIcons import icon
class QWTable(QTableView):
def __init__(self, **kwargs):
parent = kwargs.get('parent', None)
QTableView.__init__(self, parent)
self._name = self.__class__.__name__
icon.set_icons()
self.is_connected_item_changed = False
self._si_model = QStandardItemModel()
self.set_selection_mode()
self.fill_table_model(**kwargs) # defines self._si_model
self.setModel(self._si_model)
self.connect_control()
self.set_style()
def connect_control(self):
self.connect_item_selected_to(self.on_item_selected)
self.clicked.connect(self.on_click)
self.doubleClicked.connect(self.on_double_click)
self.connect_item_changed_to(self.on_item_changed)
#def __del__(self):
# QTableView.__del__(self) - it does not have __del__
def set_selection_mode(self, smode=QAbstractItemView.ExtendedSelection):
logger.debug('Set selection mode: %s'%smode)
self.setSelectionMode(smode)
def connect_item_changed_to(self, recipient):
self._si_model.itemChanged.connect(recipient)
self.is_connected_item_changed = True
def disconnect_item_changed_from(self, recipient):
if self.is_connected_item_changed:
self._si_model.itemChanged.disconnect(recipient)
self.is_connected_item_changed = False
def connect_item_selected_to(self, recipient):
self.selectionModel().currentChanged[QModelIndex, QModelIndex].connect(recipient)
def disconnect_item_selected_from(self, recipient):
#self.selectionModel().selectionChanged[QModelIndex, QModelIndex].disconnect(recipient)
self.selectionModel().currentChanged[QModelIndex, QModelIndex].disconnect(recipient)
def set_style(self):
self.setStyleSheet("QTableView::item:hover{background-color:#00FFAA;}")
#self.setSizePolicy(QSizePolicy::Preferred,QSizePolicy::Fixed)
self.set_exact_widget_size()
def set_exact_widget_size(self):
"""set window size exactly matching actual size of QTableView.
"""
self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.resizeColumnsToContents()
self.setFixedSize(self.horizontalHeader().length()+self.verticalHeader().width(),\
self.verticalHeader().length()+self.horizontalHeader().height())
def fill_table_model(self, **kwargs):
self.clear_model()
self._si_model.setHorizontalHeaderLabels(['col0', 'col1', 'col2', 'col3', 'col4'])
self._si_model.setVerticalHeaderLabels(['row0', 'row1', 'row2', 'row3'])
for row in range(0, 4):
for col in range(0, 6):
item = QStandardItem("itemA %d %d"%(row,col))
item.setIcon(icon.icon_table)
item.setCheckable(True)
self._si_model.setItem(row,col,item)
if col==2: item.setIcon(icon.icon_folder_closed)
if col==3: item.setText('Some text')
#self._si_model.appendRow(item)
def clear_model(self):
rows,cols = self._si_model.rowCount(), self._si_model.columnCount()
self._si_model.removeRows(0, rows)
self._si_model.removeColumns(0, cols)
def selected_indexes(self):
return self.selectedIndexes()
def selected_items(self):
indexes = self.selectedIndexes()
return [self._si_model.itemFromIndex(i) for i in self.selectedIndexes()]
def getFullNameFromItem(self, item):
#item = self._si_model.itemFromIndex(ind)
ind = self._si_model.indexFromItem(item)
return self.getFullNameFromIndex(ind)
def getFullNameFromIndex(self, ind):
item = self._si_model.itemFromIndex(ind)
if item is None: return None
self._full_name = item.text()
self._getFullName(ind)
return self._full_name
def _getFullName(self, ind):
ind_par = self._si_model.parent(ind)
if(ind_par.column() == -1):
item = self._si_model.itemFromIndex(ind)
self.full_name = '/' + self._full_name
#logger.debug('Item full name:' + self._full_name)
return self._full_name
else:
item_par = self._si_model.itemFromIndex(ind_par)
self._full_name = item_par.text() + '/' + self._full_name
self._getFullName(ind_par)
# def resizeEvent(self, e):
# logger.debug('resizeEvent')
# QTableView.resizeEvent(self, e)
def closeEvent(self, event): # if the x is clicked
logger.debug('closeEvent')
QTableView.closeEvent(self, event)
def on_click(self, index):
item = self._si_model.itemFromIndex(index)
msg = 'on_click: item in row:%02d text: %s' % (index.row(), item.text())
logger.debug(msg)
def on_double_click(self, index):
item = self._si_model.itemFromIndex(index)
msg = 'on_double_click: item in row:%02d text: %s' % (index.row(), item.text())
logger.debug(msg)
def on_item_selected(self, ind_sel, ind_desel):
#logger.debug("ind selected: ", ind_sel.row(), ind_sel.column())
#logger.debug("ind deselected: ", ind_desel.row(),ind_desel.column())
item = self._si_model.itemFromIndex(ind_sel)
logger.debug('on_item_selected: "%s" is selected' % (item.text() if item is not None else None))
#logger.debug('on_item_selected: %s' % self.getFullNameFromItem(item))
def on_item_changed(self, item):
state = ['UNCHECKED', 'TRISTATE', 'CHECKED'][item.checkState()]
logger.debug('abstract on_item_changed: "%s" at state %s' % (self.getFullNameFromItem(item), state))
def process_selected_items(self):
selitems = self.selected_items()
msg = '%d Selected items:' % len(selitems)
for i in selitems:
msg += '\n %s' % i.text()
logger.info(msg)
if __name__ == '__main__':
def keyPressEvent(self, e):
logger.info('keyPressEvent, key=%s' % e.key())
if e.key() == Qt.Key_Escape:
self.close()
elif e.key() == Qt.Key_S:
self.process_selected_items()
else:
logger.info('Keys:'\
'\n ESC - exit'\
'\n S - show selected items'\
'\n')
if __name__ == '__main__':
import sys
from PyQt5.QtWidgets import QApplication
logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s: %(message)s', datefmt='%H:%M:%S', level=logging.DEBUG)
app = QApplication(sys.argv)
w = QWTable()
#w.setGeometry(100, 100, 700, 300)
w.setWindowTitle('QWTable')
w.move(100,50)
w.show()
app.exec_()
del w
del app
# EOF
| 2.171875 | 2 |
src/grailbase/mtloader.py | vadmium/grailbrowser | 9 | 4294 | """Extension loader for filetype handlers.
The extension objects provided by MIMEExtensionLoader objects have four
attributes: parse, embed, add_options, and update_options. The first two
are used as handlers for supporting the MIME type as primary and embeded
resources. The last two are (currently) only used for printing.
"""
__version__ = '$Revision: 2.4 $'
from . import extloader
import string
class MIMEExtensionLoader(extloader.ExtensionLoader):
def find(self, name):
new_name = string.replace(name, "-", "_")
major, minor = tuple(string.split(new_name, "/"))
if minor:
modname = "%s_%s" % (major, minor)
else:
modname = major
mod = self.find_module(modname)
ext = None
if not mod and modname != major:
ext = self.get(major + "/")
elif mod:
ext = MIMETypeExtension(name, mod, modname)
return ext
class MIMETypeExtension:
def __init__(self, type, mod, modname):
self.type = type
self.__load_attr(mod, "parse_" + modname, "parse")
self.__load_attr(mod, "embed_" + modname, "embed")
self.__load_attr(mod, "add_options")
self.__load_attr(mod, "update_settings")
def __repr__(self):
classname = self.__class__.__name__
modulename = self.__class__.__module__
if self.parse and self.embed:
flags = " [displayable, embeddable]"
elif self.embed:
flags = " [embeddable]"
elif self.parse:
flags = " [displayable]"
else:
# not very useful, now is it?
flags = ""
return "<%s.%s for %s%s>" % (modulename, classname, self.type, flags)
def __load_attr(self, mod, name, load_as=None):
load_as = load_as or name
if hasattr(mod, name):
v = getattr(mod, name)
else:
v = None
setattr(self, load_as, v)
| 2.484375 | 2 |
eventstreams_sdk/adminrest_v1.py | IBM/eventstreams-python-sdk | 2 | 4295 | <gh_stars>1-10
# coding: utf-8
# (C) Copyright IBM Corp. 2021.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# IBM OpenAPI SDK Code Generator Version: 3.25.0-2b3f843a-20210115-164628
"""
The administration REST API for IBM Event Streams on Cloud.
"""
from typing import Dict, List
import json
from ibm_cloud_sdk_core import BaseService, DetailedResponse
from ibm_cloud_sdk_core.authenticators.authenticator import Authenticator
from ibm_cloud_sdk_core.get_authenticator import get_authenticator_from_environment
from ibm_cloud_sdk_core.utils import convert_model
from .common import get_sdk_headers
##############################################################################
# Service
##############################################################################
class AdminrestV1(BaseService):
"""The adminrest V1 service."""
DEFAULT_SERVICE_URL = 'https://adminrest.cloud.ibm.com'
DEFAULT_SERVICE_NAME = 'adminrest'
@classmethod
def new_instance(cls,
service_name: str = DEFAULT_SERVICE_NAME,
) -> 'AdminrestV1':
"""
Return a new client for the adminrest service using the specified
parameters and external configuration.
"""
authenticator = get_authenticator_from_environment(service_name)
service = cls(
authenticator
)
service.configure_service(service_name)
return service
def __init__(self,
authenticator: Authenticator = None,
) -> None:
"""
Construct a new client for the adminrest service.
:param Authenticator authenticator: The authenticator specifies the authentication mechanism.
Get up to date information from https://github.com/IBM/python-sdk-core/blob/master/README.md
about initializing the authenticator of your choice.
"""
BaseService.__init__(self,
service_url=self.DEFAULT_SERVICE_URL,
authenticator=authenticator)
#########################
# default
#########################
def create_topic(self,
*,
name: str = None,
partitions: int = None,
partition_count: int = None,
configs: List['ConfigCreate'] = None,
**kwargs
) -> DetailedResponse:
"""
Create a new topic.
Create a new topic.
:param str name: (optional) The name of topic to be created.
:param int partitions: (optional) The number of partitions.
:param int partition_count: (optional) The number of partitions, this field
takes precedence over 'partitions'. Default value is 1 if not specified.
:param List[ConfigCreate] configs: (optional) The config properties to be
set for the new topic.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if configs is not None:
configs = [convert_model(x) for x in configs]
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='create_topic')
headers.update(sdk_headers)
data = {
'name': name,
'partitions': partitions,
'partition_count': partition_count,
'configs': configs
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
url = '/admin/topics'
request = self.prepare_request(method='POST',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def list_topics(self,
*,
topic_filter: str = None,
per_page: int = None,
page: int = None,
**kwargs
) -> DetailedResponse:
"""
Get a list of topics.
Returns a list containing information about all of the Kafka topics that are
defined for an instance of the Event Streams service. If there are currently no
topics defined then an empty list is returned.
:param str topic_filter: (optional) A filter to be applied to the topic
names. A simple filter can be specified as a string with asterisk (`*`)
wildcards representing 0 or more characters, e.g. `topic-name*` will filter
all topic names that begin with the string `topic-name` followed by any
character sequence. A more complex filter pattern can be used by
surrounding a regular expression in forward slash (`/`) delimiters, e.g.
`/topic-name.* /`.
:param int per_page: (optional) The number of topic names to be returns.
:param int page: (optional) The page number to be returned. The number 1
represents the first page. The default value is 1.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `List[TopicDetail]` result
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='list_topics')
headers.update(sdk_headers)
params = {
'topic_filter': topic_filter,
'per_page': per_page,
'page': page
}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
url = '/admin/topics'
request = self.prepare_request(method='GET',
url=url,
headers=headers,
params=params)
response = self.send(request)
return response
def get_topic(self,
topic_name: str,
**kwargs
) -> DetailedResponse:
"""
Get detailed information on a topic.
Get detailed information on a topic.
:param str topic_name: The topic name for the topic to be listed.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `TopicDetail` object
"""
if topic_name is None:
raise ValueError('topic_name must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_topic')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['topic_name']
path_param_values = self.encode_path_vars(topic_name)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/admin/topics/{topic_name}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def delete_topic(self,
topic_name: str,
**kwargs
) -> DetailedResponse:
"""
Delete a topic.
Delete a topic.
:param str topic_name: The topic name for the topic to be listed.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if topic_name is None:
raise ValueError('topic_name must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='delete_topic')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['topic_name']
path_param_values = self.encode_path_vars(topic_name)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/admin/topics/{topic_name}'.format(**path_param_dict)
request = self.prepare_request(method='DELETE',
url=url,
headers=headers)
response = self.send(request)
return response
def update_topic(self,
topic_name: str,
*,
new_total_partition_count: int = None,
configs: List['ConfigUpdate'] = None,
**kwargs
) -> DetailedResponse:
"""
Increase the number of partitions and/or update one or more topic configuration parameters.
Increase the number of partitions and/or update one or more topic configuration
parameters.
:param str topic_name: The topic name for the topic to be listed.
:param int new_total_partition_count: (optional) The new partition number
to be increased.
:param List[ConfigUpdate] configs: (optional) The config properties to be
updated for the topic. Valid config keys are 'cleanup.policy',
'retention.ms', 'retention.bytes', 'segment.bytes', 'segment.ms',
'segment.index.bytes'.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if topic_name is None:
raise ValueError('topic_name must be provided')
if configs is not None:
configs = [convert_model(x) for x in configs]
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_topic')
headers.update(sdk_headers)
data = {
'new_total_partition_count': new_total_partition_count,
'configs': configs
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['topic_name']
path_param_values = self.encode_path_vars(topic_name)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/admin/topics/{topic_name}'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_mirroring_topic_selection(self,
**kwargs
) -> DetailedResponse:
"""
Get current topic selection for mirroring.
Get current topic selection for mirroring.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MirroringTopicSelection` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_mirroring_topic_selection')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
url = '/admin/mirroring/topic-selection'
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def replace_mirroring_topic_selection(self,
*,
includes: List[str] = None,
**kwargs
) -> DetailedResponse:
"""
Replace topic selection for mirroring.
Replace topic selection for mirroring. This operation replaces the complete set of
mirroring topic selections.
:param List[str] includes: (optional)
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MirroringTopicSelection` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='replace_mirroring_topic_selection')
headers.update(sdk_headers)
data = {
'includes': includes
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
url = '/admin/mirroring/topic-selection'
request = self.prepare_request(method='POST',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_mirroring_active_topics(self,
**kwargs
) -> DetailedResponse:
"""
Get topics that are being actively mirrored.
Get topics that are being actively mirrored.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `MirroringActiveTopics` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_mirroring_active_topics')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
url = '/admin/mirroring/active-topics'
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
##############################################################################
# Models
##############################################################################
class ReplicaAssignmentBrokers():
"""
ReplicaAssignmentBrokers.
:attr List[int] replicas: (optional)
"""
def __init__(self,
*,
replicas: List[int] = None) -> None:
"""
Initialize a ReplicaAssignmentBrokers object.
:param List[int] replicas: (optional)
"""
self.replicas = replicas
@classmethod
def from_dict(cls, _dict: Dict) -> 'ReplicaAssignmentBrokers':
"""Initialize a ReplicaAssignmentBrokers object from a json dictionary."""
args = {}
if 'replicas' in _dict:
args['replicas'] = _dict.get('replicas')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ReplicaAssignmentBrokers object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'replicas') and self.replicas is not None:
_dict['replicas'] = self.replicas
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ReplicaAssignmentBrokers object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ReplicaAssignmentBrokers') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ReplicaAssignmentBrokers') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ConfigCreate():
"""
ConfigCreate.
:attr str name: (optional) The name of the config property.
:attr str value: (optional) The value for a config property.
"""
def __init__(self,
*,
name: str = None,
value: str = None) -> None:
"""
Initialize a ConfigCreate object.
:param str name: (optional) The name of the config property.
:param str value: (optional) The value for a config property.
"""
self.name = name
self.value = value
@classmethod
def from_dict(cls, _dict: Dict) -> 'ConfigCreate':
"""Initialize a ConfigCreate object from a json dictionary."""
args = {}
if 'name' in _dict:
args['name'] = _dict.get('name')
if 'value' in _dict:
args['value'] = _dict.get('value')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ConfigCreate object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ConfigCreate object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ConfigCreate') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ConfigCreate') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ConfigUpdate():
"""
ConfigUpdate.
:attr str name: (optional) The name of the config property.
:attr str value: (optional) The value for a config property.
:attr bool reset_to_default: (optional) When true, the value of the config
property is reset to its default value.
"""
def __init__(self,
*,
name: str = None,
value: str = None,
reset_to_default: bool = None) -> None:
"""
Initialize a ConfigUpdate object.
:param str name: (optional) The name of the config property.
:param str value: (optional) The value for a config property.
:param bool reset_to_default: (optional) When true, the value of the config
property is reset to its default value.
"""
self.name = name
self.value = value
self.reset_to_default = reset_to_default
@classmethod
def from_dict(cls, _dict: Dict) -> 'ConfigUpdate':
"""Initialize a ConfigUpdate object from a json dictionary."""
args = {}
if 'name' in _dict:
args['name'] = _dict.get('name')
if 'value' in _dict:
args['value'] = _dict.get('value')
if 'reset_to_default' in _dict:
args['reset_to_default'] = _dict.get('reset_to_default')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ConfigUpdate object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'reset_to_default') and self.reset_to_default is not None:
_dict['reset_to_default'] = self.reset_to_default
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ConfigUpdate object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ConfigUpdate') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ConfigUpdate') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MirroringActiveTopics():
"""
Topics that are being actively mirrored.
:attr List[str] active_topics: (optional)
"""
def __init__(self,
*,
active_topics: List[str] = None) -> None:
"""
Initialize a MirroringActiveTopics object.
:param List[str] active_topics: (optional)
"""
self.active_topics = active_topics
@classmethod
def from_dict(cls, _dict: Dict) -> 'MirroringActiveTopics':
"""Initialize a MirroringActiveTopics object from a json dictionary."""
args = {}
if 'active_topics' in _dict:
args['active_topics'] = _dict.get('active_topics')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MirroringActiveTopics object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'active_topics') and self.active_topics is not None:
_dict['active_topics'] = self.active_topics
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MirroringActiveTopics object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MirroringActiveTopics') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MirroringActiveTopics') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class MirroringTopicSelection():
"""
Mirroring topic selection payload.
:attr List[str] includes: (optional)
"""
def __init__(self,
*,
includes: List[str] = None) -> None:
"""
Initialize a MirroringTopicSelection object.
:param List[str] includes: (optional)
"""
self.includes = includes
@classmethod
def from_dict(cls, _dict: Dict) -> 'MirroringTopicSelection':
"""Initialize a MirroringTopicSelection object from a json dictionary."""
args = {}
if 'includes' in _dict:
args['includes'] = _dict.get('includes')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a MirroringTopicSelection object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'includes') and self.includes is not None:
_dict['includes'] = self.includes
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this MirroringTopicSelection object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'MirroringTopicSelection') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'MirroringTopicSelection') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ReplicaAssignment():
"""
ReplicaAssignment.
:attr int id: (optional) The ID of the partition.
:attr ReplicaAssignmentBrokers brokers: (optional)
"""
def __init__(self,
*,
id: int = None,
brokers: 'ReplicaAssignmentBrokers' = None) -> None:
"""
Initialize a ReplicaAssignment object.
:param int id: (optional) The ID of the partition.
:param ReplicaAssignmentBrokers brokers: (optional)
"""
self.id = id
self.brokers = brokers
@classmethod
def from_dict(cls, _dict: Dict) -> 'ReplicaAssignment':
"""Initialize a ReplicaAssignment object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
if 'brokers' in _dict:
args['brokers'] = ReplicaAssignmentBrokers.from_dict(_dict.get('brokers'))
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ReplicaAssignment object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'brokers') and self.brokers is not None:
_dict['brokers'] = self.brokers.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ReplicaAssignment object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ReplicaAssignment') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ReplicaAssignment') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class TopicConfigs():
"""
TopicConfigs.
:attr str cleanup_policy: (optional) The value of config property
'cleanup.policy'.
:attr str min_insync_replicas: (optional) The value of config property
'min.insync.replicas'.
:attr str retention_bytes: (optional) The value of config property
'retention.bytes'.
:attr str retention_ms: (optional) The value of config property 'retention.ms'.
:attr str segment_bytes: (optional) The value of config property
'segment.bytes'.
:attr str segment_index_bytes: (optional) The value of config property
'segment.index.bytes'.
:attr str segment_ms: (optional) The value of config property 'segment.ms'.
"""
def __init__(self,
*,
cleanup_policy: str = None,
min_insync_replicas: str = None,
retention_bytes: str = None,
retention_ms: str = None,
segment_bytes: str = None,
segment_index_bytes: str = None,
segment_ms: str = None) -> None:
"""
Initialize a TopicConfigs object.
:param str cleanup_policy: (optional) The value of config property
'cleanup.policy'.
:param str min_insync_replicas: (optional) The value of config property
'min.insync.replicas'.
:param str retention_bytes: (optional) The value of config property
'retention.bytes'.
:param str retention_ms: (optional) The value of config property
'retention.ms'.
:param str segment_bytes: (optional) The value of config property
'segment.bytes'.
:param str segment_index_bytes: (optional) The value of config property
'segment.index.bytes'.
:param str segment_ms: (optional) The value of config property
'segment.ms'.
"""
self.cleanup_policy = cleanup_policy
self.min_insync_replicas = min_insync_replicas
self.retention_bytes = retention_bytes
self.retention_ms = retention_ms
self.segment_bytes = segment_bytes
self.segment_index_bytes = segment_index_bytes
self.segment_ms = segment_ms
@classmethod
def from_dict(cls, _dict: Dict) -> 'TopicConfigs':
"""Initialize a TopicConfigs object from a json dictionary."""
args = {}
if 'cleanup.policy' in _dict:
args['cleanup_policy'] = _dict.get('cleanup.policy')
if 'min.insync.replicas' in _dict:
args['min_insync_replicas'] = _dict.get('min.insync.replicas')
if 'retention.bytes' in _dict:
args['retention_bytes'] = _dict.get('retention.bytes')
if 'retention.ms' in _dict:
args['retention_ms'] = _dict.get('retention.ms')
if 'segment.bytes' in _dict:
args['segment_bytes'] = _dict.get('segment.bytes')
if 'segment.index.bytes' in _dict:
args['segment_index_bytes'] = _dict.get('segment.index.bytes')
if 'segment.ms' in _dict:
args['segment_ms'] = _dict.get('segment.ms')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a TopicConfigs object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'cleanup_policy') and self.cleanup_policy is not None:
_dict['cleanup.policy'] = self.cleanup_policy
if hasattr(self, 'min_insync_replicas') and self.min_insync_replicas is not None:
_dict['min.insync.replicas'] = self.min_insync_replicas
if hasattr(self, 'retention_bytes') and self.retention_bytes is not None:
_dict['retention.bytes'] = self.retention_bytes
if hasattr(self, 'retention_ms') and self.retention_ms is not None:
_dict['retention.ms'] = self.retention_ms
if hasattr(self, 'segment_bytes') and self.segment_bytes is not None:
_dict['segment.bytes'] = self.segment_bytes
if hasattr(self, 'segment_index_bytes') and self.segment_index_bytes is not None:
_dict['segment.index.bytes'] = self.segment_index_bytes
if hasattr(self, 'segment_ms') and self.segment_ms is not None:
_dict['segment.ms'] = self.segment_ms
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this TopicConfigs object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'TopicConfigs') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'TopicConfigs') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class TopicDetail():
"""
TopicDetail.
:attr str name: (optional) The name of the topic.
:attr int partitions: (optional) The number of partitions.
:attr int replication_factor: (optional) The number of replication factor.
:attr int retention_ms: (optional) The value of config property 'retention.ms'.
:attr str cleanup_policy: (optional) The value of config property
'cleanup.policy'.
:attr TopicConfigs configs: (optional)
:attr List[ReplicaAssignment] replica_assignments: (optional) The replia
assignment of the topic.
"""
def __init__(self,
*,
name: str = None,
partitions: int = None,
replication_factor: int = None,
retention_ms: int = None,
cleanup_policy: str = None,
configs: 'TopicConfigs' = None,
replica_assignments: List['ReplicaAssignment'] = None) -> None:
"""
Initialize a TopicDetail object.
:param str name: (optional) The name of the topic.
:param int partitions: (optional) The number of partitions.
:param int replication_factor: (optional) The number of replication factor.
:param int retention_ms: (optional) The value of config property
'retention.ms'.
:param str cleanup_policy: (optional) The value of config property
'cleanup.policy'.
:param TopicConfigs configs: (optional)
:param List[ReplicaAssignment] replica_assignments: (optional) The replia
assignment of the topic.
"""
self.name = name
self.partitions = partitions
self.replication_factor = replication_factor
self.retention_ms = retention_ms
self.cleanup_policy = cleanup_policy
self.configs = configs
self.replica_assignments = replica_assignments
@classmethod
def from_dict(cls, _dict: Dict) -> 'TopicDetail':
"""Initialize a TopicDetail object from a json dictionary."""
args = {}
if 'name' in _dict:
args['name'] = _dict.get('name')
if 'partitions' in _dict:
args['partitions'] = _dict.get('partitions')
if 'replicationFactor' in _dict:
args['replication_factor'] = _dict.get('replicationFactor')
if 'retentionMs' in _dict:
args['retention_ms'] = _dict.get('retentionMs')
if 'cleanupPolicy' in _dict:
args['cleanup_policy'] = _dict.get('cleanupPolicy')
if 'configs' in _dict:
args['configs'] = TopicConfigs.from_dict(_dict.get('configs'))
if 'replicaAssignments' in _dict:
args['replica_assignments'] = [ReplicaAssignment.from_dict(x) for x in _dict.get('replicaAssignments')]
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a TopicDetail object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'partitions') and self.partitions is not None:
_dict['partitions'] = self.partitions
if hasattr(self, 'replication_factor') and self.replication_factor is not None:
_dict['replicationFactor'] = self.replication_factor
if hasattr(self, 'retention_ms') and self.retention_ms is not None:
_dict['retentionMs'] = self.retention_ms
if hasattr(self, 'cleanup_policy') and self.cleanup_policy is not None:
_dict['cleanupPolicy'] = self.cleanup_policy
if hasattr(self, 'configs') and self.configs is not None:
_dict['configs'] = self.configs.to_dict()
if hasattr(self, 'replica_assignments') and self.replica_assignments is not None:
_dict['replicaAssignments'] = [x.to_dict() for x in self.replica_assignments]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this TopicDetail object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'TopicDetail') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'TopicDetail') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
| 1.679688 | 2 |
3-functions/pytest-exercises/test_functions.py | BaseCampCoding/python-fundamentals | 0 | 4296 | <filename>3-functions/pytest-exercises/test_functions.py
import functions
from pytest import approx
from bcca.test import should_print
def test_add_em_up():
assert functions.add_em_up(1, 2, 3) == 6
assert functions.add_em_up(4, 5, 6) == 15
def test_sub_sub_hubbub():
assert functions.sub_sub_hubbub(1, 2, 3) == -4
def test_square_area():
assert functions.square_area(5, 5) == 25
assert functions.square_area(3, 5) == 15
assert functions.square_area(2, 2) == 4
def test_circle_area():
assert functions.circle_area(1) == approx(3.14)
assert functions.circle_area(5) == approx(78.5)
def test_kilometers_to_miles():
assert functions.kilometers_to_miles(1) == approx(0.6214)
assert functions.kilometers_to_miles(.5) == approx(0.3107)
assert functions.kilometers_to_miles(0) == approx(0.0)
assert functions.kilometers_to_miles(40) == approx(24.855999999999998)
@should_print
def test_sales_tax_1(output):
functions.sales_tax(1)
assert output == """
Purchase Amount: 1
State Sales Tax: 0.04
County Sales Tax: 0.02
Total Sales Tax: 0.06
Total Cost: 1.06
"""
@should_print
def test_sales_tax_99_99(output):
functions.sales_tax(99.99)
assert output == """
Purchase Amount: 99.99
State Sales Tax: 3.9996
County Sales Tax: 1.9998
Total Sales Tax: 5.9994
Total Cost: 105.98939999999999
"""
@should_print
def test_sales_tax_5_95(output):
functions.sales_tax(5.95)
assert output == """
Purchase Amount: 5.95
State Sales Tax: 0.23800000000000002
County Sales Tax: 0.11900000000000001
Total Sales Tax: 0.35700000000000004
Total Cost: 6.307
"""
def test_min_insurance():
assert functions.min_insurance(100000) == approx(80000.0)
assert functions.min_insurance(123456789) == approx(98765431.2)
assert functions.min_insurance(0) == approx(0.0)
assert functions.min_insurance(-54317890) == approx(-43454312.0)
@should_print
def test_property_tax_10000(output):
functions.property_tax(10000)
assert output == '''
Assessment Value: 6000.0
Property Tax: 38.4
'''
@should_print
def test_property_tax_99999_95(output):
functions.property_tax(99999.95)
assert output == '''
Assessment Value: 59999.969999999994
Property Tax: 383.999808
'''
def test_bmi():
assert functions.bmi(160, 67) == approx(25.05680552)
assert functions.bmi(200, 72) == approx(27.12191358)
assert functions.bmi(120, 60) == approx(23.43333333)
def test_calories():
assert functions.calories(5, 20) == 125
assert functions.calories(1, 1) == 13
def test_earnings():
assert functions.earnings(100, 100, 100) == 3600
assert functions.earnings(50, 75, 100) == 2550
assert functions.earnings(0, 1000, 79) == 12711
@should_print
def test_paint_job_estimator(output):
functions.paint_job_estimator(50, 10)
assert output == '''
Gallons of paint required: 0.43478260869565216
Hours of labor required: 3.4782608695652173
Cost of paint: 4.3478260869565215
Cost of labor: 69.56521739130434
Total Cost: 73.91304347826086
'''
@should_print
def test_paint_job_estimator_2(output):
functions.paint_job_estimator(750, 15.95)
assert output == '''
Gallons of paint required: 6.521739130434782
Hours of labor required: 52.17391304347826
Cost of paint: 104.02173913043477
Cost of labor: 1043.4782608695652
Total Cost: 1147.5
'''
@should_print
def test_monthly_sales_tax(output):
functions.monthly_sales_tax(123456.79)
assert output == '''
Monthly sales: 123456.79
State sales tax: 4938.2716
County sales tax: 2469.1358
Total sales tax: 7407.4074
'''
@should_print
def test_monthly_sales_tax_2(output):
functions.monthly_sales_tax(4321567.21)
assert output == '''
Monthly sales: 4321567.21
State sales tax: 172862.6884
County sales tax: 86431.3442
Total sales tax: 259294.03260000004
'''
| 3.46875 | 3 |
src/products/admin.py | apabaad/django_ecommerce | 0 | 4297 | <reponame>apabaad/django_ecommerce
from django.contrib import admin
from .models import Product
admin.site.register(Product) | 1.203125 | 1 |
cio/plugins/txt.py | beshrkayali/content-io | 6 | 4298 | <reponame>beshrkayali/content-io
# coding=utf-8
from __future__ import unicode_literals
from .base import BasePlugin
class TextPlugin(BasePlugin):
ext = 'txt'
| 1.234375 | 1 |
ml-scripts/dump-data-to-learn.py | thejoeejoee/SUI-MIT-VUT-2020-2021 | 0 | 4299 | #!/usr/bin/env python3
# Project: VUT FIT SUI Project - Dice Wars
# Authors:
# - <NAME> <<EMAIL>>
# - <NAME> <<EMAIL>>
# - <NAME> <<EMAIL>>
# - <NAME> <<EMAIL>>
# Year: 2020
# Description: Generates game configurations.
import random
import sys
from argparse import ArgumentParser
import time
from signal import signal, SIGCHLD
from utils import run_ai_only_game, BoardDefinition
parser = ArgumentParser(prog='Dice_Wars')
parser.add_argument('-p', '--port', help="Server port", type=int, default=5005)
parser.add_argument('-a', '--address', help="Server address", default='127.0.0.1')
procs = []
def signal_handler():
""" Handler for SIGCHLD signal that terminates server and clients. """
for p in procs:
try:
p.kill()
except ProcessLookupError:
pass
PLAYING_AIs = [
'xkolar71_orig',
'xkolar71_2',
'xkolar71_3',
'xkolar71_4',
]
def board_definitions():
while True:
random.seed(int(time.time()))
yield BoardDefinition(random.randint(1, 10 ** 10), random.randint(1, 10 ** 10), random.randint(1, 10 ** 10))
def main():
args = parser.parse_args()
signal(SIGCHLD, signal_handler)
boards_played = 0
try:
for board_definition in board_definitions():
boards_played += 1
run_ai_only_game(
args.port, args.address, procs, PLAYING_AIs,
board_definition,
fixed=random.randint(1, 10 ** 10),
client_seed=random.randint(1, 10 ** 10),
debug=True, logdir='logs',
)
print(f'Played {boards_played} games.', file=sys.stderr)
except (Exception, KeyboardInterrupt) as e:
sys.stderr.write("Breaking the tournament because of {}\n".format(repr(e)))
for p in procs:
p.kill()
raise
if __name__ == '__main__':
main()
| 2.40625 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.