hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a238aeacac1f1412741994aad4fbe944bfc042c | 2,054 | py | Python | pagarmecoreapi/models/list_discounts_response.py | pagarme/pagarme-core-api-python | c7b11ca78ab3e7e896e5b75048e6f72b511db00e | [
"MIT"
] | 6 | 2021-09-02T19:55:04.000Z | 2022-03-16T14:06:15.000Z | pagarmecoreapi/models/list_discounts_response.py | pagarme/pagarme-core-api-python | c7b11ca78ab3e7e896e5b75048e6f72b511db00e | [
"MIT"
] | 2 | 2021-10-11T22:48:15.000Z | 2022-01-24T18:24:23.000Z | pagarmecoreapi/models/list_discounts_response.py | pagarme/pagarme-core-api-python | c7b11ca78ab3e7e896e5b75048e6f72b511db00e | [
"MIT"
] | 2 | 2021-09-12T21:43:32.000Z | 2022-03-07T16:58:54.000Z | # -*- coding: utf-8 -*-
"""
pagarmecoreapi
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
import pagarmecoreapi.models.get_discount_response
import pagarmecoreapi.models.paging_response
class ListDiscountsResponse(object):
"""Implementation of the 'ListDiscountsResponse' model.
TODO: type model description here.
Attributes:
data (list of GetDiscountResponse): The Discounts response
paging (PagingResponse): Paging object
"""
# Create a mapping from Model property names to API property names
_names = {
"data":'data',
"paging":'paging'
}
def __init__(self,
data=None,
paging=None):
"""Constructor for the ListDiscountsResponse class"""
# Initialize members of the class
self.data = data
self.paging = paging
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
data = None
if dictionary.get('data') != None:
data = list()
for structure in dictionary.get('data'):
data.append(pagarmecoreapi.models.get_discount_response.GetDiscountResponse.from_dictionary(structure))
paging = pagarmecoreapi.models.paging_response.PagingResponse.from_dictionary(dictionary.get('paging')) if dictionary.get('paging') else None
# Return an object of this model
return cls(data,
paging)
| 29.342857 | 150 | 0.611003 |
4a238b5e03cec42fc880fc27715a18c28f2d35e4 | 11,536 | py | Python | tests/test_ssdeep_pep_452.py | drobotun/pyssdeep | d9499bd21fbe7dc9caed3ddbe6f77399a297bb70 | [
"MIT"
] | null | null | null | tests/test_ssdeep_pep_452.py | drobotun/pyssdeep | d9499bd21fbe7dc9caed3ddbe6f77399a297bb70 | [
"MIT"
] | null | null | null | tests/test_ssdeep_pep_452.py | drobotun/pyssdeep | d9499bd21fbe7dc9caed3ddbe6f77399a297bb70 | [
"MIT"
] | null | null | null | import unittest
from unittest import mock
import pytest
from pyssdeep import FuzzyHash
from pyssdeep import new
from pyssdeep import compare
from pyssdeep import get_hash_file
from pyssdeep import get_hash_buffer
from pyssdeep import FuzzyHashError
def fuzzy_new_mock_raise():
raise FuzzyHashError(-1)
def fuzzy_clone_mock_raise(state):
raise FuzzyHashError(-1)
def fuzzy_update_mock_raise(state, buffer, buffer_size):
raise FuzzyHashError(-1)
def fuzzy_digest_mock_raise(state, flag):
raise FuzzyHashError(-1)
def digest_mock_raise(flag):
raise FuzzyHashError
def fuzzy_compare_mock_raise(signature_1, signature_2):
raise FuzzyHashError(-1)
def fuzzy_hash_buf_mock_raise(buffer, buffer_size):
raise FuzzyHashError(-1)
def os_path_isfile_mock(filename):
return False
def os_access_mock(filename, mode):
return False
def fuzzy_hash_filename_mock_raise(filename):
raise FuzzyHashError(-1)
class TestSSDEEP(unittest.TestCase):
def test_new(self):
test_fuzzy_hash_obj = new()
self.assertEqual(test_fuzzy_hash_obj._state.contents.bhstart, 0)
self.assertEqual(test_fuzzy_hash_obj._state.contents.bhend, 1)
self.assertEqual(test_fuzzy_hash_obj._state.contents.bhendlimit, 30)
self.assertEqual(test_fuzzy_hash_obj._state.contents.total_size, 0)
self.assertEqual(test_fuzzy_hash_obj._state.contents.reduce_border, 192)
self.assertEqual(test_fuzzy_hash_obj._state.contents.flags, 0)
self.assertEqual(test_fuzzy_hash_obj._state.contents.rollmask, 0)
def test_new_raise(self):
with mock.patch(
'pyssdeep.ssdeep_pep_452.fuzzy_new',
fuzzy_new_mock_raise
):
with self.assertRaises(FuzzyHashError) as context:
test_fuzzy_hash_obj = new()
self.assertTrue(
'Unable to create hash context. Error code: -1'
in str(context.exception)
)
def test_copy(self):
test_fuzzy_hash_obj = new()
test_fuzzy_hash_obj._state.contents.bh[0].digest = b'test_fuzzy_digest'
test_fuzzy_hash_obj_copy = test_fuzzy_hash_obj.copy()
test_fuzzy_hash_obj_copy._state.contents.bh[0].digest = b'test_fuzzy_copy_digest'
self.assertEqual(
test_fuzzy_hash_obj_copy._state.contents.bh[0].digest,
b'test_fuzzy_copy_digest'
)
self.assertEqual(
test_fuzzy_hash_obj._state.contents.bh[0].digest,
b'test_fuzzy_digest'
)
def test_copy_raise(self):
test_fuzzy_hash_obj = new()
with mock.patch(
'pyssdeep.ssdeep_pep_452.fuzzy_clone',
fuzzy_clone_mock_raise
):
with self.assertRaises(FuzzyHashError) as context:
test_fuzzy_hash_obj_copy = test_fuzzy_hash_obj.copy()
self.assertTrue(
'Unable to clone hash object. Error code: -1'
in str(context.exception)
)
def test_update(self):
test_fuzzy_hash_obj = new()
test_fuzzy_hash_obj.update(b'this test fuzzy hash string')
self.assertEqual(
test_fuzzy_hash_obj._state.contents.total_size, 27
)
def test_update_raise(self):
test_fuzzy_hash_obj = new()
with mock.patch(
'pyssdeep.ssdeep_pep_452.fuzzy_update',
fuzzy_update_mock_raise
):
with self.assertRaises(FuzzyHashError) as context:
test_fuzzy_hash_obj.update(b'this test fuzzy hash string')
self.assertTrue(
'Unable to update hash object. Error code: -1'
in str(context.exception)
)
def test_update_type_error(self):
test_fuzzy_hash_obj = new()
with self.assertRaises(TypeError) as context:
test_fuzzy_hash_obj.update(None)
self.assertTrue(
'Invalid data type. The data type cannot be "<class \'NoneType\'>".'
in str(context.exception)
)
def test_update_encode_error(self):
test_fuzzy_hash_obj = new()
with self.assertRaises(FuzzyHashError) as context:
test_fuzzy_hash_obj.update(
'тестовая строка для fazzy hash',
'ascii'
)
self.assertTrue(
'Data encoding error. The "encoding" value cannot be'
in str(context.exception)
)
def test_update_hash_context_error(self):
test_fuzzy_hash_obj = new()
test_fuzzy_hash_obj._state = None
with self.assertRaises(FuzzyHashError) as context:
test_fuzzy_hash_obj.update(b'this test fuzzy hash string')
self.assertTrue(
'Unable to update hash object. Hash context error.'
in str(context.exception)
)
def test_digest(self):
test_fuzzy_hash_obj = new()
test_result = test_fuzzy_hash_obj.digest()
self.assertEqual(test_result, '3::')
def test_digest_raise(self):
test_fuzzy_hash_obj = new()
with mock.patch(
'pyssdeep.ssdeep_pep_452.fuzzy_digest',
fuzzy_digest_mock_raise
):
with self.assertRaises(FuzzyHashError) as context:
test_result = test_fuzzy_hash_obj.digest()
self.assertTrue(
'Unable to compute digest of hash object. Error code: -1'
in str(context.exception)
)
def test_digest_type_error(self):
test_fuzzy_hash_obj = new()
with self.assertRaises(TypeError) as context:
test_result = test_fuzzy_hash_obj.digest(None)
self.assertTrue(
'Flag value must be of int type not "<class \'NoneType\'>".'
in str(context.exception)
)
def test_digest_hash_context_error(self):
test_fuzzy_hash_obj = new()
test_fuzzy_hash_obj._state = None
with self.assertRaises(FuzzyHashError) as context:
test_result = test_fuzzy_hash_obj.digest()
self.assertTrue(
'Unable to update hash object. Hash context error.'
)
def test_block_size(self):
test_fuzzy_hash_obj = new()
test_result = test_fuzzy_hash_obj.block_size
self.assertEqual(test_result, 3)
def test_block_size_raise(self):
test_fuzzy_hash_obj = new()
with mock.patch(
'pyssdeep.ssdeep_pep_452.FuzzyHash.digest',
digest_mock_raise
):
with self.assertRaises(FuzzyHashError) as context:
test_result = test_fuzzy_hash_obj.block_size
self.assertTrue(
'Unable to return the block size value.'
in str(context.exception)
)
def test_digest_size(self):
test_fuzzy_hash_obj = new()
test_result = test_fuzzy_hash_obj.digest_size
self.assertEqual(test_result, 1)
def test_digest_size_raise(self):
test_fuzzy_hash_obj = new()
with mock.patch(
'pyssdeep.ssdeep_pep_452.FuzzyHash.digest',
digest_mock_raise
):
with self.assertRaises(FuzzyHashError) as context:
test_result = test_fuzzy_hash_obj.digest_size
self.assertTrue(
'Unable to return the digest size value.'
in str(context.exception)
)
def test_name(self):
test_fuzzy_hash_obj = new()
test_result = test_fuzzy_hash_obj.name
self.assertEqual(test_result, 'ssdeep')
def test_compare(self):
test_result = compare('3:hRMs3FsRc2:hRpg', '3:hRMs3FsRc2:hRpg')
self.assertEqual(test_result, 100)
test_result = compare('3:hRMs3FsRc2:hRpg', '3:3LSve:7ce')
self.assertEqual(test_result, 0)
def test_compare_raise(self):
with mock.patch(
'pyssdeep.ssdeep_pep_452.fuzzy_compare',
fuzzy_compare_mock_raise
):
with self.assertRaises(FuzzyHashError) as context:
test_result = compare('3:hRMs3FsRc2:hRpg', '3:hRMs3FsRc2:hRpg')
self.assertTrue(
'Unable to compare this fazzy hash signatures. Error code: -1.'
in str(context.exception)
)
def test_compare_type_error(self):
with self.assertRaises(TypeError) as context:
test_result = compare(None, '3:hRMs3FsRc2:hRpg')
self.assertTrue(
'Invalid first operand type. It cannot be "<class \'NoneType\'>".'
in str(context.exception)
)
with self.assertRaises(TypeError) as context:
test_result = compare('3:hRMs3FsRc2:hRpg', None)
self.assertTrue(
'Invalid second operand type. It cannot be "<class \'NoneType\'>".'
in str(context.exception)
)
def test_get_hash_buffer(self):
test_result = get_hash_buffer('This test fuzzy hash string')
self.assertEqual(test_result, '3:hRMs3FsRc2:hRpg')
test_result = get_hash_buffer(b'This test fuzzy hash string')
self.assertEqual(test_result, '3:hRMs3FsRc2:hRpg')
def test_get_hash_buffer_raise(self):
with mock.patch(
'pyssdeep.ssdeep_pep_452.fuzzy_hash_buf',
fuzzy_hash_buf_mock_raise
):
with self.assertRaises(FuzzyHashError) as context:
test_result = get_hash_buffer(b'This test fuzzy hash string')
self.assertTrue(
'Unable to compute fuzzy hash. Error code: -1.'
in str(context.exception)
)
def test_get_hash_buffer_type_error(self):
with self.assertRaises(TypeError) as context:
test_result = get_hash_buffer(None)
self.assertTrue(
'Invalid data type. The data type cannot be "<class \'NoneType\'>".'
in str(context.exception)
)
def test_get_hash_buffer_encode_error(self):
with self.assertRaises(FuzzyHashError) as context:
test_result = get_hash_buffer(
'тестовая строка для fazzy hash',
'ascii'
)
self.assertTrue(
'Data encoding error. The "encoding" value cannot be'
in str(context.exception)
)
def test_get_hash_file(self):
test_result = get_hash_file('test_file/test_file.txt')
self.assertEqual(test_result, '3:hRMs3FsRcIn:hRpq')
def test_get_hash_file_no_file(self):
with mock.patch('os.path.isfile', os_path_isfile_mock):
with self.assertRaises(IOError) as context:
test_result = get_hash_file('test_file/test_file.txt')
self.assertTrue(
'File "test_file/test_file.txt" not found.'
in str(context.exception)
)
def test_get_hash_file_no_access(self):
with mock.patch('os.access', os_access_mock):
with self.assertRaises(IOError) as context:
test_result = get_hash_file('test_file/test_file.txt')
self.assertTrue(
'File "test_file/test_file.txt" is not available.'
in str(context.exception)
)
def test_get_hash_file_raise(self):
with mock.patch(
'pyssdeep.ssdeep_pep_452.fuzzy_hash_filename',
fuzzy_hash_filename_mock_raise
):
with self.assertRaises(FuzzyHashError) as context:
test_result = get_hash_file('test_file/test_file.txt')
self.assertTrue(
'Unable to compute fuzzy hash of file "test_file/test_file.txt". Error code: -1.'
in str(context.exception)
)
| 35.604938 | 93 | 0.640083 |
4a238b84455a0c4d8771f8a729920fbf526e59c7 | 3,128 | py | Python | src/cyborgbackup/main/management/commands/cleanup_jobs.py | ikkemaniac/cyborgbackup | b11139fa9632f745a3c288787c51fbf814b961fa | [
"BSD-3-Clause"
] | null | null | null | src/cyborgbackup/main/management/commands/cleanup_jobs.py | ikkemaniac/cyborgbackup | b11139fa9632f745a3c288787c51fbf814b961fa | [
"BSD-3-Clause"
] | null | null | null | src/cyborgbackup/main/management/commands/cleanup_jobs.py | ikkemaniac/cyborgbackup | b11139fa9632f745a3c288787c51fbf814b961fa | [
"BSD-3-Clause"
] | null | null | null | # Python
import datetime
import logging
# Django
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.utils.timezone import now
# CyBorgBackup
from cyborgbackup.main.models import Job, Repository
class Command(BaseCommand):
'''
Management command to cleanup old jobs.
'''
help = 'Remove old jobs from the database.'
def add_arguments(self, parser):
parser.add_argument('--dry-run', dest='dry_run', action='store_true',
default=False, help='Dry run mode (show items that would '
'be removed)')
parser.add_argument('--jobs', dest='only_jobs', action='store_true',
default=True,
help='Remove jobs')
def cleanup_jobs(self):
# Sanity check: Is there already a running job on the System?
jobs = Job.objects.filter(status="running")
if jobs.exists():
print('A job is already running, exiting.')
return
repos = Repository.objects.filter(enabled=True)
repoArchives = []
if repos.exists():
for repo in repos:
lines = self.launch_command(["borg", "list", "::"], repo, repo.repository_key, repo.path, **kwargs)
for line in lines:
archive_name = line.split(' ')[0] #
for type in ('rootfs', 'vm', 'mysql', 'postgresql', 'config', 'piped', 'mail', 'folders'):
if '{}-'.format(type) in archive_name:
repoArchives.append(archive_name)
entries = Job.objects.filter(job_type='job')
if entries.exists():
for entry in entries:
if entry.archive_name != '' and entry.archive_name not in repoArchives:
action_text = 'would delete' if self.dry_run else 'deleting'
self.logger.info('%s %s', action_text, entry.archive_name)
if not self.dry_run:
entry.delete()
return 0, 0
@transaction.atomic
def handle(self, *args, **options):
self.verbosity = int(options.get('verbosity', 1))
self.init_logging()
self.dry_run = bool(options.get('dry_run', False))
model_names = ('jobs',)
models_to_cleanup = set()
for m in model_names:
if options.get('only_%s' % m, False):
models_to_cleanup.add(m)
if not models_to_cleanup:
models_to_cleanup.update(model_names)
for m in model_names:
if m in models_to_cleanup:
skipped, deleted = getattr(self, 'cleanup_%s' % m)()
if self.dry_run:
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '),
deleted, skipped)
else:
self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped) | 38.617284 | 115 | 0.54156 |
4a238bc307a7f833f58a2de99a5949f4fa80d4fd | 2,550 | py | Python | test.py | jiniannet/jnt.py | a0418142fba49883a7897b12f4aef232ac9feee8 | [
"MIT"
] | null | null | null | test.py | jiniannet/jnt.py | a0418142fba49883a7897b12f4aef232ac9feee8 | [
"MIT"
] | null | null | null | test.py | jiniannet/jnt.py | a0418142fba49883a7897b12f4aef232ac9feee8 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from jntemplate import Template,engine,BaseLoader,FileLoader,engine
from timeit import timeit
import time
#from jntemplate import Lexer
# engine.configure(None)
# lexer = Lexer("${user.name}23412BAESFD$225B${name}${none}")
# arr = lexer.parse()
# for c in arr:
# print(c.string())
# dic = {"aaa":1}
# #dic.fromkeys
# t ="2"
# print(type(1))
# print(type(1.1))
# print(type(1.11112545225))
# print(type(""))
# print(type([]))
# print(type({}))
# print(type(True))
# class Parent(object):
# "父类"
# parentAttr = 100
# def __init__(self):
# super(Parent,self).__init__()
# print ("调用父类构造函数")
# def parentMethod(self):
# print ('调用父类方法')
# def setAttr(self, attr):
# Parent.parentAttr = attr
# def getAttr(self):
# print ("父类属性 :", Parent.parentAttr)
# def bbb(self):
# print ("父类bbb")
# class Child(Parent):
# "定义子类"
# def childMethod(self):
# print ('调用子类方法 child method')
# # 在子类中调用父类方法
# print (Parent.getAttr(self))
# def bbb(self):
# print ("Child类bbb")
# class DD(Child):
# "定义子类"
# def bbb(self):
# print ("dd类bbb")
# c = DD()
# c.childMethod()
#engine.configure(None)
# t = Template("hello ${name}")
# t.set("name","jnt4py")
# print(t.render())
#print(hasattr(t,"stringbb"))
#s = getattr(t,"string")
#print(s)
#print(getattr(t,"string"))
#print(dir(getattr(t,"context")) )
#print(t.string())
# class dd:
# def test(self,a,b):
# return a+b
# def test1():
# r = dd()
# arr=["test code:","success"]
# eval("r.test(arr[0],arr[1])")
# def test2():
# r = dd()
# arr=["test code:","success"]
# r.test(arr[0],arr[1])
# print(timeit('test1()', 'from __main__ import test1', number=10000))
# print(timeit('test2()', 'from __main__ import test2', number=10000))
# arr = [1,2,3,4,5,6,7]
# print(arr[2:-3])
# print(arr[2:len(arr)-3])
# g = lambda x,y: x +y
# text = "${g(2,8)}vvvvv"
# template = Template(text)
# template.set("g",g)
# print( template.render())
engine.configure()
# text = "$str.upper()"
# template = engine.create_template(text)
# template.set("str","hello jnt4py")
# render = template.render()
template = engine.create("$data[2]")
template.set("data", [7, 0, 2, 0, 6])
render = template.render()
print( render)
# list = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11"]
# print(list[1:3])
# print(list[1:])
#print(type(time.time()))
# dic = {"aaa":1,"bbb":2}
# for n in dic:
# print(dic[n]) | 19.615385 | 70 | 0.569412 |
4a238c2e3bd5689ccedacefa9c63fd32e387bfae | 5,662 | py | Python | badsources.py | SiarheiGribov/pyBot | f3257a9ffb3f022ad6c67b0eb7d25b1054a512a4 | [
"MIT"
] | null | null | null | badsources.py | SiarheiGribov/pyBot | f3257a9ffb3f022ad6c67b0eb7d25b1054a512a4 | [
"MIT"
] | null | null | null | badsources.py | SiarheiGribov/pyBot | f3257a9ffb3f022ad6c67b0eb7d25b1054a512a4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import sys
sys.path.append('pyBot/ext_libs')
import re
import os
import ast
import json
import time
import login
import requests
import ConfigParser
from urllib2 import urlopen
from bs4 import BeautifulSoup
from sseclient import SSEClient as EventSource
reload(sys)
sys.setdefaultencoding('utf8')
days_history = 5
config = ConfigParser.RawConfigParser()
config.read(os.path.abspath(os.path.dirname(__file__)) + '/bottoken.ini')
bottoken = (config.get('Token', 'bottoken'))
URL_BL = 'https://ru.wikipedia.org/w/?action=raw&utf8=1&title=User:IluvatarBot/Badlinks/links'
attempt = 0
get_complete = False
while get_complete == False:
try:
bl_page = urlopen(URL_BL).readlines()
bl = []
for i in bl_page:
bl.append(str(i).decode('UTF-8').rstrip('\n').split(",|,"))
get_complete = True
except:
attempt += 1
if attempt == 10:
print(u"Ошибка при получении чёрного списка ссылок")
sys.exit()
time.sleep(300)
url = 'https://stream.wikimedia.org/v2/stream/recentchange'
for event in EventSource(url):
if event.event == 'message':
try:
change = json.loads(event.data)
except ValueError:
pass
else:
if ('{wiki}'.format(**change)=="ruwiki") and (('{type}'.format(**change)=="edit") or ('{type}'.format(**change)=="new")) and ('{bot}'.format(**change)=="False") and ('{namespace}'.format(**change)=="0"):
revision=ast.literal_eval('{revision}'.format(**change))
new_id = str('{new}'.format(**revision))
res = 0
diff = ""
if '{type}'.format(**change)=="new":
URL_DIFF = "https://ru.wikipedia.org/w/index.php?action=raw&title=" + str('{title}'.format(**change))
try:
diff = requests.post(URL_DIFF).text
old_id = 0
except:
continue
else:
old_id = str('{old}'.format(**revision))
URL_DIFF = "https://ru.wikipedia.org/w/api.php?action=compare&format=json&prop=diff&utf8=1&fromrev=" + old_id + "&torev=" + new_id
try:
diff_parsed = requests.post(URL_DIFF).json()
except:
continue
for changeDiff in BeautifulSoup(diff_parsed['compare']['*'], "html.parser").findAll("ins", {"class": "diffchange diffchange-inline"}):
diff += str(changeDiff) + "\n"
for diffAdd in BeautifulSoup(diff_parsed['compare']['*'], "html.parser").findAll("tr"):
if not "diffchange diffchange-inline" in str(diffAdd) and not "diff-deletedline" in str(diffAdd):
for diffAdd2 in BeautifulSoup(str(diffAdd), "html.parser").findAll("", {"class": "diff-addedline"}):
diff += str(diffAdd2) + "\n"
for i in bl:
if re.search(r'' + i[0], diff, re.I):
if res == 0:
res = str(i[1])
else:
res += ", " + str(i[1])
if not res==0:
prePub = "{{User:IluvatarBot/Подозрительный источник|" + str('{title}'.format(**change)) + "|" + str(old_id) + "|" + str(new_id) + "|" + str(res) + "|" + str('{user}'.format(**change)) + "|" + str(int(time.time())) + "}}" + "\n"
token, cookies = login.login()
time.sleep(1)
raportPage_URL = "https://ru.wikipedia.org/w/?action=raw&utf8=1&title=User:IluvatarBot/Badlinks/raport"
try:
raport_page = urlopen(raportPage_URL).readlines()
except:
print("Error during get raport_page.")
continue
new_pub = []
for line in raport_page: # удаляем устаревшие шаблоны
timeReg = re.findall(r"(\d*)?\}\}", line.decode("utf-8").strip('\r\n'), re.U | re.I)
if not len(timeReg) > 0:
print("Timestamp error: " + u"один из шаблонов на странице не имеет отметки времени.")
sys.exit()
timePast = int(time.time()) - int(timeReg[0])
hoursPast = int(days_history) * 24 * 60 * 60
if not timePast >= int(hoursPast):
new_pub.append(line)
raport_page = ''.join(map(str,new_pub))
raport_page = prePub + raport_page
payload = {'action': 'edit', 'format': 'json', 'title': 'User:IluvatarBot/Badlinks/raport', 'utf8': '', 'text': raport_page, 'summary': 'Выгрузка отчёта: сомнительные источники', 'token': token}
payload2 = {'type': 'sources', 'user': str('{user}'.format(**change)), 'oldid': str(old_id), 'diff': str(new_id),
'title': str('{title}'.format(**change)), 'reason': str(res), 'bottoken': bottoken}
time.sleep(1)
try:
req = requests.post('https://ru.wikipedia.org/w/api.php', data=payload, cookies=cookies)
req = requests.post('https://tools.wmflabs.org/iluvatarbot/remove.php', data=payload2)
except:
print("Error during get publishing.")
continue | 46.793388 | 248 | 0.504415 |
4a238c90fefd377dd69f1105a5dd6dff6122585b | 10,560 | py | Python | scratch/losses/cauchy_selec_bound_train_gamma/plot_train_alpha_cases.py | finn-dodgson/DeepHalos | 86e0ac6c24ac97a0a2a0a60a7ea3721a04bd050c | [
"MIT"
] | 2 | 2021-07-26T10:56:33.000Z | 2021-12-20T17:30:53.000Z | scratch/losses/cauchy_selec_bound_train_gamma/plot_train_alpha_cases.py | finn-dodgson/DeepHalos | 86e0ac6c24ac97a0a2a0a60a7ea3721a04bd050c | [
"MIT"
] | 1 | 2021-11-25T21:01:19.000Z | 2021-12-05T01:40:53.000Z | scratch/losses/cauchy_selec_bound_train_gamma/plot_train_alpha_cases.py | finn-dodgson/DeepHalos | 86e0ac6c24ac97a0a2a0a60a7ea3721a04bd050c | [
"MIT"
] | 1 | 2021-11-27T02:35:10.000Z | 2021-11-27T02:35:10.000Z | import numpy as np
import matplotlib.pyplot as plt
import re
def read_training_log_file(filepath):
with open(filepath) as f:
h = f.readlines()[1:]
test = [re.sub(r'"', '', hi) for hi in h]
test = [re.sub(r'\[', '', hi) for hi in test]
test = [re.sub(r'\]', '', hi) for hi in test]
test = [re.sub(r'\n', '', hi) for hi in test]
for i, line in enumerate(test):
test[i] = [float(elem) for elem in line.split(",")]
return np.array(test)
path = "/Users/lls/Documents/deep_halos_files/regression/test_lowmass/reg_10000_perbin/larger_net" \
"/cauchy_selec_gamma_bound/group_reg_alpha/train_alpha/"
cases = ["l2_conv_l1_dense_wdropout/new/", "l2_conv_l21_l1_dense/", "l2_conv_l21_l1_dense_wdropout/new/"]
labels = ["L2 (conv) + L1 (dense) + dropout", "L2 (conv) + L1 (dense) + L1-g (dense)",
"L2 (conv) + L1 (dense) + L1-g (dense) + dropout"]
# cases = ["l2_conv_l21_l1_dense/lr/0.001"]
#f, axes = plt.subplots(len(cases), 3, sharex=True, figsize=(13, 8))
f, axes = plt.subplots(len(cases), 3, sharex=True, figsize=(13, 8))
color = ["C" + str(i) for i in range(len(cases))]
for i, case in enumerate(cases):
ax = axes[0, i]
tr = read_training_log_file(path + case + "/training.log")
ax.plot(tr[:,0], tr[:,1], lw=1.5, color=color[i])
ax.plot(tr[:, 0], tr[:, 3], ls="--", color=color[i], lw=1.5)
# ax.legend(loc="best", fontsize=14)
if i == 0:
ax.set_ylabel('Loss')
ax.set_title(labels[i], fontsize=14)
# p = np.load(path + case + "/trained_loss_params.npy")
ep = np.insert(tr[:, 0], 0, tr[0, 0] - 1)
ax = axes[1, i]
g = np.load(path + case + "/trained_loss_gamma.npy")
ax.plot(ep, g, color=color[i])
ax.axhline(y=0.1, color="k", ls="--")
ax.axhline(y=0.4, color="k", ls="--")
if i == 0:
ax.set_ylabel(r'$\gamma$')
ax = axes[2, i]
a = np.load(path + case + "/trained_loss_alpha.npy")
ax.plot(ep, a, color=color[i])
ax.axhline(y=-3, color="k", ls="--")
ax.axhline(y=-4, color="k", ls="--")
if i == 0:
ax.set_ylabel(r'$\log_{10} (\alpha)$')
plt.subplots_adjust(wspace=0.05, hspace=0.15, left=0.08, bottom=0.1, top=0.95)
# plt.subplots_adjust(wspace=0.4, hspace=0.5, left=0.12, bottom=0.15, top=0.98)
f.text(0.5, 0.01, "Epoch")
for j in range(3):
if j == 0:
[axes[j, i].set_yscale("log") for i in range(3)]
[axes[j, i].yaxis.set_major_formatter(plt.NullFormatter()) for i in range(1, 3)]
ax0_lim = np.concatenate([list(axes[j, i].get_ylim()) for i in range(3)])
[axes[j, i].set_ylim(ax0_lim.min(), ax0_lim.max()) for i in range(3)]
############ PLOT DIFFERENT LEARNING RATES ###############
path = "/Users/lls/Documents/deep_halos_files/regression/test_lowmass/reg_10000_perbin/larger_net" \
"/cauchy_selec_gamma_bound/group_reg_alpha/train_alpha/l2_conv_l21_l1_dense/"
cases = [".", "lr/0.0005/", "lr/0.001/"]
labels = [r"lr$_\mathrm{init} = 10^{-4}$", r"lr$_\mathrm{init} = 5\times 10^{-4}$", "lr$_\mathrm{init} = 10^{-3}$"]
f, axes = plt.subplots(3, len(cases), sharex=True, figsize=(13, 8))
color = ["C" + str(i) for i in range(len(cases))]
for i, case in enumerate(cases):
print(i)
ax = axes[0, i]
tr = read_training_log_file(path + case + "/training.log")
ax.plot(tr[:,0], tr[:,1], lw=1.5, color=color[i], label="step decay")
ax.plot(tr[:, 0], tr[:, 3], ls="--", color=color[i], lw=1.5)
ax.legend(loc="best", fontsize=14)
if i == 0:
ax.set_ylabel('Loss')
ax.set_title(labels[i], fontsize=14)
# p = np.load(path + case + "/trained_loss_params.npy")
ep = np.insert(tr[:, 0], 0, tr[0, 0] - 1)
ax = axes[1, i]
g = np.load(path + case + "/trained_loss_gamma.npy")
ax.plot(ep, g, color=color[i])
ax.axhline(y=0.1, color="k", ls="--")
ax.axhline(y=0.4, color="k", ls="--")
if i == 0:
ax.set_ylabel(r'$\gamma$')
ax = axes[2, i]
a = np.load(path + case + "/trained_loss_alpha.npy")
ax.plot(ep, a, color=color[i])
ax.axhline(y=-3, color="k", ls="--")
ax.axhline(y=-4, color="k", ls="--")
if i == 0:
ax.set_ylabel(r'$\log_{10} (\alpha)$')
plt.subplots_adjust(wspace=0.2, hspace=0.15, left=0.08, bottom=0.1, top=0.95)
f.text(0.5, 0.01, "Epoch")
i = 1
color = "C3"
case1 = cases[i] + "no_decay/"
ax = axes[0, i]
tr = read_training_log_file(path + case1 + "/training.log")
ax.plot(tr[:, 0], tr[:, 1], lw=1.5, color=color, label="no decay")
ax.plot(tr[:, 0], tr[:, 2], ls="--", color=color, lw=1.5)
ax.legend(loc="best", fontsize=14)
# p = np.load(path + case + "/trained_loss_params.npy")
ep = np.insert(tr[:, 0], 0, tr[0, 0] - 1)
ax = axes[1, i]
g = np.load(path + case1 + "/trained_loss_gamma.npy")
ax.plot(ep, g, color=color)
ax = axes[2, i]
a = np.load(path + case1 + "/trained_loss_alpha.npy")
ax.plot(ep, a, color=color)
i = 1
color = "C4"
case2 = cases[i] + "exp_decay/"
ax = axes[0, i]
tr = read_training_log_file(path + case2 + "/training.log")
ax.plot(tr[:, 0], tr[:, 1], lw=1.5, color=color, label="exp decay")
ax.plot(tr[:, 0], tr[:, 2], ls="--", color=color, lw=1.5)
ax.legend(loc="best", fontsize=14)
# p = np.load(path + case + "/trained_loss_params.npy")
ep = np.insert(tr[:, 0], 0, tr[0, 0] - 1)
ax = axes[1, i]
g = np.load(path + case2 + "/trained_loss_gamma.npy")
ax.plot(ep, g, color=color)
ax = axes[2, i]
a = np.load(path + case2 + "/trained_loss_alpha.npy")
ax.plot(ep, a, color=color)
########### PLOT GRID SEARCH IN LOG-ALPHA ##################
path_all = "/Users/lls/Documents/deep_halos_files/regression/test_lowmass/reg_10000_perbin/larger_net/" \
"cauchy_selec_gamma_bound/group_reg_alpha/train_alpha/"
reg_types = ["l2_conv_l21_l1_dense/",
# "l2_conv_l1_dense_wdropout/", "l2_conv_l21_l1_dense_wdropout/"
]
paths = [path_all + reg_type for reg_type in reg_types]
for j, path in enumerate(paths):
reg_type_i = reg_types[j][:-2]
log_alpha_values = [-3.1, -3.3, -3.5, -3.7, -3.9]
cases = ["log_alpha_" + str(l) for l in log_alpha_values]
labelss = [r"$\log(\alpha) = %.1f$" % l for l in log_alpha_values]
f, axes = plt.subplots(len(cases), 2, sharex=True, figsize=(12, 8))
color = ["C" + str(i) for i in range(len(cases))]
for i, case in enumerate(cases):
ax = axes[i, 0]
tr = read_training_log_file(path + case + "/training.log")
if i == 2:
ax.plot(tr[1:, 0], tr[1:,1], lw=1.5, color=color[i], label=labelss[i])
ax.plot(tr[1:, 0], tr[1:, 3], ls="--", color=color[i], lw=1.5)
else:
ax.plot(tr[:, 0], tr[:,1], lw=1.5, color=color[i], label=labelss[i])
ax.plot(tr[:, 0], tr[:, 3], ls="--", color=color[i], lw=1.5)
ax.legend(loc="best", fontsize=14)
# ax.set_ylim(-1, 10)
# ax.set_yscale("log")
if i == 0:
ax.set_title('Loss')
ep = np.insert(tr[:, 0], 0, tr[0, 0] - 1)
ax = axes[i, 1]
g = np.load(path + case + "/trained_loss_gamma.npy")
ax.plot(ep, g[:], color=color[i])
# ax.axhline(y=0.1, color="grey", ls="--")
# ax.axhline(y=0.4, color="grey", ls="--")
ax.set_ylim(0.01, 0.41)
if i == 0:
ax.set_title(r'$\gamma$')
plt.subplots_adjust(wspace=0.2, hspace=0, left=0.08, bottom=0.12, top=0.95)
f.text(0.5, 0.01, "Epoch")
plt.savefig(path + "grid_search_log_alpha_" + reg_type_i + ".png")
########### PLOT GRID SEARCH IN LOG-ALPHA ##################
path_all = "/Users/lls/Documents/deep_halos_files/full_mass_range/xavier/alpha/alpha"
alpha_values = ["-2", "-2.5","-3"]
f, axes = plt.subplots(len(alpha_values), 3, sharex=True, figsize=(14, 8))
color = ["C" + str(i) for i in range(len(alpha_values))]
for i in range(len(alpha_values)):
path = path_all + alpha_values[i]
label = r"$\log(\alpha) = $" + alpha_values[i]
ax = axes[i, 0]
tr = np.loadtxt(path + "/training.log", delimiter=",", skiprows=1)
ax.plot(tr[:, 0], tr[:,2], lw=1.5, color=color[i], label=label)
ax.plot(tr[:, 0], tr[:, 5], ls="--", color=color[i], lw=1.5)
ax.legend(loc="best", fontsize=14)
# ax.set_ylim(-1, 10)
if i==0 or i==1:
ax.set_yscale("log")
if i == 0:
ax.set_title('Loss')
ax = axes[i, 1]
tr = np.loadtxt(path + "/training.log", delimiter=",", skiprows=1)
ax.plot(tr[:, 0], tr[:, 1], lw=1.5, color=color[i], label=label)
ax.plot(tr[:, 0], tr[:, 4], ls="--", color=color[i], lw=1.5)
if i == 0:
ax.set_title('Likelihood')
ax.set_ylim(-0.1, 0.5)
ep = np.insert(tr[:, 0], 0, tr[0, 0] - 1)
ax = axes[i, 2]
g = np.loadtxt(path + "/gamma.txt", delimiter=",")
ax.plot(ep, g[:], color=color[i])
# ax.axhline(y=0.1, color="grey", ls="--")
# ax.axhline(y=0.4, color="grey", ls="--")
ax.set_ylim(0.01, 0.41)
if i == 0:
ax.set_title(r'$\gamma$')
plt.subplots_adjust(wspace=0.2, hspace=0, left=0.08, bottom=0.12, top=0.95)
f.text(0.5, 0.01, "Epoch")
path_all = "/Users/lls/Documents/deep_halos_files/restricted_mass_range/alpha"
alpha_values = ["-2", "-3"]
f, axes = plt.subplots(len(alpha_values), 3, sharex=True, figsize=(14, 8))
color = ["C" + str(i) for i in range(len(alpha_values))]
for i in range(len(alpha_values)):
path = path_all + alpha_values[i]
label = r"$\log(\alpha) = $" + alpha_values[i]
ax = axes[i, 0]
tr = np.loadtxt(path + "/training.log", delimiter=",", skiprows=1)
ax.plot(tr[:, 0], tr[:,2], lw=1.5, color=color[i], label=label)
ax.plot(tr[:, 0], tr[:, 5], ls="--", color=color[i], lw=1.5)
ax.legend(loc="best", fontsize=14)
# ax.set_ylim(-1, 10)
#if i==0 or i==1:
# ax.set_yscale("log")
if i == 0:
ax.set_title('Loss')
ax = axes[i, 1]
tr = np.loadtxt(path + "/training.log", delimiter=",", skiprows=1)
ax.plot(tr[:, 0], tr[:, 1], lw=1.5, color=color[i], label=label)
ax.plot(tr[:, 0], tr[:, 4], ls="--", color=color[i], lw=1.5)
if i == 0:
ax.set_title('Likelihood')
ax.set_ylim(-0.5, 1)
ep = np.insert(tr[:, 0], 0, tr[0, 0] - 1)
ax = axes[i, 2]
# g = np.loadtxt(path + "/gamma.txt", delimiter=",")
g = np.load(path + "/trained_loss_gamma.npy")
#g = np.insert(g, 0, 0.2)
ax.plot(ep, g[:], color=color[i])
# ax.axhline(y=0.1, color="grey", ls="--")
# ax.axhline(y=0.4, color="grey", ls="--")
ax.set_ylim(0.01, 0.41)
if i == 0:
ax.set_title(r'$\gamma$')
plt.subplots_adjust(wspace=0.2, hspace=0, left=0.08, bottom=0.12, top=0.95)
f.text(0.5, 0.01, "Epoch")
| 35.436242 | 115 | 0.573864 |
4a238ccc164b4b5bf3d58505c1e34be7a882403d | 671 | py | Python | nikola/data/themes/base/messages/messages_ja.py | doctorlard/nikola | d6dd2fc4af6fa0d92dfda500393bbd235b60df2a | [
"MIT"
] | null | null | null | nikola/data/themes/base/messages/messages_ja.py | doctorlard/nikola | d6dd2fc4af6fa0d92dfda500393bbd235b60df2a | [
"MIT"
] | 1 | 2019-08-18T13:37:20.000Z | 2019-08-18T16:09:08.000Z | nikola/data/themes/base/messages/messages_ja.py | doctorlard/nikola | d6dd2fc4af6fa0d92dfda500393bbd235b60df2a | [
"MIT"
] | null | null | null | # -*- encoding:utf-8 -*-
from __future__ import unicode_literals
MESSAGES = {
"Also available in": "他の言語で読む",
"Archive": "過去の記事",
"Categories": "",
"LANGUAGE": "日本語",
"More posts about": "タグ",
"Newer posts": "新しい記事",
"Next post": "次の記事",
"Older posts": "過去の記事",
"Original site": "元のサイト",
"Posted": "投稿日時",
"Posts about %s": "%sについての記事",
"Posts for year %s": "%s年の記事",
"Posts for {month} {year}": "{year}年{month}月の記事",
"Previous post": "前の記事",
"Read in English": "日本語で読む",
"Read more": "続きを読む",
"Source": "ソース",
"Tags and Categories": "",
"Tags": "タグ",
"old posts page %d": "前の記事 %dページ目",
}
| 25.807692 | 53 | 0.545455 |
4a238cd37be886cd343634146d984988cd85ca94 | 94 | py | Python | python/numpy/eye_and_identity.py | avenet/hackerrank | e522030a023af4ff50d5fc64bd3eba30144e006c | [
"MIT"
] | null | null | null | python/numpy/eye_and_identity.py | avenet/hackerrank | e522030a023af4ff50d5fc64bd3eba30144e006c | [
"MIT"
] | null | null | null | python/numpy/eye_and_identity.py | avenet/hackerrank | e522030a023af4ff50d5fc64bd3eba30144e006c | [
"MIT"
] | null | null | null | import numpy
rows, columns = map(int, input().split())
print(numpy.eye(rows, columns, k=0))
| 15.666667 | 41 | 0.680851 |
4a238d4a4776a429569b780a87dbbb2ba3696bc8 | 5,599 | bzl | Python | go/private/rules/binary.bzl | mboes/rules_go | da3744eac9bdb8ff30207ec527266e5b94c9c784 | [
"Apache-2.0"
] | null | null | null | go/private/rules/binary.bzl | mboes/rules_go | da3744eac9bdb8ff30207ec527266e5b94c9c784 | [
"Apache-2.0"
] | null | null | null | go/private/rules/binary.bzl | mboes/rules_go | da3744eac9bdb8ff30207ec527266e5b94c9c784 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load(
"@io_bazel_rules_go//go/private:context.bzl",
"go_context",
)
load(
"@io_bazel_rules_go//go/private:common.bzl",
"go_filetype",
)
load(
"@io_bazel_rules_go//go/private:rules/aspect.bzl",
"go_archive_aspect",
)
load(
"@io_bazel_rules_go//go/private:rules/rule.bzl",
"go_rule",
)
load(
"@io_bazel_rules_go//go/private:providers.bzl",
"GoLibrary",
)
load(
"@io_bazel_rules_go//go/platform:list.bzl",
"GOARCH",
"GOOS",
)
load(
"@io_bazel_rules_go//go/private:mode.bzl",
"LINKMODES",
"LINKMODE_NORMAL",
)
def _go_binary_impl(ctx):
"""go_binary_impl emits actions for compiling and linking a go executable."""
go = go_context(ctx)
library = go.new_library(go, importable = False)
source = go.library_to_source(go, ctx.attr, library, ctx.coverage_instrumented())
name = ctx.attr.basename
if not name:
name = ctx.label.name
executable = None
if ctx.attr.out:
# Use declare_file instead of attr.output(). When users set output files
# directly, Bazel warns them not to use the same name as the rule, which is
# the common case with go_binary.
executable = ctx.actions.declare_file(ctx.attr.out)
archive, executable, runfiles = go.binary(
go,
name = name,
source = source,
gc_linkopts = gc_linkopts(ctx),
version_file = ctx.version_file,
info_file = ctx.info_file,
executable = executable,
)
return [
library,
source,
archive,
OutputGroupInfo(
cgo_exports = archive.cgo_exports,
),
DefaultInfo(
files = depset([executable]),
runfiles = runfiles,
executable = executable,
),
]
go_binary = go_rule(
_go_binary_impl,
attrs = {
"basename": attr.string(),
"data": attr.label_list(
allow_files = True,
cfg = "data",
),
"srcs": attr.label_list(allow_files = go_filetype),
"deps": attr.label_list(
providers = [GoLibrary],
aspects = [go_archive_aspect],
),
"embed": attr.label_list(
providers = [GoLibrary],
aspects = [go_archive_aspect],
),
"importpath": attr.string(),
"pure": attr.string(
values = [
"on",
"off",
"auto",
],
default = "auto",
),
"static": attr.string(
values = [
"on",
"off",
"auto",
],
default = "auto",
),
"race": attr.string(
values = [
"on",
"off",
"auto",
],
default = "auto",
),
"msan": attr.string(
values = [
"on",
"off",
"auto",
],
default = "auto",
),
"goos": attr.string(
values = GOOS.keys() + ["auto"],
default = "auto",
),
"goarch": attr.string(
values = GOARCH.keys() + ["auto"],
default = "auto",
),
"gc_goopts": attr.string_list(),
"gc_linkopts": attr.string_list(),
"x_defs": attr.string_dict(),
"linkmode": attr.string(values = LINKMODES, default = LINKMODE_NORMAL),
"out": attr.string(),
},
executable = True,
)
"""See go/core.rst#go_binary for full documentation."""
go_tool_binary = go_rule(
_go_binary_impl,
bootstrap = True,
attrs = {
"basename": attr.string(),
"data": attr.label_list(
allow_files = True,
cfg = "data",
),
"srcs": attr.label_list(allow_files = go_filetype),
"deps": attr.label_list(providers = [GoLibrary]),
"embed": attr.label_list(providers = [GoLibrary]),
"gc_goopts": attr.string_list(),
"gc_linkopts": attr.string_list(),
"x_defs": attr.string_dict(),
"linkmode": attr.string(values = LINKMODES, default = LINKMODE_NORMAL),
"out": attr.string(),
"_hostonly": attr.bool(default = True),
},
executable = True,
)
"""
This is used instead of `go_binary` for tools that are executed inside
actions emitted by the go rules. This avoids a bootstrapping problem. This
is very limited and only supports sources in the main package with no
dependencies outside the standard library.
See go/core.rst#go_binary for full documentation.
TODO: This can merge with go_binary when toolchains become optional
We add a bootstrap parameter that defaults to false, set it to true on "tool" binaries
and it can pick the boostrap toolchain when it sees it.
"""
def gc_linkopts(ctx):
gc_linkopts = [
ctx.expand_make_variables("gc_linkopts", f, {})
for f in ctx.attr.gc_linkopts
]
return gc_linkopts
| 29.161458 | 86 | 0.57546 |
4a238d7fe50a14937123199240dec3ba02fb0b85 | 432 | py | Python | payment_gateway/payment_gateway/celeryconfig.py | MayaraMachado/sns_and_sqs_project | 4fcc5bbb5f6841543ea8dda353dd85a43024f683 | [
"MIT"
] | 5 | 2020-06-22T21:29:54.000Z | 2021-11-01T20:12:04.000Z | payment_gateway/payment_gateway/celeryconfig.py | MayaraMachado/sns_and_sqs_project | 4fcc5bbb5f6841543ea8dda353dd85a43024f683 | [
"MIT"
] | 5 | 2021-03-30T13:38:15.000Z | 2021-09-22T19:10:27.000Z | payment_gateway/payment_gateway/celeryconfig.py | MayaraMachado/sns_and_sqs_project | 4fcc5bbb5f6841543ea8dda353dd85a43024f683 | [
"MIT"
] | null | null | null | from django.conf import settings
from datetime import timedelta
CELERY_APPS = [
'api',
]
CELERY_IMPORTS = ['api.tasks']
BROKER_URL = 'redis://redis:6379/10'
CELERY_RESULT_BACKEND = 'redis://redis:6379/10'
CELERYBEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'America/Maceio'
| 24 | 72 | 0.780093 |
4a238e95358a764abb84f136e780c17c74f507d4 | 276 | py | Python | homeassistant/components/locative/config_flow.py | alindeman/home-assistant | b274b10f3874c196f0db8f9cfa5f47eb756d1f8e | [
"Apache-2.0"
] | 4 | 2019-07-03T22:36:57.000Z | 2019-08-10T15:33:25.000Z | homeassistant/components/locative/config_flow.py | alindeman/home-assistant | b274b10f3874c196f0db8f9cfa5f47eb756d1f8e | [
"Apache-2.0"
] | 7 | 2019-08-23T05:26:02.000Z | 2022-03-11T23:57:18.000Z | homeassistant/components/locative/config_flow.py | alindeman/home-assistant | b274b10f3874c196f0db8f9cfa5f47eb756d1f8e | [
"Apache-2.0"
] | 2 | 2018-08-15T03:59:35.000Z | 2018-10-18T12:20:05.000Z | """Config flow for Locative."""
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
config_entry_flow.register_webhook_flow(
DOMAIN,
'Locative Webhook',
{
'docs_url': 'https://www.home-assistant.io/components/locative/'
}
)
| 21.230769 | 72 | 0.713768 |
4a238ede1ede0c557c079deb1277970321775cd7 | 3,212 | py | Python | pettingzoo/utils/wrappers/order_enforcing.py | mlanas/PettingZoo | 58d47c68057bdf37720f961c1a372b4671b8b777 | [
"Apache-2.0"
] | 1 | 2021-09-13T17:47:48.000Z | 2021-09-13T17:47:48.000Z | pettingzoo/utils/wrappers/order_enforcing.py | mlanas/PettingZoo | 58d47c68057bdf37720f961c1a372b4671b8b777 | [
"Apache-2.0"
] | null | null | null | pettingzoo/utils/wrappers/order_enforcing.py | mlanas/PettingZoo | 58d47c68057bdf37720f961c1a372b4671b8b777 | [
"Apache-2.0"
] | null | null | null | from ..env import AECIterable, AECIterator
from ..env_logger import EnvLogger
from .base import BaseWrapper
class OrderEnforcingWrapper(BaseWrapper):
'''
check all call orders:
* error on getting rewards, dones, infos, agent_selection before reset
* error on calling step, observe before reset
* error on iterating without stepping or resetting environment.
* warn on calling close before render or reset
* warn on calling step after environment is done
'''
def __init__(self, env):
self._has_reset = False
self._has_rendered = False
self._has_updated = False
super().__init__(env)
def __getattr__(self, value):
'''
raises an error message when data is gotten from the env
which should only be gotten after reset
'''
if value == "unwrapped":
return self.env.unwrapped
elif value == "agent_order":
raise AttributeError("agent_order has been removed from the API. Please consider using agent_iter instead.")
elif value in {"rewards", "dones", "infos", "agent_selection", "num_agents", "agents"}:
raise AttributeError(f"{value} cannot be accessed before reset")
else:
raise AttributeError(f"'{type(self).__name__}' object has no attribute '{value}'")
def seed(self, seed=None):
self._has_reset = False
super().seed(seed)
def render(self, mode='human'):
if not self._has_reset:
EnvLogger.error_render_before_reset()
assert mode in self.metadata['render.modes']
self._has_rendered = True
return super().render(mode)
def step(self, action):
if not self._has_reset:
EnvLogger.error_step_before_reset()
elif not self.agents:
self._has_updated = True
EnvLogger.warn_step_after_done()
return None
else:
self._has_updated = True
super().step(action)
def observe(self, agent):
if not self._has_reset:
EnvLogger.error_observe_before_reset()
return super().observe(agent)
def state(self):
if not self._has_reset:
EnvLogger.error_state_before_reset()
return super().state()
def agent_iter(self, max_iter=2**63):
if not self._has_reset:
EnvLogger.error_agent_iter_before_reset()
return AECOrderEnforcingIterable(self, max_iter)
def reset(self):
self._has_reset = True
self._has_updated = True
super().reset()
def __str__(self):
if hasattr(self, 'metadata'):
return str(self.env) if self.__class__ is OrderEnforcingWrapper else f'{type(self).__name__}<{str(self.env)}>'
else:
return repr(self)
class AECOrderEnforcingIterable(AECIterable):
def __iter__(self):
return AECOrderEnforcingIterator(self.env, self.max_iter)
class AECOrderEnforcingIterator(AECIterator):
def __next__(self):
agent = super().__next__()
assert self.env._has_updated, "need to call step() or reset() in a loop over `agent_iter`"
self.env._has_updated = False
return agent
| 33.458333 | 122 | 0.640722 |
4a238f1746fb4618a23b1267dee6c5daa441c427 | 1,563 | py | Python | tests/test_respselector.py | yam-ai/yamas | 36ae42862d4d0197fcc3ac22af3a03f2873b3976 | [
"Apache-2.0"
] | 2 | 2019-10-28T03:58:12.000Z | 2022-02-25T21:02:02.000Z | tests/test_respselector.py | yam-ai/yamas | 36ae42862d4d0197fcc3ac22af3a03f2873b3976 | [
"Apache-2.0"
] | 7 | 2019-10-26T10:35:17.000Z | 2019-11-07T09:54:22.000Z | tests/test_respselector.py | yam-ai/yamas | 36ae42862d4d0197fcc3ac22af3a03f2873b3976 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2019 YAM AI Machinery Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Tuple
from http import HTTPStatus
from yamas.respgen import ResponseMaker, ResponseSelector
from yamas.reqresp import ContentType
import pytest
class TestResponseSelector:
respmakers = [
ResponseMaker(HTTPStatus.OK, {}, str(i), ContentType.TEXT, False, {})
for i in range(3)
]
respsels = []
for loop in [True, False]:
respsel = ResponseSelector(loop)
respsels.append((respsel, loop))
for rm in respmakers:
respsel.add_response_maker(rm)
@pytest.mark.parametrize('respsel, loop', respsels)
def test_select_response_selector(self, respsel, loop):
for i in range(5):
if loop:
assert respsel.make_response(tuple()).content_bytes.decode(
'utf-8') == str(i % 3)
else:
assert respsel.make_response(tuple()).content_bytes.decode(
'utf-8') == str(min(i, 2))
| 33.978261 | 77 | 0.673704 |
4a238fcf24a49d817bead1bdf00dcc2733264c65 | 21,936 | py | Python | nasws/cnn/procedures/landmark_procedures.py | kcyu2014/nas-landmarkreg | a00c3619bf4042e446e1919087f0b09fe9fa3a65 | [
"MIT"
] | 8 | 2021-04-13T01:52:11.000Z | 2022-03-30T03:53:12.000Z | nasws/cnn/procedures/landmark_procedures.py | kcyu2014/nas-landmarkreg | a00c3619bf4042e446e1919087f0b09fe9fa3a65 | [
"MIT"
] | 4 | 2021-05-29T01:41:00.000Z | 2021-08-24T09:40:43.000Z | nasws/cnn/procedures/landmark_procedures.py | kcyu2014/nas-landmarkreg | a00c3619bf4042e446e1919087f0b09fe9fa3a65 | [
"MIT"
] | null | null | null | """
Add landmark as regualrization inside.
"""
import torch
import math
from nasws.cnn.procedures.utils_maml import assign_parameters
from utils import accuracy
from .maml_procedure import named_module_parameters, task_update_step
from .maml_procedure import _summarize_shared_train as maml_summarize
import logging
from random import randint
import IPython
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
import nasws.cnn.utils
from nasws.cnn.policy.enas_policy import RepeatedDataLoader
"""
In general there are many ways to compute such ranking loss.
- we could directly compare against the loss value
- or we use the distance loss (cosine distance) between data points
"""
def set_landmark_loss_mode(model, mode: bool):
if isinstance(model, nn.DataParallel):
model.module.set_landmark_mode(mode)
else:
model.set_landmark_mode(mode)
def _summarize_shared_train(curr_step, loss, rankloss, acc=0, acc_5=0, lr=0.0, epoch_steps=1, writer=None, coef=0):
"""Logs a set of training steps."""
logging.info(f'| step {curr_step:3d} '
f'| lr {lr:.4f} '
f'| coef {coef:.4f}'
f'| rank loss {rankloss:.2f} '
f'| loss {loss:.2f} '
f'| acc {acc:8.2f}'
f'| acc-5 {acc_5: 8.2f}')
def rank_cross_entropy_loss(l1, l2):
logit = torch.sigmoid(l1-l2)
# logit = F.prelu(logit, torch.tensor(0.1))
return -torch.log(1 - logit)
def rank_infinite_loss_v1(l1, l2, w1, w2):
d = (l1 - l2) / (w1 - w2)
# in this case: l1 < l2 and w1 < w2, or l1 > l2 and w1 > w2, d > 0. but we should
p = torch.sigmoid(-d)
return F.relu(0.5 - p)
def rank_infinite_loss_v2(l1, l2, w1, w2):
d = (l1 - l2) / (w1 - w2)
p = torch.sigmoid(-d)
return F.softplus(0.5 - p)
def rank_infinite_relu(l1, l2, w1, w2):
d = (l1 - l2) * (w1 - w2)
return F.relu(d)
def rank_infinite_softplus(l1, l2, w1, w2):
d = (l1 - l2) * (w1 - w2)
return F.softplus(d, beta=5)
def rank_hinge_sign_infinite(l1, l2, w1, w2):
return F.relu(1 - (l1 - l2) * torch.sign(w2 - w1))
def rank_cross_entropy_focal_loss(l1, l2, gamma=5):
logit = torch.sigmoid(l1 - l2)
# logit = F.prelu(logit, torch.tensor(0.1))
return - (logit).pow(gamma) * torch.log(1 - logit)
def rank_mixed_cross_entropy_loss(l1, l2):
if l1 < l2:
return rank_cross_entropy_focal_loss(l1, l2)
else:
return rank_cross_entropy_loss(l1, l2)
def tanh_sign_infinite(l1, l2, w1, w2):
# given the fact that, l1 < l2 == w1 > w2.
l = torch.tanh(l1 - l2) * torch.sign(w1 - w2)
return F.relu(l)
def tanh_infinite(l1, l2, w1, w2):
# given the fact that, l1 < l2 == w1 > w2.
l = torch.tanh(l1 - l2) * torch.tanh(w1 - w2)
return F.relu(l)
def tanh_infinite_norelu(l1, l2, w1, w2):
# given the fact that, l1 < l2 == w1 > w2.
return torch.tanh(l1 - l2) * torch.tanh(w1 - w2)
# to compute the rank loss for each pair of input losses.
_loss_fn = {
'mae_relu': lambda l1, l2 : F.relu(l1 - l2),
'mae_relu_inverse': lambda l1, l2: F.relu(l2 - l1),
'mae_sign_relu': lambda l1, l2 : F.relu(torch.sign(l1 - l2)),
'mae_sign_tanh_relu': lambda l1, l2: F.relu(torch.sign(torch.tanh(l1 - l2))),
'mae_tanh_relu': lambda l1, l2: F.relu(torch.tanh(l1 - l2)),
'mae_softplus': lambda l1, l2: F.softplus(l1 - l2),
'mae_softplus_beta3': lambda l1, l2: F.softplus(l1 - l2, beta=3),
'mae_softplus_beta5': lambda l1, l2: F.softplus(l1 - l2, beta=5),
'mae_softplus_beta7': lambda l1, l2: F.softplus(l1 - l2, beta=7),
'focal_loss': rank_cross_entropy_focal_loss,
'mae_relu_norm': lambda l1, l2 : F.relu((l1 - l2) / (l1 - l2).abs() * (l1 + l2) / 2),
'mae_tanh_infinite': tanh_infinite,
'tanh_infinite': tanh_infinite_norelu,
'mae_sign_tanh_infinite': tanh_sign_infinite,
'mae_relu_sigmoid_infinite': rank_infinite_loss_v1,
'mae_relu_infinite': rank_infinite_relu,
'softplus_infinite': rank_infinite_softplus,
'sigmoid_softplus_infinite': rank_infinite_loss_v2,
'hinge_sign_infinite': rank_hinge_sign_infinite,
'crossentropy': rank_cross_entropy_loss,
'mixed_focal': rank_mixed_cross_entropy_loss,
}
def get_rank_loss_fn(name, weighted):
"""
All of these loss will penalize l1 > l2, i.e. ground truth label is l1 < l2.
:param name: args.landmark_loss_fn
:param weighted: weighted to add a subscript.
:return: loss fn.
"""
if weighted == 'embed':
return lambda l1, l2, w : w * _loss_fn[name](l1, l2)
elif weighted == 'infinite':
return _loss_fn[name + '_infinite']
return _loss_fn[name]
def pairwise_landmark_ranking_loss_step(model, data, search_space, criterion, args,
change_model_spec_fn, module_forward_fn,
rank_obj=None, pair_indicies=None):
"""
Compute the ranking loss:
for landmark models, m1, m2, ...., suppose they are in descending order
FOR i > j, L_rank = sum_{i,j} max(0, L(m_j) - L(m_i))
if landmark_loss_adjacent, i = j + 1 for sure. otherwise, i = j+1, j+2 ..., n
Version 1.0
:param model:
:param data:
:param search_space:
:param criterion:
:param args:
:param change_model_spec_fn: change model spec function, this should be associated with search space.
:param module_forward_fn: return the current loss and next loss for now.
:param rank_obj:
:param landmark_weights: weights you would like to associate with each landmark architecture.
:return:
"""
input, target = data
# for the sake of memory, we do not store the features, but we just compute the graph all the time.
coeff = args.tmp_landmark_loss_coef
rank_loss_fn = get_rank_loss_fn(args.landmark_loss_fn, args.landmark_loss_weighted)
landmark_ids, landmark_specs = search_space.landmark_topologies
if pair_indicies is None:
pair_indicies = []
for ind, model_id in enumerate(landmark_ids[:-1]): # skip the last one
end = min(ind + 1 + args.landmark_loss_adjacent_step, len(landmark_ids)) \
if args.landmark_loss_adjacent else len(landmark_ids)
for jnd in range(ind+1, end):
pair_indicies.append((ind, jnd))
for ind, jnd in pair_indicies:
# currently, landmarks id loss should decrease!
# change the model to current one
model = change_model_spec_fn(model, landmark_specs[ind])
curr_loss, _, _ = module_forward_fn(model, input, target, criterion)
model = change_model_spec_fn(model, landmark_specs[jnd])
next_loss, _, _ = module_forward_fn(model, input, target, criterion)
# weighted landmark
if args.landmark_loss_weighted == 'embed':
landmark_weights = search_space.landmark_weights
rank_loss = coeff * rank_loss_fn(next_loss, curr_loss, abs(landmark_weights[ind] - landmark_weights[jnd]))
elif args.landmark_loss_weighted == 'infinite':
landmark_weights = search_space.landmark_weights
rank_loss = coeff * rank_loss_fn(
next_loss, curr_loss,
torch.tensor(landmark_weights[jnd]).float(),
torch.tensor(landmark_weights[ind]).float(),
)
else:
rank_loss = coeff * rank_loss_fn(next_loss, curr_loss)
if rank_obj:
rank_obj.update(rank_loss.item(), input.size(0))
try:
rank_loss.backward() # update grad here.
except Exception as e:
print(e)
IPython.embed()
def random_pairwise_loss_step(model, data, search_space, criterion, args,
change_model_spec_fn, module_forward_fn,
rank_obj=None):
# each time, random a pair and compare.
pairwise_indicies = []
# landmark_ids, landmark_specs = search_space.landmark_topologies
# landmark_weights = search_space.landmark_weights
num_landmark = len(search_space._landmark_ids)
for _ in range(args.landmark_loss_random_pairs):
a = randint(0, num_landmark - 2)
b = randint(a+1, num_landmark - 1)
pairwise_indicies.append([a, b])
return pairwise_landmark_ranking_loss_step(model, data, search_space, criterion, args,
change_model_spec_fn, module_forward_fn, rank_obj,
pairwise_indicies)
def random_three_pairwise_loss_step(model, data, search_space, criterion, args,
change_model_spec_fn, module_forward_fn,
rank_obj=None):
# each time, random 3 architecture to formulate this
pairwise_indicies = []
num_landmark = len(search_space._landmark_ids)
for _ in range(args.landmark_loss_random_pairs):
a, b, c = sorted(np.random.choice(np.arange(num_landmark), 3, replace=False).tolist())
pairwise_indicies.append([a, b])
pairwise_indicies.append([b, c])
# IPython.embed(header='check three pair')
# here specs contains landmark weights
return pairwise_landmark_ranking_loss_step(model, data, search_space, criterion, args,
change_model_spec_fn, module_forward_fn, rank_obj,
pairwise_indicies)
def _schedule_coeff(args, lr):
# def _schedule_coeff(args, lr):
""" Adjust the landscape coefficient """
if args.landmark_loss_coef_scheduler == 'inverse_lr':
coeff = (args.learning_rate - lr + args.learning_rate_min) * args.landmark_loss_coef
logging.info(f"landmark loss coefficient (Inverse lr) {coeff}")
else:
coeff = args.landmark_loss_coef
args.tmp_landmark_loss_coef = coeff
return args
def adjust_landmark_coef(epoch, args, batch_idx=1, total_batchs=1):
world_size = args.world_size if hasattr(args, 'world_size') else 1
if epoch < args.supernet_warmup_epoch:
coef_adf = 0
else:
# orig_epoch = epoch
epoch = epoch - args.supernet_warmup_epoch
# run_epochs = epoch - args.landmark_warmup_epoch
if 'increase' in args.landmark_loss_coef_scheduler:
args.landmark_warmup_epoch = 0
total_epochs = args.epochs - args.landmark_warmup_epoch - args.supernet_warmup_epoch
if args.landmark_warmup_epoch > 0 and epoch < args.landmark_warmup_epoch:
# warming up the landmark coef
epoch += float(batch_idx + 1) / total_batchs
if world_size > 1:
coef_adf = 1. / world_size * (epoch * (world_size - 1) / args.landmark_warmup_epoch + 1)
else:
coef_adf = epoch / (args.landmark_warmup_epoch + 1)
else:
epoch -= args.landmark_warmup_epoch
# net epoch
if args.landmark_loss_coef_scheduler == "constant":
coef_adf = 1.
elif args.landmark_loss_coef_scheduler == "linear_step_decrease":
if epoch < total_epochs // 4:
coef_adf = 1.
elif epoch < total_epochs // 2:
coef_adf = 1e-1
elif epoch < total_epochs // 4 * 3:
coef_adf = 1e-2
else:
coef_adf = 1e-3
elif args.landmark_loss_coef_scheduler == "linear_step_increase":
if epoch < total_epochs // 4:
coef_adf = 0.
elif epoch < total_epochs // 2:
coef_adf = 1e-2
elif epoch < total_epochs // 4 * 3:
coef_adf = 1e-1
else:
coef_adf = 1.
elif args.landmark_loss_coef_scheduler == "cosine_decrease":
# self.init_lr * 0.5 * (1 + math.cos(math.pi * T_cur / T_total))
T_cur = float(epoch * total_batchs) + batch_idx
T_total = float(total_epochs * total_batchs)
coef_adf = 0.5 * (1 + math.cos(math.pi * T_cur / T_total))
elif args.landmark_loss_coef_scheduler == "cosine_increase":
# self.init_lr * 0.5 * (1 + math.cos(math.pi * T_cur / T_total))
run_epochs = epoch - args.landmark_warmup_epoch
total_epochs = args.epochs - args.landmark_warmup_epoch
T_cur = float(epoch * total_batchs) + batch_idx
T_total = float(total_epochs * total_batchs)
# pi to 2 pi, from -1 to 1, i.e. coef adf is from 0 to 1.
coef_adf = 0.5 * (1 + math.cos(math.pi * (T_cur / T_total + 1)))
else:
coef_adf = 0
coeff = coef_adf * args.landmark_loss_coef
args.tmp_landmark_loss_coef = coeff
return coeff
landmark_loss_step_fns = {
'pairwise_loss': pairwise_landmark_ranking_loss_step,
# 'pairwise_loss_normalize': pairwise_landmark_ranking_loss_step,
'random_pairwise_loss': random_pairwise_loss_step,
# 'random_pairwise_infinite_loss_v1': random_pairwise_loss_step,
'random_pariwise_loss_cross_entropy': random_pairwise_loss_step,
'random_pairwise_loss_mixed_focal': random_pairwise_loss_step,
'random_three_pairwise_loss': random_three_pairwise_loss_step,
'pairwise_logits': NotImplementedError("not yet implemented.")
}
def darts_train_model_with_landmark_regularization(
train_queue, valid_queue, model, criterion, optimizer, lr, args, architect=None,
search_space=None,
landmark_loss_step=pairwise_landmark_ranking_loss_step,
sampler=None,
):
"""
This is training procedure to add a regularization of ranking loss.
:param train_queue: Training for the supernet.
:param valid_queue: Update the ranking loss? or just do nothing.
:param model:
:param optimizer:
:param lr:
:param args:
:param architect:
:param search_space:
:param landmark_loss:
:param sampler:
:return:
"""
if not isinstance(valid_queue, RepeatedDataLoader):
valid_queue = RepeatedDataLoader(valid_queue)
objs = nasws.cnn.utils.AverageMeter()
top1 = nasws.cnn.utils.AverageMeter()
top5 = nasws.cnn.utils.AverageMeter()
rank_objs = nasws.cnn.utils.AverageMeter()
coeff = adjust_landmark_coef(args.current_epoch, args)
for step, (input, target) in enumerate(train_queue):
model.train()
if args.debug and step > 10:
logging.warning('Testing only. Break after 10 batches.')
break
if sampler:
model = sampler(model, architect, args)
n = input.size(0)
input = input.cuda().requires_grad_()
target = target.cuda()
if architect and args.current_epoch >= args.epochs:
# after warmup
search_input, search_target = valid_queue.next_batch()
search_input = search_input.cuda()
search_target = search_target.cuda(non_blocking=True)
architect.step(input, target, search_input, search_target, lr, optimizer, unrolled=args.policy_args.unrolled)
# Update model
optimizer.zero_grad()
# update the normal parameters.
loss, logits, _ = search_space.module_forward_fn(model, input, target, criterion)
loss.backward()
if args.current_epoch >= args.supernet_warmup_epoch:
# add ranking loss backwards.
if args.landmark_use_valid:
rank_input, rank_target = valid_queue.next_batch()
rank_input = rank_input.cuda().requires_grad_()
rank_target = rank_target.cuda()
else:
rank_input, rank_target = input, target
# proceed ranking steps.
# logging.debug('compute the rank loss!')
set_landmark_loss_mode(model, True)
landmark_loss_step(model, (rank_input, rank_target), search_space, criterion, args,
search_space.change_model_spec, search_space.module_forward_fn,
rank_objs)
set_landmark_loss_mode(model, False)
nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip)
optimizer.step()
prec1, prec5 = nasws.cnn.utils.accuracy(logits, target, topk=(1, 5))
objs.update(loss.item(), n)
top1.update(prec1.item(), n)
top5.update(prec5.item(), n)
if step % args.report_freq == 0:
_summarize_shared_train(step, objs.avg, rank_objs.avg, top1.avg, top5.avg, lr, coef=coeff)
# report ranking loss here.
logging.info(f"Loss at epoch end -> {objs.avg + rank_objs.avg} = (rank) {rank_objs.avg} + (model) {objs.avg}")
return top1.avg, objs.avg
def maml_ranking_loss_procedure(meta_queue, task_queue, model, criterion, optimizer, lr, args,
search_space,
landmark_loss_step=pairwise_landmark_ranking_loss_step,
sampler=None, architect=None,
):
"""
Combine MAML with this.
Checked the memory, is correctly reset.
:param model:
:param tasks:
:param meta_queue: data for updating the SuperNet, i.e. meta network, this should be normal data-loader.
:param task_queue: data for computing "task" gradient, this should be RepeatedDataloader
:param args:
:param valid_queue:
:param sampler: sample the architecture.
:return:
"""
# pass for now, disable.
objs = nasws.cnn.utils.AverageMeter()
top1 = nasws.cnn.utils.AverageMeter()
top5 = nasws.cnn.utils.AverageMeter()
task_objs = nasws.cnn.utils.AverageMeter()
task_top1 = nasws.cnn.utils.AverageMeter()
task_top5 = nasws.cnn.utils.AverageMeter()
rank_objs = nasws.cnn.utils.AverageMeter()
_schedule_coeff(args, lr)
if not isinstance(task_queue, RepeatedDataLoader):
task_queue = RepeatedDataLoader(task_queue)
num_inner_tasks = args.maml_num_inner_tasks
task_lr = args.maml_task_lr if args.maml_task_lr > 0 else lr
# logging.debug("Task lr is {}".format(task_lr))
num_episodes = len(meta_queue) // num_inner_tasks
meta_parameters = named_module_parameters(model)
logging.info(f"Epoch episode = {num_episodes}")
meta_queue = iter(meta_queue)
for episode in range(num_episodes):
if args.debug and episode > 2:
logging.warning('Testing only, break after 2 episodes.')
break
meta_loss = 0.0
total_model = 0
optimizer.zero_grad()
# 880Mb before this start
# Aggregate over num_inner_tasks sub-graph.
for n_task in range(num_inner_tasks):
input, target = task_queue.next_batch()
input = input.cuda().requires_grad_()
target = target.cuda()
# IPython.embed(helper='checking the debugging')
if sampler:
model = sampler(model, architect, args)
# IPython.embed(header='Checking a_dict memory using or not ')
# compute one step for task udpate.
# assign the meta-parameter back.
a_dict = task_update_step(model, (input, target), task_lr, criterion, args,
task_objs, task_top1, task_top5)
# Compute gradient for meta-network
meta_input, meta_target = next(meta_queue)
n = meta_input.size(0)
meta_input = meta_input.cuda().requires_grad_()
meta_target = meta_target.cuda()
# compute the meta_loss and do grad backward
# assign_parameters(model, a_dict)
meta_logits, meta_aux_logits = model(meta_input)
loss = criterion(meta_logits, meta_target)
if meta_aux_logits is not None:
meta_aux_loss = criterion(meta_aux_logits, meta_target)
loss += 0.4 * meta_aux_loss
loss.backward()
# add ranking loss backwards.
rank_input, rank_target = meta_input, meta_target
landmark_ids, landmark_specs = search_space.landmark_topologies
# proceed ranking steps.
landmark_loss_step(model, (rank_input, rank_target), landmark_ids, landmark_specs, criterion, args,
search_space.change_model_spec, search_space.module_forward_fn,
rank_objs)
# IPython.embed(header='check meta_parameter has grad or not.')
meta_loss = meta_loss + loss.item()
# keep computing the backward gradients throught the time.
total_model += 1
objs.update(loss.item(), n)
prec1, prec5 = accuracy(meta_logits.detach(), meta_target, topk=(1,5))
top1.update(prec1.item(), n)
top5.update(prec5.item(), n)
assign_parameters(model, meta_parameters)
del a_dict
# 1400M at first iter, increase 2M every epoch. but it is reasonable,
# because keep adding gradients of meta_parameters.
if episode % args.report_freq == 0:
maml_summarize(episode, objs.avg, task_objs.avg, top1.avg, top5.avg,
task_lr, task_top1.avg, task_top5.avg)
nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip)
optimizer.step()
# conclude one MAML update loop.
# IPython.embed(header='Checking memory is released for next episode loop')
# Checked with the memory, after entire loop, it is correctly cleaned the pointers.
logging.info(f"Loss at epoch end -> {objs.avg + rank_objs.avg} = (rank) {rank_objs.avg} + (model) {objs.avg}")
return (top1.avg, task_top1.avg), (objs.avg, task_objs.avg)
| 40.323529 | 121 | 0.629422 |
4a238fd37199899d0285468a4b68aefbc6c0ac93 | 10,067 | py | Python | phathom/pipeline/find_corr_neighbors_cmd.py | chunglabmit/phathom | 304db7a95e898e9b03d6b2640172752d21a7e3ed | [
"MIT"
] | 1 | 2018-04-18T11:54:29.000Z | 2018-04-18T11:54:29.000Z | phathom/pipeline/find_corr_neighbors_cmd.py | chunglabmit/phathom | 304db7a95e898e9b03d6b2640172752d21a7e3ed | [
"MIT"
] | 2 | 2018-04-05T20:53:52.000Z | 2018-11-01T16:37:39.000Z | phathom/pipeline/find_corr_neighbors_cmd.py | chunglabmit/phathom | 304db7a95e898e9b03d6b2640172752d21a7e3ed | [
"MIT"
] | null | null | null | import argparse
import pickle
import matplotlib
matplotlib.use("Agg")
import matplotlib.backends.backend_pdf
from matplotlib import pyplot
import multiprocessing
import numpy as np
import json
import tqdm
import sys
import os
from scipy.spatial import KDTree
from precomputed_tif.client import ArrayReader
from phathom.registration.fine import follow_gradient
from phathom.pipeline.find_neighbors_cmd import FindNeighborsData, cull_pts
from phathom.pipeline.find_neighbors_cmd import plot_points as fm_plot_points
def parse_arguments(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument(
"--fixed-coords",
help="Path to the blobs found in the fixed volume. If this is missing, "
"use the grid locations directly."
)
parser.add_argument(
"--fixed-url",
help="URL of the fixed neuroglancer volume",
required=True
)
parser.add_argument(
"--moving-url",
help="URL of the moving neuroglancer volume",
required=True
)
parser.add_argument(
"--transform",
help="Path to the transform .pkl file from fixed to moving",
required=True
)
parser.add_argument(
"--output",
help="Name of the output file - a JSON dictionary of results",
required=True
)
parser.add_argument(
"--visualization-file",
help="The path to the PDF file output by this program. "
"This file contains helpful visualizations that document the "
"program's progress.")
parser.add_argument(
"--x-grid",
help="Dimensions of the point grid in the X direction",
type=int,
default=50
)
parser.add_argument(
"--y-grid",
help="Dimensions of the point grid in the Y direction",
type=int,
default=50
)
parser.add_argument(
"--z-grid",
help="Dimensions of the point grid in the Z direction",
type=int,
default=50
)
parser.add_argument(
"--sigma-x",
help="Smoothing sigma in the X direction",
type=float,
default=3.0
)
parser.add_argument(
"--sigma-y",
help="Smoothing sigma in the Y direction",
type=float,
default=3.0
)
parser.add_argument(
"--sigma-z",
help="Smoothing sigma in the Z direction",
type=float,
default=3
)
parser.add_argument(
"--half-window-x",
help="The half-window size in the x direction. The actual window "
"will be half-window-x * 2 + 1",
type=int,
default=20
)
parser.add_argument(
"--half-window-y",
help="The half-window size in the y direction.",
type=int,
default=20
)
parser.add_argument(
"--half-window-z",
help="The half-window size in the z direction",
type=int,
default=20
)
parser.add_argument(
"--pad-x",
help="Amount of padding to add to moving array in the x direction "
"when fetching.",
type=int,
default=20
)
parser.add_argument(
"--pad-y",
help="Amount of padding to add to moving array in the y direction "
"when fetching.",
type=int,
default=20
)
parser.add_argument(
"--pad-z",
help="Amount of padding to add to moving array in the z direction "
"when fetching.",
type=int,
default=20
)
parser.add_argument(
"--radius",
help="Points are excluded from the grid if the nearest is more than "
"the radius from the target grid point. Radius is in microns.",
type=float,
default=25
)
parser.add_argument(
"--max-rounds",
help="Maximum number of steps to take when tracking gradient",
type=int,
default=100
)
parser.add_argument(
"--voxel-size",
help="The size of a voxel in microns, three comma-separated values "
"in x, y, z order e.g. \"1.8,1.8,2.0",
default="1.8,1.8,2.0")
parser.add_argument(
"--n-cores",
help="Number of processors to use",
type=int,
default=os.cpu_count()
)
parser.add_argument(
"--min-correlation",
help="Discard matches if correlation is less than this",
type=float,
default=.90
)
parser.add_argument(
"--level",
help="Perform alignment at this level (default = 1, others correspond "
"to the power of 2 of the magnification -1)",
type=int,
default=1
)
return parser.parse_args(args)
def choose_points(points_fixed, x_grid, y_grid, z_grid, shape, radius,
voxel_size, n_cores):
grid = get_grid_points(shape, voxel_size, x_grid, y_grid, z_grid)
kdtree = KDTree(points_fixed * np.array([voxel_size]))
nearest_d, nearest_idx = kdtree.query(grid)
mask = nearest_d <= radius
unique_idx = np.unique(nearest_idx[mask])
return points_fixed[unique_idx]
def get_grid_points(shape, voxel_size, x_grid, y_grid, z_grid):
xs = np.linspace(0, shape[2] * voxel_size[2], x_grid)
ys = np.linspace(0, shape[1] * voxel_size[1], y_grid)
zs = np.linspace(0, shape[0] * voxel_size[0], z_grid)
grid_z, grid_y, grid_x = np.meshgrid(zs, ys, xs)
grid = np.column_stack(
[grid_z.flatten(), grid_y.flatten(), grid_x.flatten()])\
.astype(np.uint32)
return grid
def main(args=sys.argv[1:]):
opts = parse_arguments(args)
if opts.visualization_file is not None:
matplotlib.interactive(False)
PDF = matplotlib.backends.backend_pdf.PdfPages(opts.visualization_file)
else:
PDF = None
with open(opts.transform, "rb") as fd:
interpolator = pickle.load(fd)["interpolator"]
magnification = 2 ** (opts.level - 1)
afixed = ArrayReader(opts.fixed_url, format="blockfs",
level=magnification)
amoving = ArrayReader(opts.moving_url, format="blockfs",
level=magnification)
voxel_size = \
np.array([float(_) for _ in opts.voxel_size.split(",")][::-1])
if opts.fixed_coords is not None:
with open(opts.fixed_coords) as fd:
points_fixed = np.array(json.load(fd))[:, ::-1]
chosen_points = choose_points(points_fixed,
opts.x_grid,
opts.y_grid,
opts.z_grid,
afixed.shape,
opts.radius,
voxel_size,
opts.n_cores)
else:
chosen_points = get_grid_points(afixed.shape,
voxel_size,
opts.x_grid,
opts.y_grid,
opts.z_grid)
if PDF is not None:
figure = plot_points(chosen_points)
figure.suptitle("Fixed points")
PDF.savefig(figure)
with multiprocessing.Pool(opts.n_cores) as pool:
futures = []
for pt_fixed in chosen_points:
args = (afixed, amoving, interpolator, pt_fixed,
opts.half_window_x, opts.half_window_y, opts.half_window_z,
opts.pad_x, opts.pad_y, opts.pad_z,
opts.max_rounds,
[opts.sigma_z, opts.sigma_y, opts.sigma_x],
opts.level)
futures.append(
(pt_fixed,
pool.apply_async(follow_gradient, args))
)
matches = []
corrs = []
for pt_fixed, future in tqdm.tqdm(futures):
result = future.get()
if result is None:
continue
pt_moving, corr = result
corrs.append(corr)
if corr >= opts.min_correlation:
matches.append((pt_fixed, pt_moving))
if PDF is not None:
figure:pyplot.Figure = pyplot.figure(figsize=(6, 6))
ax = figure.add_subplot(1, 1, 1)
counts = ax.hist(corrs, bins=100)[0]
ax.plot([opts.min_correlation, opts.min_correlation], [0, np.max(counts)])
figure.suptitle("Histogram of correlations")
PDF.savefig(figure)
fixed_coords = np.stack([pt_fixed for pt_fixed, pt_moving in matches])
moving_coords_fixed_frame =\
np.stack([pt_moving for pt_fixed, pt_moving in matches])
if PDF is not None and len(fixed_coords) > 0:
center = np.median(fixed_coords, 0)
figure = fm_plot_points(fixed_coords, moving_coords_fixed_frame, center)
figure.suptitle("Alignment")
PDF.savefig(figure)
moving_coords = interpolator(moving_coords_fixed_frame)
fixed_coords_um = fixed_coords * voxel_size.reshape(1, 3)
moving_coords_um = moving_coords * voxel_size.reshape(1, 3)
fake_fixed_features = np.zeros((len(fixed_coords), 1, 6))
fake_moving_features = np.zeros((len(moving_coords), 1, 6))
idx = np.arange(len(fixed_coords))
fnd = FindNeighborsData(
fixed_coords_um,
moving_coords_um,
fake_fixed_features,
fake_moving_features,
voxel_size.reshape(3).tolist(),
idx,
idx, 0, 0, 0, dict(
center=afixed.shape
)
)
fnd.write(opts.output)
if PDF is not None:
PDF.close()
def plot_points(points:np.ndarray, values:np.ndarray=None)->pyplot.Figure:
figure = pyplot.figure(figsize=(6, 6))
center = np.median(points, 0)
for coord_idx, axis_idx in ((2, 1), (1, 2), (0, 4)):
pts, idxs = cull_pts(points, center, coord_idx, 2000, return_indices=True)
ax = figure.add_subplot(2, 2, axis_idx)
if values is None:
ax.scatter(pts[:, 1], pts[:, 0])
else:
ax.scatter(pts[:, 1], pts[:, 0], c=values[idxs])
return figure
if __name__ == "__main__":
main() | 33.334437 | 82 | 0.584484 |
4a23902d5fc666f9143d75b63ada087ac2fec2e5 | 9,401 | py | Python | bot/cogs/watchchannels/talentpool.py | Ayplow/bot | 71a3ac9382851845dcb26609d64299bd69b0f0f5 | [
"MIT"
] | null | null | null | bot/cogs/watchchannels/talentpool.py | Ayplow/bot | 71a3ac9382851845dcb26609d64299bd69b0f0f5 | [
"MIT"
] | null | null | null | bot/cogs/watchchannels/talentpool.py | Ayplow/bot | 71a3ac9382851845dcb26609d64299bd69b0f0f5 | [
"MIT"
] | null | null | null | import logging
import textwrap
from collections import ChainMap
from typing import Union
from discord import Color, Embed, Member, User
from discord.ext.commands import Bot, Cog, Context, group
from bot.api import ResponseCodeError
from bot.constants import Channels, Guild, Roles, Webhooks
from bot.decorators import with_role
from bot.pagination import LinePaginator
from bot.utils import time
from .watchchannel import WatchChannel, proxy_user
log = logging.getLogger(__name__)
STAFF_ROLES = Roles.owner, Roles.admin, Roles.moderator, Roles.helpers # <- In constants after the merge?
class TalentPool(WatchChannel, Cog, name="Talentpool"):
"""Relays messages of helper candidates to a watch channel to observe them."""
def __init__(self, bot: Bot) -> None:
super().__init__(
bot,
destination=Channels.talent_pool,
webhook_id=Webhooks.talent_pool,
api_endpoint='bot/nominations',
api_default_params={'active': 'true', 'ordering': '-inserted_at'},
logger=log,
)
@group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True)
@with_role(Roles.owner, Roles.admin, Roles.moderator)
async def nomination_group(self, ctx: Context) -> None:
"""Highlights the activity of helper nominees by relaying their messages to the talent pool channel."""
await ctx.invoke(self.bot.get_command("help"), "talentpool")
@nomination_group.command(name='watched', aliases=('all', 'list'))
@with_role(Roles.owner, Roles.admin, Roles.moderator)
async def watched_command(self, ctx: Context, update_cache: bool = True) -> None:
"""
Shows the users that are currently being monitored in the talent pool.
The optional kwarg `update_cache` can be used to update the user
cache using the API before listing the users.
"""
await self.list_watched_users(ctx, update_cache)
@nomination_group.command(name='watch', aliases=('w', 'add', 'a'))
@with_role(Roles.owner, Roles.admin, Roles.moderator)
async def watch_command(self, ctx: Context, user: Union[Member, User, proxy_user], *, reason: str) -> None:
"""
Relay messages sent by the given `user` to the `#talent-pool` channel.
A `reason` for adding the user to the talent pool is required and will be displayed
in the header when relaying messages of this user to the channel.
"""
if user.bot:
await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.")
return
if isinstance(user, Member) and any(role.id in STAFF_ROLES for role in user.roles):
await ctx.send(f":x: Nominating staff members, eh? Here's a cookie :cookie:")
return
if not await self.fetch_user_cache():
await ctx.send(f":x: Failed to update the user cache; can't add {user}")
return
if user.id in self.watched_users:
await ctx.send(":x: The specified user is already being watched in the talent pool")
return
# Manual request with `raise_for_status` as False because we want the actual response
session = self.bot.api_client.session
url = self.bot.api_client._url_for(self.api_endpoint)
kwargs = {
'json': {
'actor': ctx.author.id,
'reason': reason,
'user': user.id
},
'raise_for_status': False,
}
async with session.post(url, **kwargs) as resp:
response_data = await resp.json()
if resp.status == 400 and response_data.get('user', False):
await ctx.send(":x: The specified user can't be found in the database tables")
return
else:
resp.raise_for_status()
self.watched_users[user.id] = response_data
await ctx.send(f":white_check_mark: Messages sent by {user} will now be relayed to the talent pool channel")
@nomination_group.command(name='history', aliases=('info', 'search'))
@with_role(Roles.owner, Roles.admin, Roles.moderator)
async def history_command(self, ctx: Context, user: Union[User, proxy_user]) -> None:
"""Shows the specified user's nomination history."""
result = await self.bot.api_client.get(
self.api_endpoint,
params={
'user__id': str(user.id),
'ordering': "-active,-inserted_at"
}
)
if not result:
await ctx.send(":warning: This user has never been nominated")
return
embed = Embed(
title=f"Nominations for {user.display_name} `({user.id})`",
color=Color.blue()
)
lines = [self._nomination_to_string(nomination) for nomination in result]
await LinePaginator.paginate(
lines,
ctx=ctx,
embed=embed,
empty=True,
max_lines=3,
max_size=1000
)
@nomination_group.command(name='unwatch', aliases=('end', ))
@with_role(Roles.owner, Roles.admin, Roles.moderator)
async def unwatch_command(self, ctx: Context, user: Union[User, proxy_user], *, reason: str) -> None:
"""
Ends the active nomination of the specified user with the given reason.
Providing a `reason` is required.
"""
active_nomination = await self.bot.api_client.get(
self.api_endpoint,
params=ChainMap(
self.api_default_params,
{"user__id": str(user.id)}
)
)
if not active_nomination:
await ctx.send(":x: The specified user does not have an active nomination")
return
[nomination] = active_nomination
await self.bot.api_client.patch(
f"{self.api_endpoint}/{nomination['id']}",
json={'end_reason': reason, 'active': False}
)
await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed")
self._remove_user(user.id)
@nomination_group.group(name='edit', aliases=('e',), invoke_without_command=True)
@with_role(Roles.owner, Roles.admin, Roles.moderator)
async def nomination_edit_group(self, ctx: Context) -> None:
"""Commands to edit nominations."""
await ctx.invoke(self.bot.get_command("help"), "talentpool", "edit")
@nomination_edit_group.command(name='reason')
@with_role(Roles.owner, Roles.admin, Roles.moderator)
async def edit_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None:
"""
Edits the reason/unnominate reason for the nomination with the given `id` depending on the status.
If the nomination is active, the reason for nominating the user will be edited;
If the nomination is no longer active, the reason for ending the nomination will be edited instead.
"""
try:
nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}")
except ResponseCodeError as e:
if e.response.status == 404:
self.log.trace(f"Nomination API 404: Can't nomination with id {nomination_id}")
await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`")
return
else:
raise
field = "reason" if nomination["active"] else "end_reason"
self.log.trace(f"Changing {field} for nomination with id {nomination_id} to {reason}")
await self.bot.api_client.patch(
f"{self.api_endpoint}/{nomination_id}",
json={field: reason}
)
await ctx.send(f":white_check_mark: Updated the {field} of the nomination!")
def _nomination_to_string(self, nomination_object: dict) -> str:
"""Creates a string representation of a nomination."""
guild = self.bot.get_guild(Guild.id)
actor_id = nomination_object["actor"]
actor = guild.get_member(actor_id)
active = nomination_object["active"]
log.debug(active)
log.debug(type(nomination_object["inserted_at"]))
start_date = time.format_infraction(nomination_object["inserted_at"])
if active:
lines = textwrap.dedent(
f"""
===============
Status: **Active**
Date: {start_date}
Actor: {actor.mention if actor else actor_id}
Reason: {nomination_object["reason"]}
Nomination ID: `{nomination_object["id"]}`
===============
"""
)
else:
end_date = time.format_infraction(nomination_object["ended_at"])
lines = textwrap.dedent(
f"""
===============
Status: Inactive
Date: {start_date}
Actor: {actor.mention if actor else actor_id}
Reason: {nomination_object["reason"]}
End date: {end_date}
Unwatch reason: {nomination_object["end_reason"]}
Nomination ID: `{nomination_object["id"]}`
===============
"""
)
return lines.strip()
| 40.347639 | 116 | 0.605148 |
4a23908dc337f96c27ebba814a2775af0d26bd85 | 961 | py | Python | kms/tests/system.py | DaveCheez/google-cloud-python | fc03d4d41f13e9d13db7206438163b3a471fdabd | [
"Apache-2.0"
] | 2 | 2021-11-26T07:08:43.000Z | 2022-03-07T20:20:04.000Z | kms/tests/system.py | DaveCheez/google-cloud-python | fc03d4d41f13e9d13db7206438163b3a471fdabd | [
"Apache-2.0"
] | 40 | 2019-07-16T10:04:48.000Z | 2020-01-20T09:04:59.000Z | kms/tests/system.py | DaveCheez/google-cloud-python | fc03d4d41f13e9d13db7206438163b3a471fdabd | [
"Apache-2.0"
] | 2 | 2019-07-18T00:05:31.000Z | 2019-11-27T14:17:22.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from google.cloud import kms_v1
class TestKeyManagementServiceClient(object):
def test_list_global_key_rings(self):
project_id = os.environ["PROJECT_ID"]
client = kms_v1.KeyManagementServiceClient()
location = "global"
parent = client.location_path(project_id, location)
client.list_key_rings(parent)
| 32.033333 | 74 | 0.736733 |
4a2390aae197184e542c7586d536bf7adab2e939 | 982 | py | Python | courses/machine_learning/feateng/taxifare_tft/trainer/setup.py | laurenzberger/training-data-analyst | 3e2ef4668c5088ab50ad50a4f29673c88fb1bcd3 | [
"Apache-2.0"
] | 6,140 | 2016-05-23T16:09:35.000Z | 2022-03-30T19:00:46.000Z | courses/machine_learning/feateng/taxifare_tft/trainer/setup.py | laurenzberger/training-data-analyst | 3e2ef4668c5088ab50ad50a4f29673c88fb1bcd3 | [
"Apache-2.0"
] | 1,384 | 2016-07-08T22:26:41.000Z | 2022-03-24T16:39:43.000Z | courses/machine_learning/feateng/taxifare_tft/trainer/setup.py | laurenzberger/training-data-analyst | 3e2ef4668c5088ab50ad50a4f29673c88fb1bcd3 | [
"Apache-2.0"
] | 5,110 | 2016-05-27T13:45:18.000Z | 2022-03-31T18:40:42.000Z | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = [
]
setup(
name='taxifare',
version='0.1',
author = 'Google',
author_email = '[email protected]',
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
description='CPB102 taxifare in Cloud ML',
requires=[]
)
| 30.6875 | 74 | 0.743381 |
4a23935d62bce5fa5aa50aee52f9ba4e8e23a4b5 | 2,063 | py | Python | migration/env.py | mglacayo07/jqGrid | 71625fb6e9684eb4bf5811414965fd84a6038d40 | [
"MIT"
] | null | null | null | migration/env.py | mglacayo07/jqGrid | 71625fb6e9684eb4bf5811414965fd84a6038d40 | [
"MIT"
] | null | null | null | migration/env.py | mglacayo07/jqGrid | 71625fb6e9684eb4bf5811414965fd84a6038d40 | [
"MIT"
] | null | null | null | from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
# from logging.config import fileConfig
# fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from jqgrid import model
target_metadata = model.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url, version_table='migrate_version')
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool
)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata,
version_table='migrate_version'
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| 26.792208 | 69 | 0.722734 |
4a2393a3572f32fac1007823299d3fc97fe35fa4 | 727 | py | Python | tests/test_base_os.py | karliy/elasticsearch-docker | e6d9e03de18bd0c7cf797e5981ea73fc05452e61 | [
"Apache-2.0"
] | null | null | null | tests/test_base_os.py | karliy/elasticsearch-docker | e6d9e03de18bd0c7cf797e5981ea73fc05452e61 | [
"Apache-2.0"
] | null | null | null | tests/test_base_os.py | karliy/elasticsearch-docker | e6d9e03de18bd0c7cf797e5981ea73fc05452e61 | [
"Apache-2.0"
] | 1 | 2019-07-24T16:55:31.000Z | 2019-07-24T16:55:31.000Z | from .fixtures import elasticsearch
def test_base_os(host):
assert host.system_info.distribution == 'centos'
assert host.system_info.release == '7'
def test_java_home_env_var(host):
java_path_cmdline = '$JAVA_HOME/bin/java -version'
assert host.run(java_path_cmdline).exit_status == 0
def test_no_core_files_exist_in_root(host):
core_file_check_cmdline = 'ls -l /core*'
assert host.run(core_file_check_cmdline).exit_status != 0
def test_all_elasticsearch_files_are_gid_0(host):
check_for_files_with_gid_0_command = (
"cd /usr/share && "
"find ./elasticsearch ! -gid 0 | "
"egrep '.*'"
)
assert host.run(check_for_files_with_gid_0_command).exit_status != 0
| 25.068966 | 72 | 0.715268 |
4a239587a3484a14a5cd217208894b922fa1f24d | 29 | py | Python | trajopt/riccati/__init__.py | Fitz13009/trajopt | e74cf44cfa7d3037d1fccb27ab1e7eebff16c8c4 | [
"MIT"
] | 40 | 2019-06-17T11:49:57.000Z | 2022-03-29T16:30:56.000Z | trajopt/riccati/__init__.py | JoeMWatson/trajopt | 8b98718721e0c373cd7dc01a35f42447c1134713 | [
"MIT"
] | 2 | 2019-12-10T13:40:00.000Z | 2020-10-06T09:22:47.000Z | trajopt/riccati/__init__.py | JoeMWatson/trajopt | 8b98718721e0c373cd7dc01a35f42447c1134713 | [
"MIT"
] | 10 | 2019-07-05T11:29:12.000Z | 2021-12-29T12:56:56.000Z | from .riccati import Riccati
| 14.5 | 28 | 0.827586 |
4a2397fe7132f6869deaaa754ba2fb75d1bfb697 | 12,100 | py | Python | tests/unit/test_snap.py | simondeziel/operator-libs-linux | 4a1a73420900e8b3e78dc5b41ec8df7b2b10260f | [
"Apache-2.0"
] | null | null | null | tests/unit/test_snap.py | simondeziel/operator-libs-linux | 4a1a73420900e8b3e78dc5b41ec8df7b2b10260f | [
"Apache-2.0"
] | null | null | null | tests/unit/test_snap.py | simondeziel/operator-libs-linux | 4a1a73420900e8b3e78dc5b41ec8df7b2b10260f | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Canonical Ltd.
# See LICENSE file for licensing details.
import json
import unittest
from unittest.mock import MagicMock, mock_open, patch
import fake_snapd as fake_snapd
from charms.operator_libs_linux.v0 import snap
patch("charms.operator_libs_linux.v0.snap._cache_init", lambda x: x).start()
lazy_load_result = r"""
{
"type": "sync",
"status-code": 200,
"status": "OK",
"result": [
{
"id": "jFJhGxzO7zh4xPun3oLzsYPesPvyGblh",
"title": "curl",
"summary": "CLI tool for transferring data with URL syntax (HTTP, HTTPS, etc)",
"description": "A command line tool and library for transferring data with URL syntax, \nsupporting HTTP, HTTPS, FTP, FTPS, GOPHER, TFTP, SCP, SFTP, SMB, TELNET, \nDICT, LDAP, LDAPS, FILE, IMAP, SMTP, POP3, RTSP and RTMP. \ncurl offers a myriad of powerful features",
"download-size": 6524928,
"name": "curl",
"publisher": {
"id": "trElzADL6BSHUJX2R38cUoXIElh2BYRZ",
"username": "woutervb",
"display-name": "Wouter van Bommel",
"validation": "unproven"
},
"store-url": "https://snapcraft.io/curl",
"developer": "woutervb",
"status": "available",
"type": "app",
"base": "core20",
"version": "7.78.0",
"channel": "stable",
"ignore-validation": false,
"revision": "233",
"confinement": "strict",
"private": false,
"devmode": false,
"jailmode": false,
"contact": "https://github.com/woutervb/snap-curl",
"license": "curl",
"website": "https://github.com/woutervb/snap-curl",
"channels": {
"latest/edge": {
"revision": "275",
"confinement": "strict",
"version": "7.78.0",
"channel": "latest/edge",
"epoch": {
"read": [
0
],
"write": [
0
]
},
"size": 6524928,
"released-at": "2021-08-19T06:15:44.601272Z"
},
"latest/stable": {
"revision": "233",
"confinement": "strict",
"version": "7.78.0",
"channel": "latest/stable",
"epoch": {
"read": [
0
],
"write": [
0
]
},
"size": 6524928,
"released-at": "2021-07-29T23:20:37.945102Z"
}
},
"tracks": [
"latest"
]
}
],
"sources": [
"store"
],
"suggested-currency": "USD"
}
"""
installed_result = r"""
{
"type": "sync",
"status-code": 200,
"status": "OK",
"result": [
{
"id": "gcqfpVCOUvmDuYT0Dh5PjdeGypSEzNdV",
"title": "charmcraft",
"summary": "The charming tool",
"description": "Charmcraft enables charm creators to build, publish, and manage charmed operators for Kubernetes, metal and virtual machines.",
"icon": "https://dashboard.snapcraft.io/site_media/appmedia/2021/06/image-juju-256.svg.png",
"installed-size": 55361536,
"name": "charmcraft",
"publisher": {
"id": "canonical",
"username": "canonical",
"display-name": "Canonical",
"validation": "verified"
},
"developer": "canonical",
"status": "active",
"type": "app",
"base": "core20",
"version": "1.2.1",
"channel": "latest/stable",
"tracking-channel": "latest/stable",
"ignore-validation": false,
"revision": "603",
"confinement": "classic",
"private": false,
"devmode": false,
"jailmode": false,
"apps": [
{
"snap": "charmcraft",
"name": "charmcraft"
}
],
"contact": "",
"license": "Apache-2.0",
"mounted-from": "/var/lib/snapd/snaps/charmcraft_603.snap",
"website": "https://github.com/canonical/charmcraft/",
"media": [
{
"type": "icon",
"url": "https://dashboard.snapcraft.io/site_media/appmedia/2021/06/image-juju-256.svg.png",
"width": 256,
"height": 256
}
],
"install-date": "2021-08-20T00:10:20.074917847Z"
},
{
"id": "99T7MUlRhtI3U0QFgl5mXXESAiSwt776",
"title": "core",
"summary": "snapd runtime environment",
"description": "The core runtime environment for snapd",
"installed-size": 104210432,
"name": "core",
"publisher": {
"id": "canonical",
"username": "canonical",
"display-name": "Canonical",
"validation": "verified"
},
"developer": "canonical",
"status": "active",
"type": "os",
"version": "16-2.51.3",
"channel": "latest/stable",
"tracking-channel": "latest/stable",
"ignore-validation": false,
"revision": "11420",
"confinement": "strict",
"private": false,
"devmode": false,
"jailmode": false,
"contact": "mailto:[email protected]",
"mounted-from": "/var/lib/snapd/snaps/core_11420.snap",
"install-date": "2021-07-27T13:24:00.522211469Z"
}
]
}
"""
class SnapCacheTester(snap.SnapCache):
def __init__(self):
# Fake out __init__ so we can test methods individually
self._snap_map = {}
self._snap_client = MagicMock()
class TestSnapCache(unittest.TestCase):
@patch("builtins.open", new_callable=mock_open, read_data="foo\nbar\n")
@patch("os.path.isfile")
def test_can_load_snap_cache(self, mock_exists, m):
m.return_value.__iter__ = lambda self: self
m.return_value.__next__ = lambda self: next(iter(self.readline, ""))
mock_exists.return_value = True
s = SnapCacheTester()
s._load_available_snaps()
self.assertIn("foo", s._snap_map)
self.assertEqual(len(s._snap_map), 2)
@patch("builtins.open", new_callable=mock_open, read_data="curl\n")
@patch("os.path.isfile")
def test_can_lazy_load_snap_info(self, mock_exists, m):
m.return_value.__iter__ = lambda self: self
m.return_value.__next__ = lambda self: next(iter(self.readline, ""))
mock_exists.return_value = True
s = SnapCacheTester()
s._snap_client.get_snap_information.return_value = json.loads(lazy_load_result)["result"][
0
]
s._load_available_snaps()
self.assertIn("curl", s._snap_map)
result = s["curl"]
self.assertEqual(result.name, "curl")
self.assertEqual(result.state, snap.SnapState.Available)
self.assertEqual(result.channel, "stable")
self.assertEqual(result.confinement, "strict")
self.assertEqual(result.revision, "233")
@patch("os.path.isfile")
def test_can_load_installed_snap_info(self, mock_exists):
mock_exists.return_value = True
s = SnapCacheTester()
s._snap_client.get_installed_snaps.return_value = json.loads(installed_result)["result"]
s._load_installed_snaps()
self.assertEqual(len(s), 2)
self.assertIn("charmcraft", s)
self.assertEqual(s["charmcraft"].name, "charmcraft")
self.assertEqual(s["charmcraft"].state, snap.SnapState.Latest)
self.assertEqual(s["charmcraft"].channel, "latest/stable")
self.assertEqual(s["charmcraft"].confinement, "classic")
self.assertEqual(s["charmcraft"].revision, "603")
@patch("os.path.isfile")
def test_raises_error_if_snap_not_running(self, mock_exists):
mock_exists.return_value = False
s = SnapCacheTester()
s._snap_client.get_installed_snaps.side_effect = snap.SnapAPIError(
{}, 400, "error", "snapd is not running"
)
with self.assertRaises(snap.SnapAPIError) as ctx:
s._load_installed_snaps()
self.assertEqual("<charms.operator_libs_linux.v0.snap.SnapAPIError>", ctx.exception.name)
self.assertIn("snapd is not running", ctx.exception.message)
def test_can_compare_snap_equality(self):
foo1 = snap.Snap("foo", snap.SnapState.Present, "stable", "1", "classic")
foo2 = snap.Snap("foo", snap.SnapState.Present, "stable", "1", "classic")
self.assertEqual(foo1, foo2)
@patch("charms.operator_libs_linux.v0.snap.subprocess.check_output")
def test_can_run_snap_commands(self, mock_subprocess):
mock_subprocess.return_value = 0
foo = snap.Snap("foo", snap.SnapState.Present, "stable", "1", "classic")
self.assertEqual(foo.present, True)
foo.ensure(snap.SnapState.Absent)
mock_subprocess.assert_called_with(["snap", "remove", "foo"], universal_newlines=True)
foo.ensure(snap.SnapState.Latest, classic=True, channel="latest/edge")
mock_subprocess.assert_called_with(
["snap", "install", "foo", "--classic", '--channel="latest/edge"'],
universal_newlines=True,
)
self.assertEqual(foo.latest, True)
foo.state = snap.SnapState.Absent
mock_subprocess.assert_called_with(["snap", "remove", "foo"], universal_newlines=True)
class TestSocketClient(unittest.TestCase):
def test_socket_not_found(self):
client = snap.SnapClient(socket_path="/does/not/exist")
with self.assertRaises(snap.SnapAPIError) as ctx:
client.get_installed_snaps()
self.assertIsInstance(ctx.exception, snap.SnapAPIError)
def test_fake_socket(self):
shutdown, socket_path = fake_snapd.start_server()
try:
client = snap.SnapClient(socket_path)
with self.assertRaises(snap.SnapAPIError) as ctx:
client.get_installed_snaps()
self.assertIsInstance(ctx.exception, snap.SnapAPIError)
finally:
shutdown()
class TestSnapBareMethods(unittest.TestCase):
@patch("builtins.open", new_callable=mock_open, read_data="curl\n")
@patch("os.path.isfile")
def setUp(self, mock_exists, m):
m.return_value.__iter__ = lambda self: self
m.return_value.__next__ = lambda self: next(iter(self.readline, ""))
mock_exists.return_value = True
snap._Cache.cache = SnapCacheTester()
snap._Cache.cache._snap_client.get_installed_snaps.return_value = json.loads(
installed_result
)["result"]
snap._Cache.cache._snap_client.get_snap_information.return_value = json.loads(
lazy_load_result
)["result"][0]
snap._Cache.cache._load_installed_snaps()
snap._Cache.cache._load_available_snaps()
@patch("charms.operator_libs_linux.v0.snap.subprocess.check_output")
def test_can_run_bare_changes(self, mock_subprocess):
mock_subprocess.return_value = 0
foo = snap.add("curl", classic=True, channel="latest")
mock_subprocess.assert_called_with(
["snap", "install", "curl", "--classic", '--channel="latest"'], universal_newlines=True
)
self.assertEqual(foo.present, True)
bar = snap.remove("curl")
mock_subprocess.assert_called_with(["snap", "remove", "curl"], universal_newlines=True)
self.assertEqual(bar.present, False)
@patch("charms.operator_libs_linux.v0.snap.subprocess.check_output")
def test_can_ensure_states(self, mock_subprocess):
mock_subprocess.return_value = 0
foo = snap.ensure("curl", "latest", classic=True, channel="latest/test")
mock_subprocess.assert_called_with(
["snap", "install", "curl", "--classic", '--channel="latest/test"'],
universal_newlines=True,
)
self.assertEqual(foo.present, True)
bar = snap.ensure("curl", "absent")
mock_subprocess.assert_called_with(["snap", "remove", "curl"], universal_newlines=True)
self.assertEqual(bar.present, False)
def test_raises_snap_not_found_error(self):
with self.assertRaises(snap.SnapError) as ctx:
snap.add("nothere")
self.assertEqual("<charms.operator_libs_linux.v0.snap.SnapError>", ctx.exception.name)
self.assertIn("Failed to install or refresh snap(s): nothere", ctx.exception.message)
| 35.380117 | 273 | 0.610496 |
4a2398e809daedb12a117098a708258e4de8073c | 17,568 | py | Python | sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_content_item_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2021-09-07T18:39:05.000Z | 2021-09-07T18:39:05.000Z | sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_content_item_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_content_item_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-03-04T06:21:56.000Z | 2022-03-04T06:21:56.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._content_item_operations import build_create_or_update_request, build_delete_request, build_get_entity_tag_request, build_get_request, build_list_by_service_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ContentItemOperations:
"""ContentItemOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~api_management_client.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list_by_service(
self,
resource_group_name: str,
service_name: str,
content_type_id: str,
**kwargs: Any
) -> AsyncIterable["_models.ContentItemCollection"]:
"""Lists developer portal's content items specified by the provided content type.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param content_type_id: Content type identifier.
:type content_type_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ContentItemCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~api_management_client.models.ContentItemCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ContentItemCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_service_request(
resource_group_name=resource_group_name,
service_name=service_name,
content_type_id=content_type_id,
subscription_id=self._config.subscription_id,
template_url=self.list_by_service.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_service_request(
resource_group_name=resource_group_name,
service_name=service_name,
content_type_id=content_type_id,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ContentItemCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_service.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/contentTypes/{contentTypeId}/contentItems'} # type: ignore
@distributed_trace_async
async def get_entity_tag(
self,
resource_group_name: str,
service_name: str,
content_type_id: str,
content_item_id: str,
**kwargs: Any
) -> bool:
"""Returns the entity state (ETag) version of the developer portal's content item specified by its
identifier.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param content_type_id: Content type identifier.
:type content_type_id: str
:param content_item_id: Content item identifier.
:type content_item_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool, or the result of cls(response)
:rtype: bool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_entity_tag_request(
resource_group_name=resource_group_name,
service_name=service_name,
content_type_id=content_type_id,
content_item_id=content_item_id,
subscription_id=self._config.subscription_id,
template_url=self.get_entity_tag.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
if cls:
return cls(pipeline_response, None, response_headers)
return 200 <= response.status_code <= 299
get_entity_tag.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/contentTypes/{contentTypeId}/contentItems/{contentItemId}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
service_name: str,
content_type_id: str,
content_item_id: str,
**kwargs: Any
) -> "_models.ContentItemContract":
"""Returns the developer portal's content item specified by its identifier.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param content_type_id: Content type identifier.
:type content_type_id: str
:param content_item_id: Content item identifier.
:type content_item_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ContentItemContract, or the result of cls(response)
:rtype: ~api_management_client.models.ContentItemContract
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ContentItemContract"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
service_name=service_name,
content_type_id=content_type_id,
content_item_id=content_item_id,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ContentItemContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/contentTypes/{contentTypeId}/contentItems/{contentItemId}'} # type: ignore
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
service_name: str,
content_type_id: str,
content_item_id: str,
if_match: Optional[str] = None,
**kwargs: Any
) -> "_models.ContentItemContract":
"""Creates a new developer portal's content item specified by the provided content type.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param content_type_id: Content type identifier.
:type content_type_id: str
:param content_item_id: Content item identifier.
:type content_item_id: str
:param if_match: ETag of the Entity. Not required when creating an entity, but required when
updating an entity.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ContentItemContract, or the result of cls(response)
:rtype: ~api_management_client.models.ContentItemContract
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ContentItemContract"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_create_or_update_request(
resource_group_name=resource_group_name,
service_name=service_name,
content_type_id=content_type_id,
content_item_id=content_item_id,
subscription_id=self._config.subscription_id,
if_match=if_match,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 200:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ContentItemContract', pipeline_response)
if response.status_code == 201:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ContentItemContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/contentTypes/{contentTypeId}/contentItems/{contentItemId}'} # type: ignore
@distributed_trace_async
async def delete(
self,
resource_group_name: str,
service_name: str,
content_type_id: str,
content_item_id: str,
if_match: str,
**kwargs: Any
) -> None:
"""Removes the specified developer portal's content item.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param content_type_id: Content type identifier.
:type content_type_id: str
:param content_item_id: Content item identifier.
:type content_item_id: str
:param if_match: ETag of the Entity. ETag should match the current entity state from the header
response of the GET request or it should be * for unconditional update.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
resource_group_name=resource_group_name,
service_name=service_name,
content_type_id=content_type_id,
content_item_id=content_item_id,
subscription_id=self._config.subscription_id,
if_match=if_match,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/contentTypes/{contentTypeId}/contentItems/{contentItemId}'} # type: ignore
| 45.161954 | 239 | 0.678222 |
4a239b330c4a49cd2ec5fdcb5d90d7d1725a85c0 | 2,079 | py | Python | search/search_utils.py | DDMAL/liberusualis | 1fd5ab9b3f2ac16296f1fb47ebc855f94b65ef3b | [
"MIT"
] | 1 | 2015-07-19T21:15:46.000Z | 2015-07-19T21:15:46.000Z | search/search_utils.py | DDMAL/liberusualis | 1fd5ab9b3f2ac16296f1fb47ebc855f94b65ef3b | [
"MIT"
] | 15 | 2015-05-08T17:00:39.000Z | 2022-03-11T23:12:12.000Z | search/search_utils.py | DDMAL/liberusualis | 1fd5ab9b3f2ac16296f1fb47ebc855f94b65ef3b | [
"MIT"
] | null | null | null | import re
import math
def incorporate_zoom(dimension, zoom_difference):
return dimension / math.pow(2, zoom_difference)
def get_transpositions(sequence):
""" Given a series of pitch names (no flats or sharps - just abcdefg), return a list of the 7 possible transpositions
of the melody. This is used when generating an elastic search query to look for all transpositions of a user
specified pitch sequence.
The URL for the query will include 'q=pnames:' followed by the returned transpositions seperated by commas.
e.g. getTranspositions('cece') returns ['cece', 'dfdf', 'egeg', 'fafa', 'gbgb', 'acac', 'bdbd']
"""
sequence = str(sequence)
asciinum = map(ord,sequence)
def transposeUp(x):
if x < 103:
return x+1
else:
return x-6
transpositions = [sequence]
for i in range(1,7):
asciinum = map(transposeUp, asciinum)
transposed = ''.join(chr(i) for i in asciinum)#convert to string
transpositions = transpositions + [transposed]
return transpositions
def get_neumes_length(neumes):
lengths = {
'punctum': 1,
'virga': 1,
'bivirga': 2,
'podatus': 2,
'pes': 2,
'clivis': 2,
'epiphonus': 2,
'cephalicus': 2,
'scandicus': 3,
'salicus': 3,
'ancus': 3,
'torculus': 3,
'porrectus': 3,
# Treat flexus as a different one so we can have porrectus flexus, etc
'resupinus': 1,
'flexus': 1,
'cavum': 1,
}
neumes = neumes.lower().split(' ')
length = 0
for neume in neumes:
length += lengths[neume]
return length
def valid_pitch_sequence(sequence):
# Should already be lowercase
pattern = re.compile("[^a-g]")
if pattern.search(sequence) is not None:
return False
else:
return True
def valid_contour_sequence(sequence):
# Already lowercase
pattern = re.compile("[^rud]")
if pattern.search(sequence) is not None:
return False
else:
return True
| 28.875 | 120 | 0.610871 |
4a239b8574076e7b3abe7f4aefc3c636c177c2d4 | 994 | py | Python | examples/async_version/async_webhook_receiver.py | YUKAI/emo-platform-api-python | ba350f7cda7cd23627a0f3363490a56dfb6bd38b | [
"MIT"
] | null | null | null | examples/async_version/async_webhook_receiver.py | YUKAI/emo-platform-api-python | ba350f7cda7cd23627a0f3363490a56dfb6bd38b | [
"MIT"
] | 1 | 2022-01-12T08:35:23.000Z | 2022-01-31T04:41:35.000Z | examples/async_version/async_webhook_receiver.py | YUKAI/emo-platform-api-python | ba350f7cda7cd23627a0f3363490a56dfb6bd38b | [
"MIT"
] | null | null | null | """Emo Platform API python example Receiving webhook data.
"""
import asyncio
from emo_platform import AsyncClient, WebHook
client = AsyncClient()
# Please replace "YOUR WEBHOOK URL" with the URL forwarded to http://localhost:8000
client.create_webhook_setting(WebHook("YOUR WEBHOOK URL"))
async def print_queue(queue):
while True:
item = await queue.get()
print("body:", item)
print("data:", item.data)
async def main():
queue = asyncio.Queue()
@client.event("message.received")
async def message_callback(body):
await asyncio.sleep(1) # Do not use time.sleep in async def
await queue.put(body)
# Create task you want to execute in parallel
task_queue = asyncio.create_task(print_queue(queue))
# Await start_webhook_event last.
# Give task list to be executed in parallel as the argument.
await client.start_webhook_event(port=8000, tasks=[task_queue])
if __name__ == "__main__":
asyncio.run(main())
| 26.157895 | 83 | 0.700201 |
4a239c017f1f36ca87f05a43d6876b41ff211d4b | 1,112 | py | Python | libG/hclust_sort.py | fraunhofer-iais/IAIS-Jupyter-Snippets-Extension | b089bf08dde9c923209065ab8589382669b7a005 | [
"MIT"
] | 3 | 2020-09-29T12:01:10.000Z | 2020-09-30T07:36:21.000Z | libG/hclust_sort.py | fraunhofer-iais/IAIS-Jupyter-Snippets-Extension | b089bf08dde9c923209065ab8589382669b7a005 | [
"MIT"
] | null | null | null | libG/hclust_sort.py | fraunhofer-iais/IAIS-Jupyter-Snippets-Extension | b089bf08dde9c923209065ab8589382669b7a005 | [
"MIT"
] | 1 | 2020-09-11T09:57:50.000Z | 2020-09-11T09:57:50.000Z | from scipy.spatial.distance import pdist
import scipy.cluster.hierarchy as hierarchy
from sklearn.preprocessing import MinMaxScaler
import numpy as np
import pandas as pd
import pylab as pl
def hclust_sort(df, sort_cols=True, sort_rows=True):
'''Sort an array by the hierarchical leaf ordering.
Parameters:
sort_cols: apply hclust sorting to columns
sort_rows: apply hclust sorting to rows
Return:
the dataframe sorted according to the hierarchical clustering leaf-order
'''
df_orig = df.copy()
df = df.copy()
scaler = MinMaxScaler((0.01,1.))
scaled_values = scaler.fit_transform(df)
df.loc[:,:] = scaled_values
row_sorting = list(range(df.shape[0]))
col_sorting = list(range(df.shape[1]))
if sort_rows:
D = pdist(df, 'euclidean')
Z = hierarchy.linkage(D)
row_sorting = hierarchy.leaves_list(Z)
if sort_cols:
D = pdist(df.T, 'euclidean')
Z = hierarchy.linkage(D)
col_sorting = hierarchy.leaves_list(Z)
return df_orig.iloc[row_sorting][df_orig.columns[col_sorting]]
| 29.263158 | 80 | 0.676259 |
4a239c3086210014d220d0f5e8b419535af9b862 | 11,856 | py | Python | docs/conf.py | nhirschey/LightGBM | 846e10c7a45a47434f19e5e2bab77891cfd97e69 | [
"MIT"
] | 1 | 2021-02-23T05:17:34.000Z | 2021-02-23T05:17:34.000Z | docs/conf.py | nhirschey/LightGBM | 846e10c7a45a47434f19e5e2bab77891cfd97e69 | [
"MIT"
] | null | null | null | docs/conf.py | nhirschey/LightGBM | 846e10c7a45a47434f19e5e2bab77891cfd97e69 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# LightGBM documentation build configuration file, created by
# sphinx-quickstart on Thu May 4 14:30:58 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute.
"""Sphinx configuration file."""
import datetime
import os
import sys
import sphinx
from distutils.dir_util import copy_tree
from docutils.nodes import reference
from docutils.parsers.rst import Directive
from docutils.transforms import Transform
from re import compile
from sphinx.errors import VersionRequirementError
from subprocess import PIPE, Popen
from unittest.mock import Mock
CURR_PATH = os.path.abspath(os.path.dirname(__file__))
LIB_PATH = os.path.join(CURR_PATH, os.path.pardir, 'python-package')
sys.path.insert(0, LIB_PATH)
INTERNAL_REF_REGEX = compile(r"(?P<url>\.\/.+)(?P<extension>\.rst)(?P<anchor>$|#)")
# -- mock out modules
MOCK_MODULES = ['numpy', 'scipy', 'scipy.sparse',
'sklearn', 'matplotlib', 'pandas', 'graphviz', 'dask', 'dask.distributed']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
class InternalRefTransform(Transform):
"""Replaces '.rst' with '.html' in all internal links like './[Something].rst[#anchor]'."""
default_priority = 210
"""Numerical priority of this transform, 0 through 999."""
def apply(self, **kwargs):
"""Apply the transform to the document tree."""
for section in self.document.traverse(reference):
if section.get("refuri") is not None:
section["refuri"] = INTERNAL_REF_REGEX.sub(r"\g<url>.html\g<anchor>", section["refuri"])
class IgnoredDirective(Directive):
"""Stub for unknown directives."""
has_content = True
def run(self):
"""Do nothing."""
return []
# -- General configuration ------------------------------------------------
os.environ['LIGHTGBM_BUILD_DOC'] = '1'
C_API = os.environ.get('C_API', '').lower().strip() != 'no'
RTD = bool(os.environ.get('READTHEDOCS', ''))
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '2.1.0' # Due to sphinx.ext.napoleon, autodoc_typehints
if needs_sphinx > sphinx.__version__:
message = 'This project needs at least Sphinx v%s' % needs_sphinx
raise VersionRequirementError(message)
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
autodoc_default_flags = ['members', 'inherited-members', 'show-inheritance']
autodoc_default_options = {
"members": True,
"inherited-members": True,
"show-inheritance": True,
}
# hide type hints in API docs
autodoc_typehints = "none"
# Generate autosummary pages. Output should be set with: `:toctree: pythonapi/`
autosummary_generate = ['Python-API.rst']
# Only the class' docstring is inserted.
autoclass_content = 'class'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'LightGBM'
copyright = '%s, Microsoft Corporation' % str(datetime.datetime.now().year)
author = 'Microsoft Corporation'
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = os.path.join(CURR_PATH, 'logo', 'LightGBM_logo_grey_text.svg')
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = os.path.join(CURR_PATH, '_static', 'images', 'favicon.ico')
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
with open(os.path.join(CURR_PATH, os.path.pardir, 'VERSION.txt'), 'r') as f:
# The short X.Y version.
version = f.read().strip()
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'default'
# -- Configuration for C API docs generation ------------------------------
if C_API:
extensions.extend([
'breathe',
])
breathe_projects = {
"LightGBM": os.path.join(CURR_PATH, 'doxyoutput', 'xml')
}
breathe_default_project = "LightGBM"
breathe_domain_by_extension = {
"h": "c",
}
breathe_show_define_initializer = True
c_id_attributes = ['LIGHTGBM_C_EXPORT']
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'includehidden': False,
'logo_only': True,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'LightGBMdoc'
# -- Options for LaTeX output ---------------------------------------------
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = os.path.join(CURR_PATH, 'logo', 'LightGBM_logo_black_text_small.png')
def generate_doxygen_xml(app):
"""Generate XML documentation for C API by Doxygen.
Parameters
----------
app : object
The application object representing the Sphinx process.
"""
doxygen_args = [
"INPUT={}".format(os.path.join(CURR_PATH, os.path.pardir,
'include', 'LightGBM', 'c_api.h')),
"OUTPUT_DIRECTORY={}".format(os.path.join(CURR_PATH, 'doxyoutput')),
"GENERATE_HTML=NO",
"GENERATE_LATEX=NO",
"GENERATE_XML=YES",
"XML_OUTPUT=xml",
"XML_PROGRAMLISTING=YES",
r'ALIASES="rst=\verbatim embed:rst:leading-asterisk"',
r'ALIASES+="endrst=\endverbatim"',
"ENABLE_PREPROCESSING=YES",
"MACRO_EXPANSION=YES",
"EXPAND_ONLY_PREDEF=NO",
"SKIP_FUNCTION_MACROS=NO",
"SORT_BRIEF_DOCS=YES",
"WARN_AS_ERROR=YES",
]
doxygen_input = '\n'.join(doxygen_args)
doxygen_input = bytes(doxygen_input, "utf-8")
if not os.path.exists(os.path.join(CURR_PATH, 'doxyoutput')):
os.makedirs(os.path.join(CURR_PATH, 'doxyoutput'))
try:
# Warning! The following code can cause buffer overflows on RTD.
# Consider suppressing output completely if RTD project silently fails.
# Refer to https://github.com/svenevs/exhale
# /blob/fe7644829057af622e467bb529db6c03a830da99/exhale/deploy.py#L99-L111
process = Popen(["doxygen", "-"],
stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate(doxygen_input)
output = '\n'.join([i.decode('utf-8') for i in (stdout, stderr) if i is not None])
if process.returncode != 0:
raise RuntimeError(output)
else:
print(output)
except BaseException as e:
raise Exception("An error has occurred while executing Doxygen\n" + str(e))
def generate_r_docs(app):
"""Generate documentation for R-package.
Parameters
----------
app : object
The application object representing the Sphinx process.
"""
commands = """
/home/docs/.conda/bin/conda create \
-q \
-y \
-c conda-forge \
-n r_env \
cmake=3.18.2=ha30ef3c_0 \
r-base=4.0.3=ha43b4e8_3 \
r-data.table=1.13.2=r40h0eb13af_0 \
r-jsonlite=1.7.1=r40hcdcec82_0 \
r-matrix=1.2_18=r40h7fa42b6_3 \
r-pkgdown=1.6.1=r40h6115d3f_0 \
r-roxygen2=7.1.1=r40h0357c0b_0
source /home/docs/.conda/bin/activate r_env
export TAR=/bin/tar
cd {0}
export R_LIBS="$CONDA_PREFIX/lib/R/library"
Rscript build_r.R || exit -1
cd {1}
Rscript -e "roxygen2::roxygenize(load = 'installed')" || exit -1
Rscript -e "pkgdown::build_site( \
lazy = FALSE \
, install = FALSE \
, devel = FALSE \
, examples = TRUE \
, run_dont_run = TRUE \
, seed = 42L \
, preview = FALSE \
, new_process = TRUE \
)
" || exit -1
cd {0}
""".format(os.path.join(CURR_PATH, os.path.pardir), os.path.join(CURR_PATH, os.path.pardir, "lightgbm_r"))
try:
# Warning! The following code can cause buffer overflows on RTD.
# Consider suppressing output completely if RTD project silently fails.
# Refer to https://github.com/svenevs/exhale
# /blob/fe7644829057af622e467bb529db6c03a830da99/exhale/deploy.py#L99-L111
process = Popen(['/bin/bash'],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True)
stdout, stderr = process.communicate(commands)
output = '\n'.join([i for i in (stdout, stderr) if i is not None])
if process.returncode != 0:
raise RuntimeError(output)
else:
print(output)
except BaseException as e:
raise Exception("An error has occurred while generating documentation for R-package\n" + str(e))
def setup(app):
"""Add new elements at Sphinx initialization time.
Parameters
----------
app : object
The application object representing the Sphinx process.
"""
first_run = not os.path.exists(os.path.join(CURR_PATH, '_FIRST_RUN.flag'))
if first_run and RTD:
open(os.path.join(CURR_PATH, '_FIRST_RUN.flag'), 'w').close()
if C_API:
app.connect("builder-inited", generate_doxygen_xml)
else:
app.add_directive('doxygenfile', IgnoredDirective)
if RTD: # build R docs only on Read the Docs site
if first_run:
app.connect("builder-inited", generate_r_docs)
app.connect("build-finished",
lambda app, _: copy_tree(os.path.join(CURR_PATH, os.path.pardir, "lightgbm_r", "docs"),
os.path.join(app.outdir, "R"), verbose=0))
app.add_transform(InternalRefTransform)
add_js_file = getattr(app, 'add_js_file', False) or app.add_javascript
add_js_file("js/script.js")
| 35.818731 | 110 | 0.654943 |
4a239dc99ad12f5659a260272159c5050532d577 | 7,172 | py | Python | src/CLIENT/client.py | f0lg0/pyChat | abade1f8a99d7eb38634b59622726a401dbab575 | [
"MIT"
] | 13 | 2020-06-30T18:59:24.000Z | 2022-03-09T17:57:49.000Z | src/CLIENT/client.py | f0lg0/pyChat | abade1f8a99d7eb38634b59622726a401dbab575 | [
"MIT"
] | 1 | 2021-01-18T19:58:50.000Z | 2021-01-18T20:00:22.000Z | src/CLIENT/client.py | f0lg0/pyChat | abade1f8a99d7eb38634b59622726a401dbab575 | [
"MIT"
] | 5 | 2020-06-30T19:06:48.000Z | 2022-03-12T21:47:29.000Z | import socket
import json
import threading
import argparse
import sys
import os
import time
from datetime import datetime
from message import Message
from streaming import createMsg, streamData, initializeAES
import pyDHE
import eel
# this is temporary, just for debuggining when you want to open two clients on one computer
# Note that there is a small chance the random port numbers will be the same and crash anyway.
import random
# [GLOBAL VARIABLES]
client = None # so we can use it in exposed functions
eel.init('./GUI/web') # initializing eel
eelPort = 42069 # default GUI port
clientDH = pyDHE.new() # diffiehellman object
# contains names only of all the clients connected
client_list = [];
class Client:
def __init__(self, server_ip, port, client_ip):
self.SERVER_IP = server_ip
self.PORT = port
self.CLIENT_IP = client_ip
self.finalDecryptionKey = None
print(f"[*] Host: {self.CLIENT_IP} | Port: {self.PORT}")
self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def connectToServer(self):
try:
self.client.connect((self.SERVER_IP, self.PORT))
except socket.error as e:
print(str(e))
sys.exit()
iv = self.recvVector() # we receive the vector
finalDecryptionKey = self.recvServerKey()
self.sharePublicInfo()
initializeAES(str(finalDecryptionKey).encode("utf-8"), iv.cont) # we even parse the vector message content
self.setUsername()
def recvServerKey(self):
# receives the servers public key and uses it to generate the final decryption key
serverKey = Message.from_json(streamData(self.client).decode("utf-8"))
return clientDH.update(int(serverKey.cont))
def sharePublicInfo(self):
packet = Message(self.CLIENT_IP, self.SERVER_IP, "temp", str(datetime.now()), str(clientDH.getPublicKey()), 'key_exc')
self.client.send(packet.pack())
def recvVector(self):
iv = streamData(self.client).decode("utf-8")
return Message.from_json(iv)
def setUsername(self):
while True:
self.USERNAME = input("Enter username> ")
if self.USERNAME:
if self.USERNAME != "*server*":
# encrypted_username = self.cipher.encrypt(self.USERNAME.encode("utf-8"))
packet = Message(self.CLIENT_IP, self.SERVER_IP, "temp", str(datetime.now()), self.USERNAME, 'setuser')
self.client.send(packet.pack())
check = streamData(self.client).decode("utf-8")
check = Message.from_json(check)
print(check.cont)
if check.cont != "[*] Username already in use!":
break
else:
print("Can't set username as *server*!")
else:
print("Username can't be empty!")
def sendMsg(self, to_send_msg):
if to_send_msg == "[export_chat]":
packet = Message(self.CLIENT_IP, self.SERVER_IP, self.USERNAME, str(datetime.now()), to_send_msg, 'export')
else:
packet = Message(self.CLIENT_IP, self.SERVER_IP, self.USERNAME, str(datetime.now()), to_send_msg, 'default')
self.client.send(packet.pack())
def receiveData(self):
while True:
try:
data = streamData(self.client)
data = data.decode("utf-8")
data = Message.from_json(data) # it's a dataclass object
except AttributeError:
print("\r[*] Connection closed by the server")
break
if data.typ == "export":
timestamp = str(datetime.now())
timestamp = timestamp.replace(":", ".") # windowz is stoopid
chat_file = f"./exported/chat{timestamp}.txt"
try:
with open(chat_file, "wb+") as chat:
chat.write(data.cont.encode("utf-8"))
print("\r[*] Writing to file...")
print(f"[*] Finished! You can find the file at {chat_file}")
except:
print('\r' + "[*] Something went wrong")
elif data.typ == "client_list_update_add" or data.typ == "disconnection":
updateClientList(data.cont)
else:
eel.writeMsg(data.cont, data.username)
self.client.close()
# updates the gui with the list 'c_list'
def updateClientList(c_list):
client_list = c_list;
# update the GUI
eel.updateClientList(client_list);
# [Eel functions]
# we need to set a name scheme for these functions cuz rn they are confusing
@eel.expose
def exposeSendMsg(to_send_msg):
client.sendMsg(to_send_msg)
@eel.expose
def getUsername():
return client.USERNAME
def getArgs():
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--server", dest = "server_ip", help = "Enter server IP")
parser.add_argument("-p", "--port", dest = "server_port", help = "Enter server PORT")
options = parser.parse_args()
if not options.server_ip and not options.server_port:
raise Exception # raising exception in case the user doesn't provide values from the terminal
if not options.server_ip:
parser.error("*** Please specify a server IP ***")
elif not options.server_port:
parser.error("*** Please specify a port number ***")
else:
return options
def startEel():
try:
# eel.start('main.html', port=random.choice(range(8000, 8080))) --> use this if you want to open multiple clients on one computer
eel.start('main.html', port=eelPort)
except (SystemExit, MemoryError, KeyboardInterrupt): # this catches the exception thrown if the user closes the window
print("*** Closing the app... ***")
os._exit(0) # this is actually super overkill but it works
def main():
try:
os.mkdir('./exported')
except FileExistsError:
pass
try:
options = getArgs()
SERVER_IP = options.server_ip
PORT = int(options.server_port)
except Exception: # in case the user doesn't provide values we ask him to enter them
SERVER_IP = input("*** Enter server IP address > ")
PORT = int(input("*** Enter server PORT number > "))
CLIENT_IP = socket.gethostbyname(socket.gethostname())
global client
client = Client(SERVER_IP, PORT, CLIENT_IP)
client.connectToServer()
# threding eel in the background
eThread = threading.Thread(target = startEel)
eThread.daemon = True
eThread.start()
client.receiveData() # this is a loop and also streamData is blocking
if __name__ == "__main__":
try:
# checking if port 42069 (default port for the GUI) is free to start the GUI
tempSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
check = tempSock.bind(('127.0.0.1', eelPort))
tempSock.close()
main()
except socket.error as e:
print(f"[!] PORT NUMBER {eelPort} ISN'T FREE, YOU CAN'T START THE APP [!]")
| 32.899083 | 137 | 0.617262 |
4a239e754acf4d5bbc34d311139219b96cb65277 | 1,410 | py | Python | opt/Ipxe/var/www/Kickstart/boot/module/prova.py | EarlRagnar78/LinuxAutomateDeploy | 3d4e0ef89b3b80972e49ea48361e51a9622156d9 | [
"Apache-2.0"
] | null | null | null | opt/Ipxe/var/www/Kickstart/boot/module/prova.py | EarlRagnar78/LinuxAutomateDeploy | 3d4e0ef89b3b80972e49ea48361e51a9622156d9 | [
"Apache-2.0"
] | null | null | null | opt/Ipxe/var/www/Kickstart/boot/module/prova.py | EarlRagnar78/LinuxAutomateDeploy | 3d4e0ef89b3b80972e49ea48361e51a9622156d9 | [
"Apache-2.0"
] | null | null | null | #script per testare l'import module via url di python:
#!/usr/bin/python
import urllib, sys
def import_URL(URL):
exec urllib.urlopen(URL).read() in globals()
"""
from urllib2 import urlopen
r = urlopen('http://urlHere/fileHere')
f = open('filenameToWrite', 'w')
f.write(r.read())
f.close()
import filenameWithout.PyInIt
"""
def import_path(fullpath):
"""
Import a file with full path specification. Allows one to
import from anywhere, something __import__ does not do.
"""
import os,sys
path, filename = os.path.split(fullpath)
filename, ext = os.path.splitext(filename)
sys.path.append(path)
module = __import__(filename)
reload(module) # Might be out of date
del sys.path[-1]
return module
#import_path("/var/www/Kickstart/boot/python/mount.py")
#module.list_media_devices
#sys.path.append(/var/www/Kickstart/boot/python/)
sys.path.append("/root/scripts/util/")
#to add later
#sys.path.insert(0, "/var/www/Kickstart/boot/python/")
#print(sys.path)
import mount
#return list of block devices
listdevices = mount.list_media_devices()
print(len(listdevices))
if (len(listdevices) > 0):
for device in listdevices:
print(device.device)
print(device.drive)
print(device.mounted)
print(device.size)
print(device.model)
print(device.vendor)
#prova = Device(test)
#print(prova)
print ["hello word"]
| 24.736842 | 61 | 0.692199 |
4a239f3b061348302cafc57bd6c26c78737ac053 | 633 | py | Python | scripts/demo.py | filesmuggler/rl-physnet | b6d9886c15d6619df331866cf6a98c61da8413e9 | [
"MIT"
] | null | null | null | scripts/demo.py | filesmuggler/rl-physnet | b6d9886c15d6619df331866cf6a98c61da8413e9 | [
"MIT"
] | null | null | null | scripts/demo.py | filesmuggler/rl-physnet | b6d9886c15d6619df331866cf6a98c61da8413e9 | [
"MIT"
] | null | null | null | import yaml
import world
from utils.text import TextFlag, log
NUM_EPISODES = 10
NUM_ACTIONS = 10
ENV_CONFIG = yaml.safe_load(open("../config/demo.yaml", 'r'))
if __name__ == "__main__":
myenv = world.environment.pusher.PusherEnvDemo(ENV_CONFIG)
myenv.rog.object_types = ['cube.obj']
myenv.reset()
for _ in range(NUM_EPISODES):
for _ in range(NUM_ACTIONS):
action = world.action.primitives.PushAction.random_sample()
observations, reward, done, info = myenv.step(action=action)
log(TextFlag.INFO, info["observations_numpy"])
myenv.reset()
myenv.stop_sim()
| 26.375 | 72 | 0.674566 |
4a23a00203b8cec140b4b8c0b930fa33b38e28e0 | 968 | py | Python | src/building_database/build_intervals.py | wrs28/safe_extubation | ad4e3e89867f6086736470a7a436bf25b8257318 | [
"MIT"
] | 2 | 2020-07-01T20:08:52.000Z | 2020-07-03T14:51:41.000Z | src/building_database/build_intervals.py | wrs28/safe_extubation | ad4e3e89867f6086736470a7a436bf25b8257318 | [
"MIT"
] | null | null | null | src/building_database/build_intervals.py | wrs28/safe_extubation | ad4e3e89867f6086736470a7a436bf25b8257318 | [
"MIT"
] | null | null | null | import pandas as pd
import sqlalchemy
from directories import print_log, engine
N_WINDOWS = 48
def main():
with engine.connect() as connection:
vaso_episodes = pd.read_sql("pressors_by_icustay", con=connection, index_col="ICUSTAY_ID")
print_log("building hour-long intervals for each icustay")
interval_splits = [pd.Series(vaso_episodes.PRESSOR_START_SEC - i*60*60, name=i, dtype=pd.Int32Dtype()) for i in range(N_WINDOWS+1)]
interval_splits = pd.concat(interval_splits, axis=1)
interval_splits = interval_splits.join(vaso_episodes[["SUBJECT_ID","HADM_ID"]])
print_log("\tsaving intervals to database `PressorGauge` in table `intervals`")
with engine.connect() as connection:
interval_splits.to_sql("interval_splits", con=connection, if_exists="replace", index_label="ICUSTAY_ID")
print_log("Done computing intervals!")
print_log()
# execute only if run as a script
if __name__ == "__main__":
main()
| 30.25 | 135 | 0.733471 |
4a23a255129dbb0b6bf903c9d385faa57c544ce0 | 1,635 | py | Python | demos/Multiscale/adoption_funnel/model/run.py | w-ghub/demos | 6382676fae89bd5a190626612712fcedf17bca6d | [
"MIT"
] | 56 | 2020-07-08T23:23:15.000Z | 2022-03-11T20:43:09.000Z | demos/Multiscale/adoption_funnel/model/run.py | w-ghub/demos | 6382676fae89bd5a190626612712fcedf17bca6d | [
"MIT"
] | 41 | 2020-07-11T23:24:06.000Z | 2022-01-28T13:28:07.000Z | demos/Multiscale/adoption_funnel/model/run.py | w-ghub/demos | 6382676fae89bd5a190626612712fcedf17bca6d | [
"MIT"
] | 39 | 2020-07-15T11:35:04.000Z | 2022-02-01T16:02:51.000Z | import pandas as pd
from .parts.utils import *
from model import config
from cadCAD.engine import ExecutionMode, ExecutionContext,Executor
from cadCAD import configs
def get_M(k, v):
if k == 'sim_config':
k, v = 'M', v['M']
return k, v
config_ids = [
dict(
get_M(k, v) for k, v in config.__dict__.items() if k in ['simulation_id', 'run_id', 'sim_config', 'subset_id']
) for config in configs
]
def run(drop_midsteps=True):
'''
Definition:
Run simulation
'''
exec_mode = ExecutionMode()
local_mode_ctx = ExecutionContext(context=exec_mode.local_mode)
simulation = Executor(exec_context=local_mode_ctx, configs=configs)
raw_system_events, tensor_field, sessions = simulation.execute()
# Result System Events DataFrame
df = pd.DataFrame(raw_system_events)
config_ids = [
dict(
get_M(k, v) for k, v in config.__dict__.items() if k in ['simulation_id', 'run_id', 'sim_config', 'subset_id']
) for config in configs
]
results = pd.DataFrame()
for i, config_id in enumerate(config_ids):
params = config_id['M']
result_record = pd.DataFrame.from_records([tuple([i for i in params.values()])], columns=list(params.keys()))
sub_df = df[df.subset == config_id['subset_id']]
max_substep = max(sub_df.substep)
is_droppable = (sub_df.substep != max_substep) & (sub_df.substep != 0)
sub_df.drop(sub_df[is_droppable].index, inplace=True)
result_record['dataset'] = [sub_df]
results = results.append(result_record)
return results.reset_index()
| 26.803279 | 118 | 0.655046 |
4a23a2dc131f1763c135eedeb47017340b9f7f06 | 90 | py | Python | params.py | roinaveiro/poisoning_hmms | 2f695ad9d61756468e63028dbedf98c59368f29d | [
"MIT"
] | 3 | 2022-02-01T03:42:33.000Z | 2022-02-01T03:42:58.000Z | params.py | roinaveiro/poisoning_hmms | 2f695ad9d61756468e63028dbedf98c59368f29d | [
"MIT"
] | 6 | 2022-03-02T10:29:44.000Z | 2022-03-31T23:33:40.000Z | params.py | roinaveiro/poisoning_hmms | 2f695ad9d61756468e63028dbedf98c59368f29d | [
"MIT"
] | null | null | null | import numpy as np
w1 = 0.7
w2 = 1 - w1
T = 10
theta = 1
epsilon = 2
zeta = 3
k = 1000 | 7.5 | 18 | 0.566667 |
4a23a37b8f88f7c060f5904861ba903a33b3f473 | 370 | py | Python | musicdb-app/Backend-django/music_service/migrations/0003_auto_20210317_1535.py | comp-333-rad/hw-3 | a52fcc4fd40a207783950a15fac89549b193d70d | [
"MIT"
] | null | null | null | musicdb-app/Backend-django/music_service/migrations/0003_auto_20210317_1535.py | comp-333-rad/hw-3 | a52fcc4fd40a207783950a15fac89549b193d70d | [
"MIT"
] | 4 | 2021-04-20T02:30:01.000Z | 2021-04-21T06:03:51.000Z | musicdb-app/Backend-django/music_service/migrations/0003_auto_20210317_1535.py | comp-333-rad/hw-3 | a52fcc4fd40a207783950a15fac89549b193d70d | [
"MIT"
] | null | null | null | # Generated by Django 3.1.7 on 2021-03-17 15:35
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('music_service', '0002_auto_20210317_1358'),
]
operations = [
migrations.RenameField(
model_name='songdetail',
old_name='genre',
new_name='name',
),
]
| 19.473684 | 53 | 0.591892 |
4a23a453d1213c7f20e7f5b6c590fc4d79687eef | 6,773 | py | Python | pywikibot/logging.py | Phantom-42/pywikibot | 9bf6568598377ec58215d841b51b52c6d9248348 | [
"MIT"
] | 3 | 2020-06-06T21:47:04.000Z | 2021-09-08T18:22:59.000Z | pywikibot/logging.py | Phantom-42/pywikibot | 9bf6568598377ec58215d841b51b52c6d9248348 | [
"MIT"
] | null | null | null | pywikibot/logging.py | Phantom-42/pywikibot | 9bf6568598377ec58215d841b51b52c6d9248348 | [
"MIT"
] | 1 | 2018-01-04T14:09:37.000Z | 2018-01-04T14:09:37.000Z | # -*- coding: utf-8 -*-
"""Logging functions."""
#
# (C) Pywikibot team, 2010-2016
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id: 4a23a453d1213c7f20e7f5b6c590fc4d79687eef $'
import logging
import os
import sys
# logging levels
from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL
STDOUT = 16
VERBOSE = 18
INPUT = 25
if sys.version_info[0] > 2:
unicode = str
_init_routines = []
_inited_routines = []
def add_init_routine(routine):
"""Add a routine to be run as soon as possible."""
_init_routines.append(routine)
def _init():
"""Init any routines which have not already been called."""
for init_routine in _init_routines:
if init_routine not in _inited_routines:
init_routine()
_inited_routines.append(init_routine)
# Clear the list of routines to be inited
_init_routines[:] = []
# User output/logging functions
# Six output functions are defined. Each requires a unicode or string
# argument. All of these functions generate a message to the log file if
# logging is enabled ("-log" or "-debug" command line arguments).
# The functions output(), stdout(), warning(), and error() all display a
# message to the user through the logger object; the only difference is the
# priority level, which can be used by the application layer to alter the
# display. The stdout() function should be used only for data that is
# the "result" of a script, as opposed to information messages to the
# user.
# The function log() by default does not display a message to the user, but
# this can be altered by using the "-verbose" command line option.
# The function debug() only logs its messages, they are never displayed on
# the user console. debug() takes a required second argument, which is a
# string indicating the debugging layer.
def logoutput(text, decoder=None, newline=True, _level=INFO, _logger="",
**kwargs):
"""Format output and send to the logging module.
Helper function used by all the user-output convenience functions.
"""
if _logger:
logger = logging.getLogger("pywiki." + _logger)
else:
logger = logging.getLogger("pywiki")
# invoke any init routines
if _init_routines:
_init()
# frame 0 is logoutput() in this module,
# frame 1 is the convenience function (output(), etc.)
# frame 2 is whatever called the convenience function
frame = sys._getframe(2)
module = os.path.basename(frame.f_code.co_filename)
context = {'caller_name': frame.f_code.co_name,
'caller_file': module,
'caller_line': frame.f_lineno,
'newline': ("\n" if newline else "")}
if decoder:
text = text.decode(decoder)
elif not isinstance(text, unicode):
if not isinstance(text, str):
# looks like text is a non-text object.
# Maybe it has a __unicode__ builtin ?
# (allows to print Page, Site...)
text = unicode(text)
else:
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
text = text.decode('iso8859-1')
logger.log(_level, text, extra=context, **kwargs)
def output(text, decoder=None, newline=True, toStdout=False, **kwargs):
r"""Output a message to the user via the userinterface.
Works like print, but uses the encoding used by the user's console
(console_encoding in the configuration file) instead of ASCII.
If decoder is None, text should be a unicode string. Otherwise it
should be encoded in the given encoding.
If newline is True, a line feed will be added after printing the text.
If toStdout is True, the text will be sent to standard output,
so that it can be piped to another process. All other text will
be sent to stderr. See: https://en.wikipedia.org/wiki/Pipeline_%28Unix%29
text can contain special sequences to create colored output. These
consist of the escape character \03 and the color name in curly braces,
e. g. \03{lightpurple}. \03{default} resets the color. By using the
color_format method from pywikibot.tools.formatter, the escape character
may be omitted.
Other keyword arguments are passed unchanged to the logger; so far, the
only argument that is useful is "exc_info=True", which causes the
log message to include an exception traceback.
"""
if toStdout: # maintained for backwards-compatibity only
from pywikibot.tools import issue_deprecation_warning # noqa
issue_deprecation_warning('"toStdout" parameter',
'pywikibot.stdout()', 2)
logoutput(text, decoder, newline, STDOUT, **kwargs)
else:
logoutput(text, decoder, newline, INFO, **kwargs)
def stdout(text, decoder=None, newline=True, **kwargs):
"""Output script results to the user via the userinterface."""
logoutput(text, decoder, newline, STDOUT, **kwargs)
def warning(text, decoder=None, newline=True, **kwargs):
"""Output a warning message to the user via the userinterface."""
logoutput(text, decoder, newline, WARNING, **kwargs)
def error(text, decoder=None, newline=True, **kwargs):
"""Output an error message to the user via the userinterface."""
logoutput(text, decoder, newline, ERROR, **kwargs)
def log(text, decoder=None, newline=True, **kwargs):
"""Output a record to the log file."""
logoutput(text, decoder, newline, VERBOSE, **kwargs)
def critical(text, decoder=None, newline=True, **kwargs):
"""Output a critical record to the log file."""
logoutput(text, decoder, newline, CRITICAL, **kwargs)
def debug(text, layer, decoder=None, newline=True, **kwargs):
"""Output a debug record to the log file.
@param layer: The name of the logger that text will be sent to.
"""
logoutput(text, decoder, newline, DEBUG, layer, **kwargs)
def exception(msg=None, decoder=None, newline=True, tb=False, **kwargs):
"""Output an error traceback to the user via the userinterface.
Use directly after an 'except' statement::
...
except Exception:
pywikibot.exception()
...
or alternatively::
...
except Exception as e:
pywikibot.exception(e)
...
@param tb: Set to True in order to output traceback also.
"""
if isinstance(msg, BaseException):
exc_info = 1
else:
exc_info = sys.exc_info()
msg = u'%s: %s' % (repr(exc_info[1]).split('(')[0],
unicode(exc_info[1]).strip())
if tb:
kwargs['exc_info'] = exc_info
logoutput(msg, decoder, newline, ERROR, **kwargs)
| 33.039024 | 77 | 0.668242 |
4a23a4ed8df6d549e0f72cb0344c4a0b2c7d0312 | 18,258 | py | Python | ros/src/tl_detector/tl_detector.py | redherring2141/CarND-Capstone | df230f902836923dbbc55065c3d4f12531c05cda | [
"MIT"
] | null | null | null | ros/src/tl_detector/tl_detector.py | redherring2141/CarND-Capstone | df230f902836923dbbc55065c3d4f12531c05cda | [
"MIT"
] | null | null | null | ros/src/tl_detector/tl_detector.py | redherring2141/CarND-Capstone | df230f902836923dbbc55065c3d4f12531c05cda | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
from std_msgs.msg import Int32
from geometry_msgs.msg import PoseStamped, Pose
from styx_msgs.msg import TrafficLightArray, TrafficLight
from styx_msgs.msg import Lane
from sensor_msgs.msg import Image
from cv_bridge import CvBridge
from light_classification.tl_classifier import TLClassifier
#import tensorflow as tf
import tf
import cv2
import yaml
import math
import sys
import numpy as np
from keras.models import load_model, model_from_json
from keras.utils.generic_utils import get_custom_objects
from keras import backend
STATE_COUNT_THRESHOLD = 3
VISIBLE_DISTANCE = 250
SMOOTHNESS = 1.0
def dice_coef(y_true, y_pred):
y_true_f = backend.flatten(y_true)
y_pred_f = backend.flatten(y_pred)
intersection = backend.sum(y_true_f * y_pred_f)
return (2.*intersection + SMOOTHNESS) / (backend.sum(y_true_f) + backend.sum(y_pred_f) + SMOOTHNESS)
def dice_coef_loss(y_true, y_pred):
return -dice_coef(y_true, y_pred)
class TLDetector(object):
def __init__(self):
rospy.init_node('tl_detector')
#self.pose = None
#self.waypoints = None
#self.cam_img = None
#self.lights = []
self.pose_curr = None
self.wpts_base = None
self.cam_img = None
self.lights = None
self.has_img = False
self.light_classifier = TLClassifier()
self.tf_listener = tf.TransformListener()
self.prev_light_loc = None
self.state = TrafficLight.UNKNOWN
self.last_state = TrafficLight.UNKNOWN
self.last_wp = -1
self.state_count = 0
self.lights_wp = []
self.stoplines_wp = []
self.cam_cb_count = 0
self.simulated_detection = rospy.get_param('~simulated_detection', 1)
self.tl_detection_interval_frames = rospy.get_param('~tl_detection_interval_frames', 10)
config_string = rospy.get_param("/traffic_light_config")
self.config = yaml.load(config_string)
# Setup classifier
rospy.loginfo("[tl_detector.py - initialization - line71] Loading TLClassifier model")
self.light_classifier = TLClassifier()
model = load_model(self.config['tl']['tl_classification_model'])
resize_width = self.config['tl']['classifier_resize_width']
resize_height = self.config['tl']['classifier_resize_height']
self.light_classifier.setup_classifier(model, resize_width, resize_height)
self.invalid_class_number = 3
# Setup detector
rospy.loginfo("[tl_detector.py - initialization - line80] Loading TLDetector model")
custom_objects = {'dice_coef_loss': dice_coef_loss, 'dice_coef': dice_coef}
self.detector_model = load_model(self.config['tl']['tl_detection_model'], custom_objects = custom_objects)
self.detector_model._make_predict_function()
self.resize_width = self.config['tl']['detector_resize_width']
self.resize_height = self.config['tl']['detector_resize_height']
self.resize_height_ratio = self.config['camera_info']['image_height'] / float(self.resize_height)
self.resize_width_ratio = self.config['camera_info']['image_width'] / float(self.resize_width)
self.mid_col = self.resize_width / 2
self.is_carla = self.config['tl']['is_carla']
self.projection_threshold = self.config['tl']['projection_threshold']
self.projection_min = self.config['tl']['projection_min']
self.color_mode = self.config['tl']['color_mode']
rospy.loginfo("[tl_detector.py - initialization - line94] Loaded TLDetector model")
sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
sub2 = rospy.Subscriber('/base_waypoints', Lane, self.wpts_cb)
'''
/vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
helps you acquire an accurate ground truth data source for the traffic light
classifier by sending the current color state of all traffic lights in the
simulator. When testing on the vehicle, the color state will not be available. You'll need to
rely on the position of the light and the camera image to predict it.
'''
sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray, self.traffic_cb)
sub6 = rospy.Subscriber('/image_color', Image, self.img_cb, queue_size=1)
#config_string = rospy.get_param("/traffic_light_config")
#self.config = yaml.load(config_string)
self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint', Int32, queue_size=1)
self.bridge = CvBridge()
'''
self.light_classifier = TLClassifier()
self.listener = tf.TransformListener()
self.state = TrafficLight.UNKNOWN
self.last_state = TrafficLight.UNKNOWN
self.last_wp = -1
self.state_count = 0
'''
rospy.spin()
def pose_cb(self, msg):
self.pose_curr = msg
def wpts_cb(self, msg):
#self.waypoints = waypoints
if self.wpts_base is not None:
return
self.wpts_base = msg
for i in range(len(self.wpts_base.waypoints)):
self.wpts_base.waypoints[i].pose.header.frame_id = self.wpts_base.header.frame_id
self.calc_tl_wpts()
def traffic_cb(self, msg):
#self.lights = msg.lights
if self.simulated_detection > 0:
self.lights = msg.lights
self.calc_tl_wpts()
light_wp, state = self.process_traffic_lights()
self.publish_upcoming_red_light(light_wp, state)
else:
if self.lights is not None:
return
self.lights = msg.lights
self.calc_tl_wpts()
def img_cb(self, msg):
"""Identifies red lights in the incoming camera image and publishes the index
of the waypoint closest to the red light's stop line to /traffic_waypoint
Args:
msg (Image): image from car-mounted camera
"""
self.cam_cb_count += 1
if self.cam_cb_count < self.tl_detection_interval_frames:
return
self.cam_cb_count = 0
# Original code
self.has_img = True
self.cam_img = msg
light_wp, state = self.process_traffic_lights()
self.publish_upcoming_red_light(light_wp, state)
'''
Publish upcoming red lights at camera frequency.
Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
of times till we start using it. Otherwise the previous stable state is
used.
'''
'''
if self.state != state:
self.state_count = 0
self.state = state
elif self.state_count >= STATE_COUNT_THRESHOLD:
self.last_state = self.state
light_wp = light_wp if state == TrafficLight.RED else -1
self.last_wp = light_wp
self.upcoming_red_light_pub.publish(Int32(light_wp))
else:
self.upcoming_red_light_pub.publish(Int32(self.last_wp))
self.state_count += 1
#Original code
'''
def get_nearest_wpt(self, pose):
"""Identifies the closest path waypoint to the given position
https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
Args:
pose (Pose): position to match a waypoint to
Returns:
int: index of the closest waypoint in self.waypoints
"""
#TODO implement
#return 0
if self.wpts_base is None:
return None
dist_min = sys.maxsize
wp_min = None
for wp in range(len(self.wpts_base.waypoints)):
dist = self.dist_euclead(pose, self.wpts_base.waypoints[wp].pose.pose)
if dist < dist_min:
dist_min = dist
wp_min = wp
return wp_min
def get_light_state(self, light):
"""Determines the current color of the traffic light
Args:
light (TrafficLight): light to classify
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
labels = list(enumerate(['Red', 'Yellow', 'Green', 'None', 'None']))
if self.simulated_detection > 0:
if self.lights is None or light >= len(self.lights):
rospy.loginfo("[tl_detector.py - get_light_state - line248] simulated_detection: No TL detection.")
return TrafficLight.UNKNOWN
state = self.lights[light].state
rospy.loginfo("[tl_detector.py - get_light_state - line251] simulated_detection: Nearest TL-state is: %s", labels[state][1])
return state
if(not self.has_img):
self.prev_light_loc = None
rospy.loginfo("[tl_detector.py - get_light_state - line256] has_image is None: No TL detection.")
return TrafficLight.UNKNOWN
cv_img = self.bridge.imgmsg_to_cv2(self.cam_img, self.color_mode)
#if cv_img is not None:
#print("cv_img generated")
#cv2.imshow('cv_img', cv_img)
tl_img = self.detect_tl(cv_img)
#cv2.imshow('tl_img', tl_img)
if tl_img is not None:
#Get classification
state = self.light_classifier.get_classification(tl_img)
state = state if (state != self.invalid_class_number) else TrafficLight.UNKNOWN
rospy.loginfo("[tl_detector.py - get_light_state - line269] Nearest TL-state is: %s", labels[state][1])
return state
else:
rospy.loginfo("[tl_detector.py - get_light_state - line272] tl_img is None: No TL detection.")
return TrafficLight.UNKNOWN
#return False
def process_traffic_lights(self):
"""Finds closest visible traffic light, if one exists, and determines its
location and color
Returns:
int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
'''
light = None
# List of positions that correspond to the line to stop in front of for a given intersection
stop_line_positions = self.config['stop_line_positions']
if(self.pose):
car_position = self.get_nearest_wpt(self.pose.pose)
#TODO find the closest visible traffic light (if one exists)
if light:
state = self.get_light_state(light)
return light_wp, state
self.waypoints = None
return -1, TrafficLight.UNKNOWN
'''
###Added
if self.pose_curr is None or len(self.stoplines_wp) == 0:
#print("self.pose_curr= ", self.pose_curr)
#print("len(self.stoplines_wp", len(self.stoplines_wp))
rospy.loginfo("[tl_detector.py - process_traffic_lights - line302] No TL detection.")
return -1, TrafficLight.UNKNOWN
light = self.get_nearest_visible_tl(self.pose_curr.pose)#Find the nearest visible TL.
# Find the closest traffic light if exists
if light is None:
rospy.loginfo("[tl_detector.py - process_traffic_lights - line307] No TL detection.")
return -1, TrafficLight.UNKNOWN
state = self.get_light_state(light)
return self.stoplines_wp[light], state
def publish_upcoming_red_light(self, light_wp, state):
#print("[debugging tl_detector.py - publish_upcoming_red_light - line329: 0 ")
if self.state != state:
self.state_count = 0
self.state = state
elif self.state_count >= STATE_COUNT_THRESHOLD:
self.last_state = self.state
light_wp = light_wp if state == TrafficLight.RED else -1
self.last_wp = light_wp
self.upcoming_red_light_pub.publish(Int32(light_wp))
#print("[debugging tl_detector.py - publish_upcoming_red_light - line337: 1 ")
else:
self.upcoming_red_light_pub.publish(Int32(self.last_wp))
#print("[debugging tl_detector.py - publish_upcoming_red_light - line340: 2 ")
self.state_count += 1
def extract_img(self, pred_img_mask, img):
#rospy.loginfo("[tl_detector.py - extract_img - line341] Detecting TL...extract_img()")
if np.max(pred_img_mask) < self.projection_min:
#print("debugging line 344")
return None
row_projection = np.sum(pred_img_mask, axis=1)
row_idx = np.argmax(row_projection)
if np.max(row_projection) < self.projection_threshold:
#print("debugging line 351")
return None
zero_row_idx = np.argwhere(row_projection <= self.projection_threshold)
top_part = zero_row_idx[zero_row_idx < row_idx]
top = np.max(top_part) if top_part.size > 0 else 0
bot_part = zero_row_idx[zero_row_idx > row_idx]
bot = np.min(bot_part) if bot_part.size > 0 else self.resize_height
roi = pred_img_mask[top:bot, :]
col_projection = np.sum(roi, axis=0)
if np.max(col_projection) < self.projection_min:
print("debugging line 364")
print("col_projection:", col_projection, "self.projection_min:", self.projection_min)
return None
non_zero_col_idx = np.argwhere(col_projection > self.projection_min)
idx_of_col_idx = np.argmin(np.abs(non_zero_col_idx - self.mid_col))
col_idx = non_zero_col_idx[idx_of_col_idx][0]
zero_col_idx = np.argwhere(col_projection == 0)
left_side = zero_col_idx[zero_col_idx < col_idx]
left = np.max(left_side) if left_side.size > 0 else 0
right_side = zero_col_idx[zero_col_idx > col_idx]
right = np.min(right_side) if right_side.size > 0 else self.resize_width
return img[int(top*self.resize_height_ratio):int(bot*self.resize_height_ratio),
int(left*self.resize_width_ratio):int(right*self.resize_width_ratio)]
def detect_tl(self, cv_img):
resize_img = cv2.cvtColor(cv2.resize(cv_img, (self.resize_width, self.resize_height)), cv2.COLOR_RGB2GRAY)
#if (resize_img is not None):
# print("resize_img generated")
resize_img = resize_img[..., np.newaxis]
if self.is_carla:
avg = np.mean(resize_img)
std = np.std(resize_img)
resize_img -= avg
resize_img /= std
img_mask = self.detector_model.predict(resize_img[None,:,:,:], batch_size=1)[0]
'''
if (img_mask is not None):
print("img_mask generated 1")
'''
img_mask = (img_mask[:,:,0]*255).astype(np.uint8)
'''
if (img_mask is not None):
print("img_mask generated 2")
'''
return self.extract_img(img_mask, cv_img)
def dist_euclead(self, pos1, pos2):
distance = (pos1.position.x-pos2.position.x)**2 + (pos1.position.y-pos2.position.y)**2
return distance
def trans_fromcar_tomap(self, pose_curr):# Transform the car position to the map coordinate
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(0.02))
transformed_pose_curr = self.tf_listener.transformPose("base_link", pose_curr)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
try:
self.tf_listener.waitForTransform("base_link", "world", rospy.Time(0), rospy.Duration(1.0))
transformed_pose_curr = self.tf_listener.tranformPose("base_link", pose_curr)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
transformed_pose_curr = None
rospy.logwarn("Failed to transform pose")
return transformed_pose_curr
def get_nearest_stopline(self, pose):
stop_line_positions = self.config['stop_line_positions']
dist_min = sys.maxsize
stop_line_min = None
for stop_line_position in stop_line_positions:
stop_line_pose = Pose()
stop_line_pose.position.x = stop_line_position[0]
stop_line_pose.position.y = stop_line_position[1]
stop_line_pose.position.z = 0.0
dist = self.dist_euclead(pose, stop_line_pose)
if dist < dist_min:
dist_min = dist
stop_line_min = stop_line_pose
return stop_line_min
def calc_tl_wpts(self):
if self.wpts_base is not None and self.lights is not None and len(self.lights_wp) == 0:
for i in range(len(self.lights)):
stopline = self.get_nearest_stopline(self.lights[i].pose.pose)
self.stoplines_wp.append(self.get_nearest_wpt(stopline))
self.lights_wp.append(self.get_nearest_wpt(self.lights[i].pose.pose))
def get_nearest_visible_tl(self, pose):
if self.wpts_base is None or self.lights is None or len(self.lights_wp) == 0:
return None
num_lights = len(self.lights_wp)
dist_min = sys.maxsize
light_min = None
for light in range(num_lights):
dist = self.dist_euclead(pose, self.wpts_base.waypoints[self.lights_wp[light]].pose.pose)
if dist < dist_min:
dist_min = dist
light_min = light
transformed_wpt = self.trans_fromcar_tomap(self.wpts_base.waypoints[self.lights_wp[light_min]].pose)
if transformed_wpt is not None and transformed_wpt.pose.position.x <= 0.0:
light_min += 1
if light_min >= num_lights:
light_min -= num_lights
dist_euclead = self.dist_euclead(pose, self.wpts_base.waypoints[self.lights_wp[light_min]].pose.pose)
if dist_euclead > (VISIBLE_DISTANCE ** 2):
return None
return light_min
if __name__ == '__main__':
try:
TLDetector()
except rospy.ROSInterruptException:
rospy.logerr('Could not start traffic node.')
| 36.589178 | 136 | 0.638241 |
4a23a54c47fceb8517d072b44349840196f9596b | 5,967 | py | Python | localflavor/us/forms.py | torakses/django-localflavor | 17ca87095d6f8c3f3888016085a2edb5951889f4 | [
"BSD-3-Clause"
] | null | null | null | localflavor/us/forms.py | torakses/django-localflavor | 17ca87095d6f8c3f3888016085a2edb5951889f4 | [
"BSD-3-Clause"
] | null | null | null | localflavor/us/forms.py | torakses/django-localflavor | 17ca87095d6f8c3f3888016085a2edb5951889f4 | [
"BSD-3-Clause"
] | null | null | null | """USA-specific Form helpers."""
from __future__ import unicode_literals
import re
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import CharField, Field, RegexField, Select
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from localflavor.compat import EmptyValueCompatMixin
from localflavor.deprecation import DeprecatedPhoneNumberFormFieldMixin
phone_digits_re = re.compile(r'^(?:1-?)?(\d{3})[-\.]?(\d{3})[-\.]?(\d{4})$')
ssn_re = re.compile(r"^(?P<area>\d{3})[-\ ]?(?P<group>\d{2})[-\ ]?(?P<serial>\d{4})$")
class USZipCodeField(EmptyValueCompatMixin, RegexField):
"""
A form field that validates input as a U.S. ZIP code.
Valid formats are XXXXX or XXXXX-XXXX.
.. note::
If you are looking for a form field with a list of U.S. Postal Service
locations please use :class:`~localflavor.us.forms.USPSSelect`.
.. versionadded:: 1.1
Whitespace around the ZIP code is accepted and automatically trimmed.
"""
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXXX or XXXXX-XXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(USZipCodeField, self).__init__(r'^\d{5}(?:-\d{4})?$',
max_length, min_length, *args, **kwargs)
def to_python(self, value):
value = super(USZipCodeField, self).to_python(value)
if value in self.empty_values:
return self.empty_value
return value.strip()
class USPhoneNumberField(EmptyValueCompatMixin, CharField, DeprecatedPhoneNumberFormFieldMixin):
"""
A form field that validates input as a U.S. phone number.
.. deprecated:: 1.4
Use the django-phonenumber-field_ library instead.
.. _django-phonenumber-field: https://github.com/stefanfoulis/django-phonenumber-field
"""
default_error_messages = {
'invalid': _('Phone numbers must be in XXX-XXX-XXXX format.'),
}
def clean(self, value):
super(USPhoneNumberField, self).clean(value)
if value in self.empty_values:
return self.empty_value
value = re.sub('(\(|\)|\s+)', '', force_text(value))
m = phone_digits_re.search(value)
if m:
return '%s-%s-%s' % (m.group(1), m.group(2), m.group(3))
raise ValidationError(self.error_messages['invalid'])
class USSocialSecurityNumberField(EmptyValueCompatMixin, CharField):
"""
A United States Social Security number.
Checks the following rules to determine whether the number is valid:
* Conforms to the XXX-XX-XXXX format.
* No group consists entirely of zeroes.
* The leading group is not "666" (block "666" will never be allocated).
* The number is not in the promotional block 987-65-4320 through
987-65-4329, which are permanently invalid.
* The number is not one known to be invalid due to otherwise widespread
promotional use or distribution (e.g., the Woolworth's number or the
1962 promotional number).
.. versionadded:: 1.1
"""
default_error_messages = {
'invalid': _('Enter a valid U.S. Social Security number in XXX-XX-XXXX format.'),
}
def clean(self, value):
super(USSocialSecurityNumberField, self).clean(value)
if value in self.empty_values:
return self.empty_value
match = re.match(ssn_re, value)
if not match:
raise ValidationError(self.error_messages['invalid'])
area, group, serial = match.groupdict()['area'], match.groupdict()['group'], match.groupdict()['serial']
# First pass: no blocks of all zeroes.
if area == '000' or group == '00' or serial == '0000':
raise ValidationError(self.error_messages['invalid'])
# Second pass: promotional and otherwise permanently invalid numbers.
if (area == '666' or
area.startswith('9') or
(area == '078' and group == '05' and serial == '1120') or
(area == '219' and group == '09' and serial == '9999')):
raise ValidationError(self.error_messages['invalid'])
return '%s-%s-%s' % (area, group, serial)
class USStateField(Field):
"""
A form field that validates its input is a U.S. state, territory, or COFA territory.
The input is validated against a dictionary which includes names and abbreviations.
It normalizes the input to the standard two-letter postal service
abbreviation for the given state.
"""
default_error_messages = {
'invalid': _('Enter a U.S. state or territory.'),
}
def clean(self, value):
from .us_states import STATES_NORMALIZED
super(USStateField, self).clean(value)
if value in EMPTY_VALUES:
return ''
try:
value = value.strip().lower()
except AttributeError:
pass
else:
try:
return STATES_NORMALIZED[value.strip().lower()]
except KeyError:
pass
raise ValidationError(self.error_messages['invalid'])
class USStateSelect(Select):
"""A Select widget that uses a list of U.S. states/territories as its choices."""
def __init__(self, attrs=None):
from .us_states import STATE_CHOICES
super(USStateSelect, self).__init__(attrs, choices=STATE_CHOICES)
class USPSSelect(Select):
"""
A Select widget that uses a list of US Postal Service codes as its choices.
.. note::
If you are looking for a form field that validates U.S. ZIP codes
please use :class:`~localflavor.us.forms.USZipCodeField`.
"""
def __init__(self, attrs=None):
from .us_states import USPS_CHOICES
super(USPSSelect, self).__init__(attrs, choices=USPS_CHOICES)
| 34.69186 | 112 | 0.644042 |
4a23a631c21ef8c9216bbaf1408b6f644701863e | 1,383 | py | Python | examples/spu_comp.py | matthiaskramm/corepy | b2aad4e86adca10420e825fb65dcbd031cf44bb1 | [
"BSD-3-Clause"
] | 8 | 2016-02-20T03:52:58.000Z | 2022-01-24T15:04:14.000Z | examples/spu_comp.py | matthiaskramm/corepy | b2aad4e86adca10420e825fb65dcbd031cf44bb1 | [
"BSD-3-Clause"
] | null | null | null | examples/spu_comp.py | matthiaskramm/corepy | b2aad4e86adca10420e825fb65dcbd031cf44bb1 | [
"BSD-3-Clause"
] | 6 | 2015-12-11T05:21:15.000Z | 2020-12-11T10:59:15.000Z | import corepy.lib.extarray as extarray
import corepy.arch.spu.isa as spu
import corepy.arch.spu.lib.util as util
import corepy.arch.spu.platform as env
prgm = env.Program()
code = prgm.get_stream()
proc = env.Processor()
# Generate substream
# Multiply gp_return by 2, add 1
subcode = prgm.get_stream()
subcode.add(spu.shli(subcode.gp_return, subcode.gp_return, 1))
subcode.add(spu.ai(subcode.gp_return, subcode.gp_return, 1))
# Initialize gp_return, insert code
code.add(spu.il(code.gp_return, 5))
code.add(subcode)
# Add 3, insert again
code.add(spu.ai(code.gp_return, code.gp_return, 3))
code.add(subcode)
#code.print_code()
prgm.add(code)
prgm.print_code() # TODO - support print prgm instead?
ret = proc.execute(prgm, mode = 'int')
print "ret", ret
prgm = env.Program()
code = prgm.get_stream()
r_add = prgm.acquire_register()
# Generate substream
# Multiply gp_return by 2, add 1
subcode = prgm.get_stream()
subcode.add(spu.shli(subcode.gp_return, subcode.gp_return, 1))
subcode.add(spu.a(subcode.gp_return, subcode.gp_return, r_add))
# Initialize gp_return, insert code
code.add(spu.il(r_add, 1))
code.add(spu.il(code.gp_return, 5))
code.add(subcode)
# Add 3, insert again
code.add(spu.il(r_add, 2))
code.add(spu.ai(code.gp_return, code.gp_return, 3))
code.add(subcode)
prgm.add(code)
prgm.print_code()
ret = proc.execute(prgm, mode = 'int')
print "ret", ret
| 23.05 | 63 | 0.741142 |
4a23a6b27d6c2a12b5ca9c5c5c903ee739ec189d | 421 | py | Python | TWLight/users/serializers.py | nicole331/TWLight | fab9002e76868f8a2ef36f9279c777de34243b2c | [
"MIT"
] | 67 | 2017-12-14T22:27:48.000Z | 2022-03-13T18:21:31.000Z | TWLight/users/serializers.py | nicole331/TWLight | fab9002e76868f8a2ef36f9279c777de34243b2c | [
"MIT"
] | 433 | 2017-03-24T22:51:23.000Z | 2022-03-31T19:36:22.000Z | TWLight/users/serializers.py | Mahuton/TWLight | 90b299d07b0479f21dc90e17b8d05f5a221b0de1 | [
"MIT"
] | 105 | 2017-06-23T03:53:41.000Z | 2022-03-30T17:24:29.000Z | from django.contrib.auth.models import User
from rest_framework import serializers
class UserSerializer(serializers.ModelSerializer):
wp_username = serializers.CharField(source="editor.wp_username")
class Meta:
model = User
# Since we only care about one field we could probably return data in a more
# sensible format, but this is totally functional.
fields = ("wp_username",)
| 32.384615 | 84 | 0.726841 |
4a23a7bbd6134f50d60ec711492cbdd372c02885 | 8,220 | py | Python | ppocr/modeling/heads/kie_sdmgr_head.py | ocr-translate/PaddleOCR-1 | 71e8ae072f8085e91db40d5e23dee60a30707542 | [
"Apache-2.0"
] | null | null | null | ppocr/modeling/heads/kie_sdmgr_head.py | ocr-translate/PaddleOCR-1 | 71e8ae072f8085e91db40d5e23dee60a30707542 | [
"Apache-2.0"
] | null | null | null | ppocr/modeling/heads/kie_sdmgr_head.py | ocr-translate/PaddleOCR-1 | 71e8ae072f8085e91db40d5e23dee60a30707542 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The code is refer from: https://github.com/open-mmlab/mmocr/blob/main/mmocr/models/kie/heads/sdmgr_head.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import paddle
from paddle import nn
import paddle.nn.functional as F
from paddle import ParamAttr
class SDMGRHead(nn.Layer):
def __init__(self,
in_channels,
num_chars=92,
visual_dim=16,
fusion_dim=1024,
node_input=32,
node_embed=256,
edge_input=5,
edge_embed=256,
num_gnn=2,
num_classes=26,
bidirectional=False):
super().__init__()
self.fusion = Block([visual_dim, node_embed], node_embed, fusion_dim)
self.node_embed = nn.Embedding(num_chars, node_input, 0)
hidden = node_embed // 2 if bidirectional else node_embed
self.rnn = nn.LSTM(
input_size=node_input, hidden_size=hidden, num_layers=1)
self.edge_embed = nn.Linear(edge_input, edge_embed)
self.gnn_layers = nn.LayerList(
[GNNLayer(node_embed, edge_embed) for _ in range(num_gnn)])
self.node_cls = nn.Linear(node_embed, num_classes)
self.edge_cls = nn.Linear(edge_embed, 2)
def forward(self, input, targets):
relations, texts, x = input
node_nums, char_nums = [], []
for text in texts:
node_nums.append(text.shape[0])
char_nums.append(paddle.sum((text > -1).astype(int), axis=-1))
max_num = max([char_num.max() for char_num in char_nums])
all_nodes = paddle.concat([
paddle.concat(
[text, paddle.zeros(
(text.shape[0], max_num - text.shape[1]))], -1)
for text in texts
])
temp = paddle.clip(all_nodes, min=0).astype(int)
embed_nodes = self.node_embed(temp)
rnn_nodes, _ = self.rnn(embed_nodes)
b, h, w = rnn_nodes.shape
nodes = paddle.zeros([b, w])
all_nums = paddle.concat(char_nums)
valid = paddle.nonzero((all_nums > 0).astype(int))
temp_all_nums = (
paddle.gather(all_nums, valid) - 1).unsqueeze(-1).unsqueeze(-1)
temp_all_nums = paddle.expand(temp_all_nums, [
temp_all_nums.shape[0], temp_all_nums.shape[1], rnn_nodes.shape[-1]
])
temp_all_nodes = paddle.gather(rnn_nodes, valid)
N, C, A = temp_all_nodes.shape
one_hot = F.one_hot(
temp_all_nums[:, 0, :], num_classes=C).transpose([0, 2, 1])
one_hot = paddle.multiply(
temp_all_nodes, one_hot.astype("float32")).sum(axis=1, keepdim=True)
t = one_hot.expand([N, 1, A]).squeeze(1)
nodes = paddle.scatter(nodes, valid.squeeze(1), t)
if x is not None:
nodes = self.fusion([x, nodes])
all_edges = paddle.concat(
[rel.reshape([-1, rel.shape[-1]]) for rel in relations])
embed_edges = self.edge_embed(all_edges.astype('float32'))
embed_edges = F.normalize(embed_edges)
for gnn_layer in self.gnn_layers:
nodes, cat_nodes = gnn_layer(nodes, embed_edges, node_nums)
node_cls, edge_cls = self.node_cls(nodes), self.edge_cls(cat_nodes)
return node_cls, edge_cls
class GNNLayer(nn.Layer):
def __init__(self, node_dim=256, edge_dim=256):
super().__init__()
self.in_fc = nn.Linear(node_dim * 2 + edge_dim, node_dim)
self.coef_fc = nn.Linear(node_dim, 1)
self.out_fc = nn.Linear(node_dim, node_dim)
self.relu = nn.ReLU()
def forward(self, nodes, edges, nums):
start, cat_nodes = 0, []
for num in nums:
sample_nodes = nodes[start:start + num]
cat_nodes.append(
paddle.concat([
paddle.expand(sample_nodes.unsqueeze(1), [-1, num, -1]),
paddle.expand(sample_nodes.unsqueeze(0), [num, -1, -1])
], -1).reshape([num**2, -1]))
start += num
cat_nodes = paddle.concat([paddle.concat(cat_nodes), edges], -1)
cat_nodes = self.relu(self.in_fc(cat_nodes))
coefs = self.coef_fc(cat_nodes)
start, residuals = 0, []
for num in nums:
residual = F.softmax(
-paddle.eye(num).unsqueeze(-1) * 1e9 +
coefs[start:start + num**2].reshape([num, num, -1]), 1)
residuals.append((residual * cat_nodes[start:start + num**2]
.reshape([num, num, -1])).sum(1))
start += num**2
nodes += self.relu(self.out_fc(paddle.concat(residuals)))
return [nodes, cat_nodes]
class Block(nn.Layer):
def __init__(self,
input_dims,
output_dim,
mm_dim=1600,
chunks=20,
rank=15,
shared=False,
dropout_input=0.,
dropout_pre_lin=0.,
dropout_output=0.,
pos_norm='before_cat'):
super().__init__()
self.rank = rank
self.dropout_input = dropout_input
self.dropout_pre_lin = dropout_pre_lin
self.dropout_output = dropout_output
assert (pos_norm in ['before_cat', 'after_cat'])
self.pos_norm = pos_norm
# Modules
self.linear0 = nn.Linear(input_dims[0], mm_dim)
self.linear1 = (self.linear0
if shared else nn.Linear(input_dims[1], mm_dim))
self.merge_linears0 = nn.LayerList()
self.merge_linears1 = nn.LayerList()
self.chunks = self.chunk_sizes(mm_dim, chunks)
for size in self.chunks:
ml0 = nn.Linear(size, size * rank)
self.merge_linears0.append(ml0)
ml1 = ml0 if shared else nn.Linear(size, size * rank)
self.merge_linears1.append(ml1)
self.linear_out = nn.Linear(mm_dim, output_dim)
def forward(self, x):
x0 = self.linear0(x[0])
x1 = self.linear1(x[1])
bs = x1.shape[0]
if self.dropout_input > 0:
x0 = F.dropout(x0, p=self.dropout_input, training=self.training)
x1 = F.dropout(x1, p=self.dropout_input, training=self.training)
x0_chunks = paddle.split(x0, self.chunks, -1)
x1_chunks = paddle.split(x1, self.chunks, -1)
zs = []
for x0_c, x1_c, m0, m1 in zip(x0_chunks, x1_chunks, self.merge_linears0,
self.merge_linears1):
m = m0(x0_c) * m1(x1_c) # bs x split_size*rank
m = m.reshape([bs, self.rank, -1])
z = paddle.sum(m, 1)
if self.pos_norm == 'before_cat':
z = paddle.sqrt(F.relu(z)) - paddle.sqrt(F.relu(-z))
z = F.normalize(z)
zs.append(z)
z = paddle.concat(zs, 1)
if self.pos_norm == 'after_cat':
z = paddle.sqrt(F.relu(z)) - paddle.sqrt(F.relu(-z))
z = F.normalize(z)
if self.dropout_pre_lin > 0:
z = F.dropout(z, p=self.dropout_pre_lin, training=self.training)
z = self.linear_out(z)
if self.dropout_output > 0:
z = F.dropout(z, p=self.dropout_output, training=self.training)
return z
def chunk_sizes(self, dim, chunks):
split_size = (dim + chunks - 1) // chunks
sizes_list = [split_size] * chunks
sizes_list[-1] = sizes_list[-1] - (sum(sizes_list) - dim)
return sizes_list
| 39.330144 | 108 | 0.582968 |
4a23a9387a81b5b9200d3aed6345383650c4859e | 11,720 | py | Python | __init__.py | nmbr7/ctfd-category-scoreboard-plugin | 5a9098061a2bd5c913e1af66da62fb7dea86d4d4 | [
"MIT"
] | null | null | null | __init__.py | nmbr7/ctfd-category-scoreboard-plugin | 5a9098061a2bd5c913e1af66da62fb7dea86d4d4 | [
"MIT"
] | null | null | null | __init__.py | nmbr7/ctfd-category-scoreboard-plugin | 5a9098061a2bd5c913e1af66da62fb7dea86d4d4 | [
"MIT"
] | null | null | null | import os
import datetime
import stream_chat
from flask import (
render_template,
jsonify,
Blueprint,
url_for,
session,
redirect,
request,
escape
)
from sqlalchemy.sql import or_
from CTFd.utils.helpers import get_errors, get_infos
from CTFd import utils, scoreboard
from CTFd.models import db, Solves, Challenges, Submissions, Teams, Users
from CTFd.plugins import override_template
from CTFd.utils.config import is_scoreboard_frozen, ctf_theme, is_users_mode
from CTFd.utils.config.visibility import challenges_visible, scores_visible
from CTFd.utils.dates import (
ctf_started, ctftime, view_after_ctf, unix_time_to_utc
)
from CTFd.utils.user import is_admin, authed
from CTFd.utils.user import get_current_user
from CTFd.utils.decorators import authed_only
from CTFd.utils.decorators.visibility import (
check_account_visibility,
check_score_visibility,
)
def get_challenges():
if not is_admin():
if not ctftime():
if view_after_ctf():
pass
else:
return []
if challenges_visible() and (ctf_started() or is_admin()):
chals = db.session.query(
Challenges.id,
Challenges.name,
Challenges.category
).filter(or_(Challenges.state != 'hidden', Challenges.state is None),Challenges.value != 1).all()
jchals = []
for x in chals:
jchals.append({
'id': x.id,
'name': x.name,
'category': x.category
})
# Sort into groups
categories = set(map(lambda x: x['category'], jchals))
return {"cat":sorted(list(categories))}
return []
def load(app):
dir_path = os.path.dirname(os.path.realpath(__file__))
template_path = os.path.join(dir_path, 'scoreboard-matrix.html')
override_template('scoreboard.html', open(template_path).read())
matrix = Blueprint('matrix', __name__, static_folder='static')
app.register_blueprint(matrix, url_prefix='/matrix')
def get_standings():
standings = scoreboard.get_standings()
# TODO faster lookup here
jstandings = []
for team in standings:
teamid = team[0]
solves = (db.session.query(Solves.challenge_id,Challenges.category,db.func.sum(Challenges.value),db.func.max(Solves.date))
.join(Challenges, Solves.challenge_id == Challenges.id)
.group_by(Challenges.category)
.filter(Solves.team_id == teamid)
.filter(Challenges.value != 1)
.filter(Challenges.value != 0)
)
challenge = (db.session.query(Challenges.category,db.func.sum(Challenges.value)).group_by(Challenges.category)).all()
#print(team[2])
chal_details = {}
for i in challenge:
chal_details.update({i[0]:i[1]})
freeze = utils.get_config('freeze')
if freeze:
freeze = unix_time_to_utc(freeze)
if teamid != session.get('id'):
#print(session.get('id'),teamid,freeze)
solves = solves.filter(Solves.date < freeze)
solves = solves.all()
score = []
cat = get_challenges()["cat"]
for i in solves:
score.append({"id":i[0],"score":i[2],"cat":i[1],"date":i[3]})
for i in cat:
if i not in [j["cat"] for j in score]:
#score.append({"score":0,"cat":i,"date":datetime.datetime.utcfromtimestamp(111111111111)})
score.append({"score":0,"cat":i,"date": None,"id":-1})
score = sorted(score, key = lambda i: i["cat"])
maxscore = 0
temp = []
catfil = []
count = 0
for c,i in enumerate(score):
if chal_details[i['cat']] == i['score']:
temp.append(i)
catfil.append(i['cat'])
maxscore += i['score']
count += 1
if maxscore == 0:
maxscore = {i["date"]:i["score"] for i in score}
date = max(maxscore,key=maxscore.get)
maxscore = maxscore[date]
cat = {i["cat"]:i["score"] for i in score}
cat = max(cat,key=cat.get)
catfil.append(cat)
else:
date = sorted(temp, key = lambda i:i['date'],reverse=True)[0]['date']
if date == None:
continue
# Check for the cat with the least date if there are multiple max values
jstandings.append({'teamid': team[0],'cat': catfil, 'solves': score, 'name': escape(team[2]),'date':date,'state':count, 'score': maxscore})
jstandings = sorted(jstandings, key = lambda i: i["date"])
#for i in jstandings:
# print(teamid,i['date'],i['score'])
jstandings = sorted(jstandings, key = lambda i: i["score"],reverse=True)
jstandings = sorted(jstandings, key = lambda i: i["state"],reverse=True)
#print('next sort')
#for i in jstandings:
# print(i['date'],i['score'])
db.session.close()
return jstandings
def scoreboard_view():
if scores_visible() and not authed():
return redirect(url_for('auth.login', next=request.path))
if not scores_visible():
return render_template('scoreboard.html',
errors=['Scores are currently hidden'])
standings = get_standings()
challenges = get_challenges()
#for i in standings:
# print(i)
return render_template(
"scoreboard.html",
standings=standings,
challenges=challenges,
mode='users' if is_users_mode() else 'teams',
score_frozen=is_scoreboard_frozen(),
theme=ctf_theme()
)
def scores():
json = {'data': [],"succes": True}
if scores_visible() and not authed():
return redirect(url_for('auth.login', next=request.path))
if not scores_visible():
return jsonify(json)
standings = get_standings()
for i, x in enumerate(standings):
score = ""
for j in x['solves']:
score += str(j['score'])+'</td><td class="chalmark">'
score += str(x['score'])
json['data'].append({"account_type": "team", 'pos': i + 1, "score": score,"name":escape(x['name']),"account_url":"/teams/", "member":[{
"score":x['score'],
"id":x['teamid'],
"name":escape(x['name']),
}]})
return jsonify(json)
# Route /api/v1/svoreboard/top/10
def scoreslist(count=10):
json = {"success":True, "data": {}}
if scores_visible() and not authed():
return redirect(url_for('auth.login', next=request.path))
if not scores_visible():
return jsonify(json)
standings = get_standings()
for i, x in enumerate(standings[:10]):
solves = (db.session.query(Solves.challenge_id,Challenges.value,Solves.date)
.join(Challenges, Solves.challenge_id == Challenges.id)
.filter(Challenges.category.in_(x['cat']))
.filter(Solves.team_id == x['teamid'])
)
freeze = utils.get_config('freeze')
if freeze:
freeze = unix_time_to_utc(freeze)
if x['teamid'] != session.get('id'):
solves = solves.filter(Solves.date < freeze)
solves = solves.all()
#print(x['teamid'],'Stat Solve',solves)
sol = []
for s in solves:
sol.append({'account_id':x['teamid'],'challenge_id':s[0],'date':s[2],'team_id':x['teamid'],'user_id':x['teamid'],'value':s[1]})
sol = sorted(sol, key = lambda i: i["date"])
json['data'].update({str(i + 1):{ 'id': x['teamid'], 'name': escape(x['name']), 'solves': sol}})
return jsonify(json)
def public(team_id):
standings = get_standings()
errors = get_errors()
team = Teams.query.filter_by(id=team_id, banned=False, hidden=False).first_or_404()
solves = team.get_solves()
awards = team.get_awards()
score = 0
place = None
for c,i in enumerate(standings):
if i['teamid'] == team_id:
place = c+1
score = i['score']
break
if errors:
return render_template("teams/public.html", team=team, errors=errors)
return render_template(
"teams/public.html",
solves=solves,
awards=awards,
team=team,
score=score,
place=place,
score_frozen=is_scoreboard_frozen(),
)
def private():
standings = get_standings()
user = get_current_user()
if not user.team_id:
return render_template("teams/team_enrollment.html")
score = 0
place = None
team_id = user.team_id
team = Teams.query.filter_by(id=team_id).first_or_404()
solves = team.get_solves()
awards = team.get_awards()
for c,i in enumerate(standings):
if i['teamid'] == team_id:
place = c+1
score = i['score']
break
return render_template(
"teams/private.html",
solves=solves,
awards=awards,
user=user,
team=team,
score=score,
place=place,
score_frozen=is_scoreboard_frozen(),
)
@app.route('/api/v1/current/user')
@authed_only
def currentuser():
if not os.getenv("GETSTREAM_KEYS"):
# Attempt to read the secret from the secret file
# from CTFd/.get_stream_secrets
# This will fail if the secret has not been written
try:
with open(".get_stream_secrets", "rb") as secret:
get_stream_key = secret.readlines()
except (OSError, IOError):
get_stream_key = None
#print(get_stream_key)
if get_stream_key is None or len(get_stream_key) < 2:
print("Error: ********** getstream_key is None or is not in valid format ***********")
return
client = stream_chat.StreamChat(api_key=get_stream_key[0].strip(), api_secret=get_stream_key[1].strip())
user = get_current_user()
team = Teams.query.filter_by(id=user.team_id).first()
if team:
team_name = team.name
team_id = user.team_id
else:
team_name = "No_Team"
team_id = "No Team id"
user_key = client.create_token(str(user.id))
return jsonify({
"current_username":user.name,
"team_id":team_id,
"team_name":team_name,
"userid":user.id,
"user_key":user_key,})
app.view_functions['scoreboard.listing'] = scoreboard_view
app.view_functions['teams.private'] = private
app.view_functions['teams.public'] = public
app.view_functions['scoreboard.score'] = scores
app.view_functions['api.scoreboard_scoreboard_detail'] = scoreslist
app.view_functions['api.scoreboard_scoreboard_list'] = scores
| 36.17284 | 151 | 0.544454 |
4a23a95f5855c8879a9b7fde33b1a59776d6319d | 3,701 | py | Python | prediction/predictor.py | litosly/RankingOptimizationApprochtoLLC | b6b79fb1364fcc481aa84093a8e8399966b19d02 | [
"MIT"
] | 5 | 2020-10-10T02:11:16.000Z | 2021-11-08T22:55:34.000Z | prediction/predictor.py | litosly/RankingOptimizationApprochtoLLC | b6b79fb1364fcc481aa84093a8e8399966b19d02 | [
"MIT"
] | null | null | null | prediction/predictor.py | litosly/RankingOptimizationApprochtoLLC | b6b79fb1364fcc481aa84093a8e8399966b19d02 | [
"MIT"
] | 2 | 2020-10-22T14:50:43.000Z | 2021-08-15T07:00:43.000Z | from tqdm import tqdm
import numpy as np
def predict_vector(rating_vector, train_vector, remove_train=True):
dim = len(rating_vector)
candidate_index = np.argpartition(-rating_vector, dim-1)[:dim]
prediction_items = candidate_index[rating_vector[candidate_index].argsort()[::-1]]
if remove_train:
return np.delete(prediction_items, np.isin(prediction_items, train_vector.nonzero()[1]).nonzero()[0])
else:
return prediction_items
def predict_scores(matrix_U, matrix_V, bias=None, gpu=False):
if gpu:
import cupy as cp
matrix_U = cp.array(matrix_U)
matrix_V = cp.array(matrix_V)
if bias is None:
prediction = matrix_U.dot(matrix_V.T)
return prediction
if gpu:
import cupy as cp
return prediction + cp.array(bias)
else:
return prediction + bias
def predict_items(prediction_scores, topK, matrix_Train, gpu=False):
prediction = []
for user_index in tqdm(range(prediction_scores.shape[0])):
vector_u = prediction_scores[user_index]
vector_train = matrix_Train[user_index]
if len(vector_train.nonzero()[0]) > 0:
train_index = vector_train.nonzero()[1]
if gpu:
import cupy as cp
candidate_index = cp.argpartition(-vector_u, topK+len(train_index))[:topK+len(train_index)]
vector_u = candidate_index[vector_u[candidate_index].argsort()[::-1]]
vector_u = cp.asnumpy(vector_u).astype(np.float32)
else:
candidate_index = np.argpartition(-vector_u, topK+len(train_index))[:topK+len(train_index)]
vector_u = candidate_index[vector_u[candidate_index].argsort()[::-1]]
vector_u = np.delete(vector_u, np.isin(vector_u, train_index).nonzero()[0])
vector_predict = vector_u[:topK]
else:
vector_predict = np.zeros(topK, dtype=np.float32)
prediction.append(vector_predict)
return np.vstack(prediction)
def predict(matrix_U, matrix_V, topK, matrix_Train, bias=None, gpu=False):
if gpu:
import cupy as cp
matrix_U = cp.array(matrix_U)
matrix_V = cp.array(matrix_V)
prediction = []
for user_index in tqdm(range(matrix_U.shape[0])):
vector_u = matrix_U[user_index]
vector_train = matrix_Train[user_index]
if len(vector_train.nonzero()[0]) > 0:
vector_predict = sub_routine(vector_u, matrix_V, vector_train, bias, topK=topK, gpu=gpu)
else:
vector_predict = np.zeros(topK, dtype=np.float32)
prediction.append(vector_predict)
return np.vstack(prediction)
def sub_routine(vector_u, matrix_V, vector_train, bias, topK=500, gpu=False):
train_index = vector_train.nonzero()[1]
vector_predict = matrix_V.dot(vector_u)
if bias is not None:
if gpu:
import cupy as cp
vector_predict = vector_predict + cp.array(bias)
else:
vector_predict = vector_predict + bias
if gpu:
import cupy as cp
candidate_index = cp.argpartition(-vector_predict, topK+len(train_index))[:topK+len(train_index)]
vector_predict = candidate_index[vector_predict[candidate_index].argsort()[::-1]]
vector_predict = cp.asnumpy(vector_predict).astype(np.float32)
else:
candidate_index = np.argpartition(-vector_predict, topK+len(train_index))[:topK+len(train_index)]
vector_predict = candidate_index[vector_predict[candidate_index].argsort()[::-1]]
vector_predict = np.delete(vector_predict, np.isin(vector_predict, train_index).nonzero()[0])
return vector_predict[:topK]
| 34.268519 | 109 | 0.661443 |
4a23aa684c4a2cd142fc1eca9a75d77d43dfcf75 | 4,610 | py | Python | bootstrapvz/plugins/prebootstrapped/tasks.py | zeridon/bootstrap-vz | fcdc6993f59e521567fb101302b02312e741b88c | [
"Apache-2.0"
] | 207 | 2015-01-26T19:00:24.000Z | 2021-12-16T10:05:58.000Z | bootstrapvz/plugins/prebootstrapped/tasks.py | zeridon/bootstrap-vz | fcdc6993f59e521567fb101302b02312e741b88c | [
"Apache-2.0"
] | 346 | 2015-01-01T08:56:09.000Z | 2019-06-10T08:03:05.000Z | bootstrapvz/plugins/prebootstrapped/tasks.py | zeridon/bootstrap-vz | fcdc6993f59e521567fb101302b02312e741b88c | [
"Apache-2.0"
] | 124 | 2015-01-16T21:22:29.000Z | 2022-02-25T17:36:10.000Z | from bootstrapvz.base import Task
from bootstrapvz.common import phases
from bootstrapvz.common.tasks import volume
from bootstrapvz.common.tasks import packages
from bootstrapvz.providers.virtualbox.tasks import guest_additions
from bootstrapvz.providers.ec2.tasks import ebs
from bootstrapvz.common.fs import unmounted
from bootstrapvz.common.tools import log_check_call
from shutil import copyfile
import os.path
import time
import logging
log = logging.getLogger(__name__)
class Snapshot(Task):
description = 'Creating a snapshot of the bootstrapped volume'
phase = phases.package_installation
predecessors = [packages.InstallPackages, guest_additions.InstallGuestAdditions]
@classmethod
def run(cls, info):
snapshot = None
with unmounted(info.volume):
snapshot = info.volume.snapshot()
msg = 'A snapshot of the bootstrapped volume was created. ID: ' + snapshot.id
log.info(msg)
class CreateFromSnapshot(Task):
description = 'Creating EBS volume from a snapshot'
phase = phases.volume_creation
successors = [ebs.Attach]
@classmethod
def run(cls, info):
snapshot = info.manifest.plugins['prebootstrapped']['snapshot']
ebs_volume = info._ec2['connection'].create_volume(info.volume.size.bytes.get_qty_in('GiB'),
info._ec2['host']['availabilityZone'],
snapshot=snapshot)
while ebs_volume.volume_state() != 'available':
time.sleep(5)
ebs_volume.update()
info.volume.volume = ebs_volume
set_fs_states(info.volume)
class CopyImage(Task):
description = 'Creating a snapshot of the bootstrapped volume'
phase = phases.package_installation
predecessors = [packages.InstallPackages, guest_additions.InstallGuestAdditions]
@classmethod
def run(cls, info):
loopback_backup_name = 'volume-{id}.{ext}.backup'.format(id=info.run_id, ext=info.volume.extension)
destination = os.path.join(info.manifest.bootstrapper['workspace'], loopback_backup_name)
with unmounted(info.volume):
copyfile(info.volume.image_path, destination)
msg = 'A copy of the bootstrapped volume was created. Path: ' + destination
log.info(msg)
class CreateFromImage(Task):
description = 'Creating loopback image from a copy'
phase = phases.volume_creation
successors = [volume.Attach]
@classmethod
def run(cls, info):
info.volume.image_path = os.path.join(info.workspace, 'volume.' + info.volume.extension)
loopback_backup_path = info.manifest.plugins['prebootstrapped']['image']
copyfile(loopback_backup_path, info.volume.image_path)
set_fs_states(info.volume)
class CopyFolder(Task):
description = 'Creating a copy of the bootstrap folder'
phase = phases.package_installation
predecessors = [packages.InstallPackages, guest_additions.InstallGuestAdditions]
@classmethod
def run(cls, info):
folder_backup_name = '{id}.{ext}.backup'.format(id=info.run_id, ext=info.volume.extension)
destination = os.path.join(info.manifest.bootstrapper['workspace'], folder_backup_name)
log_check_call(['cp', '-a', info.volume.path, destination])
msg = 'A copy of the bootstrapped volume was created. Path: ' + destination
log.info(msg)
class CreateFromFolder(Task):
description = 'Creating bootstrap folder from a copy'
phase = phases.volume_creation
successors = [volume.Attach]
@classmethod
def run(cls, info):
info.root = os.path.join(info.workspace, 'root')
log_check_call(['cp', '-a', info.manifest.plugins['prebootstrapped']['folder'], info.root])
info.volume.path = info.root
info.volume.fsm.current = 'attached'
def set_fs_states(vol):
vol.fsm.current = 'detached'
p_map = vol.partition_map
from bootstrapvz.base.fs.partitionmaps.none import NoPartitions
if not isinstance(p_map, NoPartitions):
p_map.fsm.current = 'unmapped'
from bootstrapvz.base.fs.partitions.unformatted import UnformattedPartition
from bootstrapvz.base.fs.partitions.single import SinglePartition
for partition in p_map.partitions:
if isinstance(partition, UnformattedPartition):
partition.fsm.current = 'unmapped'
continue
if isinstance(partition, SinglePartition):
partition.fsm.current = 'formatted'
continue
partition.fsm.current = 'unmapped_fmt'
| 37.177419 | 107 | 0.692191 |
4a23ab7ccd55e3884e4c3bf1103088fce8798c47 | 133 | py | Python | python_scripts_youtube/desafio16_final.py | diegocarloni/python | 232e3f40f8096854bfc8da0820808fbda8797387 | [
"MIT"
] | null | null | null | python_scripts_youtube/desafio16_final.py | diegocarloni/python | 232e3f40f8096854bfc8da0820808fbda8797387 | [
"MIT"
] | null | null | null | python_scripts_youtube/desafio16_final.py | diegocarloni/python | 232e3f40f8096854bfc8da0820808fbda8797387 | [
"MIT"
] | null | null | null | from math import trunc
num = float(input('Digite um número: '))
print('A parte inteira de {} é igual a {}'.format(num, trunc(num)))
| 26.6 | 67 | 0.676692 |
4a23abc7b1caa98c87389423c45b8238685787b9 | 24,170 | py | Python | trac/wiki/tests/functional.py | mikiec84/trac | d51a7119b9fcb9061d7fe135c7d648fa671555dd | [
"BSD-3-Clause"
] | null | null | null | trac/wiki/tests/functional.py | mikiec84/trac | d51a7119b9fcb9061d7fe135c7d648fa671555dd | [
"BSD-3-Clause"
] | null | null | null | trac/wiki/tests/functional.py | mikiec84/trac | d51a7119b9fcb9061d7fe135c7d648fa671555dd | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2020 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at https://trac.edgewall.org/log/.
import os
import unittest
from trac.mimeview.rst import has_docutils
from trac.tests.contentgen import random_sentence, random_unique_camel
from trac.tests.functional import FunctionalTwillTestCaseSetup, tc
from trac.util import create_file, get_pkginfo
from trac.util.html import tag
class TestWiki(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Create a wiki page."""
self._tester.create_wiki_page()
class TestWikiEdit(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Edit a wiki page."""
pagename = self._tester.create_wiki_page()
self._tester.edit_wiki_page(pagename)
tc.find("Your changes have been saved in version 2")
tc.find(r'\(<a href="/wiki/%s\?action=diff&version=2">diff</a>\)'
% pagename)
class TestWikiDelete(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Delete a wiki page."""
# Delete page with single version.
name = self._tester.create_wiki_page()
self._tester.go_to_wiki(name)
tc.formvalue('delete', 'action', 'delete')
tc.submit('delete_page')
tc.find("Are you sure you want to completely delete this page?")
tc.notfind("The following attachments will also be deleted:")
tc.submit('delete', 'delete-confirm')
tc.find("The page %s has been deleted." % name)
tc.url(self._tester.url)
# Delete page with attachment.
name = self._tester.create_wiki_page()
filename = self._tester.attach_file_to_wiki(name)
self._tester.go_to_wiki(name)
tc.formvalue('delete', 'action', 'delete')
tc.submit('delete_page')
tc.find("Are you sure you want to completely delete this page?")
tc.find("The following attachments will also be deleted:")
tc.find(filename)
tc.submit('delete', 'delete-confirm')
tc.find("The page %s has been deleted." % name)
tc.url(self._tester.url)
# Delete page with multiple versions.
name = self._tester.create_wiki_page(content="Initial content.")
self._tester.edit_wiki_page(name, content="Revised content.")
self._tester.go_to_wiki(name)
tc.formvalue('delete', 'action', 'delete')
tc.submit('delete_page')
tc.find("Are you sure you want to completely delete this page?")
tc.find(r'Removing all\s+<a href="/wiki/%s\?action=history&'
r'version=2">2 versions</a>\s+of the page' % name)
tc.notfind("The following attachments will also be deleted:")
tc.submit('delete', 'delete-confirm')
tc.find("The page %s has been deleted." % name)
tc.url(self._tester.url)
class TestWikiAddAttachment(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Add attachment to a wiki page. Test that the attachment
button reads 'Attach file' when no files have been attached, and
'Attach another file' when there are existing attachments.
Feature added in https://trac.edgewall.org/ticket/10281"""
name = self._tester.create_wiki_page()
self._tester.go_to_wiki(name)
tc.find("Attach file")
filename = self._tester.attach_file_to_wiki(name)
self._tester.go_to_wiki(name)
tc.find("Attach another file")
tc.find('Attachments[ \n]+<span class="trac-count">\(1\)</span>')
tc.find(filename)
tc.find('Download all attachments as:\s+<a rel="nofollow" '
'href="/zip-attachment/wiki/%s/">.zip</a>' % name)
_plugin_py = """\
from trac.core import Component, implements
from trac.util.html import tag
from trac.util.translation import tag_
from trac.wiki.api import IWikiPageManipulator
class WikiPageManipulator(Component):
implements(IWikiPageManipulator)
def prepare_wiki_page(self, req, page, fields):
pass
def validate_wiki_page(self, req, page):
field = 'comment'
yield None, tag_("The page contains invalid markup at"
" line %(number)s.", number=tag.strong('10'))
yield field, tag_("The field %(field)s cannot be empty.",
field=tag.strong(field))
"""
class TestWikiPageManipulator(FunctionalTwillTestCaseSetup):
def runTest(self):
plugin_name = self.__class__.__name__
env = self._testenv.get_trac_environment()
env.config.set('components', plugin_name + '.*', 'enabled')
env.config.save()
create_file(os.path.join(env.plugins_dir, plugin_name + '.py'),
_plugin_py)
self._testenv.restart()
try:
self._tester.go_to_front()
tc.follow("Wiki")
tc.formvalue('modifypage', 'action', 'edit')
tc.submit()
tc.submit('save', 'edit')
tc.url(self._tester.url + '/wiki/WikiStart$')
tc.find("Invalid Wiki page: The page contains invalid markup at"
" line <strong>10</strong>.")
tc.find("The Wiki page field <strong>comment</strong> is invalid:"
" The field <strong>comment</strong> cannot be empty.")
finally:
env.config.set('components', plugin_name + '.*', 'disabled')
env.config.save()
class TestWikiHistory(FunctionalTwillTestCaseSetup):
"""Create wiki page and navigate to page history."""
def runTest(self):
pagename = self._tester.create_wiki_page()
self._tester.edit_wiki_page(pagename)
url = self._tester.url
tc.follow(r"\bHistory\b")
tc.url(url + r'/wiki/%s\?action=history' % pagename)
version_link = ('<td class="version">[ \n]*'
'<a href="/wiki/%(pagename)s\?version=%%(version)s"'
'[ \n]*title="View this version">%%(version)s[ \n]*</a>'
% {'pagename': pagename})
tc.find(version_link % {'version': 1})
tc.find(version_link % {'version': 2})
tc.find(r'<th class="comment">Comment</th>')
tc.formvalue('history', 'old_version', '1')
tc.formvalue('history', 'version', '2')
tc.submit()
tc.url(r'%s/wiki/%s\?action=diff&version=2&old_version=1'
% (url, pagename))
tc.find(r'<a href="/wiki/%s\?version=1">Version 1</a>' % pagename)
tc.find(r'<a href="/wiki/%s\?version=2">Version 2</a>' % pagename)
tc.find(r'<a href="/wiki/%(name)s">%(name)s</a>' % {'name': pagename})
class TestWikiEditComment(FunctionalTwillTestCaseSetup):
"""Edit wiki page comment from diff and history."""
def runTest(self):
initial_comment = "Initial comment"
pagename = self._tester.create_wiki_page(comment=initial_comment)
url = self._tester.url
tc.follow(r"\bHistory\b")
history_url = url + r'/wiki/%s\?action=history' % pagename
tc.url(history_url)
# Comment edit from history page
tc.follow(r"\bEdit\b")
tc.url(url + r'/wiki/%s\?action=edit_comment&version=1' % pagename)
tc.find("Old comment:[ \t\n]+%s" % initial_comment)
first_comment_edit = "First comment edit"
tc.formvalue('edit-comment-form', 'new_comment', first_comment_edit)
tc.submit()
tc.url(history_url)
tc.find(r'<td class="comment">[ \t\n]+%s' % first_comment_edit)
# Comment edit from diff page
tc.formvalue('history', 'version', '1')
tc.submit()
diff_url = url + r'/wiki/%s\?action=diff&version=1' % pagename
tc.url(diff_url)
tc.find(r'<p>[ \t\n]+%s[ \t\n]+</p>' % first_comment_edit)
tc.follow(r"\bEdit\b")
tc.url(url + r'/wiki/%s\?action=edit_comment&version=1&redirect_to=diff'
% pagename)
second_comment_edit = "Second comment edit"
tc.formvalue('edit-comment-form', 'new_comment', second_comment_edit)
tc.submit()
tc.url(diff_url)
tc.find(r'<p>[ \t\n]+%s[ \t\n]+</p>' % second_comment_edit)
class TestWikiReadonlyAttribute(FunctionalTwillTestCaseSetup):
"""Test the wiki readonly attribute, which is enforce when
DefaultWikiPolicy is in the list of active permission policies."""
def runTest(self):
self._tester.logout()
self._tester.login('user')
page_name = self._tester.create_wiki_page()
permission_policies = \
self._testenv.get_config('trac', 'permission_policies')
readonly_checkbox = (
'<input type="checkbox" name="readonly" id="readonly"/>')
attach_button = (
'<input type="submit" id="attachfilebutton" value="Attach.+file"/>')
try:
# User without WIKI_ADMIN can't set a page read-only
tc.formvalue('modifypage', 'action', 'edit')
tc.submit()
tc.notfind(readonly_checkbox)
# User with WIKI_ADMIN can set a page read-only
# and still edit that page
self._testenv.grant_perm('user', 'WIKI_ADMIN')
self._tester.go_to_wiki(page_name)
tc.formvalue('modifypage', 'action', 'edit')
tc.submit()
tc.find(readonly_checkbox)
tc.formvalue('edit', 'readonly', True)
tc.submit('save')
tc.go(self._tester.url + '/attachment/wiki/' + page_name)
tc.find(attach_button)
self._tester.edit_wiki_page(page_name)
# User without WIKI_ADMIN can't edit a read-only page
self._testenv.revoke_perm('user', 'WIKI_ADMIN')
self._tester.go_to_wiki(page_name)
tc.notfind('<input type="submit" value="Edit this page">')
tc.go(self._tester.url + '/attachment/wiki/' + page_name)
tc.notfind(attach_button)
# Read-only checkbox is not present when DefaultWikiPolicy
# is not in the list of active permission policies
pp_list = [p.strip() for p in permission_policies.split(',')]
pp_list.remove('DefaultWikiPolicy')
self._testenv._tracadmin('trac', 'permission_policies',
', '.join(pp_list))
self._testenv.grant_perm('user', 'WIKI_ADMIN')
self._tester.go_to_wiki(page_name)
tc.formvalue('modifypage', 'action', 'edit')
tc.submit()
tc.notfind(readonly_checkbox)
finally:
self._testenv.set_config('trac', 'permission_policies',
permission_policies)
self._testenv.revoke_perm('user', 'WIKI_ADMIN')
self._tester.logout()
self._tester.login('admin')
class TestWikiRename(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for simple wiki rename"""
pagename = self._tester.create_wiki_page()
attachment = self._tester.attach_file_to_wiki(pagename)
base_url = self._tester.url
page_url = base_url + "/wiki/" + pagename
def click_rename():
tc.formvalue('rename', 'action', 'rename')
tc.submit()
tc.url(page_url + r'\?action=rename')
tc.find("New name:")
tc.go(page_url)
tc.find("Rename page")
click_rename()
# attempt to give an empty new name
tc.formvalue('rename-form', 'new_name', '')
tc.submit('submit')
tc.url(page_url)
tc.find("A new name is mandatory for a rename")
# attempt to rename the page to an invalid page name
tc.formvalue('rename-form', 'new_name', '../WikiStart')
tc.submit('submit')
tc.url(page_url)
tc.find("The new name is invalid")
# attempt to rename the page to the current page name
tc.formvalue('rename-form', 'new_name', pagename)
tc.submit('submit')
tc.url(page_url)
tc.find("The new name must be different from the old name")
# attempt to rename the page to an existing page name
tc.formvalue('rename-form', 'new_name', 'WikiStart')
tc.submit('submit')
tc.url(page_url)
tc.find("The page WikiStart already exists")
# correct rename to new page name (old page replaced by a redirection)
tc.go(page_url)
click_rename()
newpagename = pagename + 'Renamed'
tc.formvalue('rename-form', 'new_name', newpagename)
tc.formvalue('rename-form', 'redirect', True)
tc.submit('submit')
# check redirection page
tc.url(page_url)
tc.find("See.*/wiki/" + newpagename)
tc.find("The page %s has been renamed to %s."
% (pagename, newpagename))
tc.find("The page %s has been recreated with a redirect to %s."
% (pagename, newpagename))
# check whether attachment exists on the new page but not on old page
tc.go(base_url + '/attachment/wiki/' + newpagename + '/' + attachment)
tc.notfind("Error: Invalid Attachment")
tc.go(base_url + '/attachment/wiki/' + pagename + '/' + attachment)
tc.find("Error: Invalid Attachment")
# rename again to another new page name (this time, no redirection)
tc.go(page_url)
click_rename()
newpagename = pagename + 'RenamedAgain'
tc.formvalue('rename-form', 'new_name', newpagename)
tc.formvalue('rename-form', 'redirect', False)
tc.submit('submit')
tc.url(base_url + "/wiki/" + newpagename)
tc.find("The page %s has been renamed to %s."
% (pagename, newpagename))
# this time, the original page is gone
tc.go(page_url)
tc.url(page_url)
tc.find("The page[ \n]+%s[ \n]+does not exist" % tag.strong(pagename))
class RegressionTestTicket4812(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of https://trac.edgewall.org/ticket/4812"""
pagename = random_unique_camel() + '/' + random_unique_camel()
self._tester.create_wiki_page(pagename)
self._tester.attach_file_to_wiki(pagename)
tc.notfind('does not exist')
class ReStructuredTextWikiTest(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Render reStructured text using a wikiprocessor"""
pagename = self._tester.create_wiki_page(content="""
{{{
#!rst
Hello
=====
.. trac:: wiki:WikiStart Some Link
}}}
""")
self._tester.go_to_wiki(pagename)
tc.find("Some Link")
tc.find(r'<h1[^>]*>Hello')
tc.notfind("wiki:WikiStart")
tc.follow("Some Link")
tc.url(self._tester.url + "/wiki/WikiStart")
class ReStructuredTextCodeBlockTest(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Render reStructured code block"""
pagename = self._tester.create_wiki_page(content="""
{{{
#!rst
.. code-block:: python
print("123")
}}}
""")
self._tester.go_to_wiki(pagename)
tc.notfind("code-block")
tc.find('print')
tc.find('"123"')
class RegressionTestTicket8976(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of https://trac.edgewall.org/ticket/8976
Test fine grained permissions policy on wiki for specific page
versions."""
name = self._tester.create_wiki_page()
self._tester.edit_wiki_page(name)
self._tester.edit_wiki_page(name)
self._tester.logout()
self._tester.login('user')
try:
self._tester.go_to_wiki(name, 1)
tc.notfind(r"\bError: Forbidden\b")
self._tester.go_to_wiki(name, 2)
tc.notfind(r"\bError: Forbidden\b")
self._tester.go_to_wiki(name, 3)
tc.notfind(r"\bError: Forbidden\b")
self._tester.go_to_wiki(name, 4)
tc.find(r"\bTrac Error\b")
self._tester.go_to_wiki(name)
tc.notfind(r"\bError: Forbidden\b")
self._testenv.enable_authz_permpolicy("""
[wiki:%(name)s@1]
* = !WIKI_VIEW
[wiki:%(name)s@2]
* = WIKI_VIEW
[wiki:%(name)s@3]
* = !WIKI_VIEW
[wiki:%(name)s]
* = WIKI_VIEW
""" % {'name': name})
self._tester.go_to_wiki(name, 1)
tc.find(r"\bError: Forbidden\b")
self._tester.go_to_wiki(name, 2)
tc.notfind(r"\bError: Forbidden\b")
self._tester.go_to_wiki(name, 3)
tc.find(r"\bError: Forbidden\b")
self._tester.go_to_wiki(name, 4)
tc.find(r"\bTrac Error\b")
self._tester.go_to_wiki(name)
tc.notfind(r"\bError: Forbidden\b")
self._tester.edit_wiki_page(name)
finally:
self._tester.logout()
self._tester.login('admin')
self._testenv.disable_authz_permpolicy()
class RegressionTestTicket10274(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of https://trac.edgewall.org/ticket/10274"""
self._tester.go_to_wiki('WikiStart/..')
tc.find("Invalid Wiki page name 'WikiStart/..'")
self._tester.go_to_wiki('../WikiStart')
tc.find("Invalid Wiki page name '../WikiStart'")
self._tester.go_to_wiki('WikiStart/./SubPage')
tc.find("Invalid Wiki page name 'WikiStart/./SubPage'")
class RegressionTestTicket10850(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of https://trac.edgewall.org/ticket/10850"""
pagename = self._tester.create_wiki_page()
# colon characters
self._tester.attach_file_to_wiki(
pagename, filename='2012-09-11_15:36:40-test.tbz2')
base_url = self._tester.url
tc.go(base_url + '/attachment/wiki/' + pagename +
'/2012-09-11_15:36:40-test.tbz2')
tc.notfind('Error: Invalid Attachment')
# backslash characters
self._tester.attach_file_to_wiki(
pagename, filename=r'/tmp/back\slash.txt')
base_url = self._tester.url
tc.go(base_url + '/attachment/wiki/' + pagename + r'/back\slash.txt')
tc.notfind('Error: Invalid Attachment')
# Windows full path
self._tester.attach_file_to_wiki(
pagename, filename=r'z:\tmp\windows:path.txt')
base_url = self._tester.url
tc.go(base_url + '/attachment/wiki/' + pagename + r'/windows:path.txt')
tc.notfind('Error: Invalid Attachment')
# Windows share folder path
self._tester.attach_file_to_wiki(
pagename, filename=r'\\server\share\file:name.txt')
base_url = self._tester.url
tc.go(base_url + '/attachment/wiki/' + pagename + r'/file:name.txt')
tc.notfind('Error: Invalid Attachment')
class RegressionTestTicket10957(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of https://trac.edgewall.org/ticket/10957"""
self._tester.go_to_front()
try:
self._tester.logout()
# Check that page can't be created without WIKI_CREATE
page_name = random_unique_camel()
self._tester.go_to_wiki(page_name)
tc.find("Trac Error")
tc.find("Page %s not found" % page_name)
tc.notfind("Create this page")
tc.go(self._tester.url + '/wiki/%s?action=edit' % page_name)
tc.find("Error: Forbidden")
tc.find("WIKI_CREATE privileges are required to perform this "
"operation on %s. You don't have the required permissions."
% page_name)
# Check that page can be created when user has WIKI_CREATE
self._testenv.grant_perm('anonymous', 'WIKI_CREATE')
content_v1 = random_sentence()
self._tester.create_wiki_page(page_name, content_v1)
tc.find(content_v1)
# Check that page can't be edited without WIKI_MODIFY
tc.notfind("Edit this page")
tc.notfind("Attach file")
tc.go(self._tester.url + '/wiki/%s?action=edit' % page_name)
tc.find("Error: Forbidden")
tc.find("WIKI_MODIFY privileges are required to perform this "
"operation on %s. You don't have the required permissions."
% page_name)
# Check that page can be edited when user has WIKI_MODIFY
self._testenv.grant_perm('anonymous', 'WIKI_MODIFY')
self._tester.go_to_wiki(page_name)
tc.find("Edit this page")
tc.find("Attach file")
content_v2 = random_sentence()
self._tester.edit_wiki_page(page_name, content_v2)
tc.find(content_v2)
# Check that page can be reverted to a previous revision
tc.go(self._tester.url + '/wiki/%s?version=1' % page_name)
tc.find("Revert to this version")
tc.formvalue('modifypage', 'action', 'edit')
tc.submit()
tc.find(content_v1)
# Check that page can't be reverted without WIKI_MODIFY
self._tester.edit_wiki_page(page_name)
self._testenv.revoke_perm('anonymous', 'WIKI_MODIFY')
tc.go(self._tester.url + '/wiki/%s?version=1' % page_name)
tc.notfind("Revert to this version")
tc.go(self._tester.url + '/wiki/%s?action=edit&version=1' % page_name)
tc.find("WIKI_MODIFY privileges are required to perform this "
"operation on %s. You don't have the required permissions."
% page_name)
finally:
# Restore pre-test state.
self._tester.login('admin')
self._testenv.revoke_perm('anonymous', 'WIKI_CREATE')
class RegressionTestTicket11302(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of https://trac.edgewall.org/ticket/11302"""
pagename = self._tester.create_wiki_page()
self._tester.attach_file_to_wiki(
pagename, description="illustrates [./@1#point1]")
self._tester.go_to_wiki(pagename + '?action=edit')
tc.find(r'illustrates <a class="wiki"'
r' href="/wiki/%s\?version=1#point1">@1</a>' % pagename)
def functionalSuite(suite=None):
if not suite:
import trac.tests.functional
suite = trac.tests.functional.functionalSuite()
suite.addTest(TestWiki())
suite.addTest(TestWikiEdit())
suite.addTest(TestWikiDelete())
suite.addTest(TestWikiAddAttachment())
suite.addTest(TestWikiPageManipulator())
suite.addTest(TestWikiHistory())
suite.addTest(TestWikiEditComment())
suite.addTest(TestWikiReadonlyAttribute())
suite.addTest(TestWikiRename())
suite.addTest(RegressionTestTicket4812())
suite.addTest(RegressionTestTicket8976())
suite.addTest(RegressionTestTicket10274())
suite.addTest(RegressionTestTicket10850())
suite.addTest(RegressionTestTicket10957())
suite.addTest(RegressionTestTicket11302())
if has_docutils:
import docutils
if get_pkginfo(docutils):
suite.addTest(ReStructuredTextWikiTest())
suite.addTest(ReStructuredTextCodeBlockTest())
else:
print("SKIP: reST wiki tests (docutils has no setuptools"
" metadata)")
else:
print("SKIP: reST wiki tests (no docutils)")
return suite
test_suite = functionalSuite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| 40.827703 | 83 | 0.612867 |
4a23abd4682dc93aa1cd54f91a6818897112a247 | 1,017 | py | Python | maml/apps/pes/__init__.py | anooptp/maml | fdd95f3d60c9281d871d89b25b073e87b6ba4e52 | [
"BSD-3-Clause"
] | 161 | 2020-01-26T08:24:41.000Z | 2022-03-29T06:42:42.000Z | maml/apps/pes/__init__.py | anooptp/maml | fdd95f3d60c9281d871d89b25b073e87b6ba4e52 | [
"BSD-3-Clause"
] | 195 | 2020-01-25T19:35:20.000Z | 2022-03-28T13:14:30.000Z | maml/apps/pes/__init__.py | anooptp/maml | fdd95f3d60c9281d871d89b25b073e87b6ba4e52 | [
"BSD-3-Clause"
] | 46 | 2020-03-30T12:56:39.000Z | 2022-03-27T12:53:23.000Z | # coding: utf-8
# Copyright (c) Materials Virtual Lab
# Distributed under the terms of the BSD License.
"""This package contains PotentialMixin classes representing Interatomic Potentials."""
from ._base import PotentialMixin, Potential # noqa
from ._gap import GAPotential # noqa
from ._lammps import ( # noqa
LMPStaticCalculator,
EnergyForceStress, # noqa
SpectralNeighborAnalysis,
ElasticConstant, # noqa
NudgedElasticBand,
DefectFormation, # noqa
LMPRelaxationCalculator,
LatticeConstant,
SurfaceEnergy,
get_default_lmp_exe, # noqa
)
from ._mtp import MTPotential # noqa
from ._nnp import NNPotential # noqa
from ._snap import SNAPotential # noqa
__all__ = [
"Potential",
"GAPotential",
"MTPotential",
"SNAPotential",
"NNPotential",
"LMPStaticCalculator",
"EnergyForceStress",
"SpectralNeighborAnalysis",
"ElasticConstant",
"LatticeConstant",
"NudgedElasticBand",
"DefectFormation",
"get_default_lmp_exe",
]
| 25.425 | 87 | 0.714848 |
4a23abde83152b4a63c87ef307dfd2826521029c | 3,632 | py | Python | src/unicef_locations/admin.py | unicef/unicef-locations | 4cfe80b6a1d8ac35b0fd243063d696ee7a132b3f | [
"Apache-2.0"
] | 3 | 2018-07-26T11:11:43.000Z | 2021-05-11T11:01:09.000Z | src/unicef_locations/admin.py | unicef/unicef-locations | 4cfe80b6a1d8ac35b0fd243063d696ee7a132b3f | [
"Apache-2.0"
] | 9 | 2018-07-26T15:33:41.000Z | 2022-02-07T11:55:59.000Z | src/unicef_locations/admin.py | unicef/unicef-locations | 4cfe80b6a1d8ac35b0fd243063d696ee7a132b3f | [
"Apache-2.0"
] | 1 | 2018-05-14T18:14:54.000Z | 2018-05-14T18:14:54.000Z | from admin_extra_urls.decorators import button
from admin_extra_urls.mixins import ExtraUrlMixin
from carto.sql import SQLClient
from django import forms
from django.contrib import admin as basic_admin, messages
from django.contrib.admin.templatetags.admin_urls import admin_urlname
from django.contrib.gis import admin
from django.forms import Textarea
from django.http import HttpResponse
from django.template import loader
from django.urls import NoReverseMatch, reverse
from django.utils.html import format_html
from leaflet.admin import LeafletGeoAdmin
from mptt.admin import MPTTModelAdmin
from unicef_locations.auth import LocationsCartoNoAuthClient
from unicef_locations.utils import get_remapping
from .forms import CartoDBTableForm
from .models import CartoDBTable, GatewayType, Location
from .tasks import import_locations
class AutoSizeTextForm(forms.ModelForm):
"""
Use textarea for name and description fields
"""
class Meta:
widgets = {
'name': Textarea(),
'description': Textarea(),
}
class ActiveLocationsFilter(basic_admin.SimpleListFilter):
title = 'Active Status'
parameter_name = 'is_active'
def lookups(self, request, model_admin):
return [
(True, 'Active'),
(False, 'Archived')
]
def queryset(self, request, queryset):
return queryset.filter(**self.used_parameters)
class LocationAdmin(LeafletGeoAdmin, MPTTModelAdmin):
save_as = True
form = AutoSizeTextForm
list_display = (
'name',
'gateway',
'p_code',
'is_active',
)
list_filter = (
'gateway',
ActiveLocationsFilter,
'parent',
)
search_fields = ('name', 'p_code',)
raw_id_fields = ('parent', )
def get_queryset(self, request): # pragma: no-cover
qs = Location.objects.all()
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
def get_form(self, request, obj=None, **kwargs):
self.readonly_fields = [] if request.user.is_superuser else ['p_code', 'geom', 'point', 'gateway']
return super().get_form(request, obj, **kwargs)
class CartoDBTableAdmin(ExtraUrlMixin, admin.ModelAdmin):
form = CartoDBTableForm
save_as = True
list_display = (
'table_name',
'location_type',
'name_col',
'pcode_col',
'parent_code_col',
'import_table',
)
def import_table(self, obj):
try:
url = reverse(admin_urlname(obj._meta, 'import_sites'), args=[obj.pk])
return format_html(f'<a href="{url}">Import</a>')
except NoReverseMatch:
return '-'
@button(css_class="btn-warning auto-disable")
def import_sites(self, request, pk):
import_locations.delay(pk)
messages.info(request, 'Import Scheduled')
@button(css_class="btn-warning auto-disable")
def show_remap_table(self, request, pk):
carto_table = CartoDBTable.objects.get(pk=pk)
sql_client = SQLClient(LocationsCartoNoAuthClient(base_url=f"https://{carto_table.domain}.carto.com/"))
old2new, to_deactivate = get_remapping(sql_client, carto_table)
template = loader.get_template('admin/location_remap.html')
context = {
'old2new': old2new,
'to_deactivate': to_deactivate
}
return HttpResponse(template.render(context, request))
admin.site.register(Location, LocationAdmin)
admin.site.register(GatewayType)
admin.site.register(CartoDBTable, CartoDBTableAdmin)
| 29.290323 | 111 | 0.675385 |
4a23acb07cd030a3911d69541ac56ff4fad02eb6 | 767 | py | Python | settings/config.py | jammie080/Twitter-Bot | 0c53bd667db92991ce1f0d0a14985265d505078f | [
"BSD-2-Clause"
] | null | null | null | settings/config.py | jammie080/Twitter-Bot | 0c53bd667db92991ce1f0d0a14985265d505078f | [
"BSD-2-Clause"
] | null | null | null | settings/config.py | jammie080/Twitter-Bot | 0c53bd667db92991ce1f0d0a14985265d505078f | [
"BSD-2-Clause"
] | null | null | null | from os.path import join, dirname,os
from dotenv import load_dotenv,find_dotenv
try:
dotenv_path = join(dirname(__file__), '.env')
if dotenv:
load_dotenv(dotenv_path)
TWITTER_USERNAME = os.environ.get('')
TWITTER_PASSWORD = os.environ.get('')
except:
TWITTER_USERNAME = ''
TWITTER_PASSWORD = ''
twitter = {
'files': {
'twitter-users':'\\output\\scraped.txt',
'follow-users':'\\output\\follow.txt',
'dont-follow-users':'\\output\\dont-follow.txt'
},
'auth': {
'twitter':{
'username': '%s' % TWITTER_USERNAME,
'password': '%s' % TWITTER_PASSWORD
}
},
'url':{
'home':'https://www.twitter.com/login'
}
}
| 21.914286 | 55 | 0.54498 |
4a23ad41e477ff894e5f778080a6c7cd3f164214 | 598 | py | Python | tests/test_app.py | tbrlpld/logtweet | fdb1bcd4bdc4e7b4f08a52cd42654abcee390176 | [
"MIT"
] | 5 | 2020-01-07T20:58:40.000Z | 2021-03-17T22:36:16.000Z | tests/test_app.py | tbrlpld/logtweet | fdb1bcd4bdc4e7b4f08a52cd42654abcee390176 | [
"MIT"
] | null | null | null | tests/test_app.py | tbrlpld/logtweet | fdb1bcd4bdc4e7b4f08a52cd42654abcee390176 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# TEST: Before these tests can easily be created, I need to abstract everything
# that is related to generating the tweet content to a separate function.
# Once that is done, I only need to mock that one functions return value.
# TEST: Main app functionality.
def test_sending_creates_history_entry():
""""Test sending in normal mode creates a history entry."""
pass
# TEST: Sending duplicate in normal mode leads to error to user.
# TEST: Sending in normal mode shows a console message.
# TEST: Sending in normal mode prints success msg to console
| 35.176471 | 79 | 0.730769 |
4a23adf9e602d53aefba76091e516098f2f5efa4 | 13,200 | py | Python | pysnmp-with-texts/ATROPOS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/ATROPOS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/ATROPOS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module ATROPOS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ATROPOS-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:31:41 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
iso, Counter64, Bits, MibIdentifier, IpAddress, Counter32, experimental, Gauge32, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, NotificationType, ObjectIdentity, ModuleIdentity, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "Counter64", "Bits", "MibIdentifier", "IpAddress", "Counter32", "experimental", "Gauge32", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "NotificationType", "ObjectIdentity", "ModuleIdentity", "TimeTicks")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
atroposMIB = ModuleIdentity((1, 3, 6, 1, 3, 75, 4))
if mibBuilder.loadTexts: atroposMIB.setLastUpdated('9801010000Z')
if mibBuilder.loadTexts: atroposMIB.setOrganization('GE CRD')
if mibBuilder.loadTexts: atroposMIB.setContactInfo('Stephen F. Bush [email protected]')
if mibBuilder.loadTexts: atroposMIB.setDescription('Experimental MIB modules for the Active Virtual Network Management Prediction (Atropos) system.')
lP = MibIdentifier((1, 3, 6, 1, 3, 75, 4, 1))
lPTable = MibTable((1, 3, 6, 1, 3, 75, 4, 1, 1), )
if mibBuilder.loadTexts: lPTable.setStatus('current')
if mibBuilder.loadTexts: lPTable.setDescription('Table of Atropos LP information.')
lPEntry = MibTableRow((1, 3, 6, 1, 3, 75, 4, 1, 1, 1), ).setIndexNames((0, "ATROPOS-MIB", "lPIndex"))
if mibBuilder.loadTexts: lPEntry.setStatus('current')
if mibBuilder.loadTexts: lPEntry.setDescription('Table of Atropos LP information.')
lPIndex = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: lPIndex.setStatus('current')
if mibBuilder.loadTexts: lPIndex.setDescription('The LP table index.')
lPID = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPID.setStatus('current')
if mibBuilder.loadTexts: lPID.setDescription('The LP identifier.')
lPLVT = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPLVT.setStatus('current')
if mibBuilder.loadTexts: lPLVT.setDescription('This is the LP Local Virtual Time.')
lPQRSize = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPQRSize.setStatus('current')
if mibBuilder.loadTexts: lPQRSize.setDescription('This is the LP Receive Queue Size.')
lPQSSize = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPQSSize.setStatus('current')
if mibBuilder.loadTexts: lPQSSize.setDescription('This is the LP send queue size.')
lPCausalityRollbacks = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPCausalityRollbacks.setStatus('current')
if mibBuilder.loadTexts: lPCausalityRollbacks.setDescription('This is the number of rollbacks this LP has suffered.')
lPToleranceRollbacks = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPToleranceRollbacks.setStatus('current')
if mibBuilder.loadTexts: lPToleranceRollbacks.setDescription('This is the number of rollbacks this LP has suffered.')
lPSQSize = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPSQSize.setStatus('current')
if mibBuilder.loadTexts: lPSQSize.setDescription('This is the LP state queue size.')
lPTolerance = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPTolerance.setStatus('current')
if mibBuilder.loadTexts: lPTolerance.setDescription("This is the allowable deviation between process's predicted state and the actual state.")
lPGVT = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPGVT.setStatus('current')
if mibBuilder.loadTexts: lPGVT.setDescription("This is this system's notion of Global Virtual Time.")
lPLookAhead = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPLookAhead.setStatus('current')
if mibBuilder.loadTexts: lPLookAhead.setDescription("This is this system's maximum time into which it can predict.")
lPGvtUpdate = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPGvtUpdate.setStatus('current')
if mibBuilder.loadTexts: lPGvtUpdate.setDescription('This is the GVT update rate.')
lPStepSize = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPStepSize.setStatus('current')
if mibBuilder.loadTexts: lPStepSize.setDescription('This is the lookahead (Delta) in milliseconds for each virtual message as generated from the driving process.')
lPReal = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPReal.setStatus('current')
if mibBuilder.loadTexts: lPReal.setDescription('This is the total number of real messages received.')
lPVirtual = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPVirtual.setStatus('current')
if mibBuilder.loadTexts: lPVirtual.setDescription('This is the total number of virtual messages received.')
lPNumPkts = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPNumPkts.setStatus('current')
if mibBuilder.loadTexts: lPNumPkts.setDescription('This is the total number of all Atropos packets received.')
lPNumAnti = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPNumAnti.setStatus('current')
if mibBuilder.loadTexts: lPNumAnti.setDescription('This is the total number of Anti-Messages transmitted by this Logical Process.')
lPPredAcc = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 18), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPPredAcc.setStatus('current')
if mibBuilder.loadTexts: lPPredAcc.setDescription('This is the prediction accuracy based upon time weighted average of the difference between predicted and real values.')
lPPropX = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 19), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPPropX.setStatus('current')
if mibBuilder.loadTexts: lPPropX.setDescription('This is the proportion of out-of-order messages received at this Logical Process.')
lPPropY = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 20), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPPropY.setStatus('current')
if mibBuilder.loadTexts: lPPropY.setDescription('This is the proportion of out-of-tolerance messages received at this Logical Process.')
lPETask = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 21), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPETask.setStatus('current')
if mibBuilder.loadTexts: lPETask.setDescription('This is the expected task execution wallclock time for this Logical Process.')
lPETrb = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 22), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPETrb.setStatus('current')
if mibBuilder.loadTexts: lPETrb.setDescription('This is the expected wallclock time spent performing a rollback for this Logical Process.')
lPVmRate = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 23), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPVmRate.setStatus('current')
if mibBuilder.loadTexts: lPVmRate.setDescription('This is the rate at which virtual messages were processed by this Logical Process.')
lPReRate = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 24), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPReRate.setStatus('current')
if mibBuilder.loadTexts: lPReRate.setDescription('This is the time until next virtual message.')
lPSpeedup = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 25), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPSpeedup.setStatus('current')
if mibBuilder.loadTexts: lPSpeedup.setDescription('This is the speedup, ratio of virtual time to wallclock time, of this logical process.')
lPLookahead = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 26), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPLookahead.setStatus('current')
if mibBuilder.loadTexts: lPLookahead.setDescription('This is the expected lookahead in milliseconds of this Logical Process.')
lPNumNoState = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPNumNoState.setStatus('current')
if mibBuilder.loadTexts: lPNumNoState.setDescription('This is the number of times there was no valid state to restore when needed by a rollback or when required to check prediction accuracy.')
lPStatePred = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 28), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPStatePred.setStatus('current')
if mibBuilder.loadTexts: lPStatePred.setDescription('This is the cached value of the state at the nearest time to the current time.')
lPPktPred = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 29), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPPktPred.setStatus('current')
if mibBuilder.loadTexts: lPPktPred.setDescription('This is the predicted value in a virtual message.')
lPTdiff = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 30), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPTdiff.setStatus('current')
if mibBuilder.loadTexts: lPTdiff.setDescription('This is the time difference between a predicted and an actual value.')
lPStateError = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 31), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPStateError.setStatus('current')
if mibBuilder.loadTexts: lPStateError.setDescription('This is the difference between the contents of an application value and the state value as seen within the virtual message.')
lPUptime = MibTableColumn((1, 3, 6, 1, 3, 75, 4, 1, 1, 1, 32), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lPUptime.setStatus('current')
if mibBuilder.loadTexts: lPUptime.setDescription('This is the time in milliseconds that Atropos has been running on this node.')
mibBuilder.exportSymbols("ATROPOS-MIB", lPTolerance=lPTolerance, lPVirtual=lPVirtual, lPNumAnti=lPNumAnti, lPUptime=lPUptime, lPID=lPID, lPSQSize=lPSQSize, lPPropX=lPPropX, lPTdiff=lPTdiff, lPCausalityRollbacks=lPCausalityRollbacks, lPTable=lPTable, lPLookAhead=lPLookAhead, lPETrb=lPETrb, lPLookahead=lPLookahead, PYSNMP_MODULE_ID=atroposMIB, lPIndex=lPIndex, lPReRate=lPReRate, lPQSSize=lPQSSize, lPNumNoState=lPNumNoState, lP=lP, lPGVT=lPGVT, lPGvtUpdate=lPGvtUpdate, atroposMIB=atroposMIB, lPLVT=lPLVT, lPPredAcc=lPPredAcc, lPETask=lPETask, lPVmRate=lPVmRate, lPSpeedup=lPSpeedup, lPPktPred=lPPktPred, lPReal=lPReal, lPQRSize=lPQRSize, lPStatePred=lPStatePred, lPStepSize=lPStepSize, lPStateError=lPStateError, lPToleranceRollbacks=lPToleranceRollbacks, lPEntry=lPEntry, lPNumPkts=lPNumPkts, lPPropY=lPPropY)
| 107.317073 | 812 | 0.7725 |
4a23aea9319d2ca2f7b9e745c0192ae2265ec7ed | 625 | py | Python | examples/vhdl/com/run.py | tyski34/vunit | cc0e0fd220ec6ce409f5c48301a78ca3decab5fd | [
"Artistic-2.0"
] | null | null | null | examples/vhdl/com/run.py | tyski34/vunit | cc0e0fd220ec6ce409f5c48301a78ca3decab5fd | [
"Artistic-2.0"
] | null | null | null | examples/vhdl/com/run.py | tyski34/vunit | cc0e0fd220ec6ce409f5c48301a78ca3decab5fd | [
"Artistic-2.0"
] | null | null | null | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2014-2019, Lars Asplund [email protected]
from os.path import join, dirname
from vunit import VUnit
prj = VUnit.from_argv()
prj.add_com()
prj.add_verification_components()
prj.add_osvvm()
lib = prj.add_library('lib')
lib.add_source_files(join(dirname(__file__), 'src', '*.vhd'))
tb_lib = prj.add_library('tb_lib')
tb_lib.add_source_files(join(dirname(__file__), 'test', '*.vhd'))
prj.main()
| 28.409091 | 75 | 0.736 |
4a23aefef0ef4866844467c6a3f5f7a061d7ce97 | 14,450 | py | Python | opt/ros/kinetic/lib/python2.7/dist-packages/visualization_msgs/msg/_ImageMarker.py | Roboy/roboy_controlled_node_fpga | dbba4eff19ed04469a6196ba368cea231cad539d | [
"BSD-3-Clause"
] | 2 | 2018-12-11T16:35:20.000Z | 2019-01-23T16:42:17.000Z | opt/ros/kinetic/lib/python2.7/dist-packages/visualization_msgs/msg/_ImageMarker.py | Roboy/roboy_managing_node_fpga | 64ffe5aec2f2c98a051bb1a881849c195b8d052c | [
"BSD-3-Clause"
] | 1 | 2018-12-28T21:11:50.000Z | 2018-12-28T21:11:50.000Z | opt/ros/kinetic/lib/python2.7/dist-packages/visualization_msgs/msg/_ImageMarker.py | Roboy/roboy_managing_node_fpga | 64ffe5aec2f2c98a051bb1a881849c195b8d052c | [
"BSD-3-Clause"
] | 3 | 2018-01-21T17:53:17.000Z | 2021-09-08T10:22:05.000Z | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from visualization_msgs/ImageMarker.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import geometry_msgs.msg
import genpy
import std_msgs.msg
class ImageMarker(genpy.Message):
_md5sum = "1de93c67ec8858b831025a08fbf1b35c"
_type = "visualization_msgs/ImageMarker"
_has_header = True #flag to mark the presence of a Header object
_full_text = """uint8 CIRCLE=0
uint8 LINE_STRIP=1
uint8 LINE_LIST=2
uint8 POLYGON=3
uint8 POINTS=4
uint8 ADD=0
uint8 REMOVE=1
Header header
string ns # namespace, used with id to form a unique id
int32 id # unique id within the namespace
int32 type # CIRCLE/LINE_STRIP/etc.
int32 action # ADD/REMOVE
geometry_msgs/Point position # 2D, in pixel-coords
float32 scale # the diameter for a circle, etc.
std_msgs/ColorRGBA outline_color
uint8 filled # whether to fill in the shape with color
std_msgs/ColorRGBA fill_color # color [0.0-1.0]
duration lifetime # How long the object should last before being automatically deleted. 0 means forever
geometry_msgs/Point[] points # used for LINE_STRIP/LINE_LIST/POINTS/etc., 2D in pixel coords
std_msgs/ColorRGBA[] outline_colors # a color for each line, point, etc.
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: std_msgs/ColorRGBA
float32 r
float32 g
float32 b
float32 a
"""
# Pseudo-constants
CIRCLE = 0
LINE_STRIP = 1
LINE_LIST = 2
POLYGON = 3
POINTS = 4
ADD = 0
REMOVE = 1
__slots__ = ['header','ns','id','type','action','position','scale','outline_color','filled','fill_color','lifetime','points','outline_colors']
_slot_types = ['std_msgs/Header','string','int32','int32','int32','geometry_msgs/Point','float32','std_msgs/ColorRGBA','uint8','std_msgs/ColorRGBA','duration','geometry_msgs/Point[]','std_msgs/ColorRGBA[]']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,ns,id,type,action,position,scale,outline_color,filled,fill_color,lifetime,points,outline_colors
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(ImageMarker, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.ns is None:
self.ns = ''
if self.id is None:
self.id = 0
if self.type is None:
self.type = 0
if self.action is None:
self.action = 0
if self.position is None:
self.position = geometry_msgs.msg.Point()
if self.scale is None:
self.scale = 0.
if self.outline_color is None:
self.outline_color = std_msgs.msg.ColorRGBA()
if self.filled is None:
self.filled = 0
if self.fill_color is None:
self.fill_color = std_msgs.msg.ColorRGBA()
if self.lifetime is None:
self.lifetime = genpy.Duration()
if self.points is None:
self.points = []
if self.outline_colors is None:
self.outline_colors = []
else:
self.header = std_msgs.msg.Header()
self.ns = ''
self.id = 0
self.type = 0
self.action = 0
self.position = geometry_msgs.msg.Point()
self.scale = 0.
self.outline_color = std_msgs.msg.ColorRGBA()
self.filled = 0
self.fill_color = std_msgs.msg.ColorRGBA()
self.lifetime = genpy.Duration()
self.points = []
self.outline_colors = []
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.ns
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_3i3d5fB4f2i().pack(_x.id, _x.type, _x.action, _x.position.x, _x.position.y, _x.position.z, _x.scale, _x.outline_color.r, _x.outline_color.g, _x.outline_color.b, _x.outline_color.a, _x.filled, _x.fill_color.r, _x.fill_color.g, _x.fill_color.b, _x.fill_color.a, _x.lifetime.secs, _x.lifetime.nsecs))
length = len(self.points)
buff.write(_struct_I.pack(length))
for val1 in self.points:
_x = val1
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
length = len(self.outline_colors)
buff.write(_struct_I.pack(length))
for val1 in self.outline_colors:
_x = val1
buff.write(_get_struct_4f().pack(_x.r, _x.g, _x.b, _x.a))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.position is None:
self.position = geometry_msgs.msg.Point()
if self.outline_color is None:
self.outline_color = std_msgs.msg.ColorRGBA()
if self.fill_color is None:
self.fill_color = std_msgs.msg.ColorRGBA()
if self.lifetime is None:
self.lifetime = genpy.Duration()
if self.points is None:
self.points = None
if self.outline_colors is None:
self.outline_colors = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.ns = str[start:end].decode('utf-8')
else:
self.ns = str[start:end]
_x = self
start = end
end += 81
(_x.id, _x.type, _x.action, _x.position.x, _x.position.y, _x.position.z, _x.scale, _x.outline_color.r, _x.outline_color.g, _x.outline_color.b, _x.outline_color.a, _x.filled, _x.fill_color.r, _x.fill_color.g, _x.fill_color.b, _x.fill_color.a, _x.lifetime.secs, _x.lifetime.nsecs,) = _get_struct_3i3d5fB4f2i().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.points = []
for i in range(0, length):
val1 = geometry_msgs.msg.Point()
_x = val1
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
self.points.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.outline_colors = []
for i in range(0, length):
val1 = std_msgs.msg.ColorRGBA()
_x = val1
start = end
end += 16
(_x.r, _x.g, _x.b, _x.a,) = _get_struct_4f().unpack(str[start:end])
self.outline_colors.append(val1)
self.lifetime.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.ns
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_3i3d5fB4f2i().pack(_x.id, _x.type, _x.action, _x.position.x, _x.position.y, _x.position.z, _x.scale, _x.outline_color.r, _x.outline_color.g, _x.outline_color.b, _x.outline_color.a, _x.filled, _x.fill_color.r, _x.fill_color.g, _x.fill_color.b, _x.fill_color.a, _x.lifetime.secs, _x.lifetime.nsecs))
length = len(self.points)
buff.write(_struct_I.pack(length))
for val1 in self.points:
_x = val1
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
length = len(self.outline_colors)
buff.write(_struct_I.pack(length))
for val1 in self.outline_colors:
_x = val1
buff.write(_get_struct_4f().pack(_x.r, _x.g, _x.b, _x.a))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.position is None:
self.position = geometry_msgs.msg.Point()
if self.outline_color is None:
self.outline_color = std_msgs.msg.ColorRGBA()
if self.fill_color is None:
self.fill_color = std_msgs.msg.ColorRGBA()
if self.lifetime is None:
self.lifetime = genpy.Duration()
if self.points is None:
self.points = None
if self.outline_colors is None:
self.outline_colors = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.ns = str[start:end].decode('utf-8')
else:
self.ns = str[start:end]
_x = self
start = end
end += 81
(_x.id, _x.type, _x.action, _x.position.x, _x.position.y, _x.position.z, _x.scale, _x.outline_color.r, _x.outline_color.g, _x.outline_color.b, _x.outline_color.a, _x.filled, _x.fill_color.r, _x.fill_color.g, _x.fill_color.b, _x.fill_color.a, _x.lifetime.secs, _x.lifetime.nsecs,) = _get_struct_3i3d5fB4f2i().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.points = []
for i in range(0, length):
val1 = geometry_msgs.msg.Point()
_x = val1
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
self.points.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.outline_colors = []
for i in range(0, length):
val1 = std_msgs.msg.ColorRGBA()
_x = val1
start = end
end += 16
(_x.r, _x.g, _x.b, _x.a,) = _get_struct_4f().unpack(str[start:end])
self.outline_colors.append(val1)
self.lifetime.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_4f = None
def _get_struct_4f():
global _struct_4f
if _struct_4f is None:
_struct_4f = struct.Struct("<4f")
return _struct_4f
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_3i3d5fB4f2i = None
def _get_struct_3i3d5fB4f2i():
global _struct_3i3d5fB4f2i
if _struct_3i3d5fB4f2i is None:
_struct_3i3d5fB4f2i = struct.Struct("<3i3d5fB4f2i")
return _struct_3i3d5fB4f2i
_struct_3d = None
def _get_struct_3d():
global _struct_3d
if _struct_3d is None:
_struct_3d = struct.Struct("<3d")
return _struct_3d
| 36.397985 | 336 | 0.634256 |
4a23af76ab44b1cf9daeed86e68088d0a735db0d | 344 | py | Python | electricity/forms.py | siddharth-143/Elektra | 1534681e026b28a0233f6f69b3c726c7f34ceedb | [
"MIT"
] | 49 | 2020-09-28T04:07:48.000Z | 2022-03-02T00:49:55.000Z | electricity/forms.py | siddharth-143/Elektra | 1534681e026b28a0233f6f69b3c726c7f34ceedb | [
"MIT"
] | 232 | 2021-02-28T16:33:23.000Z | 2021-10-04T16:37:58.000Z | electricity/forms.py | siddharth-143/Elektra | 1534681e026b28a0233f6f69b3c726c7f34ceedb | [
"MIT"
] | 122 | 2020-09-27T18:28:15.000Z | 2021-11-04T15:51:34.000Z | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
class Meta:
model = User
fields = ['username',
'email',
'first_name',
'last_name',
] | 26.461538 | 54 | 0.578488 |
4a23afe16fad88f7fae7105c8c33d5ea3e10105a | 919 | py | Python | batch/batch/cloud/gcp/worker/credentials.py | jmartasek/hail | bc009191a6a159dc7d620e7f573199c0916141fa | [
"MIT"
] | null | null | null | batch/batch/cloud/gcp/worker/credentials.py | jmartasek/hail | bc009191a6a159dc7d620e7f573199c0916141fa | [
"MIT"
] | 19 | 2022-03-03T20:11:41.000Z | 2022-03-30T20:31:57.000Z | batch/batch/cloud/gcp/worker/credentials.py | pwc2/hail | edeb70bc789c881dffa0724ddd11fcb25e689b67 | [
"MIT"
] | null | null | null | from typing import Dict
import base64
from ....worker.credentials import CloudUserCredentials
class GCPUserCredentials(CloudUserCredentials):
def __init__(self, data: Dict[str, bytes]):
self._data = data
@property
def secret_name(self) -> str:
return 'gsa-key'
@property
def secret_data(self) -> Dict[str, bytes]:
return self._data
@property
def file_name(self) -> str:
return 'key.json'
@property
def cloud_env_name(self) -> str:
return 'GOOGLE_APPLICATION_CREDENTIALS'
@property
def hail_env_name(self) -> str:
return 'HAIL_GSA_KEY_FILE'
@property
def username(self):
return '_json_key'
@property
def password(self) -> str:
return base64.b64decode(self.secret_data['key.json']).decode()
@property
def mount_path(self):
return f'/{self.secret_name}/{self.file_name}'
| 21.880952 | 70 | 0.647443 |
4a23b11ef7ebe20cea296ac72a2a2a5a978dceb0 | 625 | py | Python | api/serializers.py | Emocial-NLP-Depression-Detection/Emocial-backend-API | 23d8e7490d3dc87eab9b02c54e1d5710758200f1 | [
"MIT"
] | null | null | null | api/serializers.py | Emocial-NLP-Depression-Detection/Emocial-backend-API | 23d8e7490d3dc87eab9b02c54e1d5710758200f1 | [
"MIT"
] | null | null | null | api/serializers.py | Emocial-NLP-Depression-Detection/Emocial-backend-API | 23d8e7490d3dc87eab9b02c54e1d5710758200f1 | [
"MIT"
] | null | null | null | from django.db.models import fields
from api.models import Tweets
from rest_framework import serializers
from .models import *
class TweetSerializer(serializers.ModelSerializer):
class Meta:
model = Tweets
fields = '__all__'
class TwitterUserSerializer(serializers.ModelSerializer):
class Meta:
model = TwitterUser
fields = '__all__'
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
class QuestionaireSerializer(serializers.ModelSerializer):
class Meta:
model = Questionaire
fields = "__all__" | 24.038462 | 58 | 0.7088 |
4a23b282a84ac8364be412a4570feb2c1bd2741b | 3,339 | py | Python | src/visual_attention_base/scripts/visual_attention_base/markerClass.py | ahmohamed1/activeStereoVisionPlatform | 6c928ca242e4de68c7b15a8748bff1d9f7fa1382 | [
"MIT"
] | null | null | null | src/visual_attention_base/scripts/visual_attention_base/markerClass.py | ahmohamed1/activeStereoVisionPlatform | 6c928ca242e4de68c7b15a8748bff1d9f7fa1382 | [
"MIT"
] | null | null | null | src/visual_attention_base/scripts/visual_attention_base/markerClass.py | ahmohamed1/activeStereoVisionPlatform | 6c928ca242e4de68c7b15a8748bff1d9f7fa1382 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
import numpy as np
from visualization_msgs.msg import Marker, MarkerArray
from geometry_msgs.msg import Quaternion, Pose, Point, Vector3
from std_msgs.msg import Header, ColorRGBA
dummyList = []
for i in range(0,5):
pose2D = np.random.randint(0,2000,(2)).tolist()
pose3D = np.random.randint(-1.1,1.0,(3)).tolist()
probability2D = np.random.randint(0.0,100.0,(1)).tolist()
probability3D = np.random.randint(0.0,100.0,(1)).tolist()
dummyList.append([probability2D, probability3D, pose2D, pose3D])
class CreateVisualizationMarker:
def __init__(self, frame):
self.marker_publisher = rospy.Publisher('visualization_cognitive_map', MarkerArray, queue_size=5)
self.frame = frame
self.dummyCount = 0
def createMarker(self, idea, text, position, publishText=True):
marker_text = Marker()
marker_text.header.frame_id = self.frame
if publishText:
marker_text.ns = "TextSpace";
marker_text.type = Marker.TEXT_VIEW_FACING
marker_text.scale.x = 0.2
marker_text.scale.y = 0.2
marker_text.scale.z = 0.05
marker_text.color.a = 1.0
marker_text.color.r = 1.0
marker_text.color.g = 1.0
marker_text.color.b = 0.0
else:
marker_text.ns = "ObjectSpace";
marker_text.type = Marker.SPHERE;
marker_text.scale.x = 0.1
marker_text.scale.y = 0.1
marker_text.scale.z = 0.1
marker_text.color.a = 1.0
marker_text.color.r = 1.0
marker_text.color.g = 0.0
marker_text.color.b = 0.0
marker_text.action = Marker.ADD
marker_text.pose.orientation.w = 1.0
marker_text.pose.position.x = position[0]
marker_text.pose.position.y = position[1]
marker_text.pose.position.z = position[2]
marker_text.id = idea
marker_text.text = text
return marker_text
def publishMarkerArray(self, List):
markerArray = MarkerArray()
for i, list in enumerate(List):
text = self.convertInformationToString(list[0], list[1], list[2],list[3], list[4])
# print(text)
markerTxet = self.createMarker(list[0], text, list[4])
markerArray.markers.append(markerTxet)
markerObject = self.createMarker(list[0], text, list[4],False)
markerArray.markers.append(markerObject)
# self.dummyCount += 1
self.marker_publisher.publish(markerArray)
def convertInformationToString(self, idea, probability2D, pose2D, probability3D, pose3D):#['idea', '2D propability', '2D size', '2D pose' , '3D propability', '3D pose']
string = 'Tomato ID: ' + str(idea)
string = string+ '\n2D propability: ' + str(probability2D) + '%'
string = string + '\n2D pose: ' + str(pose2D)
string = string + '\n3D propability: ' + str(probability3D) + '%'
string = string + '\n3D pose: ' + str(pose3D)
return string
def main():
rospy.init_node('visualAttention_information')
createVisualizationMarker = CreateVisualizationMarker('map')
while not rospy.is_shutdown():
createVisualizationMarker.publishMarkerArray(dummyList)
if __name__ == '__main__':
main()
| 36.293478 | 172 | 0.631926 |
4a23b2e42ca2690778ca9980a65e6dc8554e9e08 | 4,552 | py | Python | homeassistant/components/binary_sensor/ping.py | adolfoeliazat/voidhomecontrol | 6d733253811c553912e46e24debec818b28b0688 | [
"Apache-2.0"
] | 1 | 2021-08-06T09:54:39.000Z | 2021-08-06T09:54:39.000Z | homeassistant/components/binary_sensor/ping.py | adolfoeliazat/voidhomecontrol | 6d733253811c553912e46e24debec818b28b0688 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/binary_sensor/ping.py | adolfoeliazat/voidhomecontrol | 6d733253811c553912e46e24debec818b28b0688 | [
"Apache-2.0"
] | 1 | 2020-08-26T20:54:14.000Z | 2020-08-26T20:54:14.000Z | """
Tracks the latency of a host by sending ICMP echo requests (ping).
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.ping/
"""
import logging
import subprocess
import re
import sys
from datetime import timedelta
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.binary_sensor import (
BinarySensorDevice, PLATFORM_SCHEMA)
from homeassistant.const import CONF_NAME, CONF_HOST
_LOGGER = logging.getLogger(__name__)
ATTR_ROUND_TRIP_TIME_AVG = 'round_trip_time_avg'
ATTR_ROUND_TRIP_TIME_MAX = 'round_trip_time_max'
ATTR_ROUND_TRIP_TIME_MDEV = 'round_trip_time_mdev'
ATTR_ROUND_TRIP_TIME_MIN = 'round_trip_time_min'
CONF_PING_COUNT = 'count'
DEFAULT_NAME = 'Ping Binary sensor'
DEFAULT_PING_COUNT = 5
DEFAULT_SENSOR_CLASS = 'connectivity'
SCAN_INTERVAL = timedelta(minutes=5)
PING_MATCHER = re.compile(
r'(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)\/(?P<mdev>\d+.\d+)')
WIN32_PING_MATCHER = re.compile(
r'(?P<min>\d+)ms.+(?P<max>\d+)ms.+(?P<avg>\d+)ms')
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PING_COUNT, default=DEFAULT_PING_COUNT): cv.positive_int,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Ping Binary sensor."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
count = config.get(CONF_PING_COUNT)
add_devices([PingBinarySensor(name, PingData(host, count))], True)
class PingBinarySensor(BinarySensorDevice):
"""Representation of a Ping Binary sensor."""
def __init__(self, name, ping):
"""Initialize the Ping Binary sensor."""
self._name = name
self.ping = ping
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def device_class(self):
"""Return the class of this sensor."""
return DEFAULT_SENSOR_CLASS
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self.ping.available
@property
def device_state_attributes(self):
"""Return the state attributes of the ICMP checo request."""
if self.ping.data is not False:
return {
ATTR_ROUND_TRIP_TIME_AVG: self.ping.data['avg'],
ATTR_ROUND_TRIP_TIME_MAX: self.ping.data['max'],
ATTR_ROUND_TRIP_TIME_MDEV: self.ping.data['mdev'],
ATTR_ROUND_TRIP_TIME_MIN: self.ping.data['min'],
}
def update(self):
"""Get the latest data."""
self.ping.update()
class PingData(object):
"""The Class for handling the data retrieval."""
def __init__(self, host, count):
"""Initialize the data object."""
self._ip_address = host
self._count = count
self.data = {}
self.available = False
if sys.platform == 'win32':
self._ping_cmd = [
'ping', '-n', str(self._count), '-w', '1000', self._ip_address]
else:
self._ping_cmd = [
'ping', '-n', '-q', '-c', str(self._count), '-W1',
self._ip_address]
def ping(self):
"""Send ICMP echo request and return details if success."""
pinger = subprocess.Popen(
self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
try:
out = pinger.communicate()
_LOGGER.debug("Output is %s", str(out))
if sys.platform == 'win32':
match = WIN32_PING_MATCHER.search(str(out).split('\n')[-1])
rtt_min, rtt_avg, rtt_max = match.groups()
return {
'min': rtt_min,
'avg': rtt_avg,
'max': rtt_max,
'mdev': ''}
else:
match = PING_MATCHER.search(str(out).split('\n')[-1])
rtt_min, rtt_avg, rtt_max, rtt_mdev = match.groups()
return {
'min': rtt_min,
'avg': rtt_avg,
'max': rtt_max,
'mdev': rtt_mdev}
except (subprocess.CalledProcessError, AttributeError):
return False
def update(self):
"""Retrieve the latest details from the host."""
self.data = self.ping()
self.available = bool(self.data)
| 31.611111 | 79 | 0.61138 |
4a23b39493b074a48a78ab46eb1e60b71ac560fe | 2,711 | py | Python | service/group_dao.py | meyhane/azure-ad | 69b047bc94afe7a9aa3b7899c7e4f3f2ecb002c3 | [
"Apache-2.0"
] | null | null | null | service/group_dao.py | meyhane/azure-ad | 69b047bc94afe7a9aa3b7899c7e4f3f2ecb002c3 | [
"Apache-2.0"
] | null | null | null | service/group_dao.py | meyhane/azure-ad | 69b047bc94afe7a9aa3b7899c7e4f3f2ecb002c3 | [
"Apache-2.0"
] | null | null | null | import logging
from dao_helper import get_all_objects, make_request, GRAPH_URL, is_object_already_exists_exception, \
clear_sesam_attributes, stream_as_json
RESOURCE_PATH = '/groups/'
def sync_group_array(group_data_array):
def __try_create(group_data):
"""
Internal function to create a group
:param group_data: json object with group details
:return: void
"""
logging.info(f'trying to create group {group_data.get("displayName")}')
make_request(f'{GRAPH_URL}{RESOURCE_PATH}', 'POST', group_data)
logging.info(f'group {group_data.get("displayName")} created successfully')
def __try_update(group_data):
"""
Internal function to update a group
:param group_data: json object with group details, must contain group identifier
:return: void
"""
group_id = group_data['id'] if 'id' in group_data else None
if not group_id:
raise Exception("Couldn't find id for group")
logging.info(f'trying to update group {group_data.get("displayName")}')
make_request(f'{GRAPH_URL}{RESOURCE_PATH}{group_id}', 'PATCH', group_data)
logging.info(f'group {group_data.get("displayName")} updated successfully')
def __try_delete(group_data):
"""
Internal function to delete a group
:param group_data: json object with group details, must contain group identifier
:return: void
"""
group_id = group_data['id'] if 'id' in group_data else None
if not group_id:
raise Exception("Couldn't find id for group")
logging.info(f'trying to delete group {group_data.get("displayName")}')
make_request(f'{GRAPH_URL}{RESOURCE_PATH}{group_id}', 'DELETE')
logging.info(f'group {group_data.get("displayName")} disabled successfully')
for group in group_data_array:
if '_deleted' in group and group['_deleted']:
__try_delete(group)
continue
group = clear_sesam_attributes(group)
try:
if 'id' not in group:
__try_create(group)
else:
__try_update(group)
except Exception as e:
if is_object_already_exists_exception(e):
__try_update(group)
else:
raise Exception from e
def get_all_groups(delta=None, params=None):
"""
Fetch and stream back groups from Azure AD via MS Graph API
:param delta: delta token from last request
:return: generated JSON output with all fetched groups
"""
yield from stream_as_json(get_all_objects(f'{RESOURCE_PATH}delta', delta, params=params))
| 37.136986 | 102 | 0.646625 |
4a23b492c746b071b85fe9afe864b6c19ecd905e | 852 | py | Python | misc/services/life_mngr/s5_trigger_linux.py | donsheng/acrn-hypervisor | 79edf8ba08f3f6d11d1ccf464b208c80b5b0fd24 | [
"BSD-3-Clause"
] | 848 | 2018-03-06T01:20:35.000Z | 2022-03-31T05:47:50.000Z | misc/services/life_mngr/s5_trigger_linux.py | donsheng/acrn-hypervisor | 79edf8ba08f3f6d11d1ccf464b208c80b5b0fd24 | [
"BSD-3-Clause"
] | 6,483 | 2018-03-09T05:29:36.000Z | 2022-03-31T20:39:35.000Z | misc/services/life_mngr/s5_trigger_linux.py | donsheng/acrn-hypervisor | 79edf8ba08f3f6d11d1ccf464b208c80b5b0fd24 | [
"BSD-3-Clause"
] | 593 | 2018-03-06T07:04:42.000Z | 2022-03-29T15:39:27.000Z | #!/usr/bin/env python3
#
# Copyright (C) 2021 Intel Corporation.
#
# SPDX-License-Identifier: BSD-3-Clause
#
import socket
class SocketClient:
def __init__(self):
pass
def connect_to_server(self):
SOKET_ADDR = '/var/lib/life_mngr/monitor.sock'
SHUTDOWN_REQ = 'req_sys_shutdown'
BUF_LEN = 1024
# unix domain sockets
server_address = SOKET_ADDR
socket_family = socket.AF_UNIX
socket_type = socket.SOCK_STREAM
sock = socket.socket(socket_family, socket_type)
sock.connect(server_address)
sock.sendall(SHUTDOWN_REQ.encode())
data = sock.recv(BUF_LEN)
print(f"Waiting for ACK message...: {data.decode()}")
sock.close()
if __name__ == "__main__":
socket_client_obj = SocketClient()
socket_client_obj.connect_to_server()
| 26.625 | 61 | 0.65493 |
4a23b4b8442bdb4e16f09933801eb515b899d978 | 691 | py | Python | test_pypass.py | tsitsiflora/pypass | 18d8e70caf112f3feae8d7b5d7956d46f9c64916 | [
"Apache-2.0"
] | 6 | 2019-02-14T06:43:24.000Z | 2021-11-15T20:26:23.000Z | test_pypass.py | tsitsiflora/pypass | 18d8e70caf112f3feae8d7b5d7956d46f9c64916 | [
"Apache-2.0"
] | 1 | 2019-03-04T08:07:46.000Z | 2019-03-04T22:09:05.000Z | test_pypass.py | tsitsiflora/pypass | 18d8e70caf112f3feae8d7b5d7956d46f9c64916 | [
"Apache-2.0"
] | 2 | 2019-02-23T08:36:17.000Z | 2019-03-15T18:48:18.000Z | import pytest
from pypass import clean_input, reverser
def test_clean():
assert clean_input('bob') == 'bob'
assert clean_input('Mary-Jane') == 'maryjane'
assert clean_input('#$%tsitsi&') == 'tsitsi'
assert clean_input(' sarah ') == 'sarah'
assert clean_input('JOHN') == 'john'
assert clean_input('james%') != 'james%'
assert clean_input('Blessing') != 'Blessing'
def test_reverser():
assert reverser('flora') == 'arolf'
assert reverser('sarah') == 'haras'
assert reverser('Liya') == 'ayiL'
assert reverser('tsitsi') == 'istist'
assert reverser('hannah') == 'hannah'
assert reverser('MARY') == 'YRAM'
assert reverser('john') != 'john' | 34.55 | 49 | 0.635311 |
4a23b58f7bb3baa89061bff21a00d058c618069d | 3,662 | py | Python | pymbt/analysis/_structure/structure_windows.py | klavinslab/pymbt-legacy | d638aecd954664e416ed28b30cdbbcfcb176bae5 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | pymbt/analysis/_structure/structure_windows.py | klavinslab/pymbt-legacy | d638aecd954664e416ed28b30cdbbcfcb176bae5 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | pymbt/analysis/_structure/structure_windows.py | klavinslab/pymbt-legacy | d638aecd954664e416ed28b30cdbbcfcb176bae5 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | '''Evaluate windows of a sequence for in-context structure.'''
try:
from matplotlib import pylab
except ImportError:
print "Failed to import matplotlib. Plotting structures won't work."
import pymbt.analysis
class StructureWindows(object):
'''Evaluate windows of structure and plot the results.'''
def __init__(self, dna):
'''
:param dna: DNA sequence to analyze.
:type dna: pymbt.DNA
'''
self.template = dna
self.walked = []
self.core_starts = []
self.core_ends = []
self.scores = []
def windows(self, window_size=60, context_len=90, step=10):
'''Walk through the sequence of interest in windows of window_size,
evaluate free (unbound) pair probabilities.
:param window_size: Window size in base pairs.
:type window_size: int
:param context_len: The number of bases of context to use when
analyzing each window.
:type context_len: int
:param step: The number of base pairs to move for each new window.
:type step: int
'''
self.walked = _context_walk(self.template, window_size, context_len,
step)
self.core_starts, self.core_ends, self.scores = zip(*self.walked)
return self.walked
def plot(self):
'''Plot the results of the run method.'''
if self.walked:
fig = pylab.figure()
ax1 = fig.add_subplot(111)
ax1.plot(self.core_starts, self.scores, 'bo-')
pylab.xlabel('Core sequence start position (base pairs).')
pylab.ylabel('Score - Probability of being unbound.')
pylab.show()
else:
raise Exception("Run calculate() first so there's data to plot!")
def _context_walk(dna, window_size, context_len, step):
'''Generate context-dependent 'non-boundedness' scores for a DNA sequence.
:param dna: Sequence to score.
:type dna: pymbt.DNA
:param window_size: Window size in base pairs.
:type window_size: int
:param context_len: The number of bases of context to use when analyzing
each window.
:type context_len: int
:param step: The number of base pairs to move for each new window.
:type step: int
'''
# Generate window indices
window_start_ceiling = len(dna) - context_len - window_size
window_starts = range(context_len - 1, window_start_ceiling, step)
window_ends = [start + window_size for start in window_starts]
# Generate left and right in-context subsequences
l_starts = [step * i for i in range(len(window_starts))]
l_seqs = [dna[start:end] for start, end in zip(l_starts, window_ends)]
r_ends = [x + window_size + context_len for x in window_starts]
r_seqs = [dna[start:end].reverse_complement() for start, end in
zip(window_starts, r_ends)]
# Combine and calculate nupack pair probabilities
seqs = l_seqs + r_seqs
pairs_run = pymbt.analysis.nupack_multi(seqs, 'dna', 'pairs', {'index': 0})
# Focus on pair probabilities that matter - those in the window
pairs = [run[-window_size:] for run in pairs_run]
# Score by average pair probability
lr_scores = [sum(pair) / len(pair) for pair in pairs]
# Split into left-right contexts again and sum for each window
l_scores = lr_scores[0:len(seqs) / 2]
r_scores = lr_scores[len(seqs) / 2:]
scores = [(l + r) / 2 for l, r in zip(l_scores, r_scores)]
# Summarize and return window indices and score
summary = zip(window_starts, window_ends, scores)
return summary
| 37.752577 | 79 | 0.643091 |
4a23b5cbca807e0974da77f66d9fdfd82a5206d6 | 1,991 | py | Python | test/Decider/Environment.py | Valkatraz/scons | 5e70c65f633dcecc035751c9f0c6f894088df8a0 | [
"MIT"
] | 1,403 | 2017-11-23T14:24:01.000Z | 2022-03-30T20:59:39.000Z | test/Decider/Environment.py | Valkatraz/scons | 5e70c65f633dcecc035751c9f0c6f894088df8a0 | [
"MIT"
] | 3,708 | 2017-11-27T13:47:12.000Z | 2022-03-29T17:21:17.000Z | test/Decider/Environment.py | Valkatraz/scons | 5e70c65f633dcecc035751c9f0c6f894088df8a0 | [
"MIT"
] | 281 | 2017-12-01T23:48:38.000Z | 2022-03-31T15:25:44.000Z | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify use of an up-to-date Decider method through a construction
environment.
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
DefaultEnvironment(tools=[])
import os.path
env = Environment(tools=[])
env.Command('file.out', 'file.in', Copy('$TARGET', '$SOURCE'))
def my_decider(dependency, target, prev_ni, repo_node=None):
return os.path.exists('has-changed')
env.Decider(my_decider)
""")
test.write('file.in', "file.in\n")
test.run(arguments = '.')
test.up_to_date(arguments = '.')
test.write('has-changed', "\n")
test.not_up_to_date(arguments = '.')
test.not_up_to_date(arguments = '.')
test.unlink('has-changed')
test.up_to_date(arguments = '.')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 28.855072 | 73 | 0.746359 |
4a23b5d649d9a14cfb43aac454a4690f9c8e4b95 | 1,718 | py | Python | scripts/pressure_node.py | smithjoana/zoidberg_ros | 134be309b9ce9f63de107a8ae64db304bb833ee9 | [
"MIT"
] | 6 | 2018-09-15T19:16:57.000Z | 2019-07-29T19:22:12.000Z | scripts/pressure_node.py | smithjoana/zoidberg_ros | 134be309b9ce9f63de107a8ae64db304bb833ee9 | [
"MIT"
] | null | null | null | scripts/pressure_node.py | smithjoana/zoidberg_ros | 134be309b9ce9f63de107a8ae64db304bb833ee9 | [
"MIT"
] | 5 | 2018-10-19T01:49:46.000Z | 2018-11-17T18:04:55.000Z | #!/usr/bin/env python
"""
======================
Read pressure data
======================
read scaledpressure2 from the pyhawk
"""
from __future__ import print_function, division
import rospy
from pymavlink import mavutil
from sensor_msgs.msg import FluidPressure
from std_msgs.msg import Header
device = "udpin:127.0.0.1:14550"
def read_pressure(mav_obj):
"""
Read accelerometer readings until taxis is exhausted.
There will only be output once the total time has elapsed.
"""
pub = rospy.Publisher('/depth', FluidPressure, queue_size=10)
rospy.init_node('externalpressure')
rate = rospy.Rate(10)
msg_type = 'SCALED_PRESSURE2'
msg = mav_obj.recv_match(blocking=True)
# flush out old data
if msg.get_type() == "BAD_DATA":
if mavutil.all_printable(msg.data):
sys.stdout.write(msg.data)
sys.stdout.flush()
while not rospy.is_shutdown():
msg = mav_obj.recv_match(type=msg_type, blocking=True)
h = Header()
h.stamp = rospy.Time.now()
depth_m = (msg.press_abs - 1014.25) / 100
fp = FluidPressure(header=h,
fluid_pressure=depth_m,
variance=0)
pub.publish(fp)
rate.sleep()
mav = mavutil.mavlink_connection(device,
source_system=1,
source_component=1)
# check that there is a heartbeat
mav.recv_match(type='HEARTBEAT', blocking=True)
print("Heartbeat from APM (system %u component %u)" %
(mav.target_system, mav.target_component))
print('')
# a try block ensures that mav with always be closed
try:
press = read_pressure(mav)
finally:
mav.close()
| 28.633333 | 65 | 0.624563 |
4a23b774ac5c9b87d6f5225822615e937a9dbc48 | 4,508 | py | Python | Lib/site-packages/psutil/tests/test_aix.py | edupyter/EDUPYTER38 | 396183cea72987506f1ef647c0272a2577c56218 | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/psutil/tests/test_aix.py | edupyter/EDUPYTER38 | 396183cea72987506f1ef647c0272a2577c56218 | [
"bzip2-1.0.6"
] | 1 | 2017-01-11T10:12:46.000Z | 2017-01-11T10:12:46.000Z | Lib/site-packages/psutil/tests/test_aix.py | edupyter/EDUPYTER38 | 396183cea72987506f1ef647c0272a2577c56218 | [
"bzip2-1.0.6"
] | 3 | 2017-01-10T13:50:05.000Z | 2019-02-12T13:20:59.000Z | #!/usr/bin/env python3
# Copyright (c) 2009, Giampaolo Rodola'
# Copyright (c) 2017, Arnon Yaari
# All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""AIX specific tests."""
import re
import unittest
import psutil
from psutil import AIX
from psutil.tests import PsutilTestCase
from psutil.tests import sh
@unittest.skipIf(not AIX, "AIX only")
class AIXSpecificTestCase(PsutilTestCase):
def test_virtual_memory(self):
out = sh('/usr/bin/svmon -O unit=KB')
re_pattern = r"memory\s*"
for field in ("size inuse free pin virtual available mmode").split():
re_pattern += r"(?P<%s>\S+)\s+" % (field,)
matchobj = re.search(re_pattern, out)
self.assertIsNotNone(
matchobj, "svmon command returned unexpected output")
KB = 1024
total = int(matchobj.group("size")) * KB
available = int(matchobj.group("available")) * KB
used = int(matchobj.group("inuse")) * KB
free = int(matchobj.group("free")) * KB
psutil_result = psutil.virtual_memory()
# TOLERANCE_SYS_MEM from psutil.tests is not enough. For some reason
# we're seeing differences of ~1.2 MB. 2 MB is still a good tolerance
# when compared to GBs.
TOLERANCE_SYS_MEM = 2 * KB * KB # 2 MB
self.assertEqual(psutil_result.total, total)
self.assertAlmostEqual(
psutil_result.used, used, delta=TOLERANCE_SYS_MEM)
self.assertAlmostEqual(
psutil_result.available, available, delta=TOLERANCE_SYS_MEM)
self.assertAlmostEqual(
psutil_result.free, free, delta=TOLERANCE_SYS_MEM)
def test_swap_memory(self):
out = sh('/usr/sbin/lsps -a')
# From the man page, "The size is given in megabytes" so we assume
# we'll always have 'MB' in the result
# TODO maybe try to use "swap -l" to check "used" too, but its units
# are not guaranteed to be "MB" so parsing may not be consistent
matchobj = re.search(r"(?P<space>\S+)\s+"
r"(?P<vol>\S+)\s+"
r"(?P<vg>\S+)\s+"
r"(?P<size>\d+)MB", out)
self.assertIsNotNone(
matchobj, "lsps command returned unexpected output")
total_mb = int(matchobj.group("size"))
MB = 1024 ** 2
psutil_result = psutil.swap_memory()
# we divide our result by MB instead of multiplying the lsps value by
# MB because lsps may round down, so we round down too
self.assertEqual(int(psutil_result.total / MB), total_mb)
def test_cpu_stats(self):
out = sh('/usr/bin/mpstat -a')
re_pattern = r"ALL\s*"
for field in ("min maj mpcs mpcr dev soft dec ph cs ics bound rq "
"push S3pull S3grd S0rd S1rd S2rd S3rd S4rd S5rd "
"sysc").split():
re_pattern += r"(?P<%s>\S+)\s+" % (field,)
matchobj = re.search(re_pattern, out)
self.assertIsNotNone(
matchobj, "mpstat command returned unexpected output")
# numbers are usually in the millions so 1000 is ok for tolerance
CPU_STATS_TOLERANCE = 1000
psutil_result = psutil.cpu_stats()
self.assertAlmostEqual(
psutil_result.ctx_switches,
int(matchobj.group("cs")),
delta=CPU_STATS_TOLERANCE)
self.assertAlmostEqual(
psutil_result.syscalls,
int(matchobj.group("sysc")),
delta=CPU_STATS_TOLERANCE)
self.assertAlmostEqual(
psutil_result.interrupts,
int(matchobj.group("dev")),
delta=CPU_STATS_TOLERANCE)
self.assertAlmostEqual(
psutil_result.soft_interrupts,
int(matchobj.group("soft")),
delta=CPU_STATS_TOLERANCE)
def test_cpu_count_logical(self):
out = sh('/usr/bin/mpstat -a')
mpstat_lcpu = int(re.search(r"lcpu=(\d+)", out).group(1))
psutil_lcpu = psutil.cpu_count(logical=True)
self.assertEqual(mpstat_lcpu, psutil_lcpu)
def test_net_if_addrs_names(self):
out = sh('/etc/ifconfig -l')
ifconfig_names = set(out.split())
psutil_names = set(psutil.net_if_addrs().keys())
self.assertSetEqual(ifconfig_names, psutil_names)
if __name__ == '__main__':
from psutil.tests.runner import run_from_name
run_from_name(__file__)
| 36.650407 | 77 | 0.614685 |
4a23b78bf693f8fa01ced8a35aa97a6d52464859 | 64 | py | Python | src/models/util/backbone/__init__.py | shendu-sw/TFR-HSS-Benchmark | 3fbc93ff548d924050e2de5070007197f04be7f4 | [
"MIT"
] | 7 | 2021-08-24T10:01:28.000Z | 2021-12-29T07:13:17.000Z | src/models/util/backbone/__init__.py | idrl-lab/TFR-HSS-Benchmark | 3fbc93ff548d924050e2de5070007197f04be7f4 | [
"MIT"
] | null | null | null | src/models/util/backbone/__init__.py | idrl-lab/TFR-HSS-Benchmark | 3fbc93ff548d924050e2de5070007197f04be7f4 | [
"MIT"
] | 1 | 2021-08-25T01:38:39.000Z | 2021-08-25T01:38:39.000Z | from .alexnet import *
from .resnet import *
from .vgg import *
| 16 | 22 | 0.71875 |
4a23b7e2ac76be998e5e88822db9ab03bc7ddb0c | 1,024 | py | Python | examples/ur5e/sim/get_joint_info.py | weiqiao/airobot | 5f9c6f239a16f31f8923131e144b360f71a89a93 | [
"MIT"
] | null | null | null | examples/ur5e/sim/get_joint_info.py | weiqiao/airobot | 5f9c6f239a16f31f8923131e144b360f71a89a93 | [
"MIT"
] | null | null | null | examples/ur5e/sim/get_joint_info.py | weiqiao/airobot | 5f9c6f239a16f31f8923131e144b360f71a89a93 | [
"MIT"
] | null | null | null | import numpy as np
from airobot import Robot
from airobot import log_info
def main():
"""
This function demonstrates how to get joint information
such as joint positions/velocities/torques.
"""
robot = Robot('ur5e_2f140', pb_cfg={'gui': True})
robot.arm.go_home()
log_info('\nJoint positions for all actuated joints:')
jpos = robot.arm.get_jpos()
log_info(np.round(jpos, decimals=3))
joint = 'shoulder_pan_joint'
log_info('Joint [%s] position: %.3f' %
(joint, robot.arm.get_jpos('shoulder_pan_joint')))
log_info('Joint velocities:')
jvel = robot.arm.get_jvel()
log_info(np.round(jvel, decimals=3))
log_info('Joint torques:')
jtorq = robot.arm.get_jtorq()
log_info(np.round(jtorq, decimals=3))
robot.arm.eetool.close()
log_info('Gripper position (close): %.3f' % robot.arm.eetool.get_pos())
robot.arm.eetool.open()
log_info('Gripper position (open): %.3f' % robot.arm.eetool.get_pos())
if __name__ == '__main__':
main()
| 31.030303 | 75 | 0.665039 |
4a23b822089469006c2b42af4a5dbc1e1ad1ea59 | 2,506 | py | Python | setup.py | gabrieldechichi/auto-editor | 375f5f8612749f314002aa6420683543bac1318e | [
"Unlicense"
] | 1 | 2021-08-18T14:09:07.000Z | 2021-08-18T14:09:07.000Z | setup.py | gabrieldechichi/auto-editor | 375f5f8612749f314002aa6420683543bac1318e | [
"Unlicense"
] | null | null | null | setup.py | gabrieldechichi/auto-editor | 375f5f8612749f314002aa6420683543bac1318e | [
"Unlicense"
] | null | null | null | '''setup.py'''
"""
Create a build for pip
This code should only be executed by developers, not users.
"""
import os
import re
import sys
from setuptools import setup, find_packages
def pip_version():
with open(os.path.abspath('auto_editor/__init__.py')) as f:
version_content = f.read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_content, re.M)
if(version_match):
print(version_match)
return version_match.group(1)
raise ValueError('Unable to find version string.')
if(sys.argv[-1] == 'publish'):
from shutil import rmtree
rmtree('build')
rmtree('dist')
os.system('python3 setup.py sdist bdist_wheel')
os.system('twine upload dist/*')
sys.exit()
with open('README.md', 'r') as f:
long_description = f.read()
setup(
name='auto-editor',
version=pip_version(),
description='Auto-Editor: Effort free video editing!',
long_description=long_description,
long_description_content_type='text/markdown',
license='Unlicense',
url='https://github.com/WyattBlue/auto-editor',
author='WyattBlue',
author_email='[email protected]',
keywords='video audio media editor editing processing nonlinear automatic ' \
'silence-detect silence-removal silence-speedup motion-detection',
packages=find_packages(),
include_package_data=True,
install_requires=[
'numpy',
'audiotsm2',
'opencv-python>=4.3',
'youtube-dl',
'requests',
'av',
],
classifiers=[
'Topic :: Multimedia :: Video',
'License :: Public Domain',
'License :: OSI Approved :: The Unlicense (Unlicense)',
'Environment :: Console',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: Implementation :: IronPython',
'Programming Language :: Python :: Implementation :: Jython',
],
entry_points={
"console_scripts": ["auto-editor=auto_editor.__main__:main"]
}
)
| 29.482353 | 81 | 0.630487 |
4a23b8a6a4a7c303a2be0503c8310bbb13bd91b9 | 1,823 | py | Python | src/tox_ansible/matrix/__init__.py | strider/tox-ansible | e5963fac18b788d450c0b827538b54354e261fce | [
"MIT"
] | 7 | 2020-10-10T13:58:39.000Z | 2021-04-17T14:57:59.000Z | src/tox_ansible/matrix/__init__.py | strider/tox-ansible | e5963fac18b788d450c0b827538b54354e261fce | [
"MIT"
] | 52 | 2020-08-19T16:44:08.000Z | 2021-08-31T13:34:02.000Z | src/tox_ansible/matrix/__init__.py | strider/tox-ansible | e5963fac18b788d450c0b827538b54354e261fce | [
"MIT"
] | 5 | 2020-08-19T19:02:03.000Z | 2021-04-14T04:17:44.000Z | class Matrix(object):
def __init__(self):
self.axes = []
def add_axis(self, axis):
"""Add an extension axis to this matrix. Axes can be found in the
axes.py file and are subclasses of the MatrixAxisBase class.
:param axis: An expansion axis to add to this matrix."""
self.axes.append(axis)
def expand(self, cases):
for axis in self.axes:
cases = axis.expand(cases)
return cases
class MatrixAxisBase(object):
"""Expands a list of a particular test case by creating copies with the
appropriately named factors and replacing the base case.
***THIS IS AN ABSTRACT BASE CLASS***"""
def __init__(self, versions):
"""Initialize a matrix to expand a particular version.
:param versions: A list of versions to expand this matrix"""
self.versions = versions
def expand(self, tox_cases):
"""Expand the list of test cases by multiplying it by this matrix for
the configured field.
:param tox_cases: An iterable of the currently existing test cases
:return: A list of the test cases, copied and expanded by this
particular matrix factor."""
results = []
for tox_case in tox_cases:
for version in self.versions:
results.append(self.expand_one(tox_case, version))
return results
def expand_one(self, tox_case, version):
"""Do the actual expansion on a particular test case.
***MUST BE OVERRIDDEN BY CHILD CLASSES***
:param: tox_case: the test case to be expanded by this particular
axis
:param version: the version of the test case to be expanded in this
step
:return: the resultant new version of the test case"""
raise NotImplementedError
| 33.759259 | 77 | 0.645639 |
4a23b9a94aaa4b8863f1f8a812356490d6cdacbc | 1,105 | py | Python | kafka_consumer/managers.py | innovationinit/django-kafka-consumer | c401db135315496f5386330ad67b13aa889e2bb5 | [
"BSD-2-Clause"
] | null | null | null | kafka_consumer/managers.py | innovationinit/django-kafka-consumer | c401db135315496f5386330ad67b13aa889e2bb5 | [
"BSD-2-Clause"
] | null | null | null | kafka_consumer/managers.py | innovationinit/django-kafka-consumer | c401db135315496f5386330ad67b13aa889e2bb5 | [
"BSD-2-Clause"
] | null | null | null | from django.db.models import Manager
from .utils import get_class_path
class JunkMessageManager(Manager):
def handle_junk_message(self, subscriber, raw_message, exception, topic_key):
"""Store kafka message in original form for further process
:param subscriber: Message subscriber, processes given message
:type subscriber: BaseSubscriber
:param raw_message: Original message
:type raw_message: consumer.kafka_consumer.BaseForeignMessage
:param exception: Exception, that occurred in parsing or processing time
:type exception: Exception
:param topic_key: Topic entry key, to be lookup in config at retry time. Used to instantiate message processors
:type topic_key: str
"""
max_length = self.model._meta.get_field('error_message').max_length
self.create(
subscriber=get_class_path(subscriber.__class__),
raw_data=raw_message.raw_data,
offset=raw_message.offset,
error_message=str(exception)[:max_length],
topic_key=topic_key,
)
| 36.833333 | 119 | 0.696833 |
4a23baa22d5e5314ffb6ffe89dcf7ee8b3efb167 | 3,008 | py | Python | blamepipeline/preprocess/match_entity_article.py | Shuailong/BlamePipeline | bbd508dd0afc2e2c579f6afea5a3acd4c5c47956 | [
"MIT"
] | 5 | 2019-02-12T14:47:40.000Z | 2021-10-24T00:39:05.000Z | blamepipeline/preprocess/match_entity_article.py | Shuailong/BlamePipeline | bbd508dd0afc2e2c579f6afea5a3acd4c5c47956 | [
"MIT"
] | 1 | 2020-08-10T10:16:29.000Z | 2020-08-10T10:16:29.000Z | blamepipeline/preprocess/match_entity_article.py | Shuailong/BlamePipeline | bbd508dd0afc2e2c579f6afea5a3acd4c5c47956 | [
"MIT"
] | 3 | 2019-02-12T14:48:01.000Z | 2022-02-07T15:27:09.000Z | # encoding: utf-8
'''
Match articles with annotated blame entities.
'''
import argparse
from collections import defaultdict
from .match_article_entry import match_data
sources = ['USA', 'NYT', 'WSJ']
cases = defaultdict(int)
def filter_data(pairs, source=None, ignore_claim=False):
valid_pairs = []
articles = set()
global cases
for entry, article in pairs:
source = entry['source']
target = entry['target']
claim = entry['claim']
content = article['content']
if not source or not target:
# empty entity
cases['empty entity'] += 1
continue
if source.isdigit() or target.isdigit():
# entity is number
cases['digit entity'] += 1
continue
if len(source) < 2 or len(target) < 2:
# entity length too short
cases['entity too short'] += 1
continue
if source == target:
# source and target is the same
cases['same src and tgt'] += 1
continue
if source not in content:
cases['src not in content'] += 1
continue
if target not in content:
cases['tgt not in content'] += 1
continue
if not ignore_claim:
if not claim:
# no claim
cases['no claim'] += 1
continue
if source not in claim:
cases['src not in claim'] += 1
continue
if target not in claim:
cases['tgt not in claim'] += 1
continue
if claim not in content:
cases['claim not in content'] += 1
continue
d = {}
d['title'] = entry['title']
d['date'] = entry['date']
d['source'] = source
d['target'] = target
d['claim'] = claim
d['content'] = content
valid_pairs.append(d)
articles.add((entry['date'], entry['title']))
print(f'{len(articles)} articles.')
return valid_pairs
def main(args):
print(args)
data = []
for source in sources:
print(source)
pairs = match_data(source)
print('{} pairs loaded.'.format(len(pairs)))
valid_pairs = filter_data(pairs, source=source, ignore_claim=args.ignore_claim)
print('{} valid pairs.'.format(len(valid_pairs)))
data += valid_pairs
print('=-=-=-=-=-=')
print('\n---\n')
print('\n'.join([f'{k}: {cases[k]}' for k in cases]))
def str2bool(v):
return v.lower() in ('yes', 'true', 't', '1', 'y')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Extract matched source and target in articles')
parser.register('type', 'bool', str2bool)
parser.add_argument('--ignore-claim', type='bool', default=False,
help='ignore existence of claim when filtering data entries.')
args = parser.parse_args()
main(args)
| 28.923077 | 97 | 0.539561 |
4a23be5c8284c77c4558f258a0b6e16abe32a7c1 | 412 | py | Python | bigraph/preprocessing/pd_to_list.py | jake-b12/bigraph | 1b7b1e0ecb73ff7a800c706831ab3d19675e8c49 | [
"BSD-3-Clause"
] | 73 | 2021-01-09T16:25:15.000Z | 2022-03-19T14:03:58.000Z | bigraph/preprocessing/pd_to_list.py | jake-b12/bigraph | 1b7b1e0ecb73ff7a800c706831ab3d19675e8c49 | [
"BSD-3-Clause"
] | 3 | 2019-10-20T00:44:30.000Z | 2019-10-20T00:44:30.000Z | bigraph/preprocessing/pd_to_list.py | jake-b12/bigraph | 1b7b1e0ecb73ff7a800c706831ab3d19675e8c49 | [
"BSD-3-Clause"
] | 22 | 2021-01-12T02:15:47.000Z | 2022-03-16T20:54:32.000Z | def _add_to_list(dataframe: dict) -> list:
"""
Generate link tuples and append them to a list
:param dataframe: Dataframe containing links
:return: Edge list
"""
edge_list = []
for edge_row in dataframe.iterrows():
# if not str(df_nodes[edge_row[1][1]]).__contains__('.'):
tuples = edge_row[1][0], edge_row[1][1]
edge_list.append(tuples)
return edge_list
| 29.428571 | 65 | 0.633495 |
4a23be5d1ce7d5f6b8e8be54db6ffd7d10ee8c0f | 650 | py | Python | app/shop/urls.py | duboisR/django-stripe | add707f91ca43522ca19b7b735196ddc16aad651 | [
"MIT"
] | null | null | null | app/shop/urls.py | duboisR/django-stripe | add707f91ca43522ca19b7b735196ddc16aad651 | [
"MIT"
] | null | null | null | app/shop/urls.py | duboisR/django-stripe | add707f91ca43522ca19b7b735196ddc16aad651 | [
"MIT"
] | null | null | null | from django.urls import path, include
import shop.views
urlpatterns = [
# Shop
path('', shop.views.ShopView.as_view(), name="shop"),
path('item/<int:pk>/', shop.views.ShopItemView.as_view(), name="shop_item"),
# Payment
path('cart/', shop.views.CartView.as_view(), name="cart"),
path('checkout/', shop.views.CheckoutView.as_view(), name="checkout"),
path('payment/', shop.views.PaymentView.as_view(), name="payment"),
# Stripe
path('stripe-create-payment-intent', shop.views.stripe_create_payment, name="stripe_create_payment"),
path('stripe-webhook/', shop.views.stripe_webhook, name="stripe_webhook"),
]
| 34.210526 | 105 | 0.687692 |
4a23bee168895676364e2387acef3f5837f5cabe | 1,242 | py | Python | lang/py/pylib/code/filecmp/filecmp_mkexamples.py | ch1huizong/learning | 632267634a9fd84a5f5116de09ff1e2681a6cc85 | [
"MIT"
] | 13 | 2020-01-04T07:37:38.000Z | 2021-08-31T05:19:58.000Z | lang/py/pylib/code/filecmp/filecmp_mkexamples.py | ch1huizong/learning | 632267634a9fd84a5f5116de09ff1e2681a6cc85 | [
"MIT"
] | 3 | 2020-06-05T22:42:53.000Z | 2020-08-24T07:18:54.000Z | lang/py/pylib/code/filecmp/filecmp_mkexamples.py | ch1huizong/learning | 632267634a9fd84a5f5116de09ff1e2681a6cc85 | [
"MIT"
] | 9 | 2020-10-19T04:53:06.000Z | 2021-08-31T05:20:01.000Z | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 Doug Hellmann All rights reserved.
#
"""This script creates the example directory and its contents.
"""
__version__ = "$Id$"
#end_pymotw_header
import os
def mkfile(filename, body=None):
with open(filename, 'w') as f:
f.write(body or filename)
return
def make_example_dir(top):
if not os.path.exists(top):
os.mkdir(top)
curdir = os.getcwd()
os.chdir(top)
os.mkdir('dir1')
os.mkdir('dir2')
mkfile('dir1/file_only_in_dir1')
mkfile('dir2/file_only_in_dir2')
os.mkdir('dir1/dir_only_in_dir1')
os.mkdir('dir2/dir_only_in_dir2')
os.mkdir('dir1/common_dir')
os.mkdir('dir2/common_dir')
mkfile('dir1/common_file', 'this file is the same')
mkfile('dir2/common_file', 'this file is the same')
mkfile('dir1/not_the_same')
mkfile('dir2/not_the_same')
mkfile('dir1/file_in_dir1', 'This is a file in dir1')
os.mkdir('dir2/file_in_dir1')
os.chdir(curdir)
return
if __name__ == '__main__':
os.chdir(os.path.dirname(__file__) or os.getcwd())
make_example_dir('example')
make_example_dir('example/dir1/common_dir')
make_example_dir('example/dir2/common_dir')
| 22.581818 | 62 | 0.669082 |
4a23c1039b04095ad13180f7966459e9868a0aa5 | 2,073 | py | Python | src/python/tests/core/local/butler/scripts/batcher_test.py | mi-ac/clusterfuzz | 0b5c023eca9e3aac41faba17da8f341c0ca2ddc7 | [
"Apache-2.0"
] | 1 | 2020-12-23T02:49:09.000Z | 2020-12-23T02:49:09.000Z | src/python/tests/core/local/butler/scripts/batcher_test.py | mi-ac/clusterfuzz | 0b5c023eca9e3aac41faba17da8f341c0ca2ddc7 | [
"Apache-2.0"
] | 2 | 2021-03-31T19:59:19.000Z | 2021-05-20T22:08:07.000Z | src/python/tests/core/local/butler/scripts/batcher_test.py | henryzz0/clusterfuzz | 0427ed8328d6bd6e18540087793a41531bbaafea | [
"Apache-2.0"
] | 1 | 2021-11-06T06:22:00.000Z | 2021-11-06T06:22:00.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""batcher tests."""
import datetime
import unittest
from datastore import data_types
from local.butler.scripts import batcher
from tests.test_libs import test_utils
@test_utils.with_cloud_emulators('datastore')
class BatcherTest(unittest.TestCase):
"""Test batcher."""
def setUp(self):
self.testcase_ids = []
for i in range(100):
testcase = data_types.Testcase()
testcase.timestamp = datetime.datetime.fromtimestamp(i)
testcase.put()
self.testcase_ids.append(testcase.key.id())
def _test(self, batch_size, expected_batch_count, expected_count):
"""Test when the limit is too large."""
query = data_types.Testcase.query().order(data_types.Testcase.timestamp)
list_of_testcases = list(batcher.iterate(query, batch_size=batch_size))
self.assertEqual(expected_batch_count, len(list_of_testcases))
count = 0
for testcases in list_of_testcases:
for testcase in testcases:
self.assertEqual(self.testcase_ids[count], testcase.key.id())
count += 1
self.assertEqual(expected_count, count)
def test_batch(self):
"""Test batching."""
self._test(2, 50, 100)
def test_batch_non_multiple(self):
"""Test when the batch size is not a multiple."""
self._test(7, 15, 100)
def test_too_large_batch(self):
"""Test when the batch is too large."""
self._test(105, 1, 100)
def test_exact_batch(self):
"""Test when the batch is exactly the number of items."""
self._test(100, 1, 100)
| 32.904762 | 76 | 0.721659 |
4a23c288b9fe71b040c951fe5bc1bf2b90730675 | 377 | py | Python | chatette/log.py | SimGus/Chatette | fd22b6c2e4a27b222071c93772c2ae99387aa5c3 | [
"MIT"
] | 263 | 2018-09-06T14:46:29.000Z | 2022-03-31T08:40:19.000Z | chatette/log.py | IspML/Chatette | fd22b6c2e4a27b222071c93772c2ae99387aa5c3 | [
"MIT"
] | 50 | 2018-09-06T14:50:18.000Z | 2021-11-16T03:54:27.000Z | chatette/log.py | IspML/Chatette | fd22b6c2e4a27b222071c93772c2ae99387aa5c3 | [
"MIT"
] | 49 | 2018-09-18T23:15:09.000Z | 2022-03-02T11:23:08.000Z | #!/usr/bin/env python3
# coding: utf-8
"""
Module `chatette.log`
Contains logging functions used throughout the project.
"""
import sys
# pylint: disable=invalid-name
def print_DBG(txt):
"""Prints debug information on stdout."""
print("[DBG] " + txt)
def print_warn(txt):
"""Warns the user using stdout."""
print("\n[WARN] " + txt + "\n", file=sys.stderr)
| 18.85 | 55 | 0.65252 |
4a23c2c7f072ca3b2dea167396c403809e130d93 | 3,420 | py | Python | tests/test_conversion.py | cihai/cihai | 6a9764e5b3c47b28f4a4dce27234ff99b2ee45c5 | [
"MIT"
] | 41 | 2017-05-03T12:31:19.000Z | 2022-03-25T06:41:59.000Z | tests/test_conversion.py | cihai/cihai | 6a9764e5b3c47b28f4a4dce27234ff99b2ee45c5 | [
"MIT"
] | 300 | 2017-04-23T20:50:04.000Z | 2022-03-05T20:24:36.000Z | tests/test_conversion.py | cihai/cihai | 6a9764e5b3c47b28f4a4dce27234ff99b2ee45c5 | [
"MIT"
] | 10 | 2017-06-01T11:26:45.000Z | 2022-03-11T06:04:34.000Z | """Tests for cihai.
test.conversion
~~~~~~~~~~~~~~~
"""
from cihai import conversion
from cihai._compat import string_types, text_type
def test_text_type():
c1 = '(same as U+7A69 穩) firm; stable; secure'
c2 = text_type()
assert isinstance(c1, string_types)
assert isinstance(c2, text_type)
"""Return UCN character from Python Unicode character.
Converts a one character Python unicode string (e.g. u'\\u4e00') to the
corresponding Unicode UCN ('U+4E00').
U+369D kSemanticVariant U+595E<kMatthews U+594E<kMatthews
U+3CE2 kTraditionalVariant U+23FB7
U+3FF7 kSemanticVariant U+7CD9<kMatthews,kMeyerWempe
U+345A kDefinition (non-classical form of 那) that, there
U+349A kDefinition (same as U+7A69 穩) firm; stable; secure,
dependent upon others
U+34B5 kMandarin mào
U+356D kCantonese au3 jaau1
"""
def test_ucn_from_unicode():
text = '一'
python_unicode = u'\u4e00'
expected = "U+4E00"
bytes_expected = b"U+4E00"
assert conversion.python_to_ucn(python_unicode) == expected
assert isinstance(conversion.python_to_ucn(python_unicode), text_type)
assert isinstance(conversion.python_to_ucn(python_unicode, as_bytes=True), bytes)
assert conversion.python_to_ucn(text, as_bytes=True) == bytes_expected
def test_ucn_from_unicode_16():
text = '𦄀'
python_unicode = u'\u26100'
expected = "U+26100"
bytes_expected = b"U+26100"
assert conversion.python_to_ucn(python_unicode) == expected
assert isinstance(conversion.python_to_ucn(python_unicode), text_type)
assert isinstance(conversion.python_to_ucn(python_unicode, as_bytes=True), bytes)
assert conversion.python_to_ucn(text, as_bytes=True) == bytes_expected
def test_ucn_to_unicode():
before = 'U+4E00'
expected = '\u4e00'
result = conversion.ucn_to_unicode(before)
assert result == expected
assert isinstance(result, text_type)
# wide character
before = 'U+20001'
expected = '\U00020001'
result = conversion.ucn_to_unicode(before)
assert result == expected
assert isinstance(result, text_type)
before = '(same as U+7A69 穩) firm; stable; secure'
expected = '(same as 穩 穩) firm; stable; secure'
result = conversion.ucnstring_to_unicode(before)
assert result == expected
assert isinstance(result, text_type)
"""Return EUC character from a Python Unicode character.
Converts a one character Python unicode string (e.g. u'\\u4e00') to the
corresponding EUC hex ('d2bb').
"""
def test_hexd():
assert conversion.hexd(0xFFFF) == 'ffff'
def test_euc_from_unicode():
expected = '一' # u'\u4e00'
euc_bytestring = b'd2bb'
euc_unicode = 'd2bb'
result = conversion.python_to_euc(expected, as_bytes=True)
assert euc_bytestring == result
assert isinstance(result, bytes)
result = conversion.python_to_euc(expected)
assert euc_unicode == result
assert isinstance(result, text_type)
def test_euc_to_utf8():
expected = '一'
euc_bytestring = b'b0ec'
result = conversion.euc_to_utf8(euc_bytestring)
assert expected == result
def test_euc_to_unicode():
expected = '一'
expected_ustring = u'\u4e00'
euc_bytestring = b'd2bb'
result = conversion.euc_to_unicode(euc_bytestring)
assert expected == expected_ustring
assert isinstance(result, text_type)
assert expected == result
assert expected_ustring == result
| 24.428571 | 85 | 0.71462 |
4a23c41fff305e1370241de1770ecf8dd20ef8d8 | 3,535 | py | Python | gym_minigrid/envs/keycorridor.py | DamienLopez1/WM-GEP | 8b858cf8d221d58cd4ff945ae6c1270c290768c4 | [
"Apache-2.0"
] | null | null | null | gym_minigrid/envs/keycorridor.py | DamienLopez1/WM-GEP | 8b858cf8d221d58cd4ff945ae6c1270c290768c4 | [
"Apache-2.0"
] | null | null | null | gym_minigrid/envs/keycorridor.py | DamienLopez1/WM-GEP | 8b858cf8d221d58cd4ff945ae6c1270c290768c4 | [
"Apache-2.0"
] | null | null | null | from gym_minigrid.roomgrid import RoomGrid
from gym_minigrid.register import register
class KeyCorridor(RoomGrid):
"""
A ball is behind a locked door, the key is placed in a
random room.
"""
def __init__(
self,
num_rows=3,
obj_type="ball",
room_size=6,
seed=None
):
self.obj_type = obj_type
super().__init__(
room_size=room_size,
num_rows=num_rows,
max_steps=30*room_size**2,
seed=seed,
)
def _gen_grid(self, width, height):
super()._gen_grid(width, height)
# Connect the middle column rooms into a hallway
for j in range(1, self.num_rows):
self.remove_wall(1, j, 3)
# Add a locked door on the bottom right
# Add an object behind the locked door
room_idx = self._rand_int(0, self.num_rows)
door, _ = self.add_door(2, room_idx, 2, locked=True)
obj, _ = self.add_object(2, room_idx, kind=self.obj_type)
# Add a key in a random room on the left side
self.add_object(0, self._rand_int(0, self.num_rows), 'key', door.color)
# Place the agent in the middle
self.place_agent(1, self.num_rows // 2)
# Make sure all rooms are accessible
self.connect_all()
self.obj = obj
self.mission = "pick up the %s %s" % (obj.color, obj.type)
def step(self, action):
obs, reward, done, info = super().step(action)
if action == self.actions.pickup:
if self.carrying and self.carrying == self.obj:
reward = self._reward()
done = True
return obs, reward, done, info
class KeyCorridorS3R1(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=3,
num_rows=1,
seed=seed
)
class KeyCorridorS3R2(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=3,
num_rows=2,
seed=seed
)
class KeyCorridorS3R3(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=3,
num_rows=3,
seed=seed
)
class KeyCorridorS4R3(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=4,
num_rows=3,
seed=seed
)
class KeyCorridorS5R3(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=5,
num_rows=3,
seed=seed
)
class KeyCorridorS6R3(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=6,
num_rows=3,
seed=seed
)
register(
id='MiniGrid-KeyCorridorS3R1-v0',
entry_point='gym_minigrid.envs:KeyCorridorS3R1'
)
register(
id='MiniGrid-KeyCorridorS3R2-v0',
entry_point='gym_minigrid.envs:KeyCorridorS3R2'
)
register(
id='MiniGrid-KeyCorridorS3R3-v0',
entry_point='gym_minigrid.envs:KeyCorridorS3R3'
)
register(
id='MiniGrid-KeyCorridorS4R3-v0',
entry_point='gym_minigrid.envs:KeyCorridorS4R3'
)
register(
id='MiniGrid-KeyCorridorS5R3-v0',
entry_point='gym_minigrid.envs:KeyCorridorS5R3'
)
register(
id='MiniGrid-KeyCorridorS6R3-v0',
entry_point='gym_minigrid.envs:KeyCorridorS6R3'
)
| 25.615942 | 80 | 0.571994 |
4a23c43f95ae4e7c54b40ace2fdff94b04251b77 | 388 | py | Python | resteasycli/lib/saved_request.py | rapidstack/RESTEasyCLI | f65fc86163c25f843a94341f09b20db28c1511d7 | [
"MIT"
] | null | null | null | resteasycli/lib/saved_request.py | rapidstack/RESTEasyCLI | f65fc86163c25f843a94341f09b20db28c1511d7 | [
"MIT"
] | 55 | 2019-01-01T12:03:58.000Z | 2019-05-23T16:36:30.000Z | resteasycli/lib/saved_request.py | sayanarijit/RESTEasyCLI | f65fc86163c25f843a94341f09b20db28c1511d7 | [
"MIT"
] | 5 | 2019-01-01T17:04:37.000Z | 2019-03-02T17:59:49.000Z | from resteasycli.lib.request import Request
class SavedRequest(Request):
'''Saved requests for reuse and lazyness'''
def __init__(self, request_id, workspace):
data = workspace.saved_requests[request_id]
Request.__init__(self, workspace=workspace, site_id=data['site'],
endpoint_id=data['endpoint'], **data)
self.request_id = request_id
| 32.333333 | 73 | 0.690722 |
4a23c468f12ac0737b7e752e73463aca3d2c175c | 2,297 | py | Python | scripts/addCols.py | amlalejini/GECCO-2018-cohort-lexicase | da36586714a5322af37548348958dc9e3aa7938d | [
"MIT"
] | 1 | 2019-04-08T18:44:59.000Z | 2019-04-08T18:44:59.000Z | scripts/addCols.py | amlalejini/GECCO-2018-cohort-lexicase | da36586714a5322af37548348958dc9e3aa7938d | [
"MIT"
] | 5 | 2019-03-22T16:29:26.000Z | 2019-04-01T13:57:55.000Z | scripts/addCols.py | amlalejini/GECCO-2019-cohort-lexicase | da36586714a5322af37548348958dc9e3aa7938d | [
"MIT"
] | null | null | null |
import argparse, os, copy, errno, csv
cohort_configs = {
"CN_128__CS_4": "cn128:cs4",
"CN_16__CS_32": "cn16:cs32",
"CN_1__CS_512": "cn1:cs512",
"CN_256__CS_2": "cn256:cs2",
"CN_2__CS_256": "cn2:cs256",
"CN_32__CS_16": "cn32:cs16",
"CN_4__CS_128": "cn4:cs128",
"CN_64__CS_8": "cn64:cs8",
"CN_8__CS_64": "cn8:cs64"
}
sel_modes = {
"SEL_COHORT_LEX": "cohort lex",
"SEL_PROG_ONLY_COHORT_LEX": "prog-only cohorts",
"SEL_DOWN_SAMPLE_TESTS": "sample tests",
"SEL_TRUNCATED": "truncated lex"
}
parser = argparse.ArgumentParser(description="Data aggregation script.")
parser.add_argument("data_file", type=str, help="Target data file")
args = parser.parse_args()
fpath = args.data_file
file_content = None
with open(fpath, "r") as fp:
file_content = fp.read().strip().split("\n")
header = file_content[0].split(",")
header_lu = {header[i].strip():i for i in range(0, len(header))}
file_content = file_content[1:]
solutions = [l for l in csv.reader(file_content, quotechar='"', delimiter=',', quoting=csv.QUOTE_ALL, skipinitialspace=True)]
modified_content = ",".join(header) + ",cohort_config,sel_mode\n"
for sol in solutions:
treatment = sol[header_lu["treatment"]]
test_mode = treatment.split("__")[1].replace("TESTS_", "")
cohort_config = None
for thing in cohort_configs:
if thing in treatment: cohort_config = cohort_configs[thing]
if cohort_config == None:
print("Unrecognized cohort config! Exiting.")
exit()
sel_mode = None
for thing in sel_modes:
if thing in treatment: sel_mode = sel_modes[thing]
if sel_mode == None:
print("Unrecognized selection mode! Exiting.")
exit()
new_line = ",".join([sol[header_lu["treatment"]],sol[header_lu["run_id"]],sol[header_lu["problem"]],sol[header_lu["uses_cohorts"]],sol[header_lu["solution_found"]],sol[header_lu["solution_length"]],sol[header_lu["update_found"]],sol[header_lu["evaluation_found"]],sol[header_lu["update_first_solution_found"]],sol[header_lu["evaluation_first_solution_found"]], "\"" + sol[header_lu["program"]] + "\"", cohort_config, sel_mode]) + "\n"
modified_content += new_line
with open(fpath.replace(".csv", "__modified.csv"), "w") as fp:
fp.write(modified_content) | 37.048387 | 438 | 0.681323 |
4a23c5984c2d093e2f53e93aec71418f84b65928 | 6,418 | py | Python | src/test/python/apache/aurora/client/cli/test_inspect.py | pikselpalette/aurora | 65cd3ee9a574e3f711628125362a1fda1a8e82ae | [
"Apache-2.0"
] | null | null | null | src/test/python/apache/aurora/client/cli/test_inspect.py | pikselpalette/aurora | 65cd3ee9a574e3f711628125362a1fda1a8e82ae | [
"Apache-2.0"
] | null | null | null | src/test/python/apache/aurora/client/cli/test_inspect.py | pikselpalette/aurora | 65cd3ee9a574e3f711628125362a1fda1a8e82ae | [
"Apache-2.0"
] | 1 | 2022-02-27T10:41:45.000Z | 2022-02-27T10:41:45.000Z | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import contextlib
import json
from mock import patch
from apache.aurora.client.cli.client import AuroraCommandLine
from apache.aurora.config import AuroraConfig
from apache.aurora.config.schema.base import Job
from apache.thermos.config.schema_base import MB, Process, Resources, Task
from .util import AuroraClientCommandTest
class TestInspectCommand(AuroraClientCommandTest):
def get_job_config(self):
return AuroraConfig(job=Job(
cluster='west',
role='bozo',
environment='test',
name='the_job',
service=False,
task=Task(
name='task',
processes=[Process(cmdline='ls -la', name='process')],
resources=Resources(cpu=1.0, ram=1024 * MB, disk=1024 * MB)
),
contact='[email protected]',
instances=3,
cron_schedule='* * * * *'
))
def test_inspect_job(self):
mock_stdout = []
def mock_print_out(msg, indent=0):
indent_str = " " * indent
mock_stdout.append("%s%s" % (indent_str, msg))
with contextlib.nested(
patch('apache.aurora.client.cli.context.AuroraCommandContext.print_out',
side_effect=mock_print_out),
patch('apache.aurora.client.cli.context.AuroraCommandContext.get_job_config',
return_value=self.get_job_config())):
cmd = AuroraCommandLine()
assert cmd.execute(['job', 'inspect', 'west/bozo/test/hello', 'config.aurora']) == 0
output = '\n'.join(mock_stdout)
assert output == '''Job level information
name: 'the_job'
role: 'bozo'
contact: '[email protected]'
cluster: 'west'
instances: '3'
cron:
schedule: '* * * * *'
policy: 'KILL_EXISTING'
service: False
production: False
Task level information
name: 'task'
Process 'process':
cmdline:
ls -la
'''
def test_inspect_job_in_json(self):
mock_stdout = []
def mock_print_out(msg):
mock_stdout.append("%s" % msg)
with contextlib.nested(
patch('apache.aurora.client.cli.context.AuroraCommandContext.print_out',
side_effect=mock_print_out),
patch('apache.aurora.client.cli.context.AuroraCommandContext.get_job_config',
return_value=self.get_job_config())):
cmd = AuroraCommandLine()
assert cmd.execute([
'job', 'inspect', '--write-json', 'west/bozo/test/hello', 'config.aurora']) == 0
output = {
"environment": "test",
"health_check_config": {
"initial_interval_secs": 15.0,
"health_checker": {
"http": {
"expected_response_code": 0,
"endpoint": "/health",
"expected_response": "ok"}},
"interval_secs": 10.0,
"timeout_secs": 1.0,
"max_consecutive_failures": 0,
"min_consecutive_successes": 1},
"cluster": "west",
"cron_schedule": "* * * * *",
"service": False,
"update_config": {
"wait_for_batch_completion": False,
"batch_size": 1,
"watch_secs": 45,
"rollback_on_failure": True,
"max_per_shard_failures": 0,
"max_total_failures": 0},
"name": "the_job",
"max_task_failures": 1,
"cron_collision_policy": "KILL_EXISTING",
"enable_hooks": False,
"instances": 3,
"task": {
"processes": [{
"daemon": False,
"name": "process",
"ephemeral": False,
"max_failures": 1,
"min_duration": 5,
"cmdline": "ls -la",
"final": False}],
"name": "task",
"finalization_wait": 30,
"max_failures": 1,
"max_concurrency": 0,
"resources": {
"gpu": 0,
"disk": 1073741824,
"ram": 1073741824,
"cpu": 1.0},
"constraints": []},
"production": False,
"role": "bozo",
"contact": "[email protected]",
"lifecycle": {
"http": {
"graceful_shutdown_endpoint": "/quitquitquit",
"port": "health",
"shutdown_endpoint": "/abortabortabort"}},
"priority": 0}
mock_output = "\n".join(mock_stdout)
assert output == json.loads(mock_output)
def test_inspect_job_raw(self):
mock_stdout = []
def mock_print_out(msg, indent=0):
indent_str = " " * indent
mock_stdout.append("%s%s" % (indent_str, msg))
job_config = self.get_job_config()
with contextlib.nested(
patch('apache.aurora.client.cli.context.AuroraCommandContext.print_out',
side_effect=mock_print_out),
patch('apache.aurora.client.cli.context.AuroraCommandContext.get_job_config',
return_value=job_config)):
cmd = AuroraCommandLine()
assert cmd.execute(['job', 'inspect', '--raw', 'west/bozo/test/hello', 'config.aurora']) == 0
output = '\n'.join(mock_stdout)
# It's impossible to assert string equivalence of two objects with nested un-hashable types.
# Given that the only product of --raw flag is the thrift representation of AuroraConfig
# it's enough to do a spot check here and let thrift.py tests validate the structure.
assert 'TaskConfig' in output
# AURORA-990: Prevent regression of client passing invalid arguments to print_out.
# Since print_out is the final layer before print(), there's not much else we can do than
# ensure the command exits normally.
def test_inspect_job_raw_success(self):
with patch('apache.aurora.client.cli.context.AuroraCommandContext.get_job_config',
return_value=self.get_job_config()):
cmd = AuroraCommandLine()
assert cmd.execute(['job', 'inspect', '--raw', 'west/bozo/test/hello', 'config.aurora']) == 0
| 36.05618 | 99 | 0.608601 |
4a23c59a17962d1ed8f237c0185c22c684006fbe | 8,475 | py | Python | t2t_bert/distributed_single_sentence_classification/model_relation_distillation.py | yyht/bert | 480c909e0835a455606e829310ff949c9dd23549 | [
"Apache-2.0"
] | 34 | 2018-12-19T01:00:57.000Z | 2021-03-26T09:36:37.000Z | t2t_bert/distributed_single_sentence_classification/model_relation_distillation.py | yyht/bert | 480c909e0835a455606e829310ff949c9dd23549 | [
"Apache-2.0"
] | 11 | 2018-12-25T03:37:59.000Z | 2021-08-25T14:43:58.000Z | t2t_bert/distributed_single_sentence_classification/model_relation_distillation.py | yyht/bert | 480c909e0835a455606e829310ff949c9dd23549 | [
"Apache-2.0"
] | 9 | 2018-12-27T08:00:44.000Z | 2020-06-08T03:05:14.000Z | try:
from .model_interface import model_zoo
except:
from model_interface import model_zoo
import tensorflow as tf
import numpy as np
from bunch import Bunch
from model_io import model_io
from task_module import classifier
import tensorflow as tf
from metric import tf_metrics
from optimizer import distributed_optimizer as optimizer
from model_io import model_io
from distillation import knowledge_distillation as distill
def correlation(x, y):
x = x - tf.reduce_mean(x, axis=-1, keepdims=True)
y = y - tf.reduce_mean(y, axis=-1, keepdims=True)
x = tf.nn.l2_normalize(x, -1)
y = tf.nn.l2_normalize(y, -1)
return -tf.reduce_sum(x*y, axis=-1) # higher the better
def kd(x, y):
x_prob = tf.nn.softmax(x)
print(x_prob.get_shape(), y.get_shape(), tf.reduce_sum(x_prob * y, axis=-1).get_shape())
return -tf.reduce_sum(x_prob * y, axis=-1) # higher the better
def mse(x, y):
x = x - tf.reduce_mean(x, axis=-1, keepdims=True)
y = y - tf.reduce_mean(y, axis=-1, keepdims=True)
return tf.reduce_sum((x-y)**2, axis=-1) # lower the better
def kd_distance(x, y, dist_type):
if dist_type == "person":
return correlation(x,y)
elif dist_type == "kd":
return kd(x, y)
elif dist_type == "mse":
return mse(x, y)
def model_fn_builder(
model_config,
num_labels,
init_checkpoint,
model_reuse=None,
load_pretrained=True,
model_io_config={},
opt_config={},
exclude_scope="",
not_storage_params=[],
target="a",
label_lst=None,
output_type="sess",
**kargs):
def model_fn(features, labels, mode):
model_api = model_zoo(model_config)
model = model_api(model_config, features, labels,
mode, target, reuse=model_reuse)
label_ids = features["label_ids"]
if mode == tf.estimator.ModeKeys.TRAIN:
dropout_prob = model_config.dropout_prob
else:
dropout_prob = 0.0
if model_io_config.fix_lm == True:
scope = model_config.scope + "_finetuning"
else:
scope = model_config.scope
with tf.variable_scope(scope, reuse=model_reuse):
(loss,
per_example_loss,
logits) = classifier.classifier(model_config,
model.get_pooled_output(),
num_labels,
label_ids,
dropout_prob)
label_loss = tf.reduce_sum(per_example_loss * features["label_ratio"]) / (1e-10+tf.reduce_sum(features["label_ratio"]))
if mode == tf.estimator.ModeKeys.TRAIN:
distillation_api = distill.KnowledgeDistillation(kargs.get("disitllation_config", Bunch({
"logits_ratio_decay":"constant",
"logits_ratio":0.5,
"logits_decay_rate":0.999,
"distillation":['relation_kd', 'logits'],
"feature_ratio":0.5,
"feature_ratio_decay":"constant",
"feature_decay_rate":0.999,
"kd_type":"kd",
"scope":scope
})))
# get teacher logits
teacher_logit = tf.log(features["label_probs"]+1e-10)/kargs.get("temperature", 2.0) # log_softmax logits
student_logit = tf.nn.log_softmax(logits /kargs.get("temperature", 2.0)) # log_softmax logits
distillation_features = {
"student_logits_tensor":student_logit,
"teacher_logits_tensor":teacher_logit,
"student_feature_tensor":model.get_pooled_output(),
"teacher_feature_tensor":features["distillation_feature"],
"student_label":tf.ones_like(label_ids, dtype=tf.int32),
"teacher_label":tf.zeros_like(label_ids, dtype=tf.int32),
"logits_ratio":kargs.get("logits_ratio", 0.5),
"feature_ratio":kargs.get("logits_ratio", 0.5),
"distillation_ratio":features["distillation_ratio"],
"src_f_logit":logits,
"tgt_f_logit":logits,
"src_tensor":model.get_pooled_output(),
"tgt_tensor":features["distillation_feature"]
}
distillation_loss = distillation_api.distillation(distillation_features,
2, dropout_prob,
model_reuse,
opt_config.num_train_steps,
feature_ratio=1.0,
logits_ratio_decay="constant",
feature_ratio_decay="constant",
feature_decay_rate=0.999,
logits_decay_rate=0.999,
logits_ratio=0.5,
scope=scope+"/adv_classifier",
num_classes=num_labels,
gamma=kargs.get("gamma", 4))
loss = label_loss + distillation_loss["distillation_loss"]
model_io_fn = model_io.ModelIO(model_io_config)
tvars = model_io_fn.get_params(model_config.scope,
not_storage_params=not_storage_params)
print(tvars)
if load_pretrained == "yes":
model_io_fn.load_pretrained(tvars,
init_checkpoint,
exclude_scope=exclude_scope)
if mode == tf.estimator.ModeKeys.TRAIN:
optimizer_fn = optimizer.Optimizer(opt_config)
model_io_fn.print_params(tvars, string=", trainable params")
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_op = optimizer_fn.get_train_op(loss, tvars,
opt_config.init_lr,
opt_config.num_train_steps,
**kargs)
model_io_fn.set_saver()
if kargs.get("task_index", 1) == 0 and kargs.get("run_config", None):
training_hooks = []
elif kargs.get("task_index", 1) == 0:
model_io_fn.get_hooks(kargs.get("checkpoint_dir", None),
kargs.get("num_storage_steps", 1000))
training_hooks = model_io_fn.checkpoint_hook
else:
training_hooks = []
if len(optimizer_fn.distributed_hooks) >= 1:
training_hooks.extend(optimizer_fn.distributed_hooks)
print(training_hooks, "==training_hooks==", "==task_index==", kargs.get("task_index", 1))
estimator_spec = tf.estimator.EstimatorSpec(mode=mode,
loss=loss, train_op=train_op,
training_hooks=training_hooks)
if output_type == "sess":
return {
"train":{
"loss":loss,
"logits":logits,
"train_op":train_op,
"cross_entropy":label_loss,
"distillation_loss":distillation_loss["distillation_loss"],
"kd_num":tf.reduce_sum(features["distillation_ratio"]),
"ce_num":tf.reduce_sum(features["label_ratio"]),
"label_ratio":features["label_ratio"],
"distilaltion_logits_loss":distillation_loss["distillation_logits_loss"],
"distilaltion_feature_loss":distillation_loss["distillation_feature_loss"],
"rkd_loss":distillation_loss["rkd_loss"]
},
"hooks":training_hooks
}
elif output_type == "estimator":
return estimator_spec
elif mode == tf.estimator.ModeKeys.PREDICT:
print(logits.get_shape(), "===logits shape===")
pred_label = tf.argmax(logits, axis=-1, output_type=tf.int32)
prob = tf.nn.softmax(logits)
max_prob = tf.reduce_max(prob, axis=-1)
estimator_spec = tf.estimator.EstimatorSpec(
mode=mode,
predictions={
'pred_label':pred_label,
"max_prob":max_prob
},
export_outputs={
"output":tf.estimator.export.PredictOutput(
{
'pred_label':pred_label,
"max_prob":max_prob
}
)
}
)
return estimator_spec
elif mode == tf.estimator.ModeKeys.EVAL:
def metric_fn(per_example_loss,
logits,
label_ids):
"""Computes the loss and accuracy of the model."""
sentence_log_probs = tf.reshape(
logits, [-1, logits.shape[-1]])
sentence_predictions = tf.argmax(
logits, axis=-1, output_type=tf.int32)
sentence_labels = tf.reshape(label_ids, [-1])
sentence_accuracy = tf.metrics.accuracy(
labels=label_ids, predictions=sentence_predictions)
sentence_mean_loss = tf.metrics.mean(
values=per_example_loss)
sentence_f = tf_metrics.f1(label_ids,
sentence_predictions,
num_labels,
label_lst, average="macro")
eval_metric_ops = {
"f1": sentence_f,
"acc":sentence_accuracy
}
return eval_metric_ops
eval_metric_ops = metric_fn(
per_example_loss,
logits,
label_ids)
estimator_spec = tf.estimator.EstimatorSpec(mode=mode,
loss=loss,
eval_metric_ops=eval_metric_ops)
if output_type == "sess":
return {
"eval":{
"per_example_loss":per_example_loss,
"logits":logits,
"loss":tf.reduce_mean(per_example_loss)
}
}
elif output_type == "estimator":
return estimator_spec
else:
raise NotImplementedError()
return model_fn
| 31.158088 | 122 | 0.662655 |
4a23c5bd3129dfd2a52fad2d299506a875748262 | 956 | py | Python | daily-coding-problem/036.py | 0x8b/HackerRank | 45e1a0e2be68950505c0a75218715bd3132a428b | [
"MIT"
] | 3 | 2019-12-04T01:22:34.000Z | 2020-12-10T15:31:00.000Z | daily-coding-problem/036.py | 0x8b/HackerRank | 45e1a0e2be68950505c0a75218715bd3132a428b | [
"MIT"
] | null | null | null | daily-coding-problem/036.py | 0x8b/HackerRank | 45e1a0e2be68950505c0a75218715bd3132a428b | [
"MIT"
] | 1 | 2019-12-04T01:24:01.000Z | 2019-12-04T01:24:01.000Z | #!/usr/bin/env python
"""
Given the root to a binary search tree, find the second largest node
in the tree.
"""
from collections import namedtuple
Node = namedtuple("Node", ["v", "l", "r"])
a = Node(1, None, None)
b = Node(0, None, None)
c = Node(0, None, None)
d = Node(6, None, None)
e = Node(5, None, None)
f = Node(4, None, None)
g = Node(4, None, None)
h = Node(7, None, None)
i = Node(5, a, b)
j = Node(2, c, d)
k = Node(8, e, f)
l = Node(3, g, h)
m = Node(1, i, j)
n = Node(2, k, l)
tree = Node(2, m, n)
largest = float("-inf")
second_largest = float("-inf")
def find_second_largest(node: Node):
global largest, second_largest
if node.v > largest:
second_largest = largest
largest = node.v
elif node.v > second_largest:
second_largest = node.v
if node.l:
find_second_largest(node.l)
if node.r:
find_second_largest(node.r)
find_second_largest(tree)
print(second_largest)
| 16.77193 | 68 | 0.616109 |
4a23c68519465cefc2612ef87bdd76afd5c73d25 | 6,159 | py | Python | Project_Plagiarism_Detection/.ipynb_checkpoints/problem_unittests-checkpoint.py | benjaminlees/ML_SageMaker_Studies | 6e523d40e406ce5a4b6d440bd7ab5c96a6c9fc5e | [
"MIT"
] | null | null | null | Project_Plagiarism_Detection/.ipynb_checkpoints/problem_unittests-checkpoint.py | benjaminlees/ML_SageMaker_Studies | 6e523d40e406ce5a4b6d440bd7ab5c96a6c9fc5e | [
"MIT"
] | null | null | null | Project_Plagiarism_Detection/.ipynb_checkpoints/problem_unittests-checkpoint.py | benjaminlees/ML_SageMaker_Studies | 6e523d40e406ce5a4b6d440bd7ab5c96a6c9fc5e | [
"MIT"
] | null | null | null | from unittest.mock import MagicMock, patch
import sklearn.naive_bayes
import numpy as np
import pandas as pd
import re
# test csv file
TEST_CSV = 'data/test_info.csv'
class AssertTest(object):
'''Defines general test behavior.'''
def __init__(self, params):
self.assert_param_message = '\n'.join([str(k) + ': ' + str(v) + '' for k, v in params.items()])
def test(self, assert_condition, assert_message):
assert assert_condition, assert_message + '\n\nUnit Test Function Parameters\n' + self.assert_param_message
def _print_success_message():
print('Tests Passed!')
# test clean_dataframe
def test_numerical_df(numerical_dataframe):
# test result
transformed_df = numerical_dataframe(TEST_CSV)
# Check type is a DataFrame
assert isinstance(transformed_df, pd.DataFrame), 'Returned type is {}.'.format(type(transformed_df))
# check columns
column_names = list(transformed_df)
assert 'File' in column_names, 'No File column, found.'
assert 'Task' in column_names, 'No Task column, found.'
assert 'Category' in column_names, 'No Category column, found.'
assert 'Class' in column_names, 'No Class column, found.'
# check conversion values
assert transformed_df.loc[0, 'Category'] == 1, '`heavy` plagiarism mapping test, failed.'
assert transformed_df.loc[2, 'Category'] == 0, '`non` plagiarism mapping test, failed.'
assert transformed_df.loc[30, 'Category'] == 3, '`cut` plagiarism mapping test, failed.'
assert transformed_df.loc[5, 'Category'] == 2, '`light` plagiarism mapping test, failed.'
assert transformed_df.loc[37, 'Category'] == -1, 'original file mapping test, failed; should have a Category = -1.'
assert transformed_df.loc[41, 'Category'] == -1, 'original file mapping test, failed; should have a Category = -1.'
_print_success_message()
def test_containment(complete_df, containment_fn):
# check basic format and value
# for n = 1 and just the fifth file
test_val = containment_fn(complete_df, 1, 'g0pA_taske.txt')
assert isinstance(test_val, float), 'Returned type is {}.'.format(type(test_val))
assert test_val<=1.0, 'It appears that the value is not normalized; expected a value <=1, got: '+str(test_val)
# known vals for first few files
filenames = ['g0pA_taska.txt', 'g0pA_taskb.txt', 'g0pA_taskc.txt', 'g0pA_taskd.txt']
ngram_1 = [0.39814814814814814, 1.0, 0.86936936936936937, 0.5935828877005348]
ngram_3 = [0.0093457943925233638, 0.96410256410256412, 0.61363636363636365, 0.15675675675675677]
# results for comparison
results_1gram = []
results_3gram = []
for i in range(4):
val_1 = containment_fn(complete_df, 1, filenames[i])
val_3 = containment_fn(complete_df, 3, filenames[i])
results_1gram.append(val_1)
results_3gram.append(val_3)
# check correct results
print(results_1gram)
assert all(np.isclose(ngram_1, results_1gram, rtol=1e-04)), \
'n=1 calculations are incorrect. Double check the intersection calculation.'
# check correct results
assert all(np.isclose(ngram_3, results_3gram, rtol=1e-04)), \
'n=3 calculations are incorrect.'
_print_success_message()
def test_lcs(df, lcs_word):
test_index = 10 # file 10
# get answer file text
answer_text = df.loc[test_index, 'Text']
# get text for orig file
# find the associated task type (one character, a-e)
task = df.loc[test_index, 'Task']
# we know that source texts have Class = -1
orig_rows = df[(df['Class'] == -1)]
orig_row = orig_rows[(orig_rows['Task'] == task)]
source_text = orig_row['Text'].values[0]
# calculate LCS
test_val = lcs_word(answer_text, source_text)
# check type
assert isinstance(test_val, float), 'Returned type is {}.'.format(type(test_val))
assert test_val<=1.0, 'It appears that the value is not normalized; expected a value <=1, got: '+str(test_val)
# known vals for first few files
lcs_vals = [0.1917808219178082, 0.8207547169811321, 0.8464912280701754, 0.3160621761658031, 0.24257425742574257]
# results for comparison
results = []
for i in range(5):
# get answer and source text
answer_text = df.loc[i, 'Text']
task = df.loc[i, 'Task']
# we know that source texts have Class = -1
orig_rows = df[(df['Class'] == -1)]
orig_row = orig_rows[(orig_rows['Task'] == task)]
source_text = orig_row['Text'].values[0]
# calc lcs
val = lcs_word(answer_text, source_text)
results.append(val)
# check correct results
assert all(np.isclose(results, lcs_vals, rtol=1e-05)), 'LCS calculations are incorrect.' + str(results)
_print_success_message()
def test_data_split(train_x, train_y, test_x, test_y):
# check types
assert isinstance(train_x, np.ndarray),\
'train_x is not an array, instead got type: {}'.format(type(train_x))
assert isinstance(train_y, np.ndarray),\
'train_y is not an array, instead got type: {}'.format(type(train_y))
assert isinstance(test_x, np.ndarray),\
'test_x is not an array, instead got type: {}'.format(type(test_x))
assert isinstance(test_y, np.ndarray),\
'test_y is not an array, instead got type: {}'.format(type(test_y))
# should hold all 95 submission files
assert len(train_x) + len(test_x) == 95, \
'Unexpected amount of train + test data. Expecting 95 answer text files, got ' +str(len(train_x) + len(test_x))
assert len(test_x) > 1, \
'Unexpected amount of test data. There should be multiple test files.'
# check shape
assert train_x.shape[1]==2, \
'train_x should have as many columns as selected features, got: {}'.format(train_x.shape[1])
assert len(train_y.shape)==1, \
'train_y should be a 1D array, got shape: {}'.format(train_y.shape)
_print_success_message()
| 39.735484 | 119 | 0.656113 |
4a23c6edce1e56f15b8f826fca8aedc89900ce83 | 1,017 | py | Python | Examples/MakingStructures/s01-GaAs.py | jonahhaber/BGWpy | 03ae0df2c7ddad833f679a69908abcb7b4adf9a2 | [
"BSD-3-Clause-LBNL"
] | 27 | 2015-11-19T06:14:56.000Z | 2022-02-11T18:03:26.000Z | Examples/MakingStructures/s01-GaAs.py | jonahhaber/BGWpy | 03ae0df2c7ddad833f679a69908abcb7b4adf9a2 | [
"BSD-3-Clause-LBNL"
] | 5 | 2020-08-31T14:27:08.000Z | 2021-04-08T15:28:44.000Z | Examples/MakingStructures/s01-GaAs.py | jonahhaber/BGWpy | 03ae0df2c7ddad833f679a69908abcb7b4adf9a2 | [
"BSD-3-Clause-LBNL"
] | 13 | 2016-02-10T05:36:25.000Z | 2021-11-02T01:52:09.000Z | """
Construct a Structure object and write the structure file.
BGWpy relies on pymatgen.Structure objects to construct
the primitive cells for all calculations.
See http://pymatgen.org/ for more information.
"""
import os
import numpy as np
import pymatgen
# Construct the structure object.
acell_angstrom = 5.6535
rprim = np.array([[.0,.5,.5],[.5,.0,.5],[.5,.5,.0]]) * acell_angstrom
structure = pymatgen.Structure(
lattice = pymatgen.core.lattice.Lattice(rprim),
species= ['Ga', 'As'],
coords = [3*[.0], 3*[.25]],
)
# Create a directory to store the structure.
dirname = '01-GaAs'
if not os.path.exists(dirname):
os.mkdir(dirname)
# Write file in Crystallographic Information Framework.
# This the format defined by the International Union of Crystallography.
structure.to(filename=os.path.join(dirname, 'GaAs.cif'))
# Write in json format. This is the prefered format
# since it preserves the above definition of the unit cell.
structure.to(filename=os.path.join(dirname, 'GaAs.json'))
| 29.057143 | 72 | 0.726647 |
4a23c8472ab1fee1b05d68fc9aabe1658658f6a4 | 259 | py | Python | lib/py/src/transport/__init__.py | wmorgan/thrift | d9ba3d7a3e25f0f88766c344b2e937422858320b | [
"BSL-1.0"
] | 1 | 2016-05-08T06:29:55.000Z | 2016-05-08T06:29:55.000Z | lib/py/src/transport/__init__.py | KirinDave/powerset_thrift | 283603cce87e6da4117af1d3c91570e7466846c2 | [
"BSL-1.0"
] | null | null | null | lib/py/src/transport/__init__.py | KirinDave/powerset_thrift | 283603cce87e6da4117af1d3c91570e7466846c2 | [
"BSL-1.0"
] | 1 | 2021-07-13T19:20:37.000Z | 2021-07-13T19:20:37.000Z | #!/usr/bin/env python
#
# Copyright (c) 2006- Facebook
# Distributed under the Thrift Software License
#
# See accompanying file LICENSE or visit the Thrift site at:
# http://developers.facebook.com/thrift/
__all__ = ['TTransport', 'TSocket', 'THttpClient']
| 25.9 | 60 | 0.737452 |
4a23c85065342c0188550d226dce1ce1bd820f18 | 14,613 | py | Python | app/libs/smartsearchLib.py | Psychoanalytic-Electronic-Publishing/OpenPubArchive-Content-Server | 031b79b8e0dd5e1c22e2a51394cab846763a451a | [
"Apache-2.0"
] | null | null | null | app/libs/smartsearchLib.py | Psychoanalytic-Electronic-Publishing/OpenPubArchive-Content-Server | 031b79b8e0dd5e1c22e2a51394cab846763a451a | [
"Apache-2.0"
] | 115 | 2020-09-02T20:01:26.000Z | 2022-03-30T11:47:23.000Z | app/libs/smartsearchLib.py | Psychoanalytic-Electronic-Publishing/OpenPubArchive-Content-Server | 031b79b8e0dd5e1c22e2a51394cab846763a451a | [
"Apache-2.0"
] | 2 | 2020-10-15T13:52:10.000Z | 2020-10-20T13:42:51.000Z |
import re
import sys
from datetime import datetime
from optparse import OptionParser
import logging
import opasGenSupportLib as opasgenlib
from configLib.opasCoreConfig import solr_docs2, CORES # solr_authors2, solr_gloss2, solr_docs_term_search, solr_authors_term_search
import opasConfig
logger = logging.getLogger(__name__)
from namesparser import HumanNames
rx_space_req = "(\s+|\s*)"
rx_space_opt = "(\s*|\s*)"
rx_space_end_opt = "(\s*|\s*)$"
rx_space_start_opt = "^(\s*|\s*)?"
rx_year = "\(?\s*(?P<yr>(18|19|20)[0-9]{2,2})\s*\)?"
rx_title = ".*?"
rx_space_or_colon = "((\s*\:\s*)|\s+)"
rx_vol = "((?P<vol>([\(]?[0-9]{1,3}[\)]?))\:)"
# rx_pgrg = f"(pp\.?\s+)|{rx_vol}(?P<pgrg>[1-9][0-9]{0,3}([-][1-9][0-9]{0,3})?)"
rx_pgrg = "(?P<pgrg>[1-9][0-9]{0,3}([-][1-9][0-9]{0,3})?)"
rx_cit_vol_pgrg = "([A-Z].*(\s|,))" + rx_vol + rx_pgrg + ".*"
rx_vol_pgrg = rx_vol + rx_pgrg
rx_yr_vol_pgrg = rx_year + "[\s|,]" + rx_vol_pgrg
rx_vol_wildcard = rx_vol + "\*"
rx_year_pgrg = rx_space_start_opt + rx_year + rx_space_or_colon + rx_pgrg + rx_space_end_opt
rx_year_vol_pgrg = rx_year + rx_vol_pgrg + rx_space_end_opt
rx_author_name = "(?P<author>[A-Z][a-z]+)(\,\s+(([A-Z]\.?\s?){0,2})\s*)?"
rx_fuzzy_search = "[A-z]+~[0-9]"
rx_author_connector = "(and|,)"
rx_front_junk = "(\[|\()?[0-9]+(\]|\))?"
# rx_author_and_year = rx_space_start_opt + rx_author_name + rx_space_req + rx_year + rx_space_end_opt
# rx_author_year_pgrg = rx_author_and_year + ".*?" + rx_pgrg
# rx_author_name_list_year = rx_author_name_list + rx_space_req + rx_year
rx_amp_opt = "(\&\s+)?"
# replaced 2021-02-15 with def that follows it:
# rx_author_list_and_year = "(?P<author_list>[A-Z][A-z\s\,\.\-]+?)" + rx_space_req + rx_year
rx_author_name_list = "(?P<author_list>([A-Z][A-z]+\,?\s+?(([A-Z]\.?\s?){0,2})((\,\s+)|(\s*(and|\&)\s+))?)+)"
rx_author_list_and_year = rx_author_name_list + rx_year
rx_author_list_year_vol_pgrg = rx_author_list_and_year + ".*?" + rx_vol_pgrg
# rx_one_word_string = "[^\s]"
# rx_has_wildcards = "[*?]"
rx_series_of_author_last_names = "(?P<author_list>([A-Z][a-z]+((\,\s+)|(\s*and\s+))?)+)"
rx_doi = "((h.*?://)?(.*?/))?(?P<doi>(10\.[0-9]{4,4}/[A-z0-9\.\-/]+)|(doi.org/[A-z0-9\-\./]+))"
# rx_pepdoi = "(?P<prefix>PEP\/\.)(?P<locator>[A-Z\-]{2,10}\.[0-9]{3,3}\.[0-9]{4,4}([PN]{1,2}[0-9]{4,4})?"
# schema fields must have a _ in them to use. A - at the beginning is allowed, for negation
# user search fields (PEP Spec)
# SS_ = "authors|dialogs|dreams|headings|keywords|notes|panels|poems|quotes|references|source|sourcecode|text|volume|year|art_*"
SS_SEARCH_FIELDS = "[a-z_]*"
rx_solr_field = f"(?P<schema_field>^{SS_SEARCH_FIELDS})\:(?P<schema_value>([^:]*$))" # only one field permitted
rx_solr_field2 = f"(?P<schema_field>^{SS_SEARCH_FIELDS})\:(?P<schema_value>(.*$))"
# rx_syntax = "(?P<schema_field>^[a-z]{3,9})\:\:(?P<schema_value>.+$)"
advanced_syntax = f"(?P<schema_field>^(adv|solr))\:\:(?P<schema_value>.+$)"
pat_prefix_amps = re.compile("^\s*&& ")
rx_str_is_quotedstring = r"^\s*(\"|\')(?!^\1).*\1\s*$" # outer quote or single quotes, none of the same ones inside
pat_str_is_quotedstring = re.compile(rx_str_is_quotedstring, flags=re.I)
rx_quoted_str_has_wildcards = r"(\"|\').*(\*|\?).*\1"
pat_str_has_wildcards = re.compile(rx_quoted_str_has_wildcards, flags=re.I)
pat_quoted_str_has_wildcards = re.compile(rx_quoted_str_has_wildcards, flags=re.I)
rx_quoted_str_has_booleans = r"(\"|\').*\b(AND|OR|NOT)\b.*\1"
pat_quoted_str_has_booleans = re.compile(rx_quoted_str_has_booleans)
rx_str_has_wildcards = r".*(\*|\?).*"
pat_str_has_wildcards = re.compile(rx_quoted_str_has_wildcards, flags=re.I)
pat_str_has_fuzzy_search = re.compile(rx_fuzzy_search, flags=re.I)
pat_str_has_wildcards = re.compile(rx_quoted_str_has_wildcards, flags=re.I)
rx_str_has_author_id = r"[A-z]+[,]?\s[A-z]\.?\b"
pat_str_has_author_id = re.compile(rx_str_has_author_id, flags=re.I)
rx_str_has_author_name = r"[A-z]+\s[A-z]+\b"
pat_str_has_author_name = re.compile(rx_str_has_author_name, flags=re.I)
cores = CORES
class SearchEvaluation(object):
def __init__(self, field=None, found=0, score=0):
self.score = score
self.field = field
self.found = found
self.isfound = found > 0
def all_words_start_upper_case(search_str):
"""
>>> all_words_start_upper_case(r"The Rain in Spain")
False
>>> all_words_start_upper_case(r"The Rain In Spain")
True
"""
ret_val = True
for word in search_str.split():
if not word[0].isupper() and word not in ("and"):
ret_val = False
break
return ret_val
def is_quoted_str(search_str):
"""
Test if string which has a substring in quotes, that has wildcards.
>>> is_quoted_str(r"'test* 12?'")
True
>>> is_quoted_str(r"'test** 12? '")
True
"""
ret_val = False
if pat_str_is_quotedstring.search(search_str):
ret_val = True
return ret_val
def quoted_str_has_wildcards(search_str):
"""
Test if string which has a substring in quotes, that has wildcards.
>>> quoted_str_has_wildcards(r"'test* 12?'")
True
>>> quoted_str_has_wildcards(r"'test** 12? '")
True
"""
ret_val = False
if pat_quoted_str_has_wildcards.search(search_str):
ret_val = True
return ret_val
def quoted_str_has_booleans(search_str):
"""
Test if string which has a substring in quotes, that has wildcards.
>>> quoted_str_has_booleans(r'"David Tuckett" OR "Peter Fonagy"')
True
>>> quoted_str_has_booleans(r'"David Tuckett" AND "Peter Fonagy"')
True
>>> quoted_str_has_booleans(r'"David Tuckett" "Peter Fonagy"')
False
"""
ret_val = False
if pat_quoted_str_has_booleans.search(search_str):
ret_val = True
return ret_val
def str_has_fuzzy_ops(search_str):
if pat_str_has_fuzzy_search.search(search_str):
return True
else:
return False
def str_has_one_word(search_str):
"""
Test if string which has a substring in quotes, that has wildcards.
>>> str_has_one_word(r"test* 12?")
False
>>> str_has_one_word(r"test**")
True
"""
if len(search_str.split()) == 1: # has more than 1 word
return True
else:
return False
def str_has_wildcards(search_str):
"""
Test if string which has a substring in quotes, that has wildcards.
>>> result = str_has_wildcards(r"test* 12?")
>>> result is not None
True
>>> result = str_has_wildcards(r"test** 12? ")
>>> result is not None
True
"""
ret_val = False
if pat_str_has_wildcards.search(search_str):
ret_val = True
return ret_val
def str_has_author_id(search_str):
"""
# Match an author id, but no wildcards permitted
>>> str_has_author_id("Tuckett, D.")
True
>>> str_has_author_id("Tuckett, David")
False
>>> str_has_author_id(" Tuckett, Dav")
False
>>> str_has_author_id(" Tuckett, D")
True
>>> str_has_author_id(" Tuckett D")
True
>>> str_has_author_id(" Tuckett D Fonagy")
True
"""
if pat_str_has_author_id.search(search_str) and not str_has_wildcards(search_str):
return True
else:
return False
def str_is_author_mastname(search_str):
"""
Checks the database list of mastnames to see if the string matches exactly.
>>> str_is_author_mastname("Vaughan Bell")
True
>>> str_is_author_mastname("David Tuckett")
True
>>> str_is_author_mastname("Tuckett Fonagy")
False
"""
if is_value_in_field(search_str,
field="art_authors_mast_list_strings",
limit=1):
return True
else:
return False
#-----------------------------------------------------------------------------
def cleanup_solr_query(solrquery):
"""
Clean up whitespace and extra symbols that happen when building up query or solr query filter
"""
ret_val = solrquery.strip()
ret_val = ' '.join(ret_val.split()) #solrquery = re.sub("\s+", " ", solrquery)
ret_val = re.sub("\(\s+", "(", ret_val)
ret_val = re.sub("\s+\)", ")", ret_val)
if ret_val is not None:
# no need to start with '*:* && '. Remove it.
ret_val = ret_val.replace("*:* && ", "")
ret_val = ret_val.replace("*:* {", "{") # if it's before a solr join for level 2 queries
ret_val = pat_prefix_amps.sub("", ret_val)
ret_val = re.sub("\s+(AND)\s+", " && ", ret_val, flags=re.IGNORECASE)
ret_val = re.sub("\s+(OR)\s+", " || ", ret_val, flags=re.IGNORECASE)
return ret_val
#-----------------------------------------------------------------------------
def is_value_in_field(value,
field="title",
core="docs",
match_type="exact", # exact, ordered, proximate, or bool
limit=10):
"""
Returns the NumFound if the value is found in the field specified in the docs core.
Args:
value (str): String prefix of term to check.
field (str): Where to look for term
match_type (str): exact, ordered, or bool
limit (int, optional): Paging mechanism, return is limited to this number of items.
Returns:
True if the value is in the specified field
Docstring Tests:
>>> is_value_in_field("Object Relations Theories and the Developmental Tilt", "title") > 0
True
>>> is_value_in_field("Contemporary Psychoanalysis", "art_sourcetitlefull") > 0
True
>>> is_value_in_field("Contemporary Psych", "art_sourcetitlefull") > 0
False
>>> is_value_in_field("Contemp. Psychoanal.", "art_sourcetitleabbr") > 0
True
>>> is_value_in_field("Tuckett, D", "title") > 0
False
"""
ret_val = 0
try:
solr_core = cores[core]
except Exception as e:
logger.debug(f"Core selection: {core}. 'docs' is default {e}")
solr_core = solr_docs2
if match_type == "exact":
q = f'{field}:"{value}"'
elif match_type == "ordered":
q = f'{field}:"{value}"~10'
elif match_type == "proximate":
q = f'{field}:"{value}"~25'
elif match_type == "adjacent":
q = f'{field}:"{value}"~2'
else:
q = f'{field}:({value})'
if str_has_wildcards(q): # quoted_str_has_wildcards(q):
complex_phrase = "{!complexphrase}"
q = f"{complex_phrase}{q}"
try:
results = solr_core.search(q=q,
fields = f"{field}",
rows = limit,
)
except Exception as e:
logger.warning(f"Solr query: {q} fields {field} {e}")
results = []
if len(results) > 0:
ret_val = len(results) # results.numFound # it looks like the solr response object to this query always has a len == numFound
return ret_val
#-----------------------------------------------------------------------------
def get_list_of_name_ids(names_mess):
"""
>>> test="Goldberg, E.L. and Myers,W.A. and Zeifman,I."
>>> get_list_of_name_ids(test)
['Goldberg, E.', 'Myers, W.', 'Zeifman, I.']
>>> test="Eugene L. Goldberg, Wayne A. Myers and Israel Zeifman"
>>> get_list_of_name_ids(test)
['Goldberg, E.', 'Myers, W.', 'Zeifman, I.']
>>> test="Goldberg,E.L. and Zeifman,I."
>>> get_list_of_name_ids(test)
['Goldberg, E.', 'Zeifman, I.']
>>> test="Goldberg,E.L."
>>> get_list_of_name_ids(test)
['Goldberg, E.']
>>> test="Eugene L. Goldberg, Wayne A. Myers and Israel Zeifman (1974)"
>>> get_list_of_name_ids(test)
['Goldberg, E.', 'Myers, W.', 'Zeifman, I.']
"""
ret_val = []
names = HumanNames(names_mess)
try:
for n in names.human_names:
if n.last != "":
name_id = n.last + f", {n.first[0]}."
ret_val.append(name_id)
else:
ret_val.append(n.first)
except Exception as e:
logger.warning(f"name parse: {names_mess} {e}")
print (e)
return ret_val
#-----------------------------------------------------------------------------
def get_list_of_author_names_with_wildcards(author_list_str: str):
"""
>>> test="Goldberg, E.L."
>>> get_list_of_author_names_with_wildcards(test)
['Goldberg*, E*']
"""
ret_val = []
names = HumanNames(author_list_str)
try:
for n in names.human_names:
if n.last != "":
name_id = n.last + f"*, {n.first[0]}*"
ret_val.append(name_id)
else:
ret_val.append(n.first)
except Exception as e:
logger.warning(f"name parse: {names_mess} {e}")
print (e)
return ret_val
#-----------------------------------------------------------------------------
def dict_clean_none_terms(d: dict):
return {
k:v.strip()
for k, v in d.items()
if v is not None
}
#-----------------------------------------------------------------------------
def has_names_only(phrase: str):
"""
>>> has_names_only("The Rain in Spain")
False
>>> has_names_only("Edward Scissorhands")
True
>>> has_names_only("Tuckett and Fonagy")
True
"""
# try to build a list of names, and check them individually
ret_val = False
new_q = ""
hnames = HumanNames(phrase)
names = get_list_of_name_ids(phrase)
for name in names:
try:
if is_value_in_field(name, core="docs", field=opasConfig.SEARCH_FIELD_AUTHOR_CITATION):
# ok, this is a list of names
ret_val = True
if new_q != "":
new_q += f" && '{name}'"
except Exception as e:
logger.warning(f"Value error for {name}. {e}")
return ret_val
if __name__ == "__main__":
global options # so the information can be used in support functions
options = None
parser = OptionParser(usage="%prog [options] - PEP Solr Reference Text Data Loader", version="%prog ver. 0.1.14")
parser.add_option("--test", dest="testmode", action="store_true", default=False,
help="Run Doctests")
(options, args) = parser.parse_args()
if 1: # options.testmode:
import doctest
doctest.testmod()
print ("Fini. Tests complete.")
sys.exit()
| 33.439359 | 133 | 0.585506 |
4a23ca03885242484e23fe6367045a05988c0803 | 8,651 | py | Python | qurator/sbb_ner/webapp/app.py | stweil/sbb_ner | 319c29fc96667937f85d2cba111902386c95ba23 | [
"Apache-2.0"
] | null | null | null | qurator/sbb_ner/webapp/app.py | stweil/sbb_ner | 319c29fc96667937f85d2cba111902386c95ba23 | [
"Apache-2.0"
] | null | null | null | qurator/sbb_ner/webapp/app.py | stweil/sbb_ner | 319c29fc96667937f85d2cba111902386c95ba23 | [
"Apache-2.0"
] | null | null | null | import os
import logging
from flask import Flask, send_from_directory, redirect, jsonify, request
import html
import json
import torch
from somajo import Tokenizer, SentenceSplitter
from qurator.sbb_ner.models.bert import get_device, model_predict
from qurator.sbb_ner.ground_truth.data_processor import NerProcessor, convert_examples_to_features
from qurator.sbb_ner.models.tokenization import BertTokenizer
from pytorch_pretrained_bert.modeling import (CONFIG_NAME,
BertConfig,
BertForTokenClassification)
app = Flask(__name__)
app.config.from_json('config.json' if not os.environ.get('CONFIG') else os.environ.get('CONFIG'))
logger = logging.getLogger(__name__)
class NERPredictor:
def __init__(self, model_dir, batch_size, epoch, max_seq_length=128, local_rank=-1, no_cuda=False):
self._batch_size = batch_size
self._local_rank = local_rank
self._max_seq_length = max_seq_length
self._device, self._n_gpu = get_device(no_cuda=no_cuda)
self._model_config = json.load(open(os.path.join(model_dir, "model_config.json"), "r"))
self._label_to_id = self._model_config['label_map']
self._label_map = {v: k for k, v in self._model_config['label_map'].items()}
self._bert_tokenizer = \
BertTokenizer.from_pretrained(model_dir,
do_lower_case=self._model_config['do_lower'])
output_config_file = os.path.join(model_dir, CONFIG_NAME)
output_model_file = os.path.join(model_dir, "pytorch_model_ep{}.bin".format(epoch))
config = BertConfig(output_config_file)
self._model = BertForTokenClassification(config, num_labels=len(self._label_map))
self._model.load_state_dict(torch.load(output_model_file,
map_location=lambda storage, loc: storage if no_cuda else None))
self._model.to(self._device)
self._model.eval()
return
def classify_text(self, sentences):
examples = NerProcessor.create_examples(sentences, 'test')
features = [fe for ex in examples for fe in
convert_examples_to_features(ex, self._label_to_id, self._max_seq_length, self._bert_tokenizer)]
data_loader = NerProcessor.make_data_loader(None, self._batch_size, self._local_rank, self._label_to_id,
self._max_seq_length, self._bert_tokenizer, features=features,
sequential=True)
prediction_tmp = model_predict(data_loader, self._device, self._label_map, self._model)
assert len(prediction_tmp) == len(features)
prediction = []
prev_guid = None
for fe, pr in zip(features, prediction_tmp):
# longer sentences might have been processed in several steps
# therefore we have to glue them together. This can be done on the basis of the guid.
if prev_guid != fe.guid:
prediction.append((fe.tokens[1:-1], pr))
else:
prediction[-1] = (prediction[-1][0] + fe.tokens[1:-1], prediction[-1][1] + pr)
prev_guid = fe.guid
try:
assert len(sentences) == len(prediction)
except AssertionError:
print('Sentences:\n')
print(sentences)
print('\n\nPrediciton:\n')
print(prediction)
return prediction
class NERTokenizer:
def __init__(self):
self._word_tokenizer = Tokenizer(split_camel_case=True, token_classes=False, extra_info=False)
self._sentence_splitter = SentenceSplitter()
def parse_text(self, text):
tokens = self._word_tokenizer.tokenize_paragraph(text)
sentences_tokenized = self._sentence_splitter.split(tokens)
sentences = []
for sen in sentences_tokenized:
sen = [tok.replace(" ", "") for tok in sen]
if len(sen) == 0:
continue
sentences.append((sen, []))
return sentences
class PredictorStore:
def __init__(self):
self._predictor = None
self._model_id = None
def get(self, model_id):
if model_id is not None:
model = next((m for m in app.config['MODELS'] if m['id'] == int(model_id)))
else:
model = next((m for m in app.config['MODELS'] if m['default']))
if self._model_id != model['id']:
self._predictor = NERPredictor(model_dir=model['model_dir'],
epoch=model['epoch'],
batch_size=app.config['BATCH_SIZE'],
no_cuda=False if not os.environ.get('USE_CUDA') else
os.environ.get('USE_CUDA').lower() == 'false')
self._model_id = model['id']
return self._predictor
predictor_store = PredictorStore()
tokenizer = NERTokenizer()
@app.route('/')
def entry():
return redirect("/index.html", code=302)
@app.route('/models')
def get_models():
return jsonify(app.config['MODELS'])
@app.route('/tokenized', methods=['GET', 'POST'])
def tokenized():
raw_text = request.json['text']
sentences = tokenizer.parse_text(raw_text)
result = [(sen, i) for i, (sen, _) in enumerate(sentences)]
return jsonify(result)
@app.route('/ner-bert-tokens', methods=['GET', 'POST'])
@app.route('/ner-bert-tokens/<model_id>', methods=['GET', 'POST'])
def ner_bert_tokens(model_id=None):
raw_text = request.json['text']
sentences = tokenizer.parse_text(raw_text)
prediction = predictor_store.get(model_id).classify_text(sentences)
output = []
for tokens, word_predictions in prediction:
output_sentence = []
for token, word_pred in zip(tokens, word_predictions):
output_sentence.append({'token': html.escape(token), 'prediction': word_pred})
output.append(output_sentence)
return jsonify(output)
@app.route('/ner', methods=['GET', 'POST'])
@app.route('/ner/<model_id>', methods=['GET', 'POST'])
def ner(model_id=None):
raw_text = request.json['text']
sentences = tokenizer.parse_text(raw_text)
prediction = predictor_store.get(model_id).classify_text(sentences)
output = []
for (tokens, word_predictions), (input_sentence, _) in zip(prediction, sentences):
original_text = "".join(input_sentence)
original_word_positions = \
[pos for positions in [[idx] * len(word) for idx, word in enumerate(input_sentence)] for pos in positions]
word = ''
last_prediction = 'O'
output_sentence = []
for pos, (token, word_pred) in enumerate(zip(tokens, word_predictions)):
if not token.startswith('##') and token != '[UNK]':
if len(word) > 0:
output_sentence.append({'word': word, 'prediction': last_prediction})
word = ''
if token == '[UNK]':
orig_pos = len("".join([pred['word'] for pred in output_sentence]) + word)
if orig_pos > 0 and original_word_positions[orig_pos-1] != original_word_positions[orig_pos]:
output_sentence.append({'word': word, 'prediction': last_prediction})
word = ''
word += original_text[orig_pos]
if word_pred != 'X':
last_prediction = word_pred
continue
if not token.startswith('##') and word_pred == 'X':
word_pred = 'O'
token = token[2:] if token.startswith('##') else token
word += token
if word_pred != 'X':
last_prediction = word_pred
if len(word) > 0:
output_sentence.append({'word': word, 'prediction': last_prediction})
output.append(output_sentence)
for output_sentence, (input_sentence, _) in zip(output, sentences):
try:
assert "".join([pred['word'] for pred in output_sentence]) == "".join(input_sentence)
except AssertionError:
logger.warning('Input and output different!!! \n\n\nInput: {}\n\nOutput: {}\n'.
format("".join(input_sentence).replace(" ", ""),
"".join([pred['word'] for pred in output_sentence])))
return jsonify(output)
@app.route('/<path:path>')
def send_js(path):
return send_from_directory('static', path)
| 31.572993 | 118 | 0.604901 |
4a23ca69c7eda9dcf1520e7eb55e5e67b3e49e40 | 14,099 | py | Python | pixelpainter/code.py | mscosti/PixelPainter | 8fe151bcf3260e7b23c6632ea374a58e0ddc4619 | [
"MIT"
] | null | null | null | pixelpainter/code.py | mscosti/PixelPainter | 8fe151bcf3260e7b23c6632ea374a58e0ddc4619 | [
"MIT"
] | 1 | 2022-02-10T18:47:40.000Z | 2022-02-10T18:47:40.000Z | pixelpainter/code.py | mscosti/PixelPainter | 8fe151bcf3260e7b23c6632ea374a58e0ddc4619 | [
"MIT"
] | null | null | null | import board
import busio
import time
import os
import gc
from digitalio import DigitalInOut
from struct import *
import neopixel
import adafruit_fancyled.adafruit_fancyled as fancy
from adafruit_esp32spi import adafruit_esp32spi
import adafruit_esp32spi.adafruit_esp32spi_wifimanager as wifimanager
import adafruit_esp32spi.adafruit_esp32spi_wsgiserver as server
from bmpReader import bmpReader
# Get wifi details and more from a secrets.py file
try:
from secrets import secrets
except ImportError:
print("WiFi secrets are kept in secrets.py, please add them there!")
raise
try:
import json as json_module
except ImportError:
import ujson as json_module
import adafruit_dotstar as dotstar
status_light = dotstar.DotStar(board.APA102_SCK, board.APA102_MOSI, 1, brightness=1)
print("Pixel Painter Web Server!")
esp32_cs = DigitalInOut(board.D10)
esp32_ready = DigitalInOut(board.D9)
esp32_reset = DigitalInOut(board.D7)
spi = busio.SPI(board.SCK, board.MOSI, board.MISO)
esp = adafruit_esp32spi.ESP_SPIcontrol(spi, esp32_cs, esp32_ready, esp32_reset)
esp.set_ip_addr("192.168.4.1")
## Connect to wifi with secrets
wifi = wifimanager.ESPSPI_WiFiManager(esp, secrets, status_light, debug=True)
wifi.create_ap()
# wifi.connect()
pixel_pin = board.D5
num_pixels = 59
pixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=1, auto_write = False, pixel_order = neopixel.GRB)
class SimpleWSGIApplication:
"""
An example of a simple WSGI Application that supports
basic route handling and static asset file serving
"""
INDEX = "/index.html"
CHUNK_SIZE = 8192 # Number of bytes to send at once when serving files
def on(self, method, path, request_handler):
"""
Register a Request Handler for a particular HTTP method and path.
request_handler will be called whenever a matching HTTP request is received.
request_handler should accept the following args:
(Dict environ)
request_handler should return a tuple in the shape of:
(status, header_list, data_iterable)
:param str method: the method of the HTTP request
:param str path: the path of the HTTP request
:param func request_handler: the function to call
"""
self._listeners[self._get_listener_key(method, path)] = request_handler
def __init__(self, static_dir=None, debug=True):
self._debug = debug
self._listeners = {}
self._start_response = None
self._static = static_dir
if self._static:
self._static_files = ["/" + file for file in os.listdir(self._static)]
def __call__(self, environ, start_response):
"""
Called whenever the server gets a request.
The environ dict has details about the request per wsgi specification.
Call start_response with the response status string and headers as a list of tuples.
Return a single item list with the item being your response data string.
"""
if self._debug:
self._log_environ(environ)
self._start_response = start_response
status = ""
headers = []
resp_data = []
key = self._get_listener_key(environ["REQUEST_METHOD"].lower(), environ["PATH_INFO"])
if key in self._listeners:
status, headers, resp_data = self._listeners[key](environ)
if environ["REQUEST_METHOD"].lower() == "get" and self._static:
path = environ["PATH_INFO"]
if path in self._static_files:
status, headers, resp_data = self.serve_file(path, directory=self._static)
elif path == "/" and self.INDEX in self._static_files:
status, headers, resp_data = self.serve_file(self.INDEX, directory=self._static)
self._start_response(status, headers)
return resp_data
def serve_file(self, file_path, directory=None):
status = "200 OK"
contentType = self._get_content_type(file_path)
headers = [("Content-Type", contentType)]
if (contentType == 'text/css'):
headers.append(("cache-control", "public, max-age=604800, s-maxage=43200"))
full_path = file_path if not directory else directory + file_path
def resp_iter():
with open(full_path, 'rb') as file:
while True:
chunk = file.read(self.CHUNK_SIZE)
if chunk:
time.sleep(0.05)
yield chunk
else:
break
return (status, headers, resp_iter())
def _log_environ(self, environ): # pylint: disable=no-self-use
print("environ map:")
for name, value in environ.items():
if name == "wsgi.input":
continue
else:
print(name, value)
def _get_listener_key(self, method, path): # pylint: disable=no-self-use
return "{0}|{1}".format(method.lower(), path)
def _get_content_type(self, file): # pylint: disable=no-self-use
ext = file.split('.')[-1]
if ext in ("html", "htm"):
return "text/html"
if ext == "js":
return "application/javascript"
if ext == "css":
return "text/css"
if ext in ("jpg", "jpeg"):
return "image/jpeg"
if ext == "png":
return "image/png"
return "text/plain"
class display_type:
OFF = 0
BMP = 1
COLORS = 2
COLORS_GRAD_ANIMATE = 3
class pixel_stick:
def __init__(self):
self.is_displaying = display_type.OFF
self.loop_image = False
self.colors_pixels = [0] * num_pixels
self.palette = []
self.animation_step = 0
self.period = 0
self.duty_cycle = 1
self.current_display = []
# Our HTTP Request handlers
def led_color(self,environ): # pylint: disable=unused-argument
json = json_module.loads(environ["wsgi.input"].getvalue())
print(json)
rgb_tuple = (json.get("r"), json.get("g"), json.get("b"))
status_light.fill(rgb_tuple)
return ("200 OK", [], [])
def load_image(self,environ):
print("yo!")
file_name = '/static/current_image.bmp'
b = environ["wsgi.input"]
file = open(file_name, "wb")
file.write(bytes(b.getvalue(),'utf-8'))
file.flush()
file.close()
gc.collect()
return ("200 OK", [], [])
def start_image(self, environ):
print("start display")
self.is_displaying = display_type.BMP
json = json_module.loads(environ["wsgi.input"].getvalue())
if json and json.get("loop_image"):
self.loop_image = json.get("loop_image")
print("loop_image:", self.loop_image)
r = bmpReader('/static/current_image.bmp')
(self.display_width, self.display_height, self.current_display) = r.read_rows()
gc.collect() # TODO: if width is different than pixel strip length, return 400
return ("200 OK", [], [])
def start_colors(self, environ):
print("start colors")
json = json_module.loads(environ["wsgi.input"].getvalue())
if json and json.get("colors"):
colors = json.get("colors")
if json.get("blend"):
self.palette = []
for color in colors:
print(color)
self.palette.append(fancy.CRGB(color.get("r"),color.get("g"), color.get("b")))
self.period = json.get("period") if json.get("period") else 0
self.duty_cycle = json.get("duty_cycle") if json.get("duty_cycle") else 1
if json.get("animate"):
self.is_displaying = display_type.COLORS_GRAD_ANIMATE
return ("200 OK", [], [])
partition_size = num_pixels // len(colors)
remainder = num_pixels % len(colors)
if json.get("blend"):
for i in range(num_pixels):
pos = (i / ((num_pixels * len(colors)) / (len(colors) - 1) ) )
color = fancy.palette_lookup(self.palette, pos)
print('pos', pos)
print('color', color)
color = fancy.gamma_adjust(color, brightness=0.5)
self.colors_pixels[i] = color.pack()
else:
for idx, color in enumerate(colors):
color = fancy.CRGB(color.get("r"),color.get("g"), color.get("b"))
# color = fancy.gamma_adjust(color, brightness=0.5)
current_idx = idx * partition_size
use_remainder = remainder if idx == len(colors) - 1 else 0
self.colors_pixels[current_idx: current_idx + partition_size + use_remainder] = [color.pack()] * (partition_size + use_remainder)
self.is_displaying = display_type.COLORS
return ("200 OK", [], [])
def stop_display(self, environ):
self.is_displaying = display_type.OFF
self.loop_image = False
pixels.fill((0,0,0))
pixels.show()
return ("200 OK", [], [])
def process_display(self):
if self.is_displaying == display_type.COLORS and self.colors_pixels:
pixels[:] = self.colors_pixels
pixels.show()
self._blink()
if self.is_displaying == display_type.COLORS_GRAD_ANIMATE and self.palette:
# pos = self.animation_step / (len(self.palette) / (len(self.palette) - 1))
# self.animation_step += 0.1 / min(3, len(self.palette))
# color = fancy.palette_lookup(self.palette, pos)
# color = fancy.gamma_adjust(color, brightness=0.5)
# pixels.fill(color.pack())
# pixels.show()
# self._blink()
sleep = 0.05
self.animation_step += sleep / ( len(self.palette) * self.period )
print(sleep / ( len(self.palette) * self.period ))
# print(self.animation_step)
color = fancy.palette_lookup(self.palette, self.animation_step)
# print(color)
# color = fancy.gamma_adjust(color, brightness=0.5)
pixels.fill(color.pack())
pixels.show()
# time.sleep(sleep*0.5)
if self.is_displaying == display_type.BMP and self.current_display:
print("start displaying")
rowSize = (self.display_width * 3)
print("current_display_size: ", len(self.current_display))
# rowCounter = 0
# rgb = []
# for val in self.current_display:
# if (len(rgb) == 3):
# print("rgb", rgb)
# pixels[rowCounter] = tuple(rgb)
# rgb = []
# rgb.append(val)
# rowCounter += 1
# else:
# rgb.append(val)
# if (rowCounter == self.display_width):
# print("row finished")
# pixels.show()
# time.sleep(0.1)
# rowCounter = 0
# print("done!")
for row in range(self.display_height):
# print("row", row)
pixel_index = 0
for col in range(self.display_width):
# print("col", col)
idx = (rowSize * row) + (col * 3)
# print("idx", idx)
# print("rgb ", tuple(self.current_display[idx:idx+3]))
pixels[pixel_index] = tuple(self.current_display[idx:idx+3])
pixel_index += 1
# print(pixels)
pixels.show()
time.sleep(0.01)
if (not self.loop_image):
self.is_displaying = display_type.OFF
pixels.fill((0,0,0))
pixels.show()
# self.current_img = json_module.loads(environ["wsgi.input"].getvalue())
def _blink(self):
if (self.period) > 0:
time.sleep(self.period * self.duty_cycle)
if (self.duty_cycle < 1):
pixels.fill((0,0,0))
pixels.show()
time.sleep(self.period - (self.period * self.duty_cycle))
# Here we create our application, setting the static directory location
# and registering the above request_handlers for specific HTTP requests
# we want to listen and respond to.
static_dir = "/static"
try:
static_files = os.listdir(static_dir)
if "index.html" not in static_files:
raise RuntimeError("""
This example depends on an index.html, but it isn't present.
Please add it to the {0} directory""".format(static_dir))
except (OSError) as e:
raise RuntimeError("""
This example depends on a static asset directory.
Please create one named {0} in the root of the device filesystem.""".format(static_dir))
pixelStick = pixel_stick()
web_app = SimpleWSGIApplication(static_dir=static_dir)
web_app.on("POST", "/ajax/ledcolor", pixelStick.led_color)
web_app.on("POST", "/ajax/loadImage", pixelStick.load_image)
web_app.on("POST", "/ajax/startImage", pixelStick.start_image)
web_app.on("POST", "/ajax/startColors", pixelStick.start_colors)
web_app.on("POST", "/ajax/stopDisplay", pixelStick.stop_display)
# Here we setup our server, passing in our web_app as the application
server.set_interface(esp)
wsgiServer = server.WSGIServer(80, application=web_app)
print("open this IP in your browser: ", esp.pretty_ip(esp.ip_address))
# Start the server
wsgiServer.start()
while True:
# Our main loop where we have the server poll for incoming requests
try:
wsgiServer.update_poll()
# Could do any other background tasks here, like reading sensors
except (ValueError, RuntimeError) as e:
print("Failed to update server, restarting ESP32\n", e)
wifi.reset()
continue
pixelStick.process_display()
| 38.416894 | 149 | 0.59451 |
4a23caf4484f2680f1caea248a47a18fdadf02fd | 430 | py | Python | 80-89/89_Gray Code.py | yanchdh/LeetCode | ec60364082ad246390cf3292090d23f1c7dd08b4 | [
"BSD-2-Clause"
] | 2 | 2018-01-12T08:45:08.000Z | 2018-01-15T13:29:56.000Z | 80-89/89_Gray Code.py | yanchdh/LeetCode | ec60364082ad246390cf3292090d23f1c7dd08b4 | [
"BSD-2-Clause"
] | null | null | null | 80-89/89_Gray Code.py | yanchdh/LeetCode | ec60364082ad246390cf3292090d23f1c7dd08b4 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding:utf-8 -*-
# https://leetcode.com/problems/gray-code/description/
class Solution(object):
def grayCode(self, n):
"""
:type n: int
:rtype: List[int]
"""
if n <= 0:
return [0]
ret = [0, 1]
for i in xrange(1, n):
m = len(ret)
for j in xrange(m - 1, -1, -1):
ret.append(m + ret[j])
return ret
| 23.888889 | 54 | 0.427907 |
4a23cd3110afaf6e04d53bb24963fae486de8b18 | 84,240 | py | Python | test/expected/python.asyncio/variety/f_Foo.py | dustyholmes-wf/frugal | 915ccfc58fcc9baabc4549c522e3acd2975a2e0b | [
"Apache-2.0"
] | null | null | null | test/expected/python.asyncio/variety/f_Foo.py | dustyholmes-wf/frugal | 915ccfc58fcc9baabc4549c522e3acd2975a2e0b | [
"Apache-2.0"
] | null | null | null | test/expected/python.asyncio/variety/f_Foo.py | dustyholmes-wf/frugal | 915ccfc58fcc9baabc4549c522e3acd2975a2e0b | [
"Apache-2.0"
] | null | null | null | #
# Autogenerated by Frugal Compiler (3.4.7)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
import asyncio
from datetime import timedelta
import inspect
from frugal.aio.processor import FBaseProcessor
from frugal.aio.processor import FProcessorFunction
from frugal.exceptions import TApplicationExceptionType
from frugal.exceptions import TTransportExceptionType
from frugal.middleware import Method
from frugal.transport import TMemoryOutputBuffer
from frugal.util.deprecate import deprecated
from thrift.Thrift import TApplicationException
from thrift.Thrift import TMessageType
from thrift.transport.TTransport import TTransportException
import actual_base.python.f_BaseFoo
import actual_base.python.ttypes
import actual_base.python.constants
import validStructs.ttypes
import validStructs.constants
import ValidTypes.ttypes
import ValidTypes.constants
import subdir_include.ttypes
import subdir_include.constants
from .ttypes import *
class Iface(actual_base.python.f_BaseFoo.Iface):
"""
This is a thrift service. Frugal will generate bindings that include
a frugal Context for each service call.
"""
@deprecated
async def Ping(self, ctx):
"""
Ping the server.
Args:
ctx: FContext
deprecated: don't use this; use "something else"
"""
pass
async def blah(self, ctx, num, Str, event):
"""
Blah the server.
Args:
ctx: FContext
num: int (signed 32 bits)
Str: string
event: Event
"""
pass
async def oneWay(self, ctx, id, req):
"""
oneway methods don't receive a response from the server.
Args:
ctx: FContext
id: int (signed 64 bits)
req: dict of <int (signed 32 bits), string>
"""
pass
async def bin_method(self, ctx, bin, Str):
"""
Args:
ctx: FContext
bin: binary string
Str: string
"""
pass
async def param_modifiers(self, ctx, opt_num, default_num, req_num):
"""
Args:
ctx: FContext
opt_num: int (signed 32 bits)
default_num: int (signed 32 bits)
req_num: int (signed 32 bits)
"""
pass
async def underlying_types_test(self, ctx, list_type, set_type):
"""
Args:
ctx: FContext
list_type: list of int (signed 64 bits)
set_type: set of int (signed 64 bits)
"""
pass
async def getThing(self, ctx):
"""
Args:
ctx: FContext
"""
pass
async def getMyInt(self, ctx):
"""
Args:
ctx: FContext
"""
pass
async def use_subdir_struct(self, ctx, a):
"""
Args:
ctx: FContext
a: subdir_include.A
"""
pass
async def sayHelloWith(self, ctx, newMessage):
"""
Args:
ctx: FContext
newMessage: string
"""
pass
async def whatDoYouSay(self, ctx, messageArgs):
"""
Args:
ctx: FContext
messageArgs: string
"""
pass
async def sayAgain(self, ctx, messageResult):
"""
Args:
ctx: FContext
messageResult: string
"""
pass
class Client(actual_base.python.f_BaseFoo.Client, Iface):
def __init__(self, provider, middleware=None):
"""
Create a new Client with an FServiceProvider containing a transport
and protocol factory.
Args:
provider: FServiceProvider
middleware: ServiceMiddleware or list of ServiceMiddleware
"""
middleware = middleware or []
if middleware and not isinstance(middleware, list):
middleware = [middleware]
super(Client, self).__init__(provider, middleware=middleware)
middleware += provider.get_middleware()
self._methods.update({
'Ping': Method(self._Ping, middleware),
'blah': Method(self._blah, middleware),
'oneWay': Method(self._oneWay, middleware),
'bin_method': Method(self._bin_method, middleware),
'param_modifiers': Method(self._param_modifiers, middleware),
'underlying_types_test': Method(self._underlying_types_test, middleware),
'getThing': Method(self._getThing, middleware),
'getMyInt': Method(self._getMyInt, middleware),
'use_subdir_struct': Method(self._use_subdir_struct, middleware),
'sayHelloWith': Method(self._sayHelloWith, middleware),
'whatDoYouSay': Method(self._whatDoYouSay, middleware),
'sayAgain': Method(self._sayAgain, middleware),
})
@deprecated
async def Ping(self, ctx):
"""
Ping the server.
Args:
ctx: FContext
deprecated: don't use this; use "something else"
"""
return await self._methods['Ping']([ctx])
async def _Ping(self, ctx):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('ping', TMessageType.CALL, 0)
args = Ping_args()
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = Ping_result()
result.read(iprot)
iprot.readMessageEnd()
async def blah(self, ctx, num, Str, event):
"""
Blah the server.
Args:
ctx: FContext
num: int (signed 32 bits)
Str: string
event: Event
"""
return await self._methods['blah']([ctx, num, Str, event])
async def _blah(self, ctx, num, Str, event):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('blah', TMessageType.CALL, 0)
args = blah_args()
args.num = num
args.Str = Str
args.event = event
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = blah_result()
result.read(iprot)
iprot.readMessageEnd()
if result.awe is not None:
raise result.awe
if result.api is not None:
raise result.api
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "blah failed: unknown result")
async def oneWay(self, ctx, id, req):
"""
oneway methods don't receive a response from the server.
Args:
ctx: FContext
id: int (signed 64 bits)
req: dict of <int (signed 32 bits), string>
"""
return await self._methods['oneWay']([ctx, id, req])
async def _oneWay(self, ctx, id, req):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('oneWay', TMessageType.CALL, 0)
args = oneWay_args()
args.id = id
args.req = req
args.write(oprot)
oprot.writeMessageEnd()
await self._transport.oneway(ctx, memory_buffer.getvalue())
async def bin_method(self, ctx, bin, Str):
"""
Args:
ctx: FContext
bin: binary string
Str: string
"""
return await self._methods['bin_method']([ctx, bin, Str])
async def _bin_method(self, ctx, bin, Str):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('bin_method', TMessageType.CALL, 0)
args = bin_method_args()
args.bin = bin
args.Str = Str
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = bin_method_result()
result.read(iprot)
iprot.readMessageEnd()
if result.api is not None:
raise result.api
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "bin_method failed: unknown result")
async def param_modifiers(self, ctx, opt_num, default_num, req_num):
"""
Args:
ctx: FContext
opt_num: int (signed 32 bits)
default_num: int (signed 32 bits)
req_num: int (signed 32 bits)
"""
return await self._methods['param_modifiers']([ctx, opt_num, default_num, req_num])
async def _param_modifiers(self, ctx, opt_num, default_num, req_num):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('param_modifiers', TMessageType.CALL, 0)
args = param_modifiers_args()
args.opt_num = opt_num
args.default_num = default_num
args.req_num = req_num
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = param_modifiers_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "param_modifiers failed: unknown result")
async def underlying_types_test(self, ctx, list_type, set_type):
"""
Args:
ctx: FContext
list_type: list of int (signed 64 bits)
set_type: set of int (signed 64 bits)
"""
return await self._methods['underlying_types_test']([ctx, list_type, set_type])
async def _underlying_types_test(self, ctx, list_type, set_type):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('underlying_types_test', TMessageType.CALL, 0)
args = underlying_types_test_args()
args.list_type = list_type
args.set_type = set_type
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = underlying_types_test_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "underlying_types_test failed: unknown result")
async def getThing(self, ctx):
"""
Args:
ctx: FContext
"""
return await self._methods['getThing']([ctx])
async def _getThing(self, ctx):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('getThing', TMessageType.CALL, 0)
args = getThing_args()
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = getThing_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "getThing failed: unknown result")
async def getMyInt(self, ctx):
"""
Args:
ctx: FContext
"""
return await self._methods['getMyInt']([ctx])
async def _getMyInt(self, ctx):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('getMyInt', TMessageType.CALL, 0)
args = getMyInt_args()
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = getMyInt_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "getMyInt failed: unknown result")
async def use_subdir_struct(self, ctx, a):
"""
Args:
ctx: FContext
a: subdir_include.A
"""
return await self._methods['use_subdir_struct']([ctx, a])
async def _use_subdir_struct(self, ctx, a):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('use_subdir_struct', TMessageType.CALL, 0)
args = use_subdir_struct_args()
args.a = a
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = use_subdir_struct_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "use_subdir_struct failed: unknown result")
async def sayHelloWith(self, ctx, newMessage):
"""
Args:
ctx: FContext
newMessage: string
"""
return await self._methods['sayHelloWith']([ctx, newMessage])
async def _sayHelloWith(self, ctx, newMessage):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('sayHelloWith', TMessageType.CALL, 0)
args = sayHelloWith_args()
args.newMessage = newMessage
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = sayHelloWith_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "sayHelloWith failed: unknown result")
async def whatDoYouSay(self, ctx, messageArgs):
"""
Args:
ctx: FContext
messageArgs: string
"""
return await self._methods['whatDoYouSay']([ctx, messageArgs])
async def _whatDoYouSay(self, ctx, messageArgs):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('whatDoYouSay', TMessageType.CALL, 0)
args = whatDoYouSay_args()
args.messageArgs = messageArgs
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = whatDoYouSay_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "whatDoYouSay failed: unknown result")
async def sayAgain(self, ctx, messageResult):
"""
Args:
ctx: FContext
messageResult: string
"""
return await self._methods['sayAgain']([ctx, messageResult])
async def _sayAgain(self, ctx, messageResult):
memory_buffer = TMemoryOutputBuffer(self._transport.get_request_size_limit())
oprot = self._protocol_factory.get_protocol(memory_buffer)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('sayAgain', TMessageType.CALL, 0)
args = sayAgain_args()
args.messageResult = messageResult
args.write(oprot)
oprot.writeMessageEnd()
response_transport = await self._transport.request(ctx, memory_buffer.getvalue())
iprot = self._protocol_factory.get_protocol(response_transport)
iprot.read_response_headers(ctx)
_, mtype, _ = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = sayAgain_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationExceptionType.MISSING_RESULT, "sayAgain failed: unknown result")
class Processor(actual_base.python.f_BaseFoo.Processor):
def __init__(self, handler, middleware=None):
"""
Create a new Processor.
Args:
handler: Iface
"""
if middleware and not isinstance(middleware, list):
middleware = [middleware]
super(Processor, self).__init__(handler, middleware=middleware)
self.add_to_processor_map('ping', _Ping(Method(handler.Ping, middleware), self.get_write_lock()))
self.add_to_annotations_map('ping', {"deprecated": "don't use this; use \"something else\""})
self.add_to_processor_map('blah', _blah(Method(handler.blah, middleware), self.get_write_lock()))
self.add_to_processor_map('oneWay', _oneWay(Method(handler.oneWay, middleware), self.get_write_lock()))
self.add_to_processor_map('bin_method', _bin_method(Method(handler.bin_method, middleware), self.get_write_lock()))
self.add_to_processor_map('param_modifiers', _param_modifiers(Method(handler.param_modifiers, middleware), self.get_write_lock()))
self.add_to_processor_map('underlying_types_test', _underlying_types_test(Method(handler.underlying_types_test, middleware), self.get_write_lock()))
self.add_to_processor_map('getThing', _getThing(Method(handler.getThing, middleware), self.get_write_lock()))
self.add_to_processor_map('getMyInt', _getMyInt(Method(handler.getMyInt, middleware), self.get_write_lock()))
self.add_to_processor_map('use_subdir_struct', _use_subdir_struct(Method(handler.use_subdir_struct, middleware), self.get_write_lock()))
self.add_to_processor_map('sayHelloWith', _sayHelloWith(Method(handler.sayHelloWith, middleware), self.get_write_lock()))
self.add_to_processor_map('whatDoYouSay', _whatDoYouSay(Method(handler.whatDoYouSay, middleware), self.get_write_lock()))
self.add_to_processor_map('sayAgain', _sayAgain(Method(handler.sayAgain, middleware), self.get_write_lock()))
class _Ping(FProcessorFunction):
def __init__(self, handler, lock):
super(_Ping, self).__init__(handler, lock)
@deprecated
async def process(self, ctx, iprot, oprot):
args = Ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = Ping_result()
try:
ret = self._handler([ctx])
if inspect.iscoroutine(ret):
ret = await ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "ping", exception=ex)
return
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "ping", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('ping', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "ping", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _blah(FProcessorFunction):
def __init__(self, handler, lock):
super(_blah, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = blah_args()
args.read(iprot)
iprot.readMessageEnd()
result = blah_result()
try:
ret = self._handler([ctx, args.num, args.Str, args.event])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "blah", exception=ex)
return
except AwesomeException as awe:
result.awe = awe
except actual_base.python.ttypes.api_exception as api:
result.api = api
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "blah", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('blah', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "blah", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _oneWay(FProcessorFunction):
def __init__(self, handler, lock):
super(_oneWay, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = oneWay_args()
args.read(iprot)
iprot.readMessageEnd()
try:
ret = self._handler([ctx, args.id, args.req])
if inspect.iscoroutine(ret):
ret = await ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "oneWay", exception=ex)
return
except Exception as e:
raise
class _bin_method(FProcessorFunction):
def __init__(self, handler, lock):
super(_bin_method, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = bin_method_args()
args.read(iprot)
iprot.readMessageEnd()
result = bin_method_result()
try:
ret = self._handler([ctx, args.bin, args.Str])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "bin_method", exception=ex)
return
except actual_base.python.ttypes.api_exception as api:
result.api = api
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "bin_method", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('bin_method', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "bin_method", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _param_modifiers(FProcessorFunction):
def __init__(self, handler, lock):
super(_param_modifiers, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = param_modifiers_args()
args.read(iprot)
iprot.readMessageEnd()
result = param_modifiers_result()
try:
ret = self._handler([ctx, args.opt_num, args.default_num, args.req_num])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "param_modifiers", exception=ex)
return
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "param_modifiers", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('param_modifiers', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "param_modifiers", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _underlying_types_test(FProcessorFunction):
def __init__(self, handler, lock):
super(_underlying_types_test, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = underlying_types_test_args()
args.read(iprot)
iprot.readMessageEnd()
result = underlying_types_test_result()
try:
ret = self._handler([ctx, args.list_type, args.set_type])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "underlying_types_test", exception=ex)
return
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "underlying_types_test", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('underlying_types_test', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "underlying_types_test", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _getThing(FProcessorFunction):
def __init__(self, handler, lock):
super(_getThing, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = getThing_args()
args.read(iprot)
iprot.readMessageEnd()
result = getThing_result()
try:
ret = self._handler([ctx])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "getThing", exception=ex)
return
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "getThing", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('getThing', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "getThing", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _getMyInt(FProcessorFunction):
def __init__(self, handler, lock):
super(_getMyInt, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = getMyInt_args()
args.read(iprot)
iprot.readMessageEnd()
result = getMyInt_result()
try:
ret = self._handler([ctx])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "getMyInt", exception=ex)
return
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "getMyInt", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('getMyInt', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "getMyInt", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _use_subdir_struct(FProcessorFunction):
def __init__(self, handler, lock):
super(_use_subdir_struct, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = use_subdir_struct_args()
args.read(iprot)
iprot.readMessageEnd()
result = use_subdir_struct_result()
try:
ret = self._handler([ctx, args.a])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "use_subdir_struct", exception=ex)
return
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "use_subdir_struct", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('use_subdir_struct', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "use_subdir_struct", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _sayHelloWith(FProcessorFunction):
def __init__(self, handler, lock):
super(_sayHelloWith, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = sayHelloWith_args()
args.read(iprot)
iprot.readMessageEnd()
result = sayHelloWith_result()
try:
ret = self._handler([ctx, args.newMessage])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "sayHelloWith", exception=ex)
return
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "sayHelloWith", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('sayHelloWith', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "sayHelloWith", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _whatDoYouSay(FProcessorFunction):
def __init__(self, handler, lock):
super(_whatDoYouSay, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = whatDoYouSay_args()
args.read(iprot)
iprot.readMessageEnd()
result = whatDoYouSay_result()
try:
ret = self._handler([ctx, args.messageArgs])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "whatDoYouSay", exception=ex)
return
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "whatDoYouSay", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('whatDoYouSay', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "whatDoYouSay", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
class _sayAgain(FProcessorFunction):
def __init__(self, handler, lock):
super(_sayAgain, self).__init__(handler, lock)
async def process(self, ctx, iprot, oprot):
args = sayAgain_args()
args.read(iprot)
iprot.readMessageEnd()
result = sayAgain_result()
try:
ret = self._handler([ctx, args.messageResult])
if inspect.iscoroutine(ret):
ret = await ret
result.success = ret
except TApplicationException as ex:
async with self._lock:
_write_application_exception(ctx, oprot, "sayAgain", exception=ex)
return
except Exception as e:
async with self._lock:
_write_application_exception(ctx, oprot, "sayAgain", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=str(e))
raise
async with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('sayAgain', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "sayAgain", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.message)
else:
raise e
def _write_application_exception(ctx, oprot, method, ex_code=None, message=None, exception=None):
if exception is not None:
x = exception
else:
x = TApplicationException(type=ex_code, message=message)
oprot.write_response_headers(ctx)
oprot.writeMessageBegin(method, TMessageType.EXCEPTION, 0)
x.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
return x
class Ping_args(object):
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('Ping_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Ping_result(object):
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('Ping_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class blah_args(object):
"""
Attributes:
- num
- Str
- event
"""
def __init__(self, num=None, Str=None, event=None):
self.num = num
self.Str = Str
self.event = event
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.num = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.Str = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.event = Event()
self.event.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('blah_args')
if self.num is not None:
oprot.writeFieldBegin('num', TType.I32, 1)
oprot.writeI32(self.num)
oprot.writeFieldEnd()
if self.Str is not None:
oprot.writeFieldBegin('Str', TType.STRING, 2)
oprot.writeString(self.Str)
oprot.writeFieldEnd()
if self.event is not None:
oprot.writeFieldBegin('event', TType.STRUCT, 3)
self.event.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.num))
value = (value * 31) ^ hash(make_hashable(self.Str))
value = (value * 31) ^ hash(make_hashable(self.event))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class blah_result(object):
"""
Attributes:
- success
- awe
- api
"""
def __init__(self, success=None, awe=None, api=None):
self.success = success
self.awe = awe
self.api = api
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.awe = AwesomeException()
self.awe.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.api = actual_base.python.ttypes.api_exception()
self.api.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('blah_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
if self.awe is not None:
oprot.writeFieldBegin('awe', TType.STRUCT, 1)
self.awe.write(oprot)
oprot.writeFieldEnd()
if self.api is not None:
oprot.writeFieldBegin('api', TType.STRUCT, 2)
self.api.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.awe))
value = (value * 31) ^ hash(make_hashable(self.api))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class oneWay_args(object):
"""
Attributes:
- id
- req
"""
def __init__(self, id=None, req=None):
self.id = id
self.req = req
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.req = {}
(_, _, elem56) = iprot.readMapBegin()
for _ in range(elem56):
elem58 = iprot.readI32()
elem57 = iprot.readString()
self.req[elem58] = elem57
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('oneWay_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.I64, 1)
oprot.writeI64(self.id)
oprot.writeFieldEnd()
if self.req is not None:
oprot.writeFieldBegin('req', TType.MAP, 2)
oprot.writeMapBegin(TType.I32, TType.STRING, len(self.req))
for elem60, elem59 in self.req.items():
oprot.writeI32(elem60)
oprot.writeString(elem59)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.id))
value = (value * 31) ^ hash(make_hashable(self.req))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bin_method_args(object):
"""
Attributes:
- bin
- Str
"""
def __init__(self, bin=None, Str=None):
self.bin = bin
self.Str = Str
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.bin = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.Str = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('bin_method_args')
if self.bin is not None:
oprot.writeFieldBegin('bin', TType.STRING, 1)
oprot.writeBinary(self.bin)
oprot.writeFieldEnd()
if self.Str is not None:
oprot.writeFieldBegin('Str', TType.STRING, 2)
oprot.writeString(self.Str)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.bin))
value = (value * 31) ^ hash(make_hashable(self.Str))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bin_method_result(object):
"""
Attributes:
- success
- api
"""
def __init__(self, success=None, api=None):
self.success = success
self.api = api
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.api = actual_base.python.ttypes.api_exception()
self.api.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('bin_method_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeBinary(self.success)
oprot.writeFieldEnd()
if self.api is not None:
oprot.writeFieldBegin('api', TType.STRUCT, 1)
self.api.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.api))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class param_modifiers_args(object):
"""
Attributes:
- opt_num
- default_num
- req_num
"""
def __init__(self, opt_num=None, default_num=None, req_num=None):
self.opt_num = opt_num
self.default_num = default_num
self.req_num = req_num
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.opt_num = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.default_num = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.req_num = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('param_modifiers_args')
if self.opt_num is not None:
oprot.writeFieldBegin('opt_num', TType.I32, 1)
oprot.writeI32(self.opt_num)
oprot.writeFieldEnd()
if self.default_num is not None:
oprot.writeFieldBegin('default_num', TType.I32, 2)
oprot.writeI32(self.default_num)
oprot.writeFieldEnd()
if self.req_num is not None:
oprot.writeFieldBegin('req_num', TType.I32, 3)
oprot.writeI32(self.req_num)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.req_num is None:
raise TProtocol.TProtocolException(type=TProtocol.TProtocolException.INVALID_DATA, message='Required field \'req_num\' is not present in struct \'param_modifiers_args\'')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.opt_num))
value = (value * 31) ^ hash(make_hashable(self.default_num))
value = (value * 31) ^ hash(make_hashable(self.req_num))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class param_modifiers_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None):
self.success = success
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('param_modifiers_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class underlying_types_test_args(object):
"""
Attributes:
- list_type
- set_type
"""
def __init__(self, list_type=None, set_type=None):
self.list_type = list_type
self.set_type = set_type
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.list_type = []
(_, elem61) = iprot.readListBegin()
for _ in range(elem61):
elem62 = iprot.readI64()
self.list_type.append(elem62)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.SET:
self.set_type = set()
(_, elem63) = iprot.readSetBegin()
for _ in range(elem63):
elem64 = iprot.readI64()
self.set_type.add(elem64)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('underlying_types_test_args')
if self.list_type is not None:
oprot.writeFieldBegin('list_type', TType.LIST, 1)
oprot.writeListBegin(TType.I64, len(self.list_type))
for elem65 in self.list_type:
oprot.writeI64(elem65)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.set_type is not None:
oprot.writeFieldBegin('set_type', TType.SET, 2)
oprot.writeSetBegin(TType.I64, len(self.set_type))
for elem66 in self.set_type:
oprot.writeI64(elem66)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.list_type))
value = (value * 31) ^ hash(make_hashable(self.set_type))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class underlying_types_test_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None):
self.success = success
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_, elem67) = iprot.readListBegin()
for _ in range(elem67):
elem68 = iprot.readI64()
self.success.append(elem68)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('underlying_types_test_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.I64, len(self.success))
for elem69 in self.success:
oprot.writeI64(elem69)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getThing_args(object):
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getThing_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getThing_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None):
self.success = success
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = validStructs.ttypes.Thing()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getThing_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getMyInt_args(object):
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getMyInt_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getMyInt_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None):
self.success = success
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getMyInt_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class use_subdir_struct_args(object):
"""
Attributes:
- a
"""
def __init__(self, a=None):
self.a = a
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.a = subdir_include.ttypes.A()
self.a.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('use_subdir_struct_args')
if self.a is not None:
oprot.writeFieldBegin('a', TType.STRUCT, 1)
self.a.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.a))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class use_subdir_struct_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None):
self.success = success
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = subdir_include.ttypes.A()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('use_subdir_struct_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sayHelloWith_args(object):
"""
Attributes:
- newMessage
"""
def __init__(self, newMessage=None):
self.newMessage = newMessage
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.newMessage = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('sayHelloWith_args')
if self.newMessage is not None:
oprot.writeFieldBegin('newMessage', TType.STRING, 1)
oprot.writeString(self.newMessage)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.newMessage))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sayHelloWith_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None):
self.success = success
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('sayHelloWith_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class whatDoYouSay_args(object):
"""
Attributes:
- messageArgs
"""
def __init__(self, messageArgs=None):
self.messageArgs = messageArgs
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.messageArgs = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('whatDoYouSay_args')
if self.messageArgs is not None:
oprot.writeFieldBegin('messageArgs', TType.STRING, 1)
oprot.writeString(self.messageArgs)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.messageArgs))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class whatDoYouSay_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None):
self.success = success
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('whatDoYouSay_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sayAgain_args(object):
"""
Attributes:
- messageResult
"""
def __init__(self, messageResult=None):
self.messageResult = messageResult
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.messageResult = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('sayAgain_args')
if self.messageResult is not None:
oprot.writeFieldBegin('messageResult', TType.STRING, 1)
oprot.writeString(self.messageResult)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.messageResult))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sayAgain_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None):
self.success = success
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('sayAgain_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 34.538745 | 182 | 0.584651 |
4a23cd56716eef949f34d3e0f2b017f24c0aa5ce | 937 | py | Python | MyFTP_Server/modules/Config_Read.py | AlanProject/day08 | 8d7e0ebcfa5f6939253a99bf99cf7cd4f80a7ed3 | [
"Apache-2.0"
] | null | null | null | MyFTP_Server/modules/Config_Read.py | AlanProject/day08 | 8d7e0ebcfa5f6939253a99bf99cf7cd4f80a7ed3 | [
"Apache-2.0"
] | null | null | null | MyFTP_Server/modules/Config_Read.py | AlanProject/day08 | 8d7e0ebcfa5f6939253a99bf99cf7cd4f80a7ed3 | [
"Apache-2.0"
] | null | null | null | #-*- coding:utf-8 -*-
#/usr/bin/env python
import os,sys
import ConfigParser
class ConfigRead(object):
def __init__(self):
self.base_dir=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))+'/config/'
self.config_file = self.base_dir+'MyFTP.conf'
self.config = ConfigParser.ConfigParser()
def server_info(self):
self.config.read(self.config_file)
self.server_address=self.config.get('server','server_address')
self.server_port = self.config.get('server','server_port')
self.MyFTP_address = (self.server_address,int(self.server_port))
return self.MyFTP_address
def user_dir(self,user_name):
self.user_name = user_name
self.config.read(self.config_file)
self.user_path=self.config.get('user',self.user_name)
return self.user_path.strip("\'")
if __name__ == '__main__':
test = ConfigRead()
print test.user_dir('Alan') | 40.73913 | 92 | 0.681964 |
4a23cecb126abf7d94d4e32df1c5790a457241ee | 8,299 | py | Python | docs/conf.py | TrentScholl/aquarium-wise-controller | 49ef5088e3e9f8d03f436388843038b138efb066 | [
"MIT"
] | 5 | 2015-07-27T13:20:54.000Z | 2021-01-17T12:22:54.000Z | docs/conf.py | TrentScholl/aquarium-wise-controller | 49ef5088e3e9f8d03f436388843038b138efb066 | [
"MIT"
] | null | null | null | docs/conf.py | TrentScholl/aquarium-wise-controller | 49ef5088e3e9f8d03f436388843038b138efb066 | [
"MIT"
] | 1 | 2015-08-02T18:05:00.000Z | 2015-08-02T18:05:00.000Z | # -*- coding: utf-8 -*-
#
# Aquarium Wise Controller documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 03 15:35:59 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Aquarium Wise Controller'
copyright = u'2014, Trent Scholl'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'AquariumWiseControllerdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'AquariumWiseController.tex', u'Aquarium Wise Controller Documentation',
u'Trent Scholl', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'aquariumwisecontroller', u'Aquarium Wise Controller Documentation',
[u'Trent Scholl'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'AquariumWiseController', u'Aquarium Wise Controller Documentation',
u'Trent Scholl', 'AquariumWiseController', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| 32.042471 | 84 | 0.721774 |
4a23cecc1ce5bfdda1b43b97fb670ecbaf4d6ca7 | 1,668 | py | Python | kf_book/nonlinear_internal.py | VladPodilnyk/Kalman-and-Bayesian-Filters-in-Python | 1b47e2c27ea0a007e8c36d9f6d453c47402b3615 | [
"CC-BY-4.0"
] | 12,315 | 2015-01-07T12:06:26.000Z | 2022-03-31T11:03:03.000Z | kf_book/nonlinear_internal.py | VladPodilnyk/Kalman-and-Bayesian-Filters-in-Python | 1b47e2c27ea0a007e8c36d9f6d453c47402b3615 | [
"CC-BY-4.0"
] | 356 | 2015-01-09T18:53:02.000Z | 2022-03-14T20:21:06.000Z | kf_book/nonlinear_internal.py | VladPodilnyk/Kalman-and-Bayesian-Filters-in-Python | 1b47e2c27ea0a007e8c36d9f6d453c47402b3615 | [
"CC-BY-4.0"
] | 3,419 | 2015-01-02T20:47:47.000Z | 2022-03-31T18:07:33.000Z | # -*- coding: utf-8 -*-
"""Copyright 2015 Roger R Labbe Jr.
Code supporting the book
Kalman and Bayesian Filters in Python
https://github.com/rlabbe/Kalman-and-Bayesian-Filters-in-Python
This is licensed under an MIT license. See the LICENSE.txt file
for more information.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import filterpy.stats as stats
import matplotlib.pyplot as plt
import numpy as np
def plot1():
P = np.array([[6, 2.5], [2.5, .6]])
stats.plot_covariance_ellipse((10, 2), P, facecolor='g', alpha=0.2)
def plot2():
P = np.array([[6, 2.5], [2.5, .6]])
circle1=plt.Circle((10,0),3,color='#004080',fill=False,linewidth=4, alpha=.7)
ax = plt.gca()
ax.add_artist(circle1)
plt.xlim(0,10)
plt.ylim(0,3)
P = np.array([[6, 2.5], [2.5, .6]])
stats.plot_covariance_ellipse((10, 2), P, facecolor='g', alpha=0.2)
def plot3():
P = np.array([[6, 2.5], [2.5, .6]])
circle1=plt.Circle((10,0),3,color='#004080',fill=False,linewidth=4, alpha=.7)
ax = plt.gca()
ax.add_artist(circle1)
plt.xlim(0,10)
plt.ylim(0,3)
plt.axhline(3, ls='--')
stats.plot_covariance_ellipse((10, 2), P, facecolor='g', alpha=0.2)
def plot4():
P = np.array([[6, 2.5], [2.5, .6]])
circle1=plt.Circle((10,0),3,color='#004080',fill=False,linewidth=4, alpha=.7)
ax = plt.gca()
ax.add_artist(circle1)
plt.xlim(0,10)
plt.ylim(0,3)
plt.axhline(3, ls='--')
stats.plot_covariance_ellipse((10, 2), P, facecolor='g', alpha=0.2)
plt.scatter([11.4], [2.65],s=200)
plt.scatter([12], [3], c='r', s=200)
plt.show() | 27.8 | 81 | 0.615707 |
4a23ceeb6cc88b54a90ec0d9eba1dbc93b2acf50 | 1,195 | py | Python | Python/AdvancedHelloWorld.py | proxyanon/Hello-world | 686de4360ed9811b87e9e18cfedb862693ed913f | [
"MIT"
] | null | null | null | Python/AdvancedHelloWorld.py | proxyanon/Hello-world | 686de4360ed9811b87e9e18cfedb862693ed913f | [
"MIT"
] | 1 | 2019-10-29T17:48:30.000Z | 2019-10-29T17:48:30.000Z | Python/AdvancedHelloWorld.py | proxyanon/Hello-world | 686de4360ed9811b87e9e18cfedb862693ed913f | [
"MIT"
] | 1 | 2019-10-24T03:13:42.000Z | 2019-10-24T03:13:42.000Z | import datetime
now = datetime.datetime.now()
print ('Hello, World!')
person = input('What is your name? ')
print ('Hello,', person)
age = int(input('How old are you? '))
year = now.year - age
print ('So, you were born in', year)
the_password = 1234
password = ['1234', '4321', '1342']
while True:
my_password = int(input('To continue your Python experience, please input the password: '))
if my_password == the_password:
print ('Success!')
break
else:
print ('The password is incorrect. Please try again.')
print ('Python is starting...')
def countdown(t):
import time
while t >= 0:
time.sleep(1)
t -= 1
print ('Welcome,', person, '...')
#teste
t = 5
while True:
choice = input('What would you like to do? Your choices are Open Browser | Talk to a therapist | Power Off » ')
if choice == 'Open Browser':
print ('Sorry, I cannot do that.')
elif choice == 'Talk to a therapist':
print ('I am a computer, do you expect me to solve your problems?')
elif choice == 'Power Off':
print ('Powering Off...')
break
else:
print ('That is not an option.')
| 27.159091 | 115 | 0.598326 |
4a23cefb84698d07daa06cdee2c63ea959beca27 | 251 | py | Python | pagey/defaults.py | Flaconi/slackbot-pagey | 4bbac86965cf13c27fcd5410bbc6c397e321b873 | [
"MIT"
] | null | null | null | pagey/defaults.py | Flaconi/slackbot-pagey | 4bbac86965cf13c27fcd5410bbc6c397e321b873 | [
"MIT"
] | 1 | 2021-06-21T10:47:02.000Z | 2021-06-21T10:47:02.000Z | pagey/defaults.py | Flaconi/slackbot-pagey | 4bbac86965cf13c27fcd5410bbc6c397e321b873 | [
"MIT"
] | null | null | null | """This file defines all module wide default values."""
# Credits
DEF_BIN = "pagey"
DEF_NAME = "pagey"
DEF_DESC = "Pagey is a Pagerduty slack bot."
DEF_VERSION = "0.0.3"
DEF_AUTHOR = "cytopia"
DEF_GITHUB = "https://github.com/Flaconi/slackbot-pagey"
| 25.1 | 56 | 0.721116 |
4a23cf81dbbea9ee923fa6ea2c20ceeec866e87a | 49 | py | Python | action_tracker/__init__.py | jham20x6/turbo-octo-broccoli | cb510d8b37b4870be7c0ce3117b12aba15bfbc39 | [
"MIT"
] | null | null | null | action_tracker/__init__.py | jham20x6/turbo-octo-broccoli | cb510d8b37b4870be7c0ce3117b12aba15bfbc39 | [
"MIT"
] | null | null | null | action_tracker/__init__.py | jham20x6/turbo-octo-broccoli | cb510d8b37b4870be7c0ce3117b12aba15bfbc39 | [
"MIT"
] | null | null | null | from action_tracker.tracker import ActionTracker
| 24.5 | 48 | 0.897959 |
4a23cf84b7310bf83607057332abc5af43680f08 | 4,525 | py | Python | vendor/github.com/elastic/beats/packetbeat/tests/system/test_0041_memcache_udp_bin_basic.py | psadmin-io/ps-tuxbeat | 1ccaa89ea9c4fc16ac572e26179719e5c16e9685 | [
"Apache-2.0"
] | 115 | 2015-11-30T13:42:27.000Z | 2021-10-14T15:41:31.000Z | vendor/github.com/elastic/beats/packetbeat/tests/system/test_0041_memcache_udp_bin_basic.py | psadmin-io/ps-tuxbeat | 1ccaa89ea9c4fc16ac572e26179719e5c16e9685 | [
"Apache-2.0"
] | 35 | 2016-02-14T18:47:27.000Z | 2021-01-17T20:12:38.000Z | vendor/github.com/elastic/beats/packetbeat/tests/system/test_0041_memcache_udp_bin_basic.py | psadmin-io/ps-tuxbeat | 1ccaa89ea9c4fc16ac572e26179719e5c16e9685 | [
"Apache-2.0"
] | 50 | 2015-11-28T18:43:19.000Z | 2021-07-21T03:58:10.000Z | from packetbeat import BaseTest
import pprint
pp = pprint.PrettyPrinter()
def pretty(*k, **kw):
pp.pprint(*k, **kw)
class Test(BaseTest):
def _run(self, pcap):
self.render_config_template(
memcache_udp_transaction_timeout=10
)
self.run_packetbeat(pcap=pcap,
extra_args=['-waitstop', '1'],
debug_selectors=["memcache", "udp", "publish"])
objs = self.read_output()
self.assert_common(objs)
return objs
def assert_common(self, objs):
# check client ip are not mixed up
assert all(o['client_ip'] == '192.168.188.37' for o in objs)
assert all(o['ip'] == '192.168.188.38' for o in objs)
assert all(o['port'] == 11211 for o in objs)
# check transport layer always udp
assert all(o['type'] == 'memcache' for o in objs)
assert all(o['transport'] == 'udp' for o in objs)
assert all(o['memcache.protocol_type'] == 'binary' for o in objs)
def test_store(self):
objs = self._run("memcache/memcache_bin_udp_single_store.pcap")
# all transactions succeed
assert all(o['status'] == 'OK' for o in objs)
assert len(objs) == 1
set = objs[0]
assert set['memcache.request.opcode'] == 'SetQ'
assert set['memcache.request.command'] == 'set'
assert set['memcache.request.type'] == 'Store'
assert set['memcache.request.keys'] == ['test_key']
assert set['memcache.request.exptime'] == 0
assert set['memcache.request.bytes'] == 1024
assert set['memcache.request.count_values'] == 1
assert set['memcache.request.quiet']
def test_multi_store(self):
objs = self._run("memcache/memcache_bin_udp_multi_store.pcap")
# all transactions succeed
assert all(o['status'] == 'OK' for o in objs)
assert len(objs) == 3
sets = dict([(o['memcache.request.keys'][0], o) for o in objs[0:3]])
assert sorted(sets.keys()) == ['k1', 'k2', 'k3']
assert sets['k1']['memcache.request.bytes'] == 100
assert sets['k2']['memcache.request.bytes'] == 20
assert sets['k3']['memcache.request.bytes'] == 10
assert all(o['memcache.request.opcode'] == 'SetQ'
for o in sets.itervalues())
assert all(o['memcache.request.quiet']
for o in sets.itervalues())
def test_delete(self):
objs = self._run('memcache/memcache_bin_udp_delete.pcap')
# all transactions succeed
assert all(o['status'] == 'OK' for o in objs)
assert len(objs) == 2
delete, set = sorted(objs, key=lambda x: x['memcache.request.command'])
# check set command
assert set['memcache.request.opcode'] == 'SetQ'
assert set['memcache.request.command'] == 'set'
assert set['memcache.request.type'] == 'Store'
assert set['memcache.request.keys'] == ['key']
assert set['memcache.request.quiet']
# check delete command
assert delete['memcache.request.opcode'] == 'DeleteQ'
assert delete['memcache.request.command'] == 'delete'
assert delete['memcache.request.type'] == 'Delete'
assert delete['memcache.request.keys'] == ['key']
assert delete['memcache.request.quiet']
def test_counter_ops(self):
objs = self._run('memcache/memcache_bin_udp_counter_ops.pcap')
# all transactions succeed
assert all(o['status'] == 'OK' for o in objs)
assert len(objs) == 3
dec, inc, set = sorted(objs,
key=lambda x: x['memcache.request.command'])
# check set command
assert set['memcache.request.opcode'] == 'SetQ'
assert set['memcache.request.command'] == 'set'
assert set['memcache.request.type'] == 'Store'
assert set['memcache.request.keys'] == ['cnt']
assert set['memcache.request.quiet']
assert inc['memcache.request.opcode'] == 'IncrementQ'
assert inc['memcache.request.command'] == 'incr'
assert inc['memcache.request.delta'] == 2
assert inc['memcache.request.keys'] == ['cnt']
assert inc['memcache.request.quiet']
assert dec['memcache.request.opcode'] == 'DecrementQ'
assert dec['memcache.request.command'] == 'decr'
assert dec['memcache.request.delta'] == 5
assert dec['memcache.request.keys'] == ['cnt']
assert dec['memcache.request.quiet']
| 36.788618 | 79 | 0.59116 |
4a23cf953d2b0b4fda62e586a01c7302f3a3e6c6 | 454 | py | Python | data/scripts/templates/object/static/item/shared_item_tech_console_sectional_c.py | obi-two/GameServer | 7d37024e2291a97d49522610cd8f1dbe5666afc2 | [
"MIT"
] | 20 | 2015-02-23T15:11:56.000Z | 2022-03-18T20:56:48.000Z | data/scripts/templates/object/static/item/shared_item_tech_console_sectional_c.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | null | null | null | data/scripts/templates/object/static/item/shared_item_tech_console_sectional_c.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | 20 | 2015-04-04T16:35:59.000Z | 2022-03-24T14:54:37.000Z | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/item/shared_item_tech_console_sectional_c.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | 26.705882 | 80 | 0.731278 |
4a23d164f62bb14cf29531ee1dca9897c1e4cc7f | 20,230 | py | Python | test/functional/test_framework/test_framework.py | xlkulu/SHTcoin | e8e50991583a3530211025cd48191bf274798555 | [
"MIT"
] | 1 | 2019-08-25T13:07:02.000Z | 2019-08-25T13:07:02.000Z | test/functional/test_framework/test_framework.py | xlkulu/SHTcoin | e8e50991583a3530211025cd48191bf274798555 | [
"MIT"
] | 1 | 2019-08-25T13:11:54.000Z | 2019-08-25T13:11:54.000Z | test/functional/test_framework/test_framework.py | xlkulu/SHTcoin | e8e50991583a3530211025cd48191bf274798555 | [
"MIT"
] | 1 | 2019-08-25T13:07:25.000Z | 2019-08-25T13:07:25.000Z | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Base class for RPC testing."""
from enum import Enum
import logging
import optparse
import os
import pdb
import shutil
import sys
import tempfile
import time
from .authproxy import JSONRPCException
from . import coverage
from .test_node import TestNode
from .util import (
MAX_NODES,
PortSeed,
assert_equal,
check_json_precision,
connect_nodes_bi,
disconnect_nodes,
get_datadir_path,
initialize_datadir,
p2p_port,
set_node_times,
sync_blocks,
sync_mempools,
)
class TestStatus(Enum):
PASSED = 1
FAILED = 2
SKIPPED = 3
TEST_EXIT_PASSED = 0
TEST_EXIT_FAILED = 1
TEST_EXIT_SKIPPED = 77
class BitcoinTestFramework():
"""Base class for a shtcoin test script.
Individual shtcoin test scripts should subclass this class and override the set_test_params() and run_test() methods.
Individual tests can also override the following methods to customize the test setup:
- add_options()
- setup_chain()
- setup_network()
- setup_nodes()
The __init__() and main() methods should not be overridden.
This class also contains various public and private helper methods."""
def __init__(self):
"""Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
self.setup_clean_chain = False
self.nodes = []
self.mocktime = 0
self.supports_cli = False
self.set_test_params()
assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()"
def main(self):
"""Main function. This should not be overridden by the subclass test scripts."""
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave shtcoinds and test.* datadir on exit or error")
parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop shtcoinds after the test execution")
parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../../src"),
help="Source directory containing shtcoind/shtcoin-cli (default: %default)")
parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
help="Directory for caching pregenerated datadirs")
parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs")
parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO",
help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.")
parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true",
help="Print out all RPC calls as they are made")
parser.add_option("--portseed", dest="port_seed", default=os.getpid(), type='int',
help="The seed to use for assigning port numbers (default: current process id)")
parser.add_option("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
parser.add_option("--configfile", dest="configfile",
help="Location of the test framework config file")
parser.add_option("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true",
help="Attach a python debugger if test fails")
parser.add_option("--usecli", dest="usecli", default=False, action="store_true",
help="use shtcoin-cli instead of RPC for all commands")
self.add_options(parser)
(self.options, self.args) = parser.parse_args()
PortSeed.n = self.options.port_seed
os.environ['PATH'] = self.options.srcdir + ":" + self.options.srcdir + "/qt:" + os.environ['PATH']
check_json_precision()
self.options.cachedir = os.path.abspath(self.options.cachedir)
# Set up temp directory and start logging
if self.options.tmpdir:
self.options.tmpdir = os.path.abspath(self.options.tmpdir)
os.makedirs(self.options.tmpdir, exist_ok=False)
else:
self.options.tmpdir = tempfile.mkdtemp(prefix="test")
self._start_logging()
success = TestStatus.FAILED
try:
if self.options.usecli and not self.supports_cli:
raise SkipTest("--usecli specified but test does not support using CLI")
self.setup_chain()
self.setup_network()
self.run_test()
success = TestStatus.PASSED
except JSONRPCException as e:
self.log.exception("JSONRPC error")
except SkipTest as e:
self.log.warning("Test Skipped: %s" % e.message)
success = TestStatus.SKIPPED
except AssertionError as e:
self.log.exception("Assertion failed")
except KeyError as e:
self.log.exception("Key error")
except Exception as e:
self.log.exception("Unexpected exception caught during testing")
except KeyboardInterrupt as e:
self.log.warning("Exiting after keyboard interrupt")
if success == TestStatus.FAILED and self.options.pdbonfailure:
print("Testcase failed. Attaching python debugger. Enter ? for help")
pdb.set_trace()
if not self.options.noshutdown:
self.log.info("Stopping nodes")
if self.nodes:
self.stop_nodes()
else:
for node in self.nodes:
node.cleanup_on_exit = False
self.log.info("Note: shtcoinds were not stopped and may still be running")
if not self.options.nocleanup and not self.options.noshutdown and success != TestStatus.FAILED:
self.log.info("Cleaning up")
shutil.rmtree(self.options.tmpdir)
else:
self.log.warning("Not cleaning up dir %s" % self.options.tmpdir)
if success == TestStatus.PASSED:
self.log.info("Tests successful")
exit_code = TEST_EXIT_PASSED
elif success == TestStatus.SKIPPED:
self.log.info("Test skipped")
exit_code = TEST_EXIT_SKIPPED
else:
self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir)
self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir))
exit_code = TEST_EXIT_FAILED
logging.shutdown()
sys.exit(exit_code)
# Methods to override in subclass test scripts.
def set_test_params(self):
"""Tests must this method to change default values for number of nodes, topology, etc"""
raise NotImplementedError
def add_options(self, parser):
"""Override this method to add command-line options to the test"""
pass
def setup_chain(self):
"""Override this method to customize blockchain setup"""
self.log.info("Initializing test directory " + self.options.tmpdir)
if self.setup_clean_chain:
self._initialize_chain_clean()
else:
self._initialize_chain()
def setup_network(self):
"""Override this method to customize test network topology"""
self.setup_nodes()
# Connect the nodes as a "chain". This allows us
# to split the network between nodes 1 and 2 to get
# two halves that can work on competing chains.
for i in range(self.num_nodes - 1):
connect_nodes_bi(self.nodes, i, i + 1)
self.sync_all()
def setup_nodes(self):
"""Override this method to customize test node setup"""
extra_args = None
if hasattr(self, "extra_args"):
extra_args = self.extra_args
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
def run_test(self):
"""Tests must override this method to define test logic"""
raise NotImplementedError
# Public helper methods. These can be accessed by the subclass test scripts.
def add_nodes(self, num_nodes, extra_args=None, rpchost=None, timewait=None, binary=None):
"""Instantiate TestNode objects"""
if extra_args is None:
extra_args = [[]] * num_nodes
if binary is None:
binary = [None] * num_nodes
assert_equal(len(extra_args), num_nodes)
assert_equal(len(binary), num_nodes)
for i in range(num_nodes):
self.nodes.append(TestNode(i, self.options.tmpdir, extra_args[i], rpchost, timewait=timewait, binary=binary[i], stderr=None, mocktime=self.mocktime, coverage_dir=self.options.coveragedir, use_cli=self.options.usecli))
def start_node(self, i, *args, **kwargs):
"""Start a shtcoind"""
node = self.nodes[i]
node.start(*args, **kwargs)
node.wait_for_rpc_connection()
if self.options.coveragedir is not None:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def start_nodes(self, extra_args=None, *args, **kwargs):
"""Start multiple shtcoinds"""
if extra_args is None:
extra_args = [None] * self.num_nodes
assert_equal(len(extra_args), self.num_nodes)
try:
for i, node in enumerate(self.nodes):
node.start(extra_args[i], *args, **kwargs)
for node in self.nodes:
node.wait_for_rpc_connection()
except:
# If one node failed to start, stop the others
self.stop_nodes()
raise
if self.options.coveragedir is not None:
for node in self.nodes:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def stop_node(self, i):
"""Stop a shtcoind test node"""
self.nodes[i].stop_node()
self.nodes[i].wait_until_stopped()
def stop_nodes(self):
"""Stop multiple shtcoind test nodes"""
for node in self.nodes:
# Issue RPC to stop nodes
node.stop_node()
for node in self.nodes:
# Wait for nodes to stop
node.wait_until_stopped()
def restart_node(self, i, extra_args=None):
"""Stop and start a test node"""
self.stop_node(i)
self.start_node(i, extra_args)
def assert_start_raises_init_error(self, i, extra_args=None, expected_msg=None, *args, **kwargs):
with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr:
try:
self.start_node(i, extra_args, stderr=log_stderr, *args, **kwargs)
self.stop_node(i)
except Exception as e:
assert 'shtcoind exited' in str(e) # node must have shutdown
self.nodes[i].running = False
self.nodes[i].process = None
if expected_msg is not None:
log_stderr.seek(0)
stderr = log_stderr.read().decode('utf-8')
if expected_msg not in stderr:
raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr)
else:
if expected_msg is None:
assert_msg = "shtcoind should have exited with an error"
else:
assert_msg = "shtcoind should have exited with expected error " + expected_msg
raise AssertionError(assert_msg)
def wait_for_node_exit(self, i, timeout):
self.nodes[i].process.wait(timeout)
def split_network(self):
"""
Split the network of four nodes into nodes 0/1 and 2/3.
"""
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
self.sync_all([self.nodes[:2], self.nodes[2:]])
def join_network(self):
"""
Join the (previously split) network halves together.
"""
connect_nodes_bi(self.nodes, 1, 2)
self.sync_all()
def sync_all(self, node_groups=None):
if not node_groups:
node_groups = [self.nodes]
for group in node_groups:
sync_blocks(group)
sync_mempools(group)
def enable_mocktime(self):
"""Enable mocktime for the script.
mocktime may be needed for scripts that use the cached version of the
blockchain. If the cached version of the blockchain is used without
mocktime then the mempools will not sync due to IBD.
For backwared compatibility of the python scripts with previous
versions of the cache, this helper function sets mocktime to Jan 1,
2014 + (201 * 10 * 60)"""
self.mocktime = 1388534400 + (201 * 10 * 60)
def disable_mocktime(self):
self.mocktime = 0
# Private helper methods. These should not be accessed by the subclass test scripts.
def _start_logging(self):
# Add logger and logging handlers
self.log = logging.getLogger('TestFramework')
self.log.setLevel(logging.DEBUG)
# Create file handler to log all messages
fh = logging.FileHandler(self.options.tmpdir + '/test_framework.log')
fh.setLevel(logging.DEBUG)
# Create console handler to log messages to stderr. By default this logs only error messages, but can be configured with --loglevel.
ch = logging.StreamHandler(sys.stdout)
# User can provide log level as a number or string (eg DEBUG). loglevel was caught as a string, so try to convert it to an int
ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
ch.setLevel(ll)
# Format logs the same as bitcoind's debug.log with microprecision (so log files can be concatenated and sorted)
formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
formatter.converter = time.gmtime
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
self.log.addHandler(fh)
self.log.addHandler(ch)
if self.options.trace_rpc:
rpc_logger = logging.getLogger("BitcoinRPC")
rpc_logger.setLevel(logging.DEBUG)
rpc_handler = logging.StreamHandler(sys.stdout)
rpc_handler.setLevel(logging.DEBUG)
rpc_logger.addHandler(rpc_handler)
def _initialize_chain(self):
"""Initialize a pre-mined blockchain for use by the test.
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache."""
assert self.num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
if not os.path.isdir(get_datadir_path(self.options.cachedir, i)):
create_cache = True
break
if create_cache:
self.log.debug("Creating data directories from cached datadir")
# find and delete old cache directories if any exist
for i in range(MAX_NODES):
if os.path.isdir(get_datadir_path(self.options.cachedir, i)):
shutil.rmtree(get_datadir_path(self.options.cachedir, i))
# Create cache directories, run bitcoinds:
for i in range(MAX_NODES):
datadir = initialize_datadir(self.options.cachedir, i)
args = [os.getenv("SHTCOIND", "shtcoind"), "-server", "-keypool=1", "-datadir=" + datadir, "-discover=0"]
if i > 0:
args.append("-connect=127.0.0.1:" + str(p2p_port(0)))
self.nodes.append(TestNode(i, self.options.cachedir, extra_args=[], rpchost=None, timewait=None, binary=None, stderr=None, mocktime=self.mocktime, coverage_dir=None))
self.nodes[i].args = args
self.start_node(i)
# Wait for RPC connections to be ready
for node in self.nodes:
node.wait_for_rpc_connection()
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 10 minutes apart
# starting from 2010 minutes in the past
self.enable_mocktime()
block_time = self.mocktime - (201 * 10 * 60)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(self.nodes, block_time)
self.nodes[peer].generate(1)
block_time += 10 * 60
# Must sync before next peer starts generating blocks
sync_blocks(self.nodes)
# Shut them down, and clean up cache directories:
self.stop_nodes()
self.nodes = []
self.disable_mocktime()
def cache_path(n, *paths):
return os.path.join(get_datadir_path(self.options.cachedir, n), "regtest", *paths)
for i in range(MAX_NODES):
for entry in os.listdir(cache_path(i)):
if entry not in ['wallets', 'chainstate', 'blocks']:
os.remove(cache_path(i, entry))
for i in range(self.num_nodes):
from_dir = get_datadir_path(self.options.cachedir, i)
to_dir = get_datadir_path(self.options.tmpdir, i)
shutil.copytree(from_dir, to_dir)
initialize_datadir(self.options.tmpdir, i) # Overwrite port/rpcport in bitcoin.conf
def _initialize_chain_clean(self):
"""Initialize empty blockchain for use by the test.
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization."""
for i in range(self.num_nodes):
initialize_datadir(self.options.tmpdir, i)
class ComparisonTestFramework(BitcoinTestFramework):
"""Test framework for doing p2p comparison testing
Sets up some shtcoind binaries:
- 1 binary: test binary
- 2 binaries: 1 test binary, 1 ref binary
- n>2 binaries: 1 test binary, n-1 ref binaries"""
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("SHTCOIND", "shtcoind"),
help="shtcoind binary to test")
parser.add_option("--refbinary", dest="refbinary",
default=os.getenv("SHTCOIND", "shtcoind"),
help="shtcoind binary to use for reference nodes (if any)")
def setup_network(self):
extra_args = [['-whitelist=127.0.0.1']] * self.num_nodes
if hasattr(self, "extra_args"):
extra_args = self.extra_args
self.add_nodes(self.num_nodes, extra_args,
binary=[self.options.testbinary] +
[self.options.refbinary] * (self.num_nodes - 1))
self.start_nodes()
class SkipTest(Exception):
"""This exception is raised to skip a test"""
def __init__(self, message):
self.message = message
| 42.058212 | 310 | 0.621256 |
4a23d1662e1f0ae8a94f483a659eda226fe5ecfd | 753 | py | Python | quarty/setup.py | quartictech/platform | d9f535f21d38fa836ec691d86ea2b2c610320757 | [
"BSD-3-Clause"
] | 3 | 2017-11-07T21:49:39.000Z | 2019-08-08T20:59:02.000Z | quarty/setup.py | quartictech/platform | d9f535f21d38fa836ec691d86ea2b2c610320757 | [
"BSD-3-Clause"
] | 1 | 2021-06-05T08:00:37.000Z | 2021-06-05T08:00:37.000Z | quarty/setup.py | quartictech/platform | d9f535f21d38fa836ec691d86ea2b2c610320757 | [
"BSD-3-Clause"
] | 2 | 2018-01-09T10:49:48.000Z | 2019-11-27T09:18:17.000Z | import os
from setuptools import setup, find_packages
test_deps = [
"mock==2.0.0",
"pytest==3.0.7",
"pylint==1.7.1",
"pylint-quotes==0.1.5",
"pytest-runner==2.11.1",
"setuptools-lint==0.5.2"
]
setup(name="quarty",
version=os.environ.get("CIRCLE_BUILD_NUM", "0"),
description="Quartic runner",
author="Quartic Technologies",
author_email="[email protected]",
url="https://www.quartic.io",
packages=find_packages("src"),
package_dir={"":"src"},
install_requires=[
"aiohttp==2.2.5",
"PyYAML==3.12",
"requests==2.18.4",
"datadiff==2.0.0",
],
extras_require={
"test": test_deps,
},
tests_require=test_deps
)
| 22.818182 | 54 | 0.559097 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.