repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
OnoArnaldo/PythonApiYoutube | api_youtube.py | 8507eac234cd3d05a223db3beebd10412505bcf8 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import sys
import json
import urllib2
import codecs
BASE_DIR = os.path.dirname(__file__)
BASE_URL = 'https://www.googleapis.com/youtube/v3/'
API_CHANNELS = 'channels'
API_PLAYLIST = 'playlistItems'
API_KEY = 'YOUR KEY'
CHANNELS = [
'videosimprovaveis',
'nerdologia',
'Kurzgesagt',
'1veritasium',
'minutephysics',
'xadrezverbal',
'estevaoslow',
'Vsauce',
'braincraftvideo',
'CienciaTodoDia',
]
class UrlEncoder(object):
API_URL = ''
def __init__(self, **kwargs):
self.args = kwargs
def _parms(self):
args = []
for k, v in self.args.items():
args.append(k + '=' + str(v))
return '&'.join(args)
def get(self):
parms = '?' + self._parms() if len(self.args) else ''
return self.API_URL + parms
def set(self, key, value):
if value:
self.args[key] = value
class ApiChannel(object):
URL = BASE_URL + API_CHANNELS
FILE_NAME = os.path.join(BASE_DIR, 'channels.json')
def __init__(self, channels):
self.encoder = self.build_encoder(API_KEY)
self.channels = channels
def run(self):
data = self.generate_data()
self.save(data)
def generate_data(self):
encoder = self.encoder
ret = {}
for channel in self.channels:
encoder.set('forUsername', channel)
data = self.get_data(encoder.get())
ret[channel] = self.get_playlist_id(data)
return ret
def get_data(self, url):
url = urllib2.urlopen(url)
data = url.read()
return json.loads(data)
def get_playlist_id(self, data):
items = data.get('items')
content = items[0].get('contentDetails')
playlists = content.get('relatedPlaylists')
return playlists.get('uploads')
def save(self, data):
with open(self.FILE_NAME, 'w') as f:
f.write(json.dumps(data))
f.close()
def build_encoder(self, api_key):
UrlEncoder.API_URL = self.URL
encoder = UrlEncoder()
encoder.set('key', api_key)
encoder.set('part', 'contentDetails')
return encoder
class ApiPlayList(object):
URL = BASE_URL + API_PLAYLIST
FILE_NAME = os.path.join(BASE_DIR, 'playlist.txt')
def __init__(self, channels):
self.channels = channels
self.encoder = self.build_encoder(API_KEY)
def run(self):
data = self.generate_data()
self.save(data)
def generate_data(self):
encoder = self.encoder
channels = self.channels
ret = []
for key in channels:
encoder.set('playlistId', channels[key])
data = self.get_data(encoder.get())
ret += [[key] + self.get_info(data)]
return ret
def get_info(self, data):
items = data.get('items')
snippet = items[0].get('snippet')
title = snippet.get('title')
published_at = snippet.get('publishedAt')
description = snippet.get('description')
return [title, published_at, description]
def save(self, data):
fname = os.path.join(BASE_DIR, 'last_update.txt')
with codecs.open(fname, 'w', encoding='utf-8') as f:
for key, title, published_at, description in sorted(data, key=lambda x: x[2]):
f.write('{}: {} - {}\n'.format(published_at[:10], key, title))
f.close()
def get_data(self, url):
url = urllib2.urlopen(url)
data = url.read()
return json.loads(data)
def build_encoder(self, api_key):
UrlEncoder.API_URL = self.URL
encoder = UrlEncoder()
encoder.set('key', api_key)
encoder.set('part', 'snippet')
encoder.set('maxResults', '1')
return encoder
@classmethod
def import_channels(cls, fname):
with open(fname, 'r') as f:
text = f.read()
f.close()
return json.loads(text)
if __name__ == '__main__':
args = sys.argv[1:]
if '-channel' in args:
channel = ApiChannel(CHANNELS)
channel.run()
if '-playlist' in args:
channels = ApiPlayList.import_channels(ApiChannel.FILE_NAME)
play_list = ApiPlayList(channels)
play_list.run()
| [((10, 11, 10, 36), 'os.path.dirname', 'os.path.dirname', ({(10, 27, 10, 35): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((54, 16, 54, 55), 'os.path.join', 'os.path.join', ({(54, 29, 54, 37): 'BASE_DIR', (54, 39, 54, 54): '"""channels.json"""'}, {}), "(BASE_DIR, 'channels.json')", False, 'import os\n'), ((103, 16, 103, 54), 'os.path.join', 'os.path.join', ({(103, 29, 103, 37): 'BASE_DIR', (103, 39, 103, 53): '"""playlist.txt"""'}, {}), "(BASE_DIR, 'playlist.txt')", False, 'import os\n'), ((76, 14, 76, 34), 'urllib2.urlopen', 'urllib2.urlopen', ({(76, 30, 76, 33): 'url'}, {}), '(url)', False, 'import urllib2\n'), ((78, 15, 78, 31), 'json.loads', 'json.loads', ({(78, 26, 78, 30): 'data'}, {}), '(data)', False, 'import json\n'), ((135, 16, 135, 57), 'os.path.join', 'os.path.join', ({(135, 29, 135, 37): 'BASE_DIR', (135, 39, 135, 56): '"""last_update.txt"""'}, {}), "(BASE_DIR, 'last_update.txt')", False, 'import os\n'), ((142, 14, 142, 34), 'urllib2.urlopen', 'urllib2.urlopen', ({(142, 30, 142, 33): 'url'}, {}), '(url)', False, 'import urllib2\n'), ((144, 15, 144, 31), 'json.loads', 'json.loads', ({(144, 26, 144, 30): 'data'}, {}), '(data)', False, 'import json\n'), ((162, 15, 162, 31), 'json.loads', 'json.loads', ({(162, 26, 162, 30): 'text'}, {}), '(text)', False, 'import json\n'), ((136, 13, 136, 54), 'codecs.open', 'codecs.open', (), '', False, 'import codecs\n'), ((88, 20, 88, 36), 'json.dumps', 'json.dumps', ({(88, 31, 88, 35): 'data'}, {}), '(data)', False, 'import json\n')] |
be4r/ssh-miner-detection | python_and_ebpf/train.py | 47003db1d9f72ae44d5a27e92d0109d5111bec35 | #!/usr/bin/env python3
from sklearn.tree import DecisionTreeClassifier
import pickle
import numpy as np
no = [b'runc:[2:INIT]', b'containerssh-ag', b'apt',b'dpkg']
class model:
def __init__(self):
self.d = DecisionTreeClassifier()
def load(self, filename = 'model.p'):
try:
f = open(filename, 'rb')
self.d = pickle.load(f)
if type(self.d) != DecisionTreeClassifier:
d = None
f.close()
except:
return
def save(self, filename = 'model.p'):
f = open(filename, 'wb')
pickle.dump(self.d, f)
f.close()
def fit(self, x, y):
self.d.fit(x, y)
def predict(self, x):
return self.d.predict(x)
def accuracy(self, y_pred, y_ref):
return sum(np.array(y_pred) == np.array(y_ref)) / len(y_ref)
def f1(self, y_pred, y_ref):
tp = (np.array(y_pred) == 1) * (np.array(y_ref) == 1)
tn = (np.array(y_pred) == 0) * (np.array(y_ref) == 0)
fp = (np.array(y_pred) == 1) * (np.array(y_ref) == 0)
fn = (np.array(y_pred) == 0) * (np.array(y_ref) == 1)
return tp / (tp + (fp + fn) / 2)
def ngrams(array, size = 25, overlacing = False):
return [array[i:i+size] for i in range(0, len(array)//size * size, 1 if overlacing else size)]
res = [array[i:i+size] for i in range(0, len(array)//size * size, 1 if overlacing else size)]
if sum([len(i) == size for i in res]) != len(res):
raise Exception('wtf')
def gen_train(a, is_miner):
#x1,y1,x2,y2 = train_test_split(x,y,0.05)
x = ngrams(a)
y = [1 if is_miner else 0,] * len(x)
return x,y
def train_on_logs(*filenames, is_miner):
classifier = model()
#classifier.load()
x, y = [], []
for id, filename in enumerate(filenames):
l = []
with open(filename, 'r') as f:
l = eval(''.join(f))
codes = []
for i in l:
if i[0] not in no:
codes.append(i[1])
x_, y_ = gen_train(codes, is_miner[id])
x.append(x_)
y.append(y_)
print(x,y)
#classifier.fit(x,y)
#classifier.save()
def predict_on_logs(*filenames, is_miner):
classifier = model()
classifier.load()
x, y = [], []
for id, filename in enumerate(filenames):
l = []
with open(filename, 'r') as f:
l = eval(''.join(f))
codes = []
for i in l:
if i[0] not in no:
codes.append(i[1])
x_, y_ = gen_train(codes, is_miner[id])
x.append(x_)
y.append(y_)
y_pred = classifier.predict(x)
print("Accuracy: ", classifier.accuracy(y_pred, y))
print("F1: ",classifier.f1(y_pred, y))
def predict_on_trace(trace, A = 0.9):
classifier = model()
classifier.load()
x, y = [], []
for id, filename in enumerate(filenames):
codes = []
for i in trace:
if i[0] not in no:
codes.append(i[1])
x_, y_ = gen_train(codes, is_miner[id])
x.append(x_)
y.append(y_)
y_pred = classifier.predict(x)
acc = sum(np.array(y_pred)) / len(y_pred)
return acc > A
| [((12, 11, 12, 35), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ({}, {}), '()', False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((26, 2, 26, 24), 'pickle.dump', 'pickle.dump', ({(26, 14, 26, 20): 'self.d', (26, 22, 26, 23): 'f'}, {}), '(self.d, f)', False, 'import pickle\n'), ((17, 12, 17, 26), 'pickle.load', 'pickle.load', ({(17, 24, 17, 25): 'f'}, {}), '(f)', False, 'import pickle\n'), ((111, 11, 111, 27), 'numpy.array', 'np.array', ({(111, 20, 111, 26): 'y_pred'}, {}), '(y_pred)', True, 'import numpy as np\n'), ((39, 8, 39, 24), 'numpy.array', 'np.array', ({(39, 17, 39, 23): 'y_pred'}, {}), '(y_pred)', True, 'import numpy as np\n'), ((39, 35, 39, 50), 'numpy.array', 'np.array', ({(39, 44, 39, 49): 'y_ref'}, {}), '(y_ref)', True, 'import numpy as np\n'), ((40, 8, 40, 24), 'numpy.array', 'np.array', ({(40, 17, 40, 23): 'y_pred'}, {}), '(y_pred)', True, 'import numpy as np\n'), ((40, 35, 40, 50), 'numpy.array', 'np.array', ({(40, 44, 40, 49): 'y_ref'}, {}), '(y_ref)', True, 'import numpy as np\n'), ((41, 8, 41, 24), 'numpy.array', 'np.array', ({(41, 17, 41, 23): 'y_pred'}, {}), '(y_pred)', True, 'import numpy as np\n'), ((41, 35, 41, 50), 'numpy.array', 'np.array', ({(41, 44, 41, 49): 'y_ref'}, {}), '(y_ref)', True, 'import numpy as np\n'), ((42, 8, 42, 24), 'numpy.array', 'np.array', ({(42, 17, 42, 23): 'y_pred'}, {}), '(y_pred)', True, 'import numpy as np\n'), ((42, 35, 42, 50), 'numpy.array', 'np.array', ({(42, 44, 42, 49): 'y_ref'}, {}), '(y_ref)', True, 'import numpy as np\n'), ((36, 13, 36, 29), 'numpy.array', 'np.array', ({(36, 22, 36, 28): 'y_pred'}, {}), '(y_pred)', True, 'import numpy as np\n'), ((36, 33, 36, 48), 'numpy.array', 'np.array', ({(36, 42, 36, 47): 'y_ref'}, {}), '(y_ref)', True, 'import numpy as np\n')] |
slinderman/pyhsmm-spiketrains | data/parse_hipp_data.py | 462d8d2c59bd2e7c39d20d624bd8b289a31baaa2 | import os
import numpy as np
from scipy.io import loadmat
data = loadmat("data/hipp_2dtrack_a/smJun03p2.dat")
N = 49
data = reshape(data, 3, length(data)/3);
data = data';
size(data) % 43799-by-3
fclose(fid);
% sampling time
Ts = 0.0333;
duration = size(data,1) * Ts; % in second
Tmax = data(end, 3);
Tmin = data(1,3);
time_edges = [Tmin: 0.25: Tmax]; % 250 ms per bin
% interpolated rat's position in time bins
Rat_pos = interp1(data(:, 3), [data(:, 1), data(:, 2)], time_edges');
vel = abs(diff(Rat_pos, 1, 1 )); % row difference
vel = [vel(1, :); vel];
% 250 ms
rat_vel = 4 * sqrt(vel(:, 1).^2 + vel(:, 2).^2); % unit: cm/s
vel_ind = find(rat_vel >= 10); % RUN velocity threshold
% using RUN only
T = length(vel_ind);
% using Run + pause periods
T = length(time_edges);
AllSpikeData = zeros(C,T);
for i=1:C
str = ['Cell_num' num2str(i)];
fid = fopen(str, 'r');
cell_data = fscanf(fid, '%f');
cell_data = reshape(cell_data, 3, length(cell_data)/3)';
spike_time = cell_data(:, 3);
spike_pos = cell_data(:, 1:2);
[spike_time_count, bin] = histc(spike_time, time_edges); % column vector
% if analyzing the RUN period only uncomment this
% spike_time_count = spike_time_count(vel_ind);
AllSpikeData(i, :) = spike_time_count';
fclose(fid);
end | [] |
tomdev/repokid | repokid/tests/test_roledata.py | e1a4839290bafccfaa304d87bbdeae85b9dc80aa | # Copyright 2017 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from mock import patch
import repokid.utils.roledata
from repokid.role import Role
from repokid.tests.test_repokid_cli import ROLE_POLICIES, ROLES
AARDVARK_DATA = {
"arn:aws:iam::123456789012:role/all_services_used": [
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "iam"},
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "s3"}],
"arn:aws:iam::123456789012:role/unused_ec2": [
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "iam"},
{"lastAuthenticated": 0,
"serviceNamespace": "ec2"}],
"arn:aws:iam::123456789012:role/young_role": [
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "iam"},
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "s3"}]
}
class TestRoledata(object):
@patch('repokid.utils.roledata.expand_policy')
@patch('repokid.utils.roledata.get_actions_from_statement')
@patch('repokid.utils.roledata.all_permissions')
def test_get_role_permissions(self, mock_all_permissions, mock_get_actions_from_statement, mock_expand_policy):
test_role = Role(ROLES[0])
all_permissions = ['ec2:associateaddress', 'ec2:attachvolume', 'ec2:createsnapshot', 's3:createbucket',
's3:getobject']
# empty policy to make sure we get the latest
test_role.policies = [{'Policy': ROLE_POLICIES['all_services_used']}, {'Policy': ROLE_POLICIES['unused_ec2']}]
mock_all_permissions.return_value = all_permissions
mock_get_actions_from_statement.return_value = ROLE_POLICIES['unused_ec2']['ec2_perms']
mock_expand_policy.return_value = ROLE_POLICIES['unused_ec2']['ec2_perms']
permissions = repokid.utils.roledata._get_role_permissions(test_role)
assert permissions == set(ROLE_POLICIES['unused_ec2']['ec2_perms'])
@patch('repokid.hooks.call_hooks')
def test_get_repoable_permissions(self, mock_call_hooks):
minimum_age = 1
repokid.utils.roledata.IAM_ACCESS_ADVISOR_UNSUPPORTED_SERVICES = ['service_2']
repokid.utils.roledata.IAM_ACCESS_ADVISOR_UNSUPPORTED_ACTIONS = ['service_1:action_3', 'service_1:action_4']
hooks = {}
permissions = ['service_1:action_1', 'service_1:action_2', 'service_1:action_3', 'service_1:action_4',
'service_2:action_1', 'service_3:action_1', 'service_3:action_2', 'service_4:action_1',
'service_4:action_2']
# service_1 and service_2 both used more than a day ago, which is outside of our test filter for age
aa_data = [{'serviceNamespace': 'service_1', 'lastAuthenticated': (time.time() - 90000) * 1000},
{'serviceNamespace': 'service_2', 'lastAuthenticated': (time.time() - 90000) * 1000},
{'serviceNamespace': 'service_3', 'lastAuthenticated': time.time() * 1000}]
no_repo_permissions = {'service_4:action_1': time.time() - 1, 'service_4:action_2': time.time() + 1000}
repoable_decision = repokid.utils.roledata.RepoablePermissionDecision()
repoable_decision.repoable = True
mock_call_hooks.return_value = {'potentially_repoable_permissions': {'service_1:action_1': repoable_decision,
'service_1:action_2': repoable_decision,
'service_4:action_1': repoable_decision}}
repoable_permissions = repokid.utils.roledata._get_repoable_permissions(None, 'test_name', permissions, aa_data,
no_repo_permissions, minimum_age,
hooks)
# service_1:action_3 and action_4 are unsupported actions, service_2 is an unsupported service, service_3
# was used too recently, service_4 action 2 is in no_repo_permissions and not expired
assert repoable_permissions == set(['service_1:action_1', 'service_1:action_2', 'service_4:action_1'])
@patch('repokid.utils.roledata._get_role_permissions')
@patch('repokid.utils.roledata._get_repoable_permissions')
@patch('repokid.hooks.call_hooks')
def test_calculate_repo_scores(self, mock_call_hooks, mock_get_repoable_permissions, mock_get_role_permissions):
roles = [Role(ROLES[0]), Role(ROLES[1]), Role(ROLES[2])]
roles[0].disqualified_by = []
roles[0].aa_data = 'some_aa_data'
# disqualified by a filter
roles[1].policies = [{'Policy': ROLE_POLICIES['unused_ec2']}]
roles[1].disqualified_by = ['some_filter']
roles[1].aa_data = 'some_aa_data'
# no AA data
roles[2].policies = [{'Policy': ROLE_POLICIES['all_services_used']}]
roles[2].disqualified_by = []
roles[2].aa_data = None
hooks = {}
mock_get_role_permissions.side_effect = [['iam:AddRoleToInstanceProfile', 'iam:AttachRolePolicy',
'ec2:AllocateHosts', 'ec2:AssociateAddress'],
['iam:AddRoleToInstanceProfile', 'iam:AttachRolePolicy'],
['iam:AddRoleToInstanceProfile', 'iam:AttachRolePolicy']]
mock_call_hooks.return_value = set(['iam:AddRoleToInstanceProfile', 'iam:AttachRolePolicy'])
mock_get_repoable_permissions.side_effect = [set(['iam:AddRoleToInstanceProfile', 'iam:AttachRolePolicy'])]
minimum_age = 90
repokid.utils.roledata._calculate_repo_scores(roles, minimum_age, hooks)
assert roles[0].repoable_permissions == 2
assert roles[0].repoable_services == ['iam']
assert roles[1].repoable_permissions == 0
assert roles[1].repoable_services == []
assert roles[2].repoable_permissions == 0
assert roles[2].repoable_services == []
def test_get_repoed_policy(self):
policies = ROLE_POLICIES['all_services_used']
repoable_permissions = set(['iam:addroletoinstanceprofile', 'iam:attachrolepolicy', 's3:createbucket'])
rewritten_policies, empty_policies = repokid.utils.roledata._get_repoed_policy(policies, repoable_permissions)
assert rewritten_policies == {'s3_perms': {'Version': '2012-10-17',
'Statement': [{'Action': ['s3:deletebucket'],
'Resource': ['*'],
'Effect': 'Allow'}]}}
assert empty_policies == ['iam_perms']
def test_find_newly_added_permissions(self):
old_policy = ROLE_POLICIES['all_services_used']
new_policy = ROLE_POLICIES['unused_ec2']
new_perms = repokid.utils.roledata.find_newly_added_permissions(old_policy, new_policy)
assert new_perms == set(['ec2:allocatehosts', 'ec2:associateaddress'])
def test_convert_repoable_perms_to_perms_and_services(self):
all_perms = ['a:j', 'a:k', 'b:l', 'c:m', 'c:n']
repoable_perms = ['b:l', 'c:m']
expected_repoed_services = ['b']
expected_repoed_permissions = ['c:m']
assert (repokid.utils.roledata._convert_repoable_perms_to_perms_and_services(all_perms, repoable_perms) ==
(expected_repoed_permissions, expected_repoed_services))
def test_convert_repoed_service_to_sorted_perms_and_services(self):
repoed_services = ['route53', 'ec2', 's3:abc', 'dynamodb:def', 'ses:ghi', 'ses:jkl']
expected_services = ['ec2', 'route53']
expected_permissions = ['dynamodb:def', 's3:abc', 'ses:ghi', 'ses:jkl']
assert repokid.utils.roledata._convert_repoed_service_to_sorted_perms_and_services(repoed_services) == (
expected_permissions, expected_services
)
def test_get_epoch_authenticated(self):
assert(repokid.utils.roledata._get_epoch_authenticated(1545787620000) == (1545787620, True))
assert(repokid.utils.roledata._get_epoch_authenticated(1545787620) == (1545787620, True))
assert(repokid.utils.roledata._get_epoch_authenticated(154578762) == (None, False))
def test_filter_scheduled_repoable_perms(self):
assert repokid.utils.roledata._filter_scheduled_repoable_perms(
['a:b', 'a:c', 'b:a'], ['a:c', 'b']) == ['a:c', 'b:a']
assert repokid.utils.roledata._filter_scheduled_repoable_perms(
['a:b', 'a:c', 'b:a'], ['a', 'b']) == ['a:b', 'a:c', 'b:a']
assert repokid.utils.roledata._filter_scheduled_repoable_perms(
['a:b', 'a:c', 'b:a'], ['a:b', 'a:c']) == ['a:b', 'a:c']
| [((45, 5, 45, 50), 'mock.patch', 'patch', ({(45, 11, 45, 49): '"""repokid.utils.roledata.expand_policy"""'}, {}), "('repokid.utils.roledata.expand_policy')", False, 'from mock import patch\n'), ((46, 5, 46, 63), 'mock.patch', 'patch', ({(46, 11, 46, 62): '"""repokid.utils.roledata.get_actions_from_statement"""'}, {}), "('repokid.utils.roledata.get_actions_from_statement')", False, 'from mock import patch\n'), ((47, 5, 47, 52), 'mock.patch', 'patch', ({(47, 11, 47, 51): '"""repokid.utils.roledata.all_permissions"""'}, {}), "('repokid.utils.roledata.all_permissions')", False, 'from mock import patch\n'), ((64, 5, 64, 38), 'mock.patch', 'patch', ({(64, 11, 64, 37): '"""repokid.hooks.call_hooks"""'}, {}), "('repokid.hooks.call_hooks')", False, 'from mock import patch\n'), ((97, 5, 97, 58), 'mock.patch', 'patch', ({(97, 11, 97, 57): '"""repokid.utils.roledata._get_role_permissions"""'}, {}), "('repokid.utils.roledata._get_role_permissions')", False, 'from mock import patch\n'), ((98, 5, 98, 62), 'mock.patch', 'patch', ({(98, 11, 98, 61): '"""repokid.utils.roledata._get_repoable_permissions"""'}, {}), "('repokid.utils.roledata._get_repoable_permissions')", False, 'from mock import patch\n'), ((99, 5, 99, 38), 'mock.patch', 'patch', ({(99, 11, 99, 37): '"""repokid.hooks.call_hooks"""'}, {}), "('repokid.hooks.call_hooks')", False, 'from mock import patch\n'), ((49, 20, 49, 34), 'repokid.role.Role', 'Role', ({(49, 25, 49, 33): 'ROLES[0]'}, {}), '(ROLES[0])', False, 'from repokid.role import Role\n'), ((101, 17, 101, 31), 'repokid.role.Role', 'Role', ({(101, 22, 101, 30): 'ROLES[0]'}, {}), '(ROLES[0])', False, 'from repokid.role import Role\n'), ((101, 33, 101, 47), 'repokid.role.Role', 'Role', ({(101, 38, 101, 46): 'ROLES[1]'}, {}), '(ROLES[1])', False, 'from repokid.role import Role\n'), ((101, 49, 101, 63), 'repokid.role.Role', 'Role', ({(101, 54, 101, 62): 'ROLES[2]'}, {}), '(ROLES[2])', False, 'from repokid.role import Role\n'), ((81, 53, 81, 64), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((81, 92, 81, 103), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((25, 34, 25, 45), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((27, 34, 27, 45), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((31, 34, 31, 45), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((37, 34, 37, 45), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((39, 34, 39, 45), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((79, 74, 79, 85), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((77, 75, 77, 86), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((78, 75, 78, 86), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
Matnay/KPIT_Deep_Learning | DL_Scripts/image_recognition.py | 14f3815fc2829db9bede86c31f23e721f6423f79 | import rospy
from sensor_msgs.msg import Image
from std_msgs.msg import String
from cv_bridge import CvBridge
import cv2
import numpy as np
import tensorflow as tf
import classify_image
class RosTensorFlow():
def __init__(self):
classify_image.maybe_download_and_extract()
self._session = tf.Session()
classify_image.create_graph()
self._cv_bridge = CvBridge()
self._sub = rospy.Subscriber('/usb_cam/image_raw', Image, self.callback, queue_size=1)
self._pub = rospy.Publisher('result', String, queue_size=1)
self.score_threshold = rospy.get_param('~score_threshold', 0.1)
self.use_top_k = rospy.get_param('~use_top_k', 5)
def callback(self, image_msg):
cv_image = self._cv_bridge.imgmsg_to_cv2(image_msg, "bgr8")
# copy from
# classify_image.py
image_data = cv2.imencode('.jpg', cv_image)[1].tostring()
# Creates graph from saved GraphDef.
softmax_tensor = self._session.graph.get_tensor_by_name('softmax:0')
predictions = self._session.run(
softmax_tensor, {'DecodeJpeg/contents:0': image_data})
predictions = np.squeeze(predictions)
# Creates node ID --> English string lookup.
node_lookup = classify_image.NodeLookup()
top_k = predictions.argsort()[-self.use_top_k:][::-1]
for node_id in top_k:
human_string = node_lookup.id_to_string(node_id)
score = predictions[node_id]
if score > self.score_threshold:
rospy.loginfo('%s (score = %.5f)' % (human_string, score))
self._pub.publish(human_string)
def main(self):
rospy.spin()
if __name__ == '__main__':
classify_image.setup_args()
rospy.init_node('rostensorflow')
tensor = RosTensorFlow()
tensor.main()
| [((47, 4, 47, 31), 'classify_image.setup_args', 'classify_image.setup_args', ({}, {}), '()', False, 'import classify_image\n'), ((48, 4, 48, 36), 'rospy.init_node', 'rospy.init_node', ({(48, 20, 48, 35): '"""rostensorflow"""'}, {}), "('rostensorflow')", False, 'import rospy\n'), ((13, 8, 13, 51), 'classify_image.maybe_download_and_extract', 'classify_image.maybe_download_and_extract', ({}, {}), '()', False, 'import classify_image\n'), ((14, 24, 14, 36), 'tensorflow.Session', 'tf.Session', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((15, 8, 15, 37), 'classify_image.create_graph', 'classify_image.create_graph', ({}, {}), '()', False, 'import classify_image\n'), ((16, 26, 16, 36), 'cv_bridge.CvBridge', 'CvBridge', ({}, {}), '()', False, 'from cv_bridge import CvBridge\n'), ((18, 20, 18, 94), 'rospy.Subscriber', 'rospy.Subscriber', (), '', False, 'import rospy\n'), ((19, 20, 19, 67), 'rospy.Publisher', 'rospy.Publisher', (), '', False, 'import rospy\n'), ((20, 31, 20, 71), 'rospy.get_param', 'rospy.get_param', ({(20, 47, 20, 65): '"""~score_threshold"""', (20, 67, 20, 70): '0.1'}, {}), "('~score_threshold', 0.1)", False, 'import rospy\n'), ((21, 25, 21, 57), 'rospy.get_param', 'rospy.get_param', ({(21, 41, 21, 53): '"""~use_top_k"""', (21, 55, 21, 56): '5'}, {}), "('~use_top_k', 5)", False, 'import rospy\n'), ((32, 22, 32, 45), 'numpy.squeeze', 'np.squeeze', ({(32, 33, 32, 44): 'predictions'}, {}), '(predictions)', True, 'import numpy as np\n'), ((34, 22, 34, 49), 'classify_image.NodeLookup', 'classify_image.NodeLookup', ({}, {}), '()', False, 'import classify_image\n'), ((44, 8, 44, 20), 'rospy.spin', 'rospy.spin', ({}, {}), '()', False, 'import rospy\n'), ((40, 16, 40, 74), 'rospy.loginfo', 'rospy.loginfo', ({(40, 30, 40, 73): "('%s (score = %.5f)' % (human_string, score))"}, {}), "('%s (score = %.5f)' % (human_string, score))", False, 'import rospy\n'), ((27, 21, 27, 51), 'cv2.imencode', 'cv2.imencode', ({(27, 34, 27, 40): '""".jpg"""', (27, 42, 27, 50): 'cv_image'}, {}), "('.jpg', cv_image)", False, 'import cv2\n')] |
aviskumar/speedo | plugins/grouputils.py | 758e8ac1fdeeb0b72c3a57742032ca5c79f0b2fa | # Copyright (C) 2020-2021 by TeamSpeedo@Github, < https://github.com/TeamSpeedo >.
#
# This file is part of < https://github.com/TeamSpeedo/FridayUserBot > project,
# and is released under the "GNU v3.0 License Agreement".
# Please see < https://github.com/TeamSpeedo/blob/master/LICENSE >
#
# All rights reserved.
import asyncio
import os
import time
from asyncio import sleep
from pyrogram.types import ChatPermissions
import pyrogram
from main_start.core.decorators import speedo_on_cmd
from main_start.helper_func.basic_helpers import (
edit_or_reply,
edit_or_send_as_file,
get_text,
get_user,
is_admin_or_owner,
)
from main_start.helper_func.logger_s import LogIt
from main_start.helper_func.plugin_helpers import (
convert_to_image,
convert_vid_to_vidnote,
generate_meme,
)
@speedo_on_cmd(
["silentpin"],
only_if_admin=True,
cmd_help={
"help": "Pin Message Without Sending Notification To Members!",
"example": "{ch}silentpin (reply to message)",
},
)
async def spin(client, message):
engine = message.Engine
if not message.reply_to_message:
await edit_or_reply(message, engine.get_string("REPLY_TO_PIN"))
try:
await client.pin_chat_message(
message.chat.id,
message.reply_to_message.message_id,
disable_notification=True,
)
except BaseException as e:
await edit_or_reply(
message, engine.get_string("UNABLE_TO_PIN").format(e)
)
return
await edit_or_reply(message, engine.get_string("PINNED"))
@speedo_on_cmd(
["pinloud", "pin"],
only_if_admin=True,
cmd_help={
"help": "Pin Message With Sending Notification To Members!",
"example": "{ch}pin (reply to messages)",
},
)
async def lpin(client, message):
engine = message.Engine
if not message.reply_to_message:
await edit_or_reply(message, engine.get_string("REPLY_TO_PIN"))
try:
await client.pin_chat_message(
message.chat.id, message.reply_to_message.message_id
)
except BaseException as e:
await edit_or_reply(
message, engine.get_string("UNABLE_TO_PIN").format(e)
)
return
await edit_or_reply(message, engine.get_string("PINNED"))
@speedo_on_cmd(
["unpin", "rmpins"],
only_if_admin=True,
cmd_help={"help": "Unpin All Pinned Messages!", "example": "{ch}rmpins"},
)
async def dpins(client, message):
engine = message.Engine
await client.unpin_all_chat_messages(message.chat.id)
await edit_or_reply(message, engine.get_string("UNPINNED"))
@speedo_on_cmd(
["adminlist", "admins"],
cmd_help={"help": "Get Adminlist Of Chat!", "example": "{ch}adminlist"},
)
async def midhunadmin(client, message):
engine = message.Engine
mentions = ""
starky = get_text(message) or message.chat.id
pablo = await edit_or_reply(message, engine.get_string("PROCESSING"))
try:
X = await client.get_chat_members(starky, filter="administrators")
ujwal = await client.get_chat(starky)
except BaseException as e:
await pablo.edit(engine.get_string("CANT_FETCH_ADMIN").format("Admins", e))
return
for midhun in X:
if not midhun.user.is_deleted:
link = f'✱ <a href="tg://user?id={midhun.user.id}">{midhun.user.first_name}</a>'
userid = f"<code>{midhun.user.id}</code>"
mentions += f"\n{link} {userid}"
holy = ujwal.username or ujwal.id
messag = f"""
<b>Admins in {ujwal.title} | {holy}</b>
{mentions}
"""
await edit_or_send_as_file(
messag,
pablo,
client,
f"`AdminList Of {holy}!`",
"admin-lookup-result",
"html",
)
@speedo_on_cmd(
["botlist", "bot"],
group_only=True,
cmd_help={"help": "Get List Of Bots In Chat!", "example": "{ch}botlist"},
)
async def bothub(client, message):
engine = message.Engine
buts = "**Bot List** \n\n"
starky = get_text(message) or message.chat.id
pablo = await edit_or_reply(message, engine.get_string("PROCESSING"))
try:
bots = await client.get_chat_members(starky, filter="bots")
except BaseException as e:
await pablo.edit(engine.get_string("CANT_FETCH_ADMIN").format("Bots", e))
return
for nos, ujwal in enumerate(bots, start=1):
buts += f"{nos}〉 [{ujwal.user.first_name}](tg://user?id={ujwal.user.id}) \n"
await pablo.edit(buts)
@speedo_on_cmd(
["zombies", "delusers"],
cmd_help={
"help": "Remove Deleted Accounts In The Group/Channel!",
"example": "{ch}zombies",
},
)
async def ujwalzombie(client, message):
engine = message.Engine
pablo = await edit_or_reply(message, engine.get_string("PROCESSING"))
if len(message.text.split()) == 1:
dm = 0
da = 0
dc = 0
async for member in client.iter_chat_members(message.chat.id):
if member.user.is_deleted:
await sleep(1)
if member.status == "member":
dm += 1
elif member.status == "administrator":
da += 1
elif member.status == "creator":
dc += 1
text = "**Zombies Report!** \n\n"
if dm > 0:
text += engine.get_string("TOTAL_ZOMBIES_USERS").format(dm)
if da > 0:
text += engine.get_string("TOTAL_ZOMBIES_ADMINS").format(da)
if dc > 0:
text += engine.get_string("GRP_OWNER_IS_ZOMBIE")
d = dm + da + dc
if d > 0:
text += (engine.get_string("WIPE_THEM"))
await pablo.edit(text)
else:
await pablo.edit(engine.get_string("NO_ZOMBIES"))
return
sgname = message.text.split(None, 1)[1]
if sgname.lower().strip() == "clean":
me = client.me
lol = await is_admin_or_owner(message, me.id)
if not lol:
await pablo.edit(engine.get_string("NOT_ADMIN"))
return
s = 0
f = 0
async for member in client.iter_chat_members(message.chat.id):
if member.user.is_deleted:
try:
await client.kick_chat_member(message.chat.id, member.user.id)
s += 1
except:
f += 1
text = ""
if s > 0:
text += engine.get_string("REMOVED_ZOMBIES").format(s)
if f > 0:
text += (engine.get_string("FAILED_ZOMBIES").format(f))
await pablo.edit(text)
@speedo_on_cmd(
["ban", "bun"],
only_if_admin=True,
group_only=True,
cmd_help={
"help": "Ban Replied User or provide his ID!",
"example": "{ch}ban (reply to user message OR provide his ID)",
},
)
async def ban_world(client, message):
engine = message.Engine
bun = await edit_or_reply(message, engine.get_string("PROCESSING"))
me_m = client.me
me_ = await message.chat.get_member(int(me_m.id))
if not me_.can_restrict_members:
await bun.edit(engine.get_string("NOT_ADMIN"))
return
text_ = get_text(message)
userk, reason = get_user(message, text_)
if not userk:
await bun.edit(engine.get_string("TO_DO").format("Ban"))
return
try:
user_ = await client.get_users(userk)
except BaseException as e:
await bun.edit(engine.get_string("USER_MISSING").format(e))
return
userz = user_.id
if not reason:
reason = "Not Specified!"
if userz == me_m.id:
await bun.edit(engine.get_string("TF_DO_IT").format("Ban"))
return
try:
user_ = await client.get_users(userz)
except BaseException as e:
await bun.edit(engine.get_string("USER_MISSING").format(e))
return
try:
await client.kick_chat_member(message.chat.id, int(user_.id))
except BaseException as e:
await bun.edit(engine.get_string("FAILED_ADMIN_ACTION").format("Ban", e))
return
b = f"**#Banned** \n**User :** [{user_.first_name}](tg://user?id={user_.id}) \n**Chat :** `{message.chat.title}` \n**Reason :** `{reason}`"
await bun.edit(b)
log = LogIt(message)
await log.log_msg(client, b)
@speedo_on_cmd(
["unban", "unbun"],
only_if_admin=True,
group_only=True,
cmd_help={
"help": "UnBan Replied User or provide his ID!",
"example": "{ch}unban (reply to user message OR Provide his id)",
},
)
async def unban_world(client, message):
engine = message.Engine
unbun = await edit_or_reply(message, engine.get_string("PROCESSING"))
me_m = client.me
me_ = await message.chat.get_member(int(me_m.id))
if not me_.can_restrict_members:
await unbun.edit(engine.get_string("NOT_ADMIN"))
return
text_ = get_text(message)
userm, reason = get_user(message, text_)
if not userm:
await unbun.edit(
engine.get_string("TO_DO").format("Un-Ban")
)
return
try:
user_ = await client.get_users(userm)
except BaseException as e:
await unbun.edit(engine.get_string("USER_MISSING").format(e))
return
userz = user_.id
if not reason:
reason = "Not Specified!"
if userz == me_m.id:
await unbun.edit(engine.get_string("TF_DO_IT").format("Un-Ban"))
return
try:
await client.unban_chat_member(message.chat.id, int(user_.id))
except BaseException as e:
await unbun.edit(engine.get_string("FAILED_ADMIN_ACTION").format("Un-Ban", e))
ub = f"**#UnBanned** \n**User :** [{user_.first_name}](tg://user?id={user_.id}) \n**Chat :** `{message.chat.title}` \n**Reason :** `{reason}`"
await unbun.edit(ub)
log = LogIt(message)
await log.log_msg(client, ub)
@speedo_on_cmd(
["promote", "prumote"],
only_if_admin=True,
group_only=True,
cmd_help={
"help": "Promote Replied user or provide his ID!",
"example": "{ch}promote (reply to user message OR provide his ID)",
},
)
async def ujwal_mote(client, message):
engine = message.Engine
pablo = await edit_or_reply(message, engine.get_string("PROCESSING"))
me_m = client.me
me_ = await message.chat.get_member(int(me_m.id))
if not me_.can_promote_members:
await pablo.edit(engine.get_string("NOT_ADMIN"))
return
asplit = get_text(message)
userl, Res = get_user(message, asplit)
if not userl:
await pablo.edit(
engine.get_string("TO_DO").format("Promote")
)
return
try:
user = await client.get_users(userl)
except BaseException as e:
await pablo.edit(engine.get_string("USER_MISSING").format(e))
return
userz = user.id
if not Res:
Res = "Admeme"
if userz == me_m.id:
await pablo.edit(engine.get_string("TF_DO_IT").format("Promote"))
return
try:
await client.promote_chat_member(
message.chat.id,
user.id,
can_change_info=me_.can_change_info,
can_delete_messages=me_.can_delete_messages,
can_restrict_members=me_.can_restrict_members,
can_invite_users=me_.can_invite_users,
can_pin_messages=me_.can_pin_messages,
can_promote_members=me_.can_promote_members,
)
except BaseException as e:
await pablo.edit(engine.get_string("FAILED_ADMIN_ACTION").format("Promote", e))
return
p = f"**#Promote** \n**User :** [{user.first_name}](tg://user?id={user.id}) \n**Chat :** `{message.chat.title}` \n**Title :** `{Res}`"
await pablo.edit(p)
log = LogIt(message)
await log.log_msg(client, p)
try:
if Res:
await client.set_administrator_title(message.chat.id, user.id, Res)
except:
pass
@speedo_on_cmd(
["demote", "demute"],
only_if_admin=True,
group_only=True,
cmd_help={
"help": "Demote Replied user or provide his ID!",
"example": "{ch}demote (reply to user message OR provide his ID)",
},
)
async def ujwal_demote(client, message):
engine = message.Engine
pablo = await edit_or_reply(message, engine.get_string("PROCESSING"))
me_m = client.me
await message.chat.get_member(int(me_m.id))
asplit = get_text(message)
usero = get_user(message, asplit)[0]
if not usero:
await pablo.edit(
engine.get_string("TO_DO").format("Demote")
)
return
try:
user = await client.get_users(usero)
except BaseException as e:
await pablo.edit(engine.get_string("USER_MISSING").format(e))
return
userz = user.id
if userz == me_m.id:
await pablo.edit(engine.get_string("TF_DO_IT").format("Demote"))
return
try:
await client.promote_chat_member(
message.chat.id,
user.id,
is_anonymous=False,
can_change_info=False,
can_post_messages=False,
can_edit_messages=False,
can_delete_messages=False,
can_restrict_members=False,
can_invite_users=False,
can_pin_messages=False,
can_promote_members=False,
)
except BaseException as e:
await pablo.edit(engine.get_string("FAILED_ADMIN_ACTION").format("Demote", e))
return
d = f"**#Demote** \n**User :** [{user.first_name}](tg://user?id={user.id}) \n**Chat :** `{message.chat.title}`"
await pablo.edit(d)
log = LogIt(message)
await log.log_msg(client, d)
@speedo_on_cmd(
["mute"],
only_if_admin=True,
group_only=True,
cmd_help={
"help": "Mute Replied user or provide his ID!",
"example": "{ch}mute (reply to user message OR provide his ID)",
},
)
async def ujwal_mute(client, message):
engine = message.Engine
pablo = await edit_or_reply(message, engine.get_string("PROCESSING"))
me_m = client.me
me_ = await message.chat.get_member(int(me_m.id))
if not me_.can_restrict_members:
await pablo.edit(engine.get_string("NOT_ADMIN"))
return
asplit = get_text(message)
userf = get_user(message, asplit)[0]
if not userf:
await pablo.edit(
engine.get_string("TO_DO").format("Mute")
)
return
try:
user = await client.get_users(userf)
except BaseException as e:
await pablo.edit(engine.get_string("USER_MISSING").format(e))
return
userz = user.id
if userz == me_m.id:
await pablo.edit(engine.get_string("TF_DO_IT").format("Mute"))
return
try:
await client.restrict_chat_member(
message.chat.id, user.id, ChatPermissions(can_send_messages=False)
)
except BaseException as e:
await pablo.edit(engine.get_string("FAILED_ADMIN_ACTION").format("Mute", e))
return
m = f"**#Muted** \n**User :** [{user.first_name}](tg://user?id={user.id}) \n**Chat :** `{message.chat.title}`"
await pablo.edit(m)
log = LogIt(message)
await log.log_msg(client, m)
@speedo_on_cmd(
["unmute"],
only_if_admin=True,
group_only=True,
cmd_help={
"help": "Unmute Replied user or provide his ID!",
"example": "{ch}Unmute (reply to user message OR provide his ID)",
},
)
async def ujwal_unmute(client, message):
engine = message.Engine
pablo = await edit_or_reply(message, engine.get_string("PROCESSING"))
me_m = client.me
me_ = await message.chat.get_member(int(me_m.id))
if not me_.can_restrict_members:
await pablo.edit(engine.get_string("NOT_ADMIN"))
return
asplit = get_text(message)
userf = get_user(message, asplit)[0]
if not userf:
await pablo.edit(
engine.get_string("TO_DO").format("Un-Mute")
)
return
try:
user = await client.get_users(userf)
except BaseException as e:
await pablo.edit(engine.get_string("USER_MISSING").format(e))
return
userz = user.id
if userz == me_m.id:
await pablo.edit(engine.get_string("TF_DO_IT").format("un-mute"))
return
try:
await client.restrict_chat_member(
message.chat.id, user.id, ChatPermissions(can_send_messages=True)
)
except BaseException as e:
await pablo.edit(engine.get_string("FAILED_ADMIN_ACTION").format("Un-mute", e))
return
um = f"**#Un_Muted** \n**User :** [{user.first_name}](tg://user?id={user.id}) \n**Chat :** `{message.chat.title}`"
await pablo.edit(um)
log = LogIt(message)
await log.log_msg(client, um)
@speedo_on_cmd(
["chatinfo", "grpinfo"],
group_only=True,
cmd_help={"help": "Get Info Of The Chat!", "example": "{ch}chatinfo"},
)
async def owo_chat_info(client, message):
engine = message.Engine
s = await edit_or_reply(message, engine.get_string("PROCESSING"))
ujwal = await client.get_chat(message.chat.id)
peer = await client.resolve_peer(message.chat.id)
online_ = await client.send(pyrogram.raw.functions.messages.GetOnlines(peer=peer))
msg = "**Chat Info** \n\n"
msg += f"**Chat-ID :** __{ujwal.id}__ \n"
msg += f"**Verified :** __{ujwal.is_verified}__ \n"
msg += f"**Is Scam :** __{ujwal.is_scam}__ \n"
msg += f"**Chat Title :** __{ujwal.title}__ \n"
msg += f"**Users Online :** __{online_.onlines}__ \n"
if ujwal.photo:
msg += f"**Chat DC :** __{ujwal.dc_id}__ \n"
if ujwal.username:
msg += f"**Chat Username :** __{ujwal.username}__ \n"
if ujwal.description:
msg += f"**Chat Description :** __{ujwal.description}__ \n"
msg += f"**Chat Members Count :** __{ujwal.members_count}__ \n"
if ujwal.photo:
kek = await client.download_media(ujwal.photo.big_file_id)
await client.send_photo(message.chat.id, photo=kek, caption=msg)
await s.delete()
else:
await s.edit(msg)
@speedo_on_cmd(
["purge"],
only_if_admin=True,
cmd_help={
"help": "Purge All Messages Till Replied Message!",
"example": "{ch}purge (reply to message)",
},
)
async def purge(client, message):
engine = message.Engine
start_time = time.time()
message_ids = []
purge_len = 0
event = await edit_or_reply(message, engine.get_string("PROCESSING"))
me_m = client.me
if message.chat.type in ["supergroup", "channel"]:
me_ = await message.chat.get_member(int(me_m.id))
if not me_.can_delete_messages:
await event.edit(engine.get_string("NOT_ADMIN"))
return
if not message.reply_to_message:
await event.edit(engine.get_string("NEEDS_REPLY").format("Message To Purge."))
return
async for msg in client.iter_history(
chat_id=message.chat.id,
offset_id=message.reply_to_message.message_id,
reverse=True,
):
if msg.message_id != message.message_id:
purge_len += 1
message_ids.append(msg.message_id)
if len(message_ids) >= 100:
await client.delete_messages(
chat_id=message.chat.id, message_ids=message_ids, revoke=True
)
message_ids.clear()
if message_ids:
await client.delete_messages(
chat_id=message.chat.id, message_ids=message_ids, revoke=True
)
end_time = time.time()
u_time = round(end_time - start_time)
await event.edit(
engine.get_string("PURGE_").format(purge_len, u_time)
)
await asyncio.sleep(3)
await event.delete()
@speedo_on_cmd(
["del"],
cmd_help={
"help": "Delete Replied Message!",
"example": "{ch}del (reply to message)",
},
)
async def delmsgs(client, message):
engine = message.Engine
if not message.reply_to_message:
await message.delete()
return
await client.delete_messages(
chat_id=message.chat.id,
message_ids=[message.reply_to_message.message_id],
revoke=True,
)
await message.delete()
@speedo_on_cmd(
["setgrppic", "gpic"],
cmd_help={
"help": "Set Custom Group Pic, For Lazy Peoples!",
"example": "{ch}setgrppic (reply to image)",
},
)
async def magic_grps(client, message):
engine = message.Engine
msg_ = await edit_or_reply(message, engine.get_string("PROCESSING"))
if not message.reply_to_message:
await msg_.edit(engine.get_string("NEEDS_REPLY").format("image"))
return
me_ = await message.chat.get_member(int(client.me.id))
if not me_.can_change_info:
await msg_.edit(engine.get_string("NOT_ADMIN"))
return
cool = await convert_to_image(message, client)
if not cool:
await msg_.edit(engine.get_string("NEEDS_REPLY").format("a valid media"))
return
if not os.path.exists(cool):
await msg_.edit(engine.get_string("INVALID_MEDIA"))
return
try:
await client.set_chat_photo(message.chat.id, photo=cool)
except BaseException as e:
await msg_.edit(f"`Unable To Set Group Photo! TraceBack : {e}")
return
await msg_.edit(engine.get_string("DONE_"))
| [((33, 1, 40, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((59, 1, 66, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((83, 1, 87, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((94, 1, 97, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((130, 1, 134, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((150, 1, 156, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((211, 1, 219, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((260, 1, 268, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((305, 1, 313, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((365, 1, 373, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((418, 1, 426, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((464, 1, 472, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((510, 1, 514, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((542, 1, 549, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((591, 1, 597, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((611, 1, 617, 1), 'main_start.core.decorators.speedo_on_cmd', 'speedo_on_cmd', (), '', False, 'from main_start.core.decorators import speedo_on_cmd\n'), ((228, 12, 228, 29), 'main_start.helper_func.basic_helpers.get_text', 'get_text', ({(228, 21, 228, 28): 'message'}, {}), '(message)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((229, 20, 229, 44), 'main_start.helper_func.basic_helpers.get_user', 'get_user', ({(229, 29, 229, 36): 'message', (229, 38, 229, 43): 'text_'}, {}), '(message, text_)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((256, 10, 256, 24), 'main_start.helper_func.logger_s.LogIt', 'LogIt', ({(256, 16, 256, 23): 'message'}, {}), '(message)', False, 'from main_start.helper_func.logger_s import LogIt\n'), ((277, 12, 277, 29), 'main_start.helper_func.basic_helpers.get_text', 'get_text', ({(277, 21, 277, 28): 'message'}, {}), '(message)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((278, 20, 278, 44), 'main_start.helper_func.basic_helpers.get_user', 'get_user', ({(278, 29, 278, 36): 'message', (278, 38, 278, 43): 'text_'}, {}), '(message, text_)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((301, 10, 301, 24), 'main_start.helper_func.logger_s.LogIt', 'LogIt', ({(301, 16, 301, 23): 'message'}, {}), '(message)', False, 'from main_start.helper_func.logger_s import LogIt\n'), ((322, 13, 322, 30), 'main_start.helper_func.basic_helpers.get_text', 'get_text', ({(322, 22, 322, 29): 'message'}, {}), '(message)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((323, 17, 323, 42), 'main_start.helper_func.basic_helpers.get_user', 'get_user', ({(323, 26, 323, 33): 'message', (323, 35, 323, 41): 'asplit'}, {}), '(message, asplit)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((356, 10, 356, 24), 'main_start.helper_func.logger_s.LogIt', 'LogIt', ({(356, 16, 356, 23): 'message'}, {}), '(message)', False, 'from main_start.helper_func.logger_s import LogIt\n'), ((379, 13, 379, 30), 'main_start.helper_func.basic_helpers.get_text', 'get_text', ({(379, 22, 379, 29): 'message'}, {}), '(message)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((414, 10, 414, 24), 'main_start.helper_func.logger_s.LogIt', 'LogIt', ({(414, 16, 414, 23): 'message'}, {}), '(message)', False, 'from main_start.helper_func.logger_s import LogIt\n'), ((435, 13, 435, 30), 'main_start.helper_func.basic_helpers.get_text', 'get_text', ({(435, 22, 435, 29): 'message'}, {}), '(message)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((460, 10, 460, 24), 'main_start.helper_func.logger_s.LogIt', 'LogIt', ({(460, 16, 460, 23): 'message'}, {}), '(message)', False, 'from main_start.helper_func.logger_s import LogIt\n'), ((481, 13, 481, 30), 'main_start.helper_func.basic_helpers.get_text', 'get_text', ({(481, 22, 481, 29): 'message'}, {}), '(message)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((506, 10, 506, 24), 'main_start.helper_func.logger_s.LogIt', 'LogIt', ({(506, 16, 506, 23): 'message'}, {}), '(message)', False, 'from main_start.helper_func.logger_s import LogIt\n'), ((552, 17, 552, 28), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((582, 15, 582, 26), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((101, 13, 101, 30), 'main_start.helper_func.basic_helpers.get_text', 'get_text', ({(101, 22, 101, 29): 'message'}, {}), '(message)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((120, 10, 127, 5), 'main_start.helper_func.basic_helpers.edit_or_send_as_file', 'edit_or_send_as_file', ({(121, 8, 121, 14): 'messag', (122, 8, 122, 13): 'pablo', (123, 8, 123, 14): 'client', (124, 8, 124, 33): 'f"""`AdminList Of {holy}!`"""', (125, 8, 125, 29): '"""admin-lookup-result"""', (126, 8, 126, 14): '"""html"""'}, {}), "(messag, pablo, client, f'`AdminList Of {holy}!`',\n 'admin-lookup-result', 'html')", False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((138, 13, 138, 30), 'main_start.helper_func.basic_helpers.get_text', 'get_text', ({(138, 22, 138, 29): 'message'}, {}), '(message)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((380, 12, 380, 37), 'main_start.helper_func.basic_helpers.get_user', 'get_user', ({(380, 21, 380, 28): 'message', (380, 30, 380, 36): 'asplit'}, {}), '(message, asplit)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((436, 12, 436, 37), 'main_start.helper_func.basic_helpers.get_user', 'get_user', ({(436, 21, 436, 28): 'message', (436, 30, 436, 36): 'asplit'}, {}), '(message, asplit)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((482, 12, 482, 37), 'main_start.helper_func.basic_helpers.get_user', 'get_user', ({(482, 21, 482, 28): 'message', (482, 30, 482, 36): 'asplit'}, {}), '(message, asplit)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((587, 10, 587, 26), 'asyncio.sleep', 'asyncio.sleep', ({(587, 24, 587, 25): '(3)'}, {}), '(3)', False, 'import asyncio\n'), ((628, 17, 628, 50), 'main_start.helper_func.plugin_helpers.convert_to_image', 'convert_to_image', ({(628, 34, 628, 41): 'message', (628, 43, 628, 49): 'client'}, {}), '(message, client)', False, 'from main_start.helper_func.plugin_helpers import convert_to_image, convert_vid_to_vidnote, generate_meme\n'), ((632, 11, 632, 31), 'os.path.exists', 'os.path.exists', ({(632, 26, 632, 30): 'cool'}, {}), '(cool)', False, 'import os\n'), ((190, 20, 190, 53), 'main_start.helper_func.basic_helpers.is_admin_or_owner', 'is_admin_or_owner', ({(190, 38, 190, 45): 'message', (190, 47, 190, 52): 'me.id'}, {}), '(message, me.id)', False, 'from main_start.helper_func.basic_helpers import edit_or_reply, edit_or_send_as_file, get_text, get_user, is_admin_or_owner\n'), ((520, 32, 520, 85), 'pyrogram.raw.functions.messages.GetOnlines', 'pyrogram.raw.functions.messages.GetOnlines', (), '', False, 'import pyrogram\n'), ((453, 38, 453, 78), 'pyrogram.types.ChatPermissions', 'ChatPermissions', (), '', False, 'from pyrogram.types import ChatPermissions\n'), ((499, 38, 499, 77), 'pyrogram.types.ChatPermissions', 'ChatPermissions', (), '', False, 'from pyrogram.types import ChatPermissions\n'), ((166, 22, 166, 30), 'asyncio.sleep', 'sleep', ({(166, 28, 166, 29): '(1)'}, {}), '(1)', False, 'from asyncio import sleep\n')] |
Nereg/Carberretta | carberretta/bot/cogs/feeds.py | 01e25bc8ece4c310ab541304e8809dfdd3eec3b8 | """
FEEDS
Handles YouTube and Twitch feed notifications.
"""
import datetime as dt
import discord
import feedparser
from apscheduler.triggers.cron import CronTrigger
from discord.ext import commands
from carberretta import Config
from carberretta.utils import DEFAULT_EMBED_COLOUR, chron
LIVE_EMBED_COLOUR = 0x9146FF
VOD_EMBED_COLOUR = 0x3498DB
class Feeds(commands.Cog):
def __init__(self, bot: commands.Bot) -> None:
self.bot = bot
async def call_feed(self) -> dict:
url = f"https://www.youtube.com/feeds/videos.xml?channel_id={Config.YOUTUBE_CHANNEL_ID}&{dt.datetime.utcnow()}"
async with self.bot.session.get(url) as response:
if not 200 <= response.status <= 299:
return []
if not (data := feedparser.parse(await response.text()).entries):
return []
return data
async def call_yt_api(self, video_id: str) -> dict:
url = f"https://www.googleapis.com/youtube/v3/videos?part=contentDetails%2CliveStreamingDetails%2Csnippet&id={video_id}&key={Config.YOUTUBE_API_KEY}"
async with self.bot.session.get(url) as response:
if not 200 <= response.status <= 299:
return []
if not (data := await response.json()):
return []
return data["items"][0]
async def call_twitch_api(self) -> dict:
url = f"https://api.twitch.tv/helix/search/channels?query=carberratutorials"
oauthurl = f"https://id.twitch.tv/oauth2/token?client_id={Config.TWITCH_CLIENT_ID}&client_secret={Config.TWITCH_CLIENT_SECRET}&grant_type=client_credentials"
async with self.bot.session.post(url=oauthurl) as response:
if not 200 <= response.status <= 299:
return []
if not (twitch_tok := (await response.json())["access_token"]):
return []
headers = {
"client-id": f"{Config.TWITCH_CLIENT_ID}",
"Authorization": f"Bearer {twitch_tok}",
}
async with self.bot.session.get(url=url, headers=headers) as response:
if not 200 <= response.status <= 299:
return []
if not (data := await response.json()):
return []
return data["data"][0]
@commands.Cog.listener()
async def on_ready(self) -> None:
if not self.bot.ready.booted:
self.videos_channel = self.bot.get_channel(Config.VIDEOS_ID)
self.videos_role = self.bot.guild.get_role(Config.VIDEOS_ROLE_ID)
self.vods_role = self.bot.guild.get_role(Config.VODS_ROLE_ID)
self.streams_role = self.bot.guild.get_role(Config.STREAMS_ROLE_ID)
self.youtube = self.bot.get_cog("YouTube")
if (await self.bot.application_info()).id == 696804435321552906:
self.bot.scheduler.add_job(self.get_new_videos, CronTrigger(minute="*/3", second=0))
self.bot.scheduler.add_job(self.get_new_vods, CronTrigger(minute="*/3", second=15))
self.bot.scheduler.add_job(self.get_new_premieres, CronTrigger(minute="*/3", second=30))
self.bot.scheduler.add_job(self.get_new_streams, CronTrigger(minute="*/3", second=45))
self.bot.ready.up(self)
async def get_new_vods(self) -> str:
current_vod = await self.bot.db.field("SELECT ContentValue FROM videos WHERE ContentType = ?", "vod")
for item in await self.call_feed():
data = await self.call_yt_api(item.yt_videoid)
thumbnails = data["snippet"]["thumbnails"]
duration = data["contentDetails"]["duration"]
if current_vod == item.yt_videoid:
# We announced this vod already
return
elif "#VOD" in item.summary:
# This is a vod we havent announced
await self.videos_channel.send(
f"Hey {self.vods_role.mention}, a new VOD just went live! Catch up on anything you missed from the last stream!",
embed=discord.Embed.from_dict(
{
"title": item.title,
"description": desc if len(desc := item.summary) <= 500 else f"{desc[:500]}...",
"color": VOD_EMBED_COLOUR,
"url": item.link,
"author": {"name": "Carberra Tutorials"},
"image": {"url": thumbnails["maxres"]["url"]},
"footer": {"text": f"Runtime: {self.youtube.get_duration(duration, long=True)}"},
}
),
)
await self.bot.db.execute(
"UPDATE videos SET ContentValue = ? WHERE ContentType = ?", item.yt_videoid, "vod"
)
return item.yt_videoid
async def get_new_videos(self) -> str:
current_vid = await self.bot.db.field("SELECT ContentValue FROM videos WHERE ContentType = ?", "video")
for item in await self.call_feed():
data = await self.call_yt_api(item.yt_videoid)
thumbnails = data["snippet"]["thumbnails"]
duration = data["contentDetails"]["duration"]
if item.yt_videoid == current_vid:
# This is a video we already announced
return
elif "liveStreamingDetails" not in data.keys():
# A new video is live and its was not a premiere
if "#VOD" not in item.summary:
# This isnt a VOD
await self.videos_channel.send(
f"Hey {self.videos_role.mention}, a new video just went live! Come check it out!",
embed=discord.Embed.from_dict(
{
"title": item.title,
"description": desc if len(desc := item.summary) <= 500 else f"{desc[:500]}...",
"color": DEFAULT_EMBED_COLOUR,
"url": item.link,
"author": {"name": "Carberra Tutorials"},
"image": {"url": thumbnails["maxres"]["url"]},
"footer": {"text": f"Runtime: {self.youtube.get_duration(duration, long=True)}"},
}
),
)
await self.bot.db.execute(
"UPDATE videos SET ContentValue = ? WHERE ContentType = ?", item.yt_videoid, "video"
)
return item.yt_videoid
async def get_new_premieres(self) -> tuple:
known_premieres = {
_id: [_upcoming, _announced]
for _id, _upcoming, _announced in await self.bot.db.records("SELECT * FROM premieres")
}
for item in await self.call_feed():
data = await self.call_yt_api(item.yt_videoid)
thumbnails = data["snippet"]["thumbnails"]
duration = data["contentDetails"]["duration"]
live_content = data["snippet"]["liveBroadcastContent"]
upcoming = known_premieres[item.yt_videoid][0] if item.yt_videoid in known_premieres.keys() else None
announced = known_premieres[item.yt_videoid][1] if item.yt_videoid in known_premieres.keys() else None
if "liveStreamingDetails" in data.keys():
start_time = data["liveStreamingDetails"]["scheduledStartTime"].strip("Z")
scheduled_time = chron.from_iso(start_time)
if not upcoming and duration != "P0D":
# We have not seen this premiere before
if live_content == "upcoming" and not announced:
# This premiere is upcoming and not live
await self.videos_channel.send(
f"Hey {self.videos_role.mention}, a new premiere is scheduled for {chron.long_date_and_time(scheduled_time)} UTC! Hope to see you there!",
embed=discord.Embed.from_dict(
{
"title": item.title,
"description": desc if len(desc := item.summary) <= 500 else f"{desc[:500]}...",
"color": DEFAULT_EMBED_COLOUR,
"url": item.link,
"author": {"name": "Carberra Tutorials"},
"image": {"url": thumbnails["maxres"]["url"]},
"footer": {"text": f"Runtime: {self.youtube.get_duration(duration, long=True)}"},
}
),
)
await self.bot.db.execute(
"REPLACE INTO premieres (VideoID, Upcoming, Announced) VALUES (?, ?, ?)",
item.yt_videoid,
1,
0,
)
return item.yt_videoid, False
elif live_content == "live" and not upcoming and not announced:
# The premiere was never upcoming is now live
await self.videos_channel.send(
f"Hey {self.videos_role.mention}, a new premiere started on {chron.long_date_and_time(scheduled_time)} UTC! Come and join us!",
embed=discord.Embed.from_dict(
{
"title": item.title,
"description": desc if len(desc := item.summary) <= 500 else f"{desc[:500]}...",
"color": DEFAULT_EMBED_COLOUR,
"url": item.link,
"author": {"name": "Carberra Tutorials"},
"image": {"url": thumbnails["maxres"]["url"]},
"footer": {"text": f"Runtime: {self.youtube.get_duration(duration, long=True)}"},
}
),
)
await self.bot.db.execute(
"REPLACE INTO premieres (VideoID, Upcoming, Announced) VALUES (?, ?, ?)",
item.yt_videoid,
1,
1,
)
return item.yt_videoid, True
elif not announced:
# A premiere was upcoming, and is now live
await self.videos_channel.send(
f"Hey {self.videos_role.mention}, a new premiere started on {chron.long_date_and_time(scheduled_time)} UTC! Come and join us!",
embed=discord.Embed.from_dict(
{
"title": item.title,
"description": desc if len(desc := item.summary) <= 500 else f"{desc[:500]}...",
"color": DEFAULT_EMBED_COLOUR,
"url": item.link,
"author": {"name": "Carberra Tutorials"},
"image": {"url": thumbnails["maxres"]["url"]},
"footer": {"text": f"Runtime: {self.youtube.get_duration(duration, long=True)}"},
}
),
)
await self.bot.db.execute(
"REPLACE INTO premieres (VideoID, Upcoming, Announced) VALUES (?, ?, ?)", item.yt_videoid, 1, 1
)
return item.yt_videoid, True
async def get_new_streams(self) -> tuple:
data = await self.call_twitch_api()
if data:
live_now = await self.bot.db.field("SELECT StreamLive FROM streams WHERE ID = 1")
if data["is_live"] and not live_now:
# The stream is live and we havent announced it yet
start = chron.from_iso(data["started_at"].strip("Z"))
message = await self.videos_channel.send(
f"Hey {self.streams_role.mention}, I'm live on Twitch now! Come watch!",
embed=discord.Embed.from_dict(
{
"title": data["title"],
"description": f"**Category: {data['game_name']}**",
"color": LIVE_EMBED_COLOUR,
"url": "https://www.twitch.tv/carberratutorials",
"author": {"name": "Carberra Tutorials"},
"thumbnail": {"url": data["thumbnail_url"]},
"footer": {"text": f"Started: {chron.long_date_and_time(start)} UTC"},
}
),
)
await self.bot.db.execute(
"UPDATE streams SET StreamLive = ?, StreamStart = ?, StreamMessage= ? WHERE ID = 1",
1,
start,
message.id,
)
return data["title"], False
elif not data["is_live"] and live_now:
# The stream is not live and last we checked it was (stream is over)
await self.bot.db.execute(
"UPDATE streams SET StreamLive = ?, StreamEnd = ? WHERE ID = 1", 0, dt.datetime.utcnow()
)
start, stream_message, end = await self.bot.db.record(
"SELECT StreamStart, StreamMessage, StreamEnd FROM streams WHERE ID = 1"
)
duration = chron.from_iso(end) - chron.from_iso(start)
try:
message = await self.videos_channel.fetch_message(stream_message)
except (discord.NotFound, discord.Forbidden, discord.HTTPException):
return
else:
await message.edit(
content=f"Hey {self.streams_role.mention}, I'm live on Twitch now! Come watch!",
embed=discord.Embed.from_dict(
{
"title": "The stream has ended.",
"description": "**Catch you in the next one!**",
"color": LIVE_EMBED_COLOUR,
"url": "https://www.twitch.tv/carberratutorials",
"author": {"name": "Carberra Tutorials"},
"thumbnail": {"url": data["thumbnail_url"]},
"footer": {"text": f"Runtime: {chron.long_delta(duration)}"},
}
),
)
return data["title"], True
@commands.group(name="feed", invoke_without_command=True)
@commands.is_owner()
async def group_feed(self, ctx: commands.Context) -> None:
pass
@group_feed.command(name="video")
@commands.is_owner()
async def command_feed_video(self, ctx: commands.Context) -> None:
last_video = await self.get_new_videos()
await ctx.send(f"Announced video: {last_video}." if last_video else "No new videos.")
@group_feed.command(name="vod")
@commands.is_owner()
async def command_feed_vod(self, ctx: commands.Context) -> None:
last_vod = await self.get_new_vods()
await ctx.send(f"Announced VOD: {last_vod}." if last_vod else "No new VODs.")
@group_feed.command(name="premiere")
@commands.is_owner()
async def command_feed_premiere(self, ctx: commands.Context) -> None:
if not (last_premiere := await self.get_new_premieres()):
await ctx.send("No new premieres.")
else:
await ctx.send(
f"Announced live premiere: {last_premiere[0]}."
if last_premiere[1]
else f"Announced upcoming premiere: {last_premiere[0]}."
)
@group_feed.command(name="stream")
@commands.is_owner()
async def command_feed_stream(self, ctx: commands.Context) -> None:
if not (last_stream := await self.get_new_streams()):
await ctx.send("No new streams.")
else:
await ctx.send(
f"Stream ended: {last_stream[0]}." if last_stream[1] else f"Announced stream: {last_stream[0]}."
)
def setup(bot: commands.Bot) -> None:
bot.add_cog(Feeds(bot))
| [((71, 5, 71, 28), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ({}, {}), '()', False, 'from discord.ext import commands\n'), ((335, 5, 335, 61), 'discord.ext.commands.group', 'commands.group', (), '', False, 'from discord.ext import commands\n'), ((336, 5, 336, 24), 'discord.ext.commands.is_owner', 'commands.is_owner', ({}, {}), '()', False, 'from discord.ext import commands\n'), ((341, 5, 341, 24), 'discord.ext.commands.is_owner', 'commands.is_owner', ({}, {}), '()', False, 'from discord.ext import commands\n'), ((347, 5, 347, 24), 'discord.ext.commands.is_owner', 'commands.is_owner', ({}, {}), '()', False, 'from discord.ext import commands\n'), ((353, 5, 353, 24), 'discord.ext.commands.is_owner', 'commands.is_owner', ({}, {}), '()', False, 'from discord.ext import commands\n'), ((365, 5, 365, 24), 'discord.ext.commands.is_owner', 'commands.is_owner', ({}, {}), '()', False, 'from discord.ext import commands\n'), ((25, 97, 25, 117), 'datetime.datetime.utcnow', 'dt.datetime.utcnow', ({}, {}), '()', True, 'import datetime as dt\n'), ((180, 33, 180, 59), 'carberretta.utils.chron.from_iso', 'chron.from_iso', ({(180, 48, 180, 58): 'start_time'}, {}), '(start_time)', False, 'from carberretta.utils import DEFAULT_EMBED_COLOUR, chron\n'), ((81, 64, 81, 99), 'apscheduler.triggers.cron.CronTrigger', 'CronTrigger', (), '', False, 'from apscheduler.triggers.cron import CronTrigger\n'), ((82, 62, 82, 98), 'apscheduler.triggers.cron.CronTrigger', 'CronTrigger', (), '', False, 'from apscheduler.triggers.cron import CronTrigger\n'), ((83, 67, 83, 103), 'apscheduler.triggers.cron.CronTrigger', 'CronTrigger', (), '', False, 'from apscheduler.triggers.cron import CronTrigger\n'), ((84, 65, 84, 101), 'apscheduler.triggers.cron.CronTrigger', 'CronTrigger', (), '', False, 'from apscheduler.triggers.cron import CronTrigger\n'), ((309, 27, 309, 46), 'carberretta.utils.chron.from_iso', 'chron.from_iso', ({(309, 42, 309, 45): 'end'}, {}), '(end)', False, 'from carberretta.utils import DEFAULT_EMBED_COLOUR, chron\n'), ((309, 49, 309, 70), 'carberretta.utils.chron.from_iso', 'chron.from_iso', ({(309, 64, 309, 69): 'start'}, {}), '(start)', False, 'from carberretta.utils import DEFAULT_EMBED_COLOUR, chron\n'), ((302, 88, 302, 108), 'datetime.datetime.utcnow', 'dt.datetime.utcnow', ({}, {}), '()', True, 'import datetime as dt\n'), ((189, 95, 189, 135), 'carberretta.utils.chron.long_date_and_time', 'chron.long_date_and_time', ({(189, 120, 189, 134): 'scheduled_time'}, {}), '(scheduled_time)', False, 'from carberretta.utils import DEFAULT_EMBED_COLOUR, chron\n'), ((243, 85, 243, 125), 'carberretta.utils.chron.long_date_and_time', 'chron.long_date_and_time', ({(243, 110, 243, 124): 'scheduled_time'}, {}), '(scheduled_time)', False, 'from carberretta.utils import DEFAULT_EMBED_COLOUR, chron\n'), ((216, 89, 216, 129), 'carberretta.utils.chron.long_date_and_time', 'chron.long_date_and_time', ({(216, 114, 216, 128): 'scheduled_time'}, {}), '(scheduled_time)', False, 'from carberretta.utils import DEFAULT_EMBED_COLOUR, chron\n'), ((284, 59, 284, 90), 'carberretta.utils.chron.long_date_and_time', 'chron.long_date_and_time', ({(284, 84, 284, 89): 'start'}, {}), '(start)', False, 'from carberretta.utils import DEFAULT_EMBED_COLOUR, chron\n'), ((328, 63, 328, 89), 'carberretta.utils.chron.long_delta', 'chron.long_delta', ({(328, 80, 328, 88): 'duration'}, {}), '(duration)', False, 'from carberretta.utils import DEFAULT_EMBED_COLOUR, chron\n')] |
abaire/gdb_sniffer | gdb/proxy.py | f330193c65a39ce6abb01f25737ca967a0af9629 | """Provides a GDB logging proxy.
See https://sourceware.org/gdb/onlinedocs/gdb/Remote-Protocol.html
See https://www.embecosm.com/appnotes/ean4/embecosm-howto-rsp-server-ean4-issue-2.html
"""
from __future__ import annotations
import logging
import socket
from typing import Optional
from typing import Tuple
from .packet import GDBPacket
from net import ip_transport
logger = logging.getLogger(__name__)
class GDBProxy(ip_transport.IPTransport):
"""GDB Remote Serial Protocol proxy."""
def __init__(self, target_addr: Tuple[str, int], colorize: bool = False):
super().__init__(process_callback=self._on_gdb_bytes_read)
self.log_acks = False
self.target_addr = target_addr
self._target: Optional[ip_transport.IPTransport] = None
if colorize:
self.target_color = "\x1b[34m\x1b[47m"
self.gdb_color = "\x1b[30m\x1b[47m"
else:
self.target_color = ""
self.gdb_color = ""
self._gdb_read_buffer: bytearray = bytearray()
self._target_read_buffer: bytearray = bytearray()
def set_connection(self, sock, addr):
super().set_connection(sock, addr)
logger.debug(f"{self.target_color}Connecting to target at {self.target_addr}")
try:
target_sock = socket.create_connection(self.target_addr)
except ConnectionRefusedError:
logger.error(f"{self.target_color}Connection to Target@{self.target_addr} refused.")
self.close()
return
self._target = ip_transport.IPTransport(self._on_target_bytes_read, f"Target@{self.target_addr}")
self._target.set_connection(target_sock, self.target_addr)
self._add_sub_connection(self._target)
def _on_gdb_bytes_read(self, _ignored):
buffer = self._read_buffer
self.shift_read_buffer(len(buffer))
self._append_gdb_read_buffer(buffer)
self._target._write_buffer.extend(buffer)
def _on_target_bytes_read(self, _ignored):
buffer = self._target.read_buffer
self._target.shift_read_buffer(len(buffer))
self._append_target_read_buffer(buffer)
self._write_buffer.extend(buffer)
def _append_gdb_read_buffer(self, data: bytes):
self._unescape_and_append(self._gdb_read_buffer, data)
bytes_consumed = self._log_rsp_bytes(f"{self.gdb_color}GDB :", self._gdb_read_buffer)
if bytes_consumed:
self._gdb_read_buffer = bytearray(self._gdb_read_buffer[bytes_consumed:])
def _append_target_read_buffer(self, data: bytes):
self._unescape_and_append(self._target_read_buffer, data)
bytes_consumed = self._log_rsp_bytes(f"{self.target_color}TARGET :", self._target_read_buffer)
if bytes_consumed:
self._target_read_buffer = bytearray(self._target_read_buffer[bytes_consumed:])
@staticmethod
def _unescape_and_append(buffer: bytearray, data: bytes):
# RSP uses '}' as an escape character. Escapes are processed in this method
# before adding to the read buffer to simplify parsing.
if not data:
return
# Process any left over escapes.
if buffer and buffer[-1] == GDBPacket.RSP_ESCAPE_CHAR:
buffer[-1] = data[0] ^ 0x20
data = data[1:]
escape_char_index = data.find(GDBPacket.RSP_ESCAPE_CHAR)
while escape_char_index >= 0:
if escape_char_index == len(data):
# If there are no more characters after the escape char, just add it to the buffer and let it be
# processed when more data is received.
break
if escape_char_index:
buffer.extend(data[: escape_char_index - 1])
unescaped = data[escape_char_index + 1] ^ 0x20
buffer.append(unescaped)
data = data[escape_char_index + 2 :]
buffer.extend(data)
def _log_rsp_bytes(self, log_prefix: str, buffer: bytearray) -> int:
total_bytes_consumed = 0
pkt = GDBPacket()
buffer_len = len(buffer)
while total_bytes_consumed < buffer_len:
if buffer[0] == ord("+"):
if self.log_acks:
logger.info(f"{log_prefix} <<ack>>")
total_bytes_consumed += 1
buffer = buffer[1:]
continue
if buffer[0] == ord("-"):
if self.log_acks:
logger.info(f"{log_prefix} <<nack>>")
total_bytes_consumed += 1
buffer = buffer[1:]
continue
if buffer[0] == 0x03:
logger.info(f"{log_prefix} <<Interrupt request>>")
total_bytes_consumed += 1
buffer = buffer[1:]
continue
leader = buffer.find(GDBPacket.PACKET_LEADER)
if leader > 0:
logger.warning(
f"{log_prefix} Skipping {leader} non-leader bytes {buffer[:total_bytes_consumed + leader]}"
)
buffer = buffer[leader:]
bytes_consumed = pkt.parse(buffer)
buffer = buffer[bytes_consumed:]
if not bytes_consumed:
break
total_bytes_consumed += bytes_consumed
if pkt.data:
logger.info(f"{log_prefix} Received packet {pkt}")
else:
logger.info(f"{log_prefix} Received empty packet")
if len(buffer):
logger.debug(
f"{log_prefix} After processing: [{len(buffer)}] {buffer}"
)
return total_bytes_consumed
| [((17, 9, 17, 36), 'logging.getLogger', 'logging.getLogger', ({(17, 27, 17, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((51, 23, 51, 105), 'net.ip_transport.IPTransport', 'ip_transport.IPTransport', ({(51, 48, 51, 74): 'self._on_target_bytes_read', (51, 76, 51, 104): 'f"""Target@{self.target_addr}"""'}, {}), "(self._on_target_bytes_read,\n f'Target@{self.target_addr}')", False, 'from net import ip_transport\n'), ((45, 26, 45, 68), 'socket.create_connection', 'socket.create_connection', ({(45, 51, 45, 67): 'self.target_addr'}, {}), '(self.target_addr)', False, 'import socket\n')] |
usmanwardag/pylayers | pylayers/em/openems/test/Rect_Waveguide.py | 2e8a9bdc993b2aacc92610a9c7edf875c6c7b24a | from openems.openems import *
# A simple simulation
#
# FDTD Simulation Setting
#
F = FDTD()
F.add(Exc(typ='Sinus',f0=100000))
F.add(BoundaryCond(['PMC','PMC','PEC','PEC','MUR','MUR']))
#
# CSX (Geometry setting)
#
C = CSX()
# The Box is added as a property
C.add(Excitation('excitation'),p=Box(P1=[-10,-10,0],P2=[10,10,0],Pr=0))
C.add(DumpBox('Et'),p=Box(P1=[-10,0,-10],P2=[10,0,30],Pr=0))
C.add(RectilinearGrid(np.arange(-10,11,1),np.arange(-10,11,1),np.arange(-10,11,1)))
C.add(Polyhedron())
S = OpenEMS(F,C)
S.save(filename='RectWaveguide.xml')
#gnd = Matter('gnd')
#sphere = Matter('sphere')
#patch = Matter('patch')
#substrate = Matter('substrate',typ='Ma',Epsilon="3.38",Kappa="0.00046")
#cdgsht = Matter('copper',typ='Cs',conductivity="56e6",thickness="40e-6")
#b1 = Box(P1=[0,0,0],P2=[100,100,200],Pr=0)
#b2 = Box(P1=[0,0,0],P2=[10,20,30],Pr=10)
#b4 = Box(P1=[-10,0,-10],P2=[10,0,30],Pr=0)
#s1 = Sphere(P=[0,0,0],R=100,Pr=50)
#dump = DumpBox()
#C.add(gnd)
#C.add(patch)
#C.add(substrate)
#C.add(sphere)
#C.add(cdgsht)
#C.add(exc)
#C.add(dump)
#C.set('gnd',b1)
#C.set('gnd',b2)
#C.set('sphere',s1)
#C.set('copper',b1)
#C.set('copper',b2)
#C.set('Et',b4)
#C.save(filename='structure.xml')
##C.AddBox(prop='ConductingSheet',name='copper',P1=[0,-50,200],P2=[1000,50,200],Pri=10)
##C.AddCylinder(prop='Metal',name='cyl0',P1=[0,0,0],P2=[0,0,100],Rad=50,Pri=10)
#
| [] |
vd1371/CBSA | DataPreprocessing/_segment_Y.py | f2b3f03c91ccd9ec02c2331f43573d7d6e72fd47 | import numpy as np
def segment_Y(Y, **params):
Y_segments = params.get("Y_segments")
Y_quantile = params.get("Y_quantile")
print("segmenting Y")
Y = Y.values.reshape(-1)
Y_quantile = np.quantile(Y, Y_quantile, axis = 0)
bigger_mask = (Y > Y_quantile).copy()
smaller_mask = (Y <= Y_quantile).copy()
Y[bigger_mask] = 1
Y[smaller_mask] = 0
Y = Y.astype(int)
return Y | [((10, 14, 10, 50), 'numpy.quantile', 'np.quantile', (), '', True, 'import numpy as np\n')] |
Puzzlebox-IMT/Puzzlebox | WifiEnigma/BattleAI/question.py | 6b80e22a4aee3228140692bd6352de18b2f6a96d | import mysql.connector
import random
from voice import synthetize_voice, delete_wav
def AllQuestionAI(id_theme):
i = 0
#CONNEXION A LA BDD
conn = mysql.connector.connect(host="localhost",
user="phpmyadmin", password="Vince@Mysql1997",
database="Puzzlebox")
cursor = conn.cursor()
#EXECUTER LA REQUETE AVEC LA BDD
query = ("SELECT * FROM Question INNER JOIN themes_questions ON Question.ID_QUESTION = themes_questions.ID_QUESTION WHERE ID_THEME=%s")
cursor.execute(query, (id_theme, ))
#RECUPERATION DES INFORMATIONS
rows = cursor.fetchall()
if rows:
for line in rows:
i += 1
enonce = line[1]
proposition1 = line[2]
proposition2 = line[3]
proposition3 = line[4]
proposition4 = line[5]
reponse = line[5]
print("*******************************************************************************")
print(" QUESTION ",i," ")
print("*******************************************************************************")
print("ENONCE : ", enonce)
print("PROPOSITION 1 : ", proposition1)
print("PROPOSITION 2 : ", proposition2)
print("PROPOSITION 3 : ", proposition3)
print("PROPOSITION 4 : ", proposition4)
print("REPONSE : ", reponse)
else:
print("Ce thème ne contient pas de questions")
def questionAI(id_theme):
i = 0
#CONNEXION A LA BDD
conn = mysql.connector.connect(host="localhost",
user="phpmyadmin", password="Vince@Mysql1997",
database="Puzzlebox")
cursor = conn.cursor()
#EXECUTER LA REQUETE AVEC LA BDD
query = ("SELECT * FROM Question INNER JOIN themes_questions ON Question.ID_QUESTION = themes_questions.ID_QUESTION WHERE ID_THEME=%s")
cursor.execute(query, (id_theme, ))
#RECUPERATION DES INFORMATIONS
rows = cursor.fetchall()
if rows:
nb_rows = len(rows)
num_question = random.randint(1, nb_rows)
#L'index de la liste commence à zéro, il faut donc décaler d'un le numéro
num_question = num_question - 1
question = rows[num_question]
result = [] #Tab which stores the query results
#RECUPERATION DES TUPLES
result.append(question[1])
result.append(question[2])
result.append(question[3])
result.append(question[4])
result.append(question[5])
result.append(question[5]) #This last one is the answer
print("*******************************************************************************")
print(" QUESTION ",num_question+1," ")
print("*******************************************************************************")
print("ENONCE : ", result[0])
print("PROPOSITION 1 : ", result[1])
print("PROPOSITION 2 : ", result[2])
print("PROPOSITION 3 : ", result[3])
print("PROPOSITION 4 : ", result[4])
print("REPONSE : ", result[5])
#complete_question = ''.join(complete_question) #Convert tuple into string
return result
else:
print("Ce thème ne contient pas de questions")
def tell_question(question):
synthetize_voice(question[0])
for i in range(1,5) :
num_prop = "Proposition {} ".format(i)
num_prop = ''.join(num_prop)
line = ''.join(question[i])
line = num_prop + line
synthetize_voice(line)
delete_wav()
def quiz():
counter = 1
while(counter <= 5):
questionAI(1)
if (__name__ == '__main__'):
result = questionAI(1)
tell_question(result)
| [((101, 4, 101, 33), 'voice.synthetize_voice', 'synthetize_voice', ({(101, 21, 101, 32): 'question[0]'}, {}), '(question[0])', False, 'from voice import synthetize_voice, delete_wav\n'), ((109, 4, 109, 16), 'voice.delete_wav', 'delete_wav', ({}, {}), '()', False, 'from voice import synthetize_voice, delete_wav\n'), ((65, 22, 65, 48), 'random.randint', 'random.randint', ({(65, 37, 65, 38): '1', (65, 40, 65, 47): 'nb_rows'}, {}), '(1, nb_rows)', False, 'import random\n'), ((108, 8, 108, 30), 'voice.synthetize_voice', 'synthetize_voice', ({(108, 25, 108, 29): 'line'}, {}), '(line)', False, 'from voice import synthetize_voice, delete_wav\n')] |
IanHawke/toy-amr | toy-amr/flux_functions.py | 1f616791993ccd83cc6034616c08e09fa4ba310d | import numpy
def lax_friedrichs(cons_minus, cons_plus, simulation, tl):
alpha = tl.grid.dx / tl.dt
flux = numpy.zeros_like(cons_minus)
prim_minus, aux_minus = simulation.model.cons2all(cons_minus, tl.prim)
prim_plus, aux_plus = simulation.model.cons2all(cons_plus , tl.prim)
f_minus = simulation.model.flux(cons_minus, prim_minus, aux_minus)
f_plus = simulation.model.flux(cons_plus, prim_plus, aux_plus )
flux[:, 1:-1] = 0.5 * ( (f_plus[:,0:-2] + f_minus[:,1:-1]) + \
alpha * (cons_plus[:,0:-2] - cons_minus[:,1:-1]) )
return flux
def upwind(cons_minus, cons_plus, simulation, patch):
flux = numpy.zeros_like(cons_minus)
flux[:, 1:-1] = simulation.model.riemann_problem_flux(cons_plus [:, 0:-2],
cons_minus[:, 1:-1])
return flux
| [((5, 11, 5, 39), 'numpy.zeros_like', 'numpy.zeros_like', ({(5, 28, 5, 38): 'cons_minus'}, {}), '(cons_minus)', False, 'import numpy\n'), ((17, 11, 17, 39), 'numpy.zeros_like', 'numpy.zeros_like', ({(17, 28, 17, 38): 'cons_minus'}, {}), '(cons_minus)', False, 'import numpy\n')] |
vmagamedov/pi | pi/auth.py | 6ee98af69b757d96aa4eddc32513309e0fe05d1d | import re
import json
import base64
import codecs
import os.path
import asyncio
import subprocess
_PREFIX = 'docker-credential-'
def read_config():
path = os.path.expanduser('~/.docker/config.json')
if not os.path.exists(path):
return {}
with codecs.open(path, encoding='utf-8') as f:
json_data = f.read()
return json.loads(json_data)
async def _read_creds(creds_store, server):
if not re.match(r'^\w+$', creds_store, re.ASCII):
raise ValueError('Invalid credsStore: {!r}'.format(creds_store))
proc = await asyncio.create_subprocess_exec(
_PREFIX + creds_store, 'get',
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = await proc.communicate(server.encode('ascii'))
if proc.returncode != 0:
return None
else:
data = json.loads(stdout)
return {
'Username': data['Username'],
'Password': data['Secret'],
'ServerAddress': server,
}
def _decode_auth(auth_data, server):
auth_data_decoded = base64.b64decode(auth_data).decode('utf-8')
username, _, password = auth_data_decoded.partition(':')
return {
'Username': username,
'Password': password,
'ServerAddress': server,
}
async def resolve_auth(config, server):
config_auths = config.get('auths')
if config_auths is None:
return None
server_auth = config_auths.get(server)
if server_auth is not None:
auth_data = server_auth.get('auth')
if auth_data is not None:
return _decode_auth(auth_data, server)
creds_store = config.get('credsStore')
if creds_store is not None:
return await _read_creds(creds_store, server)
return None
def server_name(image_name):
registry, _, name = image_name.partition('/')
if not name:
return 'docker.io'
else:
return registry
def encode_header(auth):
json_data = json.dumps(auth)
return base64.urlsafe_b64encode(json_data.encode('ascii'))
| [((19, 11, 19, 32), 'json.loads', 'json.loads', ({(19, 22, 19, 31): 'json_data'}, {}), '(json_data)', False, 'import json\n'), ((81, 16, 81, 32), 'json.dumps', 'json.dumps', ({(81, 27, 81, 31): 'auth'}, {}), '(auth)', False, 'import json\n'), ((17, 9, 17, 44), 'codecs.open', 'codecs.open', (), '', False, 'import codecs\n'), ((23, 11, 23, 52), 're.match', 're.match', ({(23, 20, 23, 28): '"""^\\\\w+$"""', (23, 30, 23, 41): 'creds_store', (23, 43, 23, 51): 're.ASCII'}, {}), "('^\\\\w+$', creds_store, re.ASCII)", False, 'import re\n'), ((26, 17, 31, 5), 'asyncio.create_subprocess_exec', 'asyncio.create_subprocess_exec', (), '', False, 'import asyncio\n'), ((36, 15, 36, 33), 'json.loads', 'json.loads', ({(36, 26, 36, 32): 'stdout'}, {}), '(stdout)', False, 'import json\n'), ((45, 24, 45, 51), 'base64.b64decode', 'base64.b64decode', ({(45, 41, 45, 50): 'auth_data'}, {}), '(auth_data)', False, 'import base64\n')] |
ACWI-SOGW/ngwmn_monitoring_locations_etl | etl/transform.py | e9ebfebbc5fa349a58669fb1d9944786f26729c3 | """
Transform the data into a form that
works with the WELL_REGISTRY_STG table.
"""
import re
def mapping_factory(mapping):
def map_func(key):
if key is not None:
ora_val = mapping.get(key.lower())
else:
ora_val = None
return ora_val
return map_func
WELL_TYPES = {
'surveillance': 1,
'trend': 2,
'special': 3,
}
map_well_type = mapping_factory(WELL_TYPES)
WELL_PURPOSE = {
'dedicated monitoring/observation': 1,
'other': 2
}
map_well_purpose = mapping_factory(WELL_PURPOSE)
QW_WELL_CHARS = {
'background': 1,
'suspected/anticipated changes': 2,
'known changes': 3
}
map_qw_well_chars = mapping_factory(QW_WELL_CHARS)
WL_WELL_CHARS = {
'background': 1,
'suspected/anticipated changes': 2,
'known changes': 3,
'unknown': 999
}
map_wl_well_chars = mapping_factory(WL_WELL_CHARS)
def to_flag(flag):
return '1' if flag else '0'
def transform_mon_loc_data(ml_data):
"""
Map the fields from the API JSON response to
the fields in the WELL_REGISTRY_STG table with
appropriate foreign key values.
"""
mapped_data = dict()
mapped_data['AGENCY_CD'] = ml_data['agency']['agency_cd']
mapped_data['AGENCY_NM'] = ml_data['agency']['agency_nm']
mapped_data['AGENCY_MED'] = ml_data['agency']['agency_med']
mapped_data['SITE_NO'] = ml_data['site_no']
mapped_data['SITE_NAME'] = ml_data['site_name']
mapped_data['DEC_LAT_VA'] = ml_data['dec_lat_va']
mapped_data['DEC_LONG_VA'] = ml_data['dec_long_va']
mapped_data['HORZ_DATUM'] = ml_data['horizontal_datum']
mapped_data['ALT_VA'] = ml_data['alt_va']
mapped_data['ALT_DATUM_CD'] = ml_data['altitude_datum']
try:
mapped_data['NAT_AQUIFER_CD'] = ml_data['nat_aqfr']['nat_aqfr_cd']
mapped_data['NAT_AQFR_DESC'] = ml_data['nat_aqfr']['nat_aqfr_desc']
except (AttributeError, KeyError, TypeError):
mapped_data['NAT_AQUIFER_CD'] = None
mapped_data['NAT_AQFR_DESC'] = None
mapped_data['LOCAL_AQUIFER_NAME'] = ml_data['local_aquifer_name']
mapped_data['AQFR_CHAR'] = ml_data['aqfr_type']
mapped_data['QW_SN_FLAG'] = to_flag(ml_data['qw_sn_flag'])
mapped_data['QW_BASELINE_FLAG'] = to_flag(ml_data['qw_baseline_flag'])
mapped_data['QW_WELL_CHARS'] = map_qw_well_chars(ml_data['qw_well_chars'])
mapped_data['QW_WELL_PURPOSE'] = map_well_purpose(ml_data['qw_well_purpose'])
mapped_data['QW_SYS_NAME'] = ml_data['qw_network_name']
mapped_data['WL_SN_FLAG'] = to_flag(ml_data['wl_sn_flag'])
mapped_data['WL_BASELINE_FLAG'] = to_flag(ml_data['wl_baseline_flag'])
mapped_data['WL_WELL_CHARS'] = map_wl_well_chars(ml_data['wl_well_chars'])
mapped_data['WL_WELL_PURPOSE'] = map_well_purpose(ml_data['wl_well_purpose'])
mapped_data['WL_SYS_NAME'] = ml_data['wl_network_name']
mapped_data['DATA_PROVIDER'] = None
mapped_data['DISPLAY_FLAG'] = to_flag(ml_data['display_flag'])
mapped_data['WL_DATA_PROVIDER'] = None
mapped_data['QW_DATA_PROVIDER'] = None
mapped_data['LITH_DATA_PROVIDER'] = None
mapped_data['CONST_DATA_PROVIDER'] = None
mapped_data['WELL_DEPTH'] = ml_data['well_depth']
mapped_data['LINK'] = ml_data['link']
mapped_data['INSERT_DATE'] = ml_data['insert_date']
mapped_data['UPDATE_DATE'] = ml_data['update_date']
mapped_data['WL_WELL_PURPOSE_NOTES'] = ml_data['wl_well_purpose_notes']
mapped_data['QW_WELL_PURPOSE_NOTES'] = ml_data['qw_well_purpose_notes']
mapped_data['INSERT_USER_ID'] = ml_data['insert_user']
mapped_data['UPDATE_USER_ID'] = ml_data['update_user']
mapped_data['WL_WELL_TYPE'] = map_well_type(ml_data['wl_well_type'])
mapped_data['QW_WELL_TYPE'] = map_well_type(ml_data['qw_well_type'])
mapped_data['LOCAL_AQUIFER_CD'] = None
mapped_data['REVIEW_FLAG'] = None
try:
mapped_data['STATE_CD'] = ml_data['state']['state_cd']
except (AttributeError, KeyError, TypeError):
mapped_data['STATE_CD'] = None
try:
mapped_data['COUNTY_CD'] = ml_data['county']['county_cd']
except (AttributeError, KeyError, TypeError):
mapped_data['COUNTY_CD'] = None
try:
mapped_data['COUNTRY_CD'] = ml_data['country']['country_cd']
except (AttributeError, KeyError, TypeError):
mapped_data['COUNTRY_CD'] = None
mapped_data['WELL_DEPTH_UNITS'] = ml_data['well_depth_units']['unit_id'] if ml_data['well_depth_units'] else None
mapped_data['ALT_UNITS'] = ml_data['altitude_units']['unit_id'] if ml_data['altitude_units'] else None
mapped_data['SITE_TYPE'] = ml_data['site_type']
mapped_data['HORZ_METHOD'] = ml_data['horz_method']
mapped_data['HORZ_ACY'] = ml_data['horz_acy']
mapped_data['ALT_METHOD'] = ml_data['alt_method']
mapped_data['ALT_ACY'] = ml_data['alt_acy']
return mapped_data
def date_format(mapped_data):
# fix missing fractions of a second
if re.match(r".*:\d\dZ$", mapped_data['INSERT_DATE']):
mapped_data['INSERT_DATE'] = mapped_data['INSERT_DATE'][:-1] + ".0Z"
if re.match(r".*:\d\dZ$", mapped_data['UPDATE_DATE']):
mapped_data['UPDATE_DATE'] = mapped_data['UPDATE_DATE'][:-1] + ".0Z"
| [((132, 7, 132, 57), 're.match', 're.match', ({(132, 16, 132, 28): '""".*:\\\\d\\\\dZ$"""', (132, 30, 132, 56): "mapped_data['INSERT_DATE']"}, {}), "('.*:\\\\d\\\\dZ$', mapped_data['INSERT_DATE'])", False, 'import re\n'), ((134, 7, 134, 57), 're.match', 're.match', ({(134, 16, 134, 28): '""".*:\\\\d\\\\dZ$"""', (134, 30, 134, 56): "mapped_data['UPDATE_DATE']"}, {}), "('.*:\\\\d\\\\dZ$', mapped_data['UPDATE_DATE'])", False, 'import re\n')] |
shamilison/django-reporter-pro | django_reporter_pro/config/model_configs.py | 0c6f60bbae939d318e7aafaec83613d2768a4f63 | # Created by shamilsakib at 04/10/20
BASE_MODEL = None | [] |
TEAlab/DPSpark | DPSparkImplementations/paf_kernels.py | 4d53ee13b03e2e12119c28fe2b2241ad20231eac | __author__ = "Zafar Ahmad, Mohammad Mahdi Javanmard"
__copyright__ = "Copyright (c) 2019 Tealab@SBU"
__license__ = "MIT"
__version__ = "1.0.0"
__maintainer__ = "Zafar Ahmad"
__email__ = "[email protected]"
__status__ = "Development"
import numpy as np
import numba as nb
'''
Iterative kernels
'''
def update_iter(u_block, x_block, n, I_, J_, K_):
return _update_iter(np.ascontiguousarray(u_block), np.ascontiguousarray(x_block), n, I_, J_, K_)
@nb.jit(nopython=True)
def _update_iter(u_block, x_block, n, I_, J_, K_):
# For testing purposes, rather than passing f_matrix_broadcast, we call this function
def f_matrix(i, j):
return float(i+j)
for k in range(x_block.shape[0]-1, -1, -1):
K = K_*x_block.shape[0]+k
for j in range(x_block.shape[0]-1, -1, -1):
J = J_*x_block.shape[0]+j
for i in range(x_block.shape[0]-1, -1, -1):
I = I_*x_block.shape[0]+i
min1 = min(K-2, n-3)
min2 = min(J-1, n-4)
if ((K < n) and (K >= 3) and (J <= min1) and (J >= I+1) and (I <= min2)):
x_block[i, j] = max(x_block[i, j], u_block[j+1, k] + f_matrix(J+1, min(K, 2*J-I+1)))
return x_block
def funcA_iter(block_info, n):
((I_, J_), x_block) = block_info
return update_iter(x_block, x_block, n, I_, J_, I_)
def funcX_iter(block_info, u_block_info, n):
((I_, J_), x_block) = block_info
((UI_, UJ_), u_block) = u_block_info
return update_iter(u_block, x_block, n, I_, J_, UJ_)
| [((19, 1, 19, 22), 'numba.jit', 'nb.jit', (), '', True, 'import numba as nb\n'), ((18, 24, 18, 53), 'numpy.ascontiguousarray', 'np.ascontiguousarray', ({(18, 45, 18, 52): 'u_block'}, {}), '(u_block)', True, 'import numpy as np\n'), ((18, 55, 18, 84), 'numpy.ascontiguousarray', 'np.ascontiguousarray', ({(18, 76, 18, 83): 'x_block'}, {}), '(x_block)', True, 'import numpy as np\n')] |
terrapain/terrakg | terrakg/rates.py | 90c52ca3b227d2daabd604255e793ac5f536c246 | from terra_sdk.exceptions import LCDResponseError
from terrakg import logger
# Logging
from terrakg.client import ClientContainer
logger = logger.get_logger(__name__)
class Rates:
"""
Access the most recent rates.
"""
def __init__(self, client: ClientContainer):
self.client = client
def get_token_quote_and_fees(self, token_contract: str, pair: str, amount: int = 1000000, reverse: bool = False):
"""
Returns the price for `amount` of the token `pair` (exchange is included in pair).
Set `reverse` to true to get the inverse price.
"""
desc, action, result_key = ("reverse_simulation", "ask_asset", "offer_amount") if reverse else (
"simulation", "offer_asset", "return_amount")
query_msg = {
desc: {
action: {
"amount": str(amount),
"info": {"token": {
"contract_addr": token_contract
}
}
}
}
}
try:
result = self.client.lcd_client.wasm.contract_query(pair, query_msg)
return result[result_key], result['commission_amount']
except LCDResponseError as e:
logger.warning(f"Issue with price query: {e}")
return None
| [] |
HonzaKlicpera/Effective-footage-processing-Blender-add-on | src/tracking_module.py | f3faae3fc56a3ef8f2eabba9af8be718e57f4d35 | import bpy
import os, glob
from pathlib import Path
from enum import Enum
from abc import ABC, abstractmethod
import csv
from . import keying_module
def export_tracking_data(self, context):
clip = context.space_data.clip
clip_name = os.path.splitext(clip.name)[0]
tracker_name = context.scene.tracking_local.tracker_name
output_path = os.path.join(keying_module.get_abs_output_path(context),clip_name)
keying_module.create_directory(output_path)
file = open(os.path.join(output_path,clip_name+".csv"), "w", newline='')
writer = csv.writer(file, delimiter=',')
multiplier = context.scene.tracking_local.tracking_multiplier
tracker = clip.tracking.tracks.get(tracker_name)
if tracker is not None:
prev = tracker.markers[0].co[0]
for m in tracker.markers:
writer.writerow([(m.co[0] - prev) * multiplier])
prev = m.co[0]
self.report({"INFO"},"TRACKER SUCESSFULLY EXPORTED")
else:
self.report({"ERROR"},"TRACKER NOT FOUND")
file.close()
#----------------------------------------
# PROPERTIES
#----------------------------------------
class TrackingSceneProps(bpy.types.PropertyGroup):
tracker_name: bpy.props.StringProperty \
(
name = "Track name",
description = "Name of the tracker for data export",
)
tracking_multiplier: bpy.props.FloatProperty \
(
name = "Distance multiplier",
description = "The exported tracking distance gets multiplied by this value",
default = 1,
min = 0.0001
)
class TrackingPanel(bpy.types.Panel):
bl_label = "Tracking Panel"
bl_idname = "SCENE_PT_tracking_rendering"
bl_space_type = "CLIP_EDITOR"
bl_region_type = "UI"
bl_context = "render"
def draw(self, context):
layout = self.layout
scene = context.scene
box = layout.box()
box.row().label(text = "Tracking export")
box.row().prop(scene.tracking_local, "tracker_name")
box.row().prop(scene.tracking_local, "tracking_multiplier")
box.row().operator("tracking.export_data")
class TrackingExportDataOp(bpy.types.Operator):
bl_idname = "tracking.export_data"
bl_label = "Export Data"
bl_description = "Export the tracking data of the chosen tracker"
def execute(self, context):
export_tracking_data(self, context)
return {"FINISHED"}
classes = (
TrackingExportDataOp,
TrackingPanel,
TrackingSceneProps
)
def register():
for cls in classes:
bpy.utils.register_class(cls)
bpy.types.Scene.tracking_local = bpy.props.PointerProperty(type=TrackingSceneProps)
def unregister():
for cls in reversed(classes):
bpy.utils.unregister_class(cls)
del bpy.types.Scene.tracking_local | [((18, 13, 18, 44), 'csv.writer', 'csv.writer', (), '', False, 'import csv\n'), ((39, 18, 43, 7), 'bpy.props.StringProperty', 'bpy.props.StringProperty', (), '', False, 'import bpy\n'), ((45, 25, 51, 7), 'bpy.props.FloatProperty', 'bpy.props.FloatProperty', (), '', False, 'import bpy\n'), ((90, 37, 90, 87), 'bpy.props.PointerProperty', 'bpy.props.PointerProperty', (), '', False, 'import bpy\n'), ((12, 16, 12, 43), 'os.path.splitext', 'os.path.splitext', ({(12, 33, 12, 42): 'clip.name'}, {}), '(clip.name)', False, 'import os, glob\n'), ((17, 16, 17, 58), 'os.path.join', 'os.path.join', ({(17, 29, 17, 40): 'output_path', (17, 41, 17, 57): "clip_name + '.csv'"}, {}), "(output_path, clip_name + '.csv')", False, 'import os, glob\n'), ((89, 8, 89, 37), 'bpy.utils.register_class', 'bpy.utils.register_class', ({(89, 33, 89, 36): 'cls'}, {}), '(cls)', False, 'import bpy\n'), ((94, 8, 94, 39), 'bpy.utils.unregister_class', 'bpy.utils.unregister_class', ({(94, 35, 94, 38): 'cls'}, {}), '(cls)', False, 'import bpy\n')] |
andydandy74/ClockworkForDynamo | nodes/2.x/python/View.ViewTemplate.py | bd4ac2c13956a02352a458d01096a35b7258d9f2 | import clr
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
def GetViewTemplate(view):
if not view: return None
elif hasattr(view, "ViewTemplateId"):
if view.ViewTemplateId.IntegerValue == -1: return None
else: return view.Document.GetElement(view.ViewTemplateId)
else: return None
views = UnwrapElement(IN[0])
if isinstance(IN[0], list): OUT = [GetViewTemplate(x) for x in views]
else: OUT = GetViewTemplate(views) | [((2, 0, 2, 28), 'clr.AddReference', 'clr.AddReference', ({(2, 17, 2, 27): '"""RevitAPI"""'}, {}), "('RevitAPI')", False, 'import clr\n')] |
bohdana-kuzmenko/incubator-dlab | infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py | d052709450e7916860c7dd191708d5524cf44c1e | #!/usr/bin/python
# *****************************************************************************
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# ******************************************************************************
import json
import time
from fabric.api import *
from dlab.fab import *
from dlab.meta_lib import *
from dlab.actions_lib import *
import sys
import os
import uuid
import logging
from Crypto.PublicKey import RSA
if __name__ == "__main__":
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.INFO,
filename=local_log_filepath)
try:
os.environ['exploratory_name']
except:
os.environ['exploratory_name'] = ''
if os.path.exists('/response/.dataproc_creating_{}'.format(os.environ['exploratory_name'])):
time.sleep(30)
print('Generating infrastructure names and tags')
dataproc_conf = dict()
try:
dataproc_conf['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
except:
dataproc_conf['exploratory_name'] = ''
try:
dataproc_conf['computational_name'] = (os.environ['computational_name']).lower().replace('_', '-')
except:
dataproc_conf['computational_name'] = ''
dataproc_conf['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
dataproc_conf['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
dataproc_conf['key_name'] = os.environ['conf_key_name']
dataproc_conf['key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
dataproc_conf['region'] = os.environ['gcp_region']
dataproc_conf['zone'] = os.environ['gcp_zone']
dataproc_conf['subnet'] = '{0}-{1}-subnet'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'])
dataproc_conf['cluster_name'] = '{0}-{1}-des-{2}-{3}'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'],
dataproc_conf['exploratory_name'], dataproc_conf['computational_name'])
dataproc_conf['cluster_tag'] = '{0}-{1}-ps'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'])
dataproc_conf['bucket_name'] = '{}-{}-bucket'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'])
dataproc_conf['release_label'] = os.environ['dataproc_version']
dataproc_conf['cluster_labels'] = {
os.environ['notebook_instance_name']: "not-configured",
"name": dataproc_conf['cluster_name'],
"sbn": dataproc_conf['service_base_name'],
"user": dataproc_conf['edge_user_name'],
"notebook_name": os.environ['notebook_instance_name'],
"product": "dlab",
"computational_name": dataproc_conf['computational_name']
}
dataproc_conf['dataproc_service_account_name'] = '{0}-{1}-ps'.format(dataproc_conf['service_base_name'],
dataproc_conf['edge_user_name'])
service_account_email = "{}@{}.iam.gserviceaccount.com".format(dataproc_conf['dataproc_service_account_name'],
os.environ['gcp_project_id'])
dataproc_conf['edge_instance_hostname'] = '{0}-{1}-edge'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'])
dataproc_conf['dlab_ssh_user'] = os.environ['conf_os_user']
edge_status = GCPMeta().get_instance_status(dataproc_conf['edge_instance_hostname'])
if edge_status != 'RUNNING':
logging.info('ERROR: Edge node is unavailable! Aborting...')
print('ERROR: Edge node is unavailable! Aborting...')
ssn_hostname = GCPMeta().get_private_ip_address(dataproc_conf['service_base_name'] + '-ssn')
put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'], ssn_hostname)
append_result("Edge node is unavailable")
sys.exit(1)
print("Will create exploratory environment with edge node as access point as following: ".format(json.dumps(dataproc_conf, sort_keys=True, indent=4, separators=(',', ': '))))
logging.info(json.dumps(dataproc_conf))
local('touch /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']))
local("echo Waiting for changes to propagate; sleep 10")
dataproc_cluster = json.loads(open('/root/templates/dataengine-service_cluster.json').read().decode('utf-8-sig'))
dataproc_cluster['projectId'] = os.environ['gcp_project_id']
dataproc_cluster['clusterName'] = dataproc_conf['cluster_name']
dataproc_cluster['labels'] = dataproc_conf['cluster_labels']
dataproc_cluster['config']['configBucket'] = dataproc_conf['bucket_name']
dataproc_cluster['config']['gceClusterConfig']['serviceAccount'] = service_account_email
dataproc_cluster['config']['gceClusterConfig']['zoneUri'] = dataproc_conf['zone']
dataproc_cluster['config']['gceClusterConfig']['subnetworkUri'] = dataproc_conf['subnet']
dataproc_cluster['config']['masterConfig']['machineTypeUri'] = os.environ['dataproc_master_instance_type']
dataproc_cluster['config']['workerConfig']['machineTypeUri'] = os.environ['dataproc_slave_instance_type']
dataproc_cluster['config']['masterConfig']['numInstances'] = int(os.environ['dataproc_master_count'])
dataproc_cluster['config']['workerConfig']['numInstances'] = int(os.environ['dataproc_slave_count'])
if int(os.environ['dataproc_preemptible_count']) != 0:
dataproc_cluster['config']['secondaryWorkerConfig']['numInstances'] = int(os.environ['dataproc_preemptible_count'])
else:
del dataproc_cluster['config']['secondaryWorkerConfig']
dataproc_cluster['config']['softwareConfig']['imageVersion'] = dataproc_conf['release_label']
ssh_user_pubkey = open(os.environ['conf_key_dir'] + os.environ['edge_user_name'] + '.pub').read()
key = RSA.importKey(open(dataproc_conf['key_path'], 'rb').read())
ssh_admin_pubkey = key.publickey().exportKey("OpenSSH")
dataproc_cluster['config']['gceClusterConfig']['metadata']['ssh-keys'] = '{0}:{1}\n{0}:{2}'.format(dataproc_conf['dlab_ssh_user'], ssh_user_pubkey, ssh_admin_pubkey)
dataproc_cluster['config']['gceClusterConfig']['tags'][0] = dataproc_conf['cluster_tag']
try:
logging.info('[Creating Dataproc Cluster]')
print('[Creating Dataproc Cluster]')
params = "--region {0} --bucket {1} --params '{2}'".format(dataproc_conf['region'], dataproc_conf['bucket_name'], json.dumps(dataproc_cluster))
try:
local("~/scripts/{}.py {}".format('dataengine-service_create', params))
except:
traceback.print_exc()
raise Exception
keyfile_name = "/root/keys/{}.pem".format(dataproc_conf['key_name'])
local('rm /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']))
except Exception as err:
print('Error: {0}'.format(err))
append_result("Failed to create Dataproc Cluster.", str(err))
local('rm /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']))
sys.exit(1)
| [((41, 4, 43, 52), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((49, 8, 49, 22), 'time.sleep', 'time.sleep', ({(49, 19, 49, 21): '(30)'}, {}), '(30)', False, 'import time\n'), ((91, 8, 91, 68), 'logging.info', 'logging.info', ({(91, 21, 91, 67): '"""ERROR: Edge node is unavailable! Aborting..."""'}, {}), "('ERROR: Edge node is unavailable! Aborting...')", False, 'import logging\n'), ((96, 8, 96, 19), 'sys.exit', 'sys.exit', ({(96, 17, 96, 18): '(1)'}, {}), '(1)', False, 'import sys\n'), ((99, 17, 99, 42), 'json.dumps', 'json.dumps', ({(99, 28, 99, 41): 'dataproc_conf'}, {}), '(dataproc_conf)', False, 'import json\n'), ((128, 8, 128, 51), 'logging.info', 'logging.info', ({(128, 21, 128, 50): '"""[Creating Dataproc Cluster]"""'}, {}), "('[Creating Dataproc Cluster]')", False, 'import logging\n'), ((98, 101, 98, 176), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((130, 122, 130, 150), 'json.dumps', 'json.dumps', ({(130, 133, 130, 149): 'dataproc_cluster'}, {}), '(dataproc_cluster)', False, 'import json\n'), ((144, 8, 144, 19), 'sys.exit', 'sys.exit', ({(144, 17, 144, 18): '(1)'}, {}), '(1)', False, 'import sys\n')] |
mattias-lundell/aoc2021 | 02.py | 32bd41446d963c5788d4614106405be65de81bcd |
test = """forward 5
down 5
forward 8
up 3
down 8
forward 2
"""
def part1(lines):
h = 0
d = 0
for line in lines:
direction, delta = line.split()
delta = int(delta)
if direction == 'forward':
h += delta
elif direction == 'down':
d += delta
elif direction == 'up':
d -= delta
print(h*d)
def part2(lines):
h = 0
d = 0
a = 0
for line in lines:
direction, delta = line.split()
delta = int(delta)
print(direction, delta)
if direction == 'forward':
h += delta
d += (delta * a)
elif direction == 'down':
a += delta
elif direction == 'up':
a -= delta
print(h*d)
if __name__ == '__main__':
part1(test.splitlines())
part1(open('in02.txt').readlines())
part2(test.splitlines())
part2(open('in02.txt').readlines())
| [] |
ollc-code/django-back | associations/migrations/0001_initial.py | 205f3adc61f9e62c88dfcc170999cef495cebed7 | # Generated by Django 3.1.3 on 2020-11-09 08:56
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Associations',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('association_name', models.CharField(max_length=100)),
('incharge', models.CharField(max_length=100)),
('about', models.CharField(max_length=500)),
('contacts', models.CharField(max_length=300)),
],
),
]
| [((17, 23, 17, 112), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((18, 37, 18, 69), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((19, 29, 19, 61), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((20, 26, 20, 58), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((21, 29, 21, 61), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n')] |
seignovert/pyvims | tests/pds/test_times.py | a70b5b9b8bc5c37fa43b7db4d15407f312a31849 | """Test PDS times modules."""
from datetime import datetime as dt
from pyvims.pds.times import (cassini2utc, cassini_time, dt_date, dt_doy, dt_iso,
dyear, pds_folder, pds_time, utc2cassini)
from pytest import approx, raises
def test_dt_iso():
"""Test parsing ISO time pattern."""
assert str(dt_iso('2005-02-14T18:02:29.123')) == '2005-02-14 18:02:29.123000+00:00'
assert str(dt_iso('2005-02-14 18:02:29')) == '2005-02-14 18:02:29+00:00'
assert str(dt_iso('2005-02-14:18:02')) == '2005-02-14 18:02:00+00:00'
assert str(dt_iso('2005-02-14')) == '2005-02-14 00:00:00+00:00'
times = dt_iso('from 2005-02-14T18:02:29 to 2005-02-14T18:03')
assert len(times) == 2
assert str(times[0]) == '2005-02-14 18:02:29+00:00'
assert str(times[1]) == '2005-02-14 18:03:00+00:00'
with raises(ValueError):
_ = dt_iso('2005-045')
def test_dt_doy():
"""Test parsing DOY time pattern."""
assert str(dt_doy('2005-045T18:02:29.123')) == '2005-02-14 18:02:29.123000+00:00'
assert str(dt_doy('2005-045 18:02:29')) == '2005-02-14 18:02:29+00:00'
assert str(dt_doy('2005-045:18:02')) == '2005-02-14 18:02:00+00:00'
assert str(dt_doy('2005-045')) == '2005-02-14 00:00:00+00:00'
times = dt_doy('from 2005-045T18:02:29 to 2005-045T18:03')
assert len(times) == 2
assert str(times[0]) == '2005-02-14 18:02:29+00:00'
assert str(times[1]) == '2005-02-14 18:03:00+00:00'
with raises(ValueError):
_ = dt_doy('2005-02-14')
def test_dt_date():
"""Test date pattern."""
assert str(dt_date('Feb 14, 2005')) == '2005-02-14 00:00:00+00:00'
assert str(dt_date('Febr 14, 2005')) == '2005-02-14 00:00:00+00:00'
assert str(dt_date('Feb 14, 2005', eod=True)) == '2005-02-14 23:59:59+00:00'
assert str(dt_date('to Feb 14, 2005')) == '2005-02-14 23:59:59+00:00'
times = dt_date('from Feb 14, 2005 through March 12, 2006')
assert len(times) == 2
assert str(times[0]) == '2005-02-14 00:00:00+00:00'
assert str(times[1]) == '2006-03-12 23:59:59+00:00'
with raises(ValueError):
_ = dt_date('2005-02-14')
def test_pds_time():
"""Test PDS time parsing."""
assert str(pds_time('May 17, 2007')) == '2007-05-17 00:00:00+00:00'
assert str(pds_time('2010-274T00:00:00')) == '2010-10-01 00:00:00+00:00'
assert str(pds_time('2011-10-01T00:02:04.244')) == '2011-10-01 00:02:04.244000+00:00'
t0, t1 = pds_time('… May 17, 2007 through Jun 30, 2007')
assert str(t0) == '2007-05-17 00:00:00+00:00'
assert str(t1) == '2007-06-30 23:59:59+00:00'
t0, t1 = pds_time('… 2010-274T00:00:00 through 2010-365T23:59:59')
assert str(t0) == '2010-10-01 00:00:00+00:00'
assert str(t1) == '2010-12-31 23:59:59+00:00'
t0, t1 = pds_time('… 2011-10-01T00:02:04.244 through 2011-12-31T12:28:45.128')
assert str(t0) == '2011-10-01 00:02:04.244000+00:00'
assert str(t1) == '2011-12-31 12:28:45.128000+00:00'
t0, t1 = pds_time('2005015T175855_2005016T184233/')
assert str(t0) == '2005-01-15 17:58:55+00:00'
assert str(t1) == '2005-01-16 18:42:33+00:00'
with raises(ValueError):
_ = pds_time('No data available')
def test_cassini_time():
"""Test Cassini time parsing."""
assert cassini_time('v1487096932_1.qub') == 1487096932.0
assert cassini_time(1483230358.172) == 1483230358.172
with raises(ValueError):
_ = cassini_time('v123_1')
with raises(ValueError):
_ = cassini_time(123)
def test_cassini2utc():
"""Test Cassini time to UTC converter."""
assert str(cassini2utc('v1487096932_1')) == '2005-02-14 18:02:29'
assert str(cassini2utc(1483230358.172)) == '2005-01-01 00:00:00'
def test_utc2cassini():
"""Test UTC to Cassini time converter."""
assert utc2cassini('2005-02-14T18:02:29') == approx(1487096932.068, abs=1e-3)
times = utc2cassini('May 17, 2007 through Jun 30, 2007')
assert len(times) == 2
assert times[0] == approx(1558053238.602, abs=1e-3)
assert times[1] == approx(1561941262.879, abs=1e-3)
def test_pds_folder():
"""Test convert PDS folder as string."""
assert pds_folder('2005015T175855') == '2005-015T17:58:55'
assert pds_folder('2005015T175855_2005016T184233/') == \
'2005-015T17:58:55 2005-016T18:42:33'
def test_dyear():
"""Test decimal year."""
assert dyear('2005-01-01') == 2005.0
assert dyear('2005-12-31') == 2005.9973
assert dyear('2004-12-31') == 2004.9973
assert dyear(dt(2005, 1, 1)) == 2005.0
assert dyear(dt(2005, 12, 31)) == 2005.9973
assert dyear(dt(2004, 12, 31)) == 2004.9973
| [((19, 12, 19, 66), 'pyvims.pds.times.dt_iso', 'dt_iso', ({(19, 19, 19, 65): '"""from 2005-02-14T18:02:29 to 2005-02-14T18:03"""'}, {}), "('from 2005-02-14T18:02:29 to 2005-02-14T18:03')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((36, 12, 36, 62), 'pyvims.pds.times.dt_doy', 'dt_doy', ({(36, 19, 36, 61): '"""from 2005-045T18:02:29 to 2005-045T18:03"""'}, {}), "('from 2005-045T18:02:29 to 2005-045T18:03')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((53, 12, 53, 63), 'pyvims.pds.times.dt_date', 'dt_date', ({(53, 20, 53, 62): '"""from Feb 14, 2005 through March 12, 2006"""'}, {}), "('from Feb 14, 2005 through March 12, 2006')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((69, 13, 69, 62), 'pyvims.pds.times.pds_time', 'pds_time', ({(69, 22, 69, 61): '"""… May 17, 2007 through Jun 30, 2007"""'}, {}), "('… May 17, 2007 through Jun 30, 2007')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((73, 13, 73, 72), 'pyvims.pds.times.pds_time', 'pds_time', ({(73, 22, 73, 71): '"""… 2010-274T00:00:00 through 2010-365T23:59:59"""'}, {}), "('… 2010-274T00:00:00 through 2010-365T23:59:59')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((77, 13, 77, 84), 'pyvims.pds.times.pds_time', 'pds_time', ({(77, 22, 77, 83): '"""… 2011-10-01T00:02:04.244 through 2011-12-31T12:28:45.128"""'}, {}), "('… 2011-10-01T00:02:04.244 through 2011-12-31T12:28:45.128')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((81, 13, 81, 55), 'pyvims.pds.times.pds_time', 'pds_time', ({(81, 22, 81, 54): '"""2005015T175855_2005016T184233/"""'}, {}), "('2005015T175855_2005016T184233/')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((111, 12, 111, 60), 'pyvims.pds.times.utc2cassini', 'utc2cassini', ({(111, 24, 111, 59): '"""May 17, 2007 through Jun 30, 2007"""'}, {}), "('May 17, 2007 through Jun 30, 2007')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((25, 9, 25, 27), 'pytest.raises', 'raises', ({(25, 16, 25, 26): 'ValueError'}, {}), '(ValueError)', False, 'from pytest import approx, raises\n'), ((26, 12, 26, 30), 'pyvims.pds.times.dt_iso', 'dt_iso', ({(26, 19, 26, 29): '"""2005-045"""'}, {}), "('2005-045')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((42, 9, 42, 27), 'pytest.raises', 'raises', ({(42, 16, 42, 26): 'ValueError'}, {}), '(ValueError)', False, 'from pytest import approx, raises\n'), ((43, 12, 43, 32), 'pyvims.pds.times.dt_doy', 'dt_doy', ({(43, 19, 43, 31): '"""2005-02-14"""'}, {}), "('2005-02-14')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((59, 9, 59, 27), 'pytest.raises', 'raises', ({(59, 16, 59, 26): 'ValueError'}, {}), '(ValueError)', False, 'from pytest import approx, raises\n'), ((60, 12, 60, 33), 'pyvims.pds.times.dt_date', 'dt_date', ({(60, 20, 60, 32): '"""2005-02-14"""'}, {}), "('2005-02-14')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((85, 9, 85, 27), 'pytest.raises', 'raises', ({(85, 16, 85, 26): 'ValueError'}, {}), '(ValueError)', False, 'from pytest import approx, raises\n'), ((86, 12, 86, 41), 'pyvims.pds.times.pds_time', 'pds_time', ({(86, 21, 86, 40): '"""No data available"""'}, {}), "('No data available')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((91, 11, 91, 44), 'pyvims.pds.times.cassini_time', 'cassini_time', ({(91, 24, 91, 43): '"""v1487096932_1.qub"""'}, {}), "('v1487096932_1.qub')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((92, 11, 92, 39), 'pyvims.pds.times.cassini_time', 'cassini_time', ({(92, 24, 92, 38): '(1483230358.172)'}, {}), '(1483230358.172)', False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((94, 9, 94, 27), 'pytest.raises', 'raises', ({(94, 16, 94, 26): 'ValueError'}, {}), '(ValueError)', False, 'from pytest import approx, raises\n'), ((95, 12, 95, 34), 'pyvims.pds.times.cassini_time', 'cassini_time', ({(95, 25, 95, 33): '"""v123_1"""'}, {}), "('v123_1')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((97, 9, 97, 27), 'pytest.raises', 'raises', ({(97, 16, 97, 26): 'ValueError'}, {}), '(ValueError)', False, 'from pytest import approx, raises\n'), ((98, 12, 98, 29), 'pyvims.pds.times.cassini_time', 'cassini_time', ({(98, 25, 98, 28): '123'}, {}), '(123)', False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((109, 11, 109, 45), 'pyvims.pds.times.utc2cassini', 'utc2cassini', ({(109, 23, 109, 44): '"""2005-02-14T18:02:29"""'}, {}), "('2005-02-14T18:02:29')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((109, 49, 109, 81), 'pytest.approx', 'approx', (), '', False, 'from pytest import approx, raises\n'), ((114, 23, 114, 55), 'pytest.approx', 'approx', (), '', False, 'from pytest import approx, raises\n'), ((115, 23, 115, 55), 'pytest.approx', 'approx', (), '', False, 'from pytest import approx, raises\n'), ((120, 11, 120, 39), 'pyvims.pds.times.pds_folder', 'pds_folder', ({(120, 22, 120, 38): '"""2005015T175855"""'}, {}), "('2005015T175855')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((121, 11, 121, 55), 'pyvims.pds.times.pds_folder', 'pds_folder', ({(121, 22, 121, 54): '"""2005015T175855_2005016T184233/"""'}, {}), "('2005015T175855_2005016T184233/')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((127, 11, 127, 30), 'pyvims.pds.times.dyear', 'dyear', ({(127, 17, 127, 29): '"""2005-01-01"""'}, {}), "('2005-01-01')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((128, 11, 128, 30), 'pyvims.pds.times.dyear', 'dyear', ({(128, 17, 128, 29): '"""2005-12-31"""'}, {}), "('2005-12-31')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((129, 11, 129, 30), 'pyvims.pds.times.dyear', 'dyear', ({(129, 17, 129, 29): '"""2004-12-31"""'}, {}), "('2004-12-31')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((14, 15, 14, 48), 'pyvims.pds.times.dt_iso', 'dt_iso', ({(14, 22, 14, 47): '"""2005-02-14T18:02:29.123"""'}, {}), "('2005-02-14T18:02:29.123')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((15, 15, 15, 44), 'pyvims.pds.times.dt_iso', 'dt_iso', ({(15, 22, 15, 43): '"""2005-02-14 18:02:29"""'}, {}), "('2005-02-14 18:02:29')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((16, 15, 16, 41), 'pyvims.pds.times.dt_iso', 'dt_iso', ({(16, 22, 16, 40): '"""2005-02-14:18:02"""'}, {}), "('2005-02-14:18:02')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((17, 15, 17, 35), 'pyvims.pds.times.dt_iso', 'dt_iso', ({(17, 22, 17, 34): '"""2005-02-14"""'}, {}), "('2005-02-14')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((31, 15, 31, 46), 'pyvims.pds.times.dt_doy', 'dt_doy', ({(31, 22, 31, 45): '"""2005-045T18:02:29.123"""'}, {}), "('2005-045T18:02:29.123')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((32, 15, 32, 42), 'pyvims.pds.times.dt_doy', 'dt_doy', ({(32, 22, 32, 41): '"""2005-045 18:02:29"""'}, {}), "('2005-045 18:02:29')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((33, 15, 33, 39), 'pyvims.pds.times.dt_doy', 'dt_doy', ({(33, 22, 33, 38): '"""2005-045:18:02"""'}, {}), "('2005-045:18:02')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((34, 15, 34, 33), 'pyvims.pds.times.dt_doy', 'dt_doy', ({(34, 22, 34, 32): '"""2005-045"""'}, {}), "('2005-045')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((48, 15, 48, 38), 'pyvims.pds.times.dt_date', 'dt_date', ({(48, 23, 48, 37): '"""Feb 14, 2005"""'}, {}), "('Feb 14, 2005')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((49, 15, 49, 39), 'pyvims.pds.times.dt_date', 'dt_date', ({(49, 23, 49, 38): '"""Febr 14, 2005"""'}, {}), "('Febr 14, 2005')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((50, 15, 50, 48), 'pyvims.pds.times.dt_date', 'dt_date', (), '', False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((51, 15, 51, 41), 'pyvims.pds.times.dt_date', 'dt_date', ({(51, 23, 51, 40): '"""to Feb 14, 2005"""'}, {}), "('to Feb 14, 2005')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((65, 15, 65, 39), 'pyvims.pds.times.pds_time', 'pds_time', ({(65, 24, 65, 38): '"""May 17, 2007"""'}, {}), "('May 17, 2007')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((66, 15, 66, 44), 'pyvims.pds.times.pds_time', 'pds_time', ({(66, 24, 66, 43): '"""2010-274T00:00:00"""'}, {}), "('2010-274T00:00:00')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((67, 15, 67, 50), 'pyvims.pds.times.pds_time', 'pds_time', ({(67, 24, 67, 49): '"""2011-10-01T00:02:04.244"""'}, {}), "('2011-10-01T00:02:04.244')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((103, 15, 103, 43), 'pyvims.pds.times.cassini2utc', 'cassini2utc', ({(103, 27, 103, 42): '"""v1487096932_1"""'}, {}), "('v1487096932_1')", False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((104, 15, 104, 42), 'pyvims.pds.times.cassini2utc', 'cassini2utc', ({(104, 27, 104, 41): '(1483230358.172)'}, {}), '(1483230358.172)', False, 'from pyvims.pds.times import cassini2utc, cassini_time, dt_date, dt_doy, dt_iso, dyear, pds_folder, pds_time, utc2cassini\n'), ((131, 17, 131, 31), 'datetime.datetime', 'dt', ({(131, 20, 131, 24): '(2005)', (131, 26, 131, 27): '(1)', (131, 29, 131, 30): '(1)'}, {}), '(2005, 1, 1)', True, 'from datetime import datetime as dt\n'), ((132, 17, 132, 33), 'datetime.datetime', 'dt', ({(132, 20, 132, 24): '(2005)', (132, 26, 132, 28): '(12)', (132, 30, 132, 32): '(31)'}, {}), '(2005, 12, 31)', True, 'from datetime import datetime as dt\n'), ((133, 17, 133, 33), 'datetime.datetime', 'dt', ({(133, 20, 133, 24): '(2004)', (133, 26, 133, 28): '(12)', (133, 30, 133, 32): '(31)'}, {}), '(2004, 12, 31)', True, 'from datetime import datetime as dt\n')] |
zhouli121018/nodejsgm | e/mail-relay/web/apps/mail/migrations/0109_auto_20171130_1047.py | 0ccbc8acf61badc812f684dd39253d55c99f08eb | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mail', '0108_auto_20171130_1004'),
]
operations = [
migrations.AlterModelOptions(
name='relaysenderwhitelist',
options={'verbose_name': '\u4e2d\u7ee7\u53d1\u4ef6\u4eba\u767d\u540d\u5355'},
),
migrations.AlterModelOptions(
name='spamrptblacklist',
options={'verbose_name': '\u7f51\u5173\u9694\u79bb\u62a5\u544a\u6536\u4ef6\u4eba\u9ed1\u540d\u5355'},
),
]
| [((14, 8, 17, 9), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', (), '', False, 'from django.db import models, migrations\n'), ((18, 8, 21, 9), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', (), '', False, 'from django.db import models, migrations\n')] |
usegalaxy-no/usegalaxy | venv/lib/python3.6/site-packages/ansible_test/_data/sanity/code-smell/runtime-metadata.py | 75dad095769fe918eb39677f2c887e681a747f3a | #!/usr/bin/env python
"""Schema validation of ansible-core's ansible_builtin_runtime.yml and collection's meta/runtime.yml"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import os
import re
import sys
from distutils.version import StrictVersion, LooseVersion
from functools import partial
import yaml
from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA
from voluptuous import Required, Schema, Invalid
from voluptuous.humanize import humanize_error
from ansible.module_utils.six import string_types
from ansible.utils.version import SemanticVersion
def isodate(value, check_deprecation_date=False, is_tombstone=False):
"""Validate a datetime.date or ISO 8601 date string."""
# datetime.date objects come from YAML dates, these are ok
if isinstance(value, datetime.date):
removal_date = value
else:
# make sure we have a string
msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date'
if not isinstance(value, string_types):
raise Invalid(msg)
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
# we have to do things manually.
if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', value):
raise Invalid(msg)
try:
removal_date = datetime.datetime.strptime(value, '%Y-%m-%d').date()
except ValueError:
raise Invalid(msg)
# Make sure date is correct
today = datetime.date.today()
if is_tombstone:
# For a tombstone, the removal date must be in the past
if today < removal_date:
raise Invalid(
'The tombstone removal_date (%s) must not be after today (%s)' % (removal_date, today))
else:
# For a deprecation, the removal date must be in the future. Only test this if
# check_deprecation_date is truish, to avoid checks to suddenly start to fail.
if check_deprecation_date and today > removal_date:
raise Invalid(
'The deprecation removal_date (%s) must be after today (%s)' % (removal_date, today))
return value
def removal_version(value, is_ansible, current_version=None, is_tombstone=False):
"""Validate a removal version string."""
msg = (
'Removal version must be a string' if is_ansible else
'Removal version must be a semantic version (https://semver.org/)'
)
if not isinstance(value, string_types):
raise Invalid(msg)
try:
if is_ansible:
version = StrictVersion()
version.parse(value)
version = LooseVersion(value) # We're storing Ansible's version as a LooseVersion
else:
version = SemanticVersion()
version.parse(value)
if version.major != 0 and (version.minor != 0 or version.patch != 0):
raise Invalid('removal_version (%r) must be a major release, not a minor or patch release '
'(see specification at https://semver.org/)' % (value, ))
if current_version is not None:
if is_tombstone:
# For a tombstone, the removal version must not be in the future
if version > current_version:
raise Invalid('The tombstone removal_version (%r) must not be after the '
'current version (%s)' % (value, current_version))
else:
# For a deprecation, the removal version must be in the future
if version <= current_version:
raise Invalid('The deprecation removal_version (%r) must be after the '
'current version (%s)' % (value, current_version))
except ValueError:
raise Invalid(msg)
return value
def any_value(value):
"""Accepts anything."""
return value
def get_ansible_version():
"""Return current ansible-core version"""
from ansible.release import __version__
return LooseVersion('.'.join(__version__.split('.')[:3]))
def get_collection_version():
"""Return current collection version, or None if it is not available"""
import importlib.util
collection_detail_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
'collection_detail.py')
collection_detail_spec = importlib.util.spec_from_file_location('collection_detail', collection_detail_path)
collection_detail = importlib.util.module_from_spec(collection_detail_spec)
sys.modules['collection_detail'] = collection_detail
collection_detail_spec.loader.exec_module(collection_detail)
# noinspection PyBroadException
try:
result = collection_detail.read_manifest_json('.') or collection_detail.read_galaxy_yml('.')
return SemanticVersion(result['version'])
except Exception: # pylint: disable=broad-except
# We do not care why it fails, in case we cannot get the version
# just return None to indicate "we don't know".
return None
def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
"""Validate explicit runtime metadata file"""
try:
with open(path, 'r') as f_path:
routing = yaml.safe_load(f_path)
except yaml.error.MarkedYAMLError as ex:
print('%s:%d:%d: YAML load failed: %s' % (path, ex.context_mark.line +
1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex))))
return
except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: YAML load failed: %s' %
(path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
return
if is_ansible:
current_version = get_ansible_version()
else:
current_version = get_collection_version()
# Updates to schema MUST also be reflected in the documentation
# ~https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html
# plugin_routing schema
avoid_additional_data = Schema(
Any(
{
Required('removal_version'): any_value,
'warning_text': any_value,
},
{
Required('removal_date'): any_value,
'warning_text': any_value,
}
),
extra=PREVENT_EXTRA
)
deprecation_schema = All(
# The first schema validates the input, and the second makes sure no extra keys are specified
Schema(
{
'removal_version': partial(removal_version, is_ansible=is_ansible,
current_version=current_version),
'removal_date': partial(isodate, check_deprecation_date=check_deprecation_dates),
'warning_text': Any(*string_types),
}
),
avoid_additional_data
)
tombstoning_schema = All(
# The first schema validates the input, and the second makes sure no extra keys are specified
Schema(
{
'removal_version': partial(removal_version, is_ansible=is_ansible,
current_version=current_version, is_tombstone=True),
'removal_date': partial(isodate, is_tombstone=True),
'warning_text': Any(*string_types),
}
),
avoid_additional_data
)
plugin_routing_schema = Any(
Schema({
('deprecation'): Any(deprecation_schema),
('tombstone'): Any(tombstoning_schema),
('redirect'): Any(*string_types),
}, extra=PREVENT_EXTRA),
)
list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema}
for str_type in string_types]
plugin_schema = Schema({
('action'): Any(None, *list_dict_plugin_routing_schema),
('become'): Any(None, *list_dict_plugin_routing_schema),
('cache'): Any(None, *list_dict_plugin_routing_schema),
('callback'): Any(None, *list_dict_plugin_routing_schema),
('cliconf'): Any(None, *list_dict_plugin_routing_schema),
('connection'): Any(None, *list_dict_plugin_routing_schema),
('doc_fragments'): Any(None, *list_dict_plugin_routing_schema),
('filter'): Any(None, *list_dict_plugin_routing_schema),
('httpapi'): Any(None, *list_dict_plugin_routing_schema),
('inventory'): Any(None, *list_dict_plugin_routing_schema),
('lookup'): Any(None, *list_dict_plugin_routing_schema),
('module_utils'): Any(None, *list_dict_plugin_routing_schema),
('modules'): Any(None, *list_dict_plugin_routing_schema),
('netconf'): Any(None, *list_dict_plugin_routing_schema),
('shell'): Any(None, *list_dict_plugin_routing_schema),
('strategy'): Any(None, *list_dict_plugin_routing_schema),
('terminal'): Any(None, *list_dict_plugin_routing_schema),
('test'): Any(None, *list_dict_plugin_routing_schema),
('vars'): Any(None, *list_dict_plugin_routing_schema),
}, extra=PREVENT_EXTRA)
# import_redirection schema
import_redirection_schema = Any(
Schema({
('redirect'): Any(*string_types),
# import_redirect doesn't currently support deprecation
}, extra=PREVENT_EXTRA)
)
list_dict_import_redirection_schema = [{str_type: import_redirection_schema}
for str_type in string_types]
# top level schema
schema = Schema({
# All of these are optional
('plugin_routing'): Any(plugin_schema),
('import_redirection'): Any(None, *list_dict_import_redirection_schema),
# requires_ansible: In the future we should validate this with SpecifierSet
('requires_ansible'): Any(*string_types),
('action_groups'): dict,
}, extra=PREVENT_EXTRA)
# Ensure schema is valid
try:
schema(routing)
except MultipleInvalid as ex:
for error in ex.errors:
# No way to get line/column numbers
print('%s:%d:%d: %s' % (path, 0, 0, humanize_error(routing, error)))
def main():
"""Validate runtime metadata"""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
collection_legacy_file = 'meta/routing.yml'
collection_runtime_file = 'meta/runtime.yml'
# This is currently disabled, because if it is enabled this test can start failing
# at a random date. For this to be properly activated, we (a) need to be able to return
# codes for this test, and (b) make this error optional.
check_deprecation_dates = False
for path in paths:
if path == collection_legacy_file:
print('%s:%d:%d: %s' % (path, 0, 0, ("Should be called '%s'" % collection_runtime_file)))
continue
validate_metadata_file(
path,
is_ansible=path not in (collection_legacy_file, collection_runtime_file),
check_deprecation_dates=check_deprecation_dates)
if __name__ == '__main__':
main()
| [((42, 12, 42, 33), 'datetime.date.today', 'datetime.date.today', ({}, {}), '()', False, 'import datetime\n'), ((64, 14, 64, 26), 'voluptuous.Invalid', 'Invalid', ({(64, 22, 64, 25): 'msg'}, {}), '(msg)', False, 'from voluptuous import Required, Schema, Invalid\n'), ((118, 15, 118, 49), 'ansible.utils.version.SemanticVersion', 'SemanticVersion', ({(118, 31, 118, 48): "result['version']"}, {}), "(result['version'])", False, 'from ansible.utils.version import SemanticVersion\n'), ((32, 18, 32, 30), 'voluptuous.Invalid', 'Invalid', ({(32, 26, 32, 29): 'msg'}, {}), '(msg)', False, 'from voluptuous import Required, Schema, Invalid\n'), ((35, 15, 35, 62), 're.match', 're.match', ({(35, 24, 35, 54): '"""^[0-9]{4}-[0-9]{2}-[0-9]{2}$"""', (35, 56, 35, 61): 'value'}, {}), "('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', value)", False, 'import re\n'), ((36, 18, 36, 30), 'voluptuous.Invalid', 'Invalid', ({(36, 26, 36, 29): 'msg'}, {}), '(msg)', False, 'from voluptuous import Required, Schema, Invalid\n'), ((46, 18, 47, 103), 'voluptuous.Invalid', 'Invalid', ({(47, 16, 47, 102): "('The tombstone removal_date (%s) must not be after today (%s)' % (\n removal_date, today))"}, {}), "('The tombstone removal_date (%s) must not be after today (%s)' % (\n removal_date, today))", False, 'from voluptuous import Required, Schema, Invalid\n'), ((52, 18, 53, 101), 'voluptuous.Invalid', 'Invalid', ({(53, 16, 53, 100): "('The deprecation removal_date (%s) must be after today (%s)' % (\n removal_date, today))"}, {}), "('The deprecation removal_date (%s) must be after today (%s)' % (\n removal_date, today))", False, 'from voluptuous import Required, Schema, Invalid\n'), ((67, 22, 67, 37), 'distutils.version.StrictVersion', 'StrictVersion', ({}, {}), '()', False, 'from distutils.version import StrictVersion, LooseVersion\n'), ((69, 22, 69, 41), 'distutils.version.LooseVersion', 'LooseVersion', ({(69, 35, 69, 40): 'value'}, {}), '(value)', False, 'from distutils.version import StrictVersion, LooseVersion\n'), ((71, 22, 71, 39), 'ansible.utils.version.SemanticVersion', 'SemanticVersion', ({}, {}), '()', False, 'from ansible.utils.version import SemanticVersion\n'), ((88, 14, 88, 26), 'voluptuous.Invalid', 'Invalid', ({(88, 22, 88, 25): 'msg'}, {}), '(msg)', False, 'from voluptuous import Required, Schema, Invalid\n'), ((129, 22, 129, 44), 'yaml.safe_load', 'yaml.safe_load', ({(129, 37, 129, 43): 'f_path'}, {}), '(f_path)', False, 'import yaml\n'), ((201, 20, 201, 63), 'voluptuous.Any', 'Any', ({(201, 24, 201, 28): 'None', (201, 30, 201, 62): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((202, 20, 202, 63), 'voluptuous.Any', 'Any', ({(202, 24, 202, 28): 'None', (202, 30, 202, 62): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((203, 19, 203, 62), 'voluptuous.Any', 'Any', ({(203, 23, 203, 27): 'None', (203, 29, 203, 61): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((204, 22, 204, 65), 'voluptuous.Any', 'Any', ({(204, 26, 204, 30): 'None', (204, 32, 204, 64): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((205, 21, 205, 64), 'voluptuous.Any', 'Any', ({(205, 25, 205, 29): 'None', (205, 31, 205, 63): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((206, 24, 206, 67), 'voluptuous.Any', 'Any', ({(206, 28, 206, 32): 'None', (206, 34, 206, 66): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((207, 27, 207, 70), 'voluptuous.Any', 'Any', ({(207, 31, 207, 35): 'None', (207, 37, 207, 69): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((208, 20, 208, 63), 'voluptuous.Any', 'Any', ({(208, 24, 208, 28): 'None', (208, 30, 208, 62): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((209, 21, 209, 64), 'voluptuous.Any', 'Any', ({(209, 25, 209, 29): 'None', (209, 31, 209, 63): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((210, 23, 210, 66), 'voluptuous.Any', 'Any', ({(210, 27, 210, 31): 'None', (210, 33, 210, 65): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((211, 20, 211, 63), 'voluptuous.Any', 'Any', ({(211, 24, 211, 28): 'None', (211, 30, 211, 62): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((212, 26, 212, 69), 'voluptuous.Any', 'Any', ({(212, 30, 212, 34): 'None', (212, 36, 212, 68): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((213, 21, 213, 64), 'voluptuous.Any', 'Any', ({(213, 25, 213, 29): 'None', (213, 31, 213, 63): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((214, 21, 214, 64), 'voluptuous.Any', 'Any', ({(214, 25, 214, 29): 'None', (214, 31, 214, 63): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((215, 19, 215, 62), 'voluptuous.Any', 'Any', ({(215, 23, 215, 27): 'None', (215, 29, 215, 61): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((216, 22, 216, 65), 'voluptuous.Any', 'Any', ({(216, 26, 216, 30): 'None', (216, 32, 216, 64): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((217, 22, 217, 65), 'voluptuous.Any', 'Any', ({(217, 26, 217, 30): 'None', (217, 32, 217, 64): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((218, 18, 218, 61), 'voluptuous.Any', 'Any', ({(218, 22, 218, 26): 'None', (218, 28, 218, 60): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((219, 18, 219, 61), 'voluptuous.Any', 'Any', ({(219, 22, 219, 26): 'None', (219, 28, 219, 60): '*list_dict_plugin_routing_schema'}, {}), '(None, *list_dict_plugin_routing_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((238, 28, 238, 46), 'voluptuous.Any', 'Any', ({(238, 32, 238, 45): 'plugin_schema'}, {}), '(plugin_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((239, 32, 239, 79), 'voluptuous.Any', 'Any', ({(239, 36, 239, 40): 'None', (239, 42, 239, 78): '*list_dict_import_redirection_schema'}, {}), '(None, *list_dict_import_redirection_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((241, 30, 241, 48), 'voluptuous.Any', 'Any', ({(241, 34, 241, 47): '*string_types'}, {}), '(*string_types)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((40, 18, 40, 30), 'voluptuous.Invalid', 'Invalid', ({(40, 26, 40, 29): 'msg'}, {}), '(msg)', False, 'from voluptuous import Required, Schema, Invalid\n'), ((74, 22, 75, 87), 'voluptuous.Invalid', 'Invalid', ({(74, 30, 75, 86): "('removal_version (%r) must be a major release, not a minor or patch release (see specification at https://semver.org/)'\n % (value,))"}, {}), "(\n 'removal_version (%r) must be a major release, not a minor or patch release (see specification at https://semver.org/)'\n % (value,))", False, 'from voluptuous import Required, Schema, Invalid\n'), ((101, 33, 101, 55), 'ansible.release.__version__.split', '__version__.split', ({(101, 51, 101, 54): '"""."""'}, {}), "('.')", False, 'from ansible.release import __version__\n'), ((108, 74, 108, 99), 'os.path.dirname', 'os.path.dirname', ({(108, 90, 108, 98): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((152, 16, 152, 43), 'voluptuous.Required', 'Required', ({(152, 25, 152, 42): '"""removal_version"""'}, {}), "('removal_version')", False, 'from voluptuous import Required, Schema, Invalid\n'), ((156, 16, 156, 40), 'voluptuous.Required', 'Required', ({(156, 25, 156, 39): '"""removal_date"""'}, {}), "('removal_date')", False, 'from voluptuous import Required, Schema, Invalid\n'), ((167, 35, 168, 75), 'functools.partial', 'partial', (), '', False, 'from functools import partial\n'), ((169, 32, 169, 96), 'functools.partial', 'partial', (), '', False, 'from functools import partial\n'), ((170, 32, 170, 50), 'voluptuous.Any', 'Any', ({(170, 36, 170, 49): '*string_types'}, {}), '(*string_types)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((180, 35, 181, 94), 'functools.partial', 'partial', (), '', False, 'from functools import partial\n'), ((182, 32, 182, 67), 'functools.partial', 'partial', (), '', False, 'from functools import partial\n'), ((183, 32, 183, 50), 'voluptuous.Any', 'Any', ({(183, 36, 183, 49): '*string_types'}, {}), '(*string_types)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((191, 29, 191, 52), 'voluptuous.Any', 'Any', ({(191, 33, 191, 51): 'deprecation_schema'}, {}), '(deprecation_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((192, 27, 192, 50), 'voluptuous.Any', 'Any', ({(192, 31, 192, 49): 'tombstoning_schema'}, {}), '(tombstoning_schema)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((193, 26, 193, 44), 'voluptuous.Any', 'Any', ({(193, 30, 193, 43): '*string_types'}, {}), '(*string_types)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((226, 26, 226, 44), 'voluptuous.Any', 'Any', ({(226, 30, 226, 43): '*string_types'}, {}), '(*string_types)', False, 'from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA\n'), ((257, 28, 257, 44), 'sys.stdin.read', 'sys.stdin.read', ({}, {}), '()', False, 'import sys\n'), ((38, 27, 38, 72), 'datetime.datetime.strptime', 'datetime.datetime.strptime', ({(38, 54, 38, 59): 'value', (38, 61, 38, 71): '"""%Y-%m-%d"""'}, {}), "(value, '%Y-%m-%d')", False, 'import datetime\n'), ((80, 26, 81, 84), 'voluptuous.Invalid', 'Invalid', ({(80, 34, 81, 83): "('The tombstone removal_version (%r) must not be after the current version (%s)'\n % (value, current_version))"}, {}), "(\n 'The tombstone removal_version (%r) must not be after the current version (%s)'\n % (value, current_version))", False, 'from voluptuous import Required, Schema, Invalid\n'), ((85, 26, 86, 84), 'voluptuous.Invalid', 'Invalid', ({(85, 34, 86, 83): "('The deprecation removal_version (%r) must be after the current version (%s)'\n % (value, current_version))"}, {}), "(\n 'The deprecation removal_version (%r) must be after the current version (%s)'\n % (value, current_version))", False, 'from voluptuous import Required, Schema, Invalid\n'), ((252, 48, 252, 78), 'voluptuous.humanize.humanize_error', 'humanize_error', ({(252, 63, 252, 70): 'routing', (252, 72, 252, 77): 'error'}, {}), '(routing, error)', False, 'from voluptuous.humanize import humanize_error\n')] |
mitdo/o2ac-ur | catkin_ws/src/o2ac_flexbe/o2ac_flexbe_states/src/o2ac_flexbe_states/align_bearing_holes.py | 74c82a54a693bf6a3fc995ff63e7c91ac1fda6fd | #!/usr/bin/env python
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxyActionClient
# example import of required action
from o2ac_msgs.msg import AlignBearingHolesAction, AlignBearingHolesGoal
class AlignBearingHolesActionState(EventState):
'''
Actionlib for aligning the bearing holes
-- task_name string Name of the task
<= success AlignBearingHoles completed successfully.
<= error AlignBearingHoles failed to execute.
'''
def __init__(self, task_name):
super(
AlignBearingHolesActionState,
self).__init__(
outcomes=[
'success',
'error'])
self._topic = 'o2ac_flexbe/align_bearing_holes'
# pass required clients as dict (topic: type)
self._client = ProxyActionClient(
{self._topic: AlignBearingHolesAction})
self._task_name = task_name
self._success = False
def execute(self, userdata):
if not self._success:
return 'error'
if self._client.has_result(self._topic):
result = self._client.get_result(self._topic)
Logger.logwarn('result %s' % str(result))
if not result:
Logger.logwarn('Fail to complete AlignBearingHoles')
self._success = False
return 'error'
else:
Logger.logwarn('Succeed! completed AlignBearingHoles')
self._success = True
return 'success'
def on_enter(self, userdata):
goal = AlignBearingHolesGoal()
goal.task_name = self._task_name
self._success = True
try:
self._client.send_goal(self._topic, goal)
except Exception as e:
Logger.logwarn(
'Failed to send the AlignBearingHoles command:\n%s' %
str(e))
self._success = False
def on_exit(self, userdata):
if not self._client.has_result(self._topic):
self._client.cancel(self._topic)
Logger.loginfo('Cancelled active action goal.')
| [((30, 23, 31, 51), 'flexbe_core.proxy.ProxyActionClient', 'ProxyActionClient', ({(31, 12, 31, 50): '{self._topic: AlignBearingHolesAction}'}, {}), '({self._topic: AlignBearingHolesAction})', False, 'from flexbe_core.proxy import ProxyActionClient\n'), ((55, 15, 55, 38), 'o2ac_msgs.msg.AlignBearingHolesGoal', 'AlignBearingHolesGoal', ({}, {}), '()', False, 'from o2ac_msgs.msg import AlignBearingHolesAction, AlignBearingHolesGoal\n'), ((70, 12, 70, 59), 'flexbe_core.Logger.loginfo', 'Logger.loginfo', ({(70, 27, 70, 58): '"""Cancelled active action goal."""'}, {}), "('Cancelled active action goal.')", False, 'from flexbe_core import EventState, Logger\n'), ((46, 16, 46, 68), 'flexbe_core.Logger.logwarn', 'Logger.logwarn', ({(46, 31, 46, 67): '"""Fail to complete AlignBearingHoles"""'}, {}), "('Fail to complete AlignBearingHoles')", False, 'from flexbe_core import EventState, Logger\n'), ((50, 16, 50, 70), 'flexbe_core.Logger.logwarn', 'Logger.logwarn', ({(50, 31, 50, 69): '"""Succeed! completed AlignBearingHoles"""'}, {}), "('Succeed! completed AlignBearingHoles')", False, 'from flexbe_core import EventState, Logger\n')] |
sebastian-philipp/find-unicode-control | find_unicode_control.py | 170730aff64d17a4d9c57b0284d862c932e1565c | #!/usr/bin/env python3
"""Find unicode control characters in source files
By default the script takes one or more files or directories and looks for
unicode control characters in all text files. To narrow down the files, provide
a config file with the -c command line, defining a scan_exclude list, which
should be a list of regular expressions matching paths to exclude from the scan.
There is a second mode enabled with -p which when set to 'all', prints all
control characters and when set to 'bidi', prints only the 9 bidirectional
control characters.
"""
import sys, os, argparse, re, unicodedata, magic
import importlib
from stat import *
scan_exclude = [r'\.git/', r'\.hg/', r'\.desktop$', r'ChangeLog$', r'NEWS$',
r'\.ppd$', r'\.txt$', r'\.directory$']
scan_exclude_mime = [r'text/x-po$', r'text/x-tex$', r'text/x-troff$',
r'text/html$']
verbose_mode = False
# Print to stderr in verbose mode.
def eprint(*args, **kwargs):
if verbose_mode:
print(*args, file=sys.stderr, **kwargs)
# Decode a single latin1 line.
def decodeline(inf):
if isinstance(inf, str):
return inf
return inf.decode('latin-1')
# Make a text string from a file, attempting to decode from latin1 if necessary.
# Other non-utf-8 locales are not supported at the moment.
def getfiletext(filename):
text = None
with open(filename) as infile:
try:
if detailed_mode:
return [decodeline(inf) for inf in infile]
except Exception as e:
eprint('%s: %s' % (filename, e))
return None
try:
text = ''.join(infile)
except UnicodeDecodeError:
eprint('%s: Retrying with latin1' % filename)
try:
text = ''.join([decodeline(inf) for inf in infile])
except Exception as e:
eprint('%s: %s' % (filename, e))
if text:
return set(text)
else:
return None
def analyze_text_detailed(filename, text, disallowed, msg):
line = 0
warned = False
for t in text:
line = line + 1
subset = [c for c in t if c in disallowed]
if subset:
print('%s:%d %s: %s' % (filename, line, msg, subset))
warned = True
if not warned:
eprint('%s: OK' % filename)
# Look for disallowed characters in the text. We reduce all characters into a
# set to speed up analysis. FIXME: Add a slow mode to get line numbers in files
# that have these disallowed chars.
def analyze_text(filename, text, disallowed, msg):
if detailed_mode:
analyze_text_detailed(filename, text, disallowed, msg)
return
if not text.isdisjoint(disallowed):
print('%s: %s: %s' % (filename, msg, text & disallowed))
else:
eprint('%s: OK' % filename)
def should_read(f):
m = magic.detect_from_filename(f)
# Fast check, just the file name.
if [e for e in scan_exclude if re.search(e, f)]:
return False
# Slower check, mime type.
if not 'text/' in m.mime_type \
or [e for e in scan_exclude_mime if re.search(e, m.mime_type)]:
return False
return True
# Get file text and feed into analyze_text.
def analyze_file(f, disallowed, msg):
eprint('%s: Reading file' % f)
if should_read(f):
text = getfiletext(f)
if text:
analyze_text(f, text, disallowed, msg)
else:
eprint('%s: SKIPPED' % f)
# Actual implementation of the recursive descent into directories.
def analyze_any(p, disallowed, msg):
mode = os.stat(p).st_mode
if S_ISDIR(mode):
analyze_dir(p, disallowed, msg)
elif S_ISREG(mode):
analyze_file(p, disallowed, msg)
else:
eprint('%s: UNREADABLE' % p)
# Recursively analyze files in the directory.
def analyze_dir(d, disallowed, msg):
for f in os.listdir(d):
analyze_any(os.path.join(d, f), disallowed, msg)
def analyze_paths(paths, disallowed, msg):
for p in paths:
analyze_any(p, disallowed, msg)
# All control characters. We omit the ascii control characters.
def nonprint_unicode(c):
cat = unicodedata.category(c)
if cat.startswith('C') and cat != 'Cc':
return True
return False
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Look for Unicode control characters")
parser.add_argument('path', metavar='path', nargs='+',
help='Sources to analyze')
parser.add_argument('-p', '--nonprint', required=False,
type=str, choices=['all', 'bidi'],
help='Look for either all non-printable unicode characters or bidirectional control characters.')
parser.add_argument('-v', '--verbose', required=False, action='store_true',
help='Verbose mode.')
parser.add_argument('-d', '--detailed', required=False, action='store_true',
help='Print line numbers where characters occur.')
parser.add_argument('-t', '--notests', required=False,
action='store_true', help='Exclude tests (basically test.* as a component of path).')
parser.add_argument('-c', '--config', required=False, type=str,
help='Configuration file to read settings from.')
args = parser.parse_args()
verbose_mode = args.verbose
detailed_mode = args.detailed
if not args.nonprint:
# Formatting control characters in the unicode space. This includes the
# bidi control characters.
disallowed = set(chr(c) for c in range(sys.maxunicode) if \
unicodedata.category(chr(c)) == 'Cf')
msg = 'unicode control characters'
elif args.nonprint == 'all':
# All control characters.
disallowed = set(chr(c) for c in range(sys.maxunicode) if \
nonprint_unicode(chr(c)))
msg = 'disallowed characters'
else:
# Only bidi control characters.
disallowed = set([
chr(0x202a), chr(0x202b), chr(0x202c), chr(0x202d), chr(0x202e),
chr(0x2066), chr(0x2067), chr(0x2068), chr(0x2069)])
msg = 'bidirectional control characters'
if args.config:
spec = importlib.util.spec_from_file_location("settings", args.config)
settings = importlib.util.module_from_spec(spec)
spec.loader.exec_module(settings)
if hasattr(settings, 'scan_exclude'):
scan_exclude = scan_exclude + settings.scan_exclude
if hasattr(settings, 'scan_exclude_mime'):
scan_exclude_mime = scan_exclude_mime + settings.scan_exclude_mime
if args.notests:
scan_exclude = scan_exclude + [r'/test[^/]+/']
analyze_paths(args.path, disallowed, msg)
| [((86, 8, 86, 37), 'magic.detect_from_filename', 'magic.detect_from_filename', ({(86, 35, 86, 36): 'f'}, {}), '(f)', False, 'import sys, os, argparse, re, unicodedata, magic\n'), ((120, 13, 120, 26), 'os.listdir', 'os.listdir', ({(120, 24, 120, 25): 'd'}, {}), '(d)', False, 'import sys, os, argparse, re, unicodedata, magic\n'), ((129, 10, 129, 33), 'unicodedata.category', 'unicodedata.category', ({(129, 31, 129, 32): 'c'}, {}), '(c)', False, 'import sys, os, argparse, re, unicodedata, magic\n'), ((135, 13, 135, 87), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import sys, os, argparse, re, unicodedata, magic\n'), ((110, 11, 110, 21), 'os.stat', 'os.stat', ({(110, 19, 110, 20): 'p'}, {}), '(p)', False, 'import sys, os, argparse, re, unicodedata, magic\n'), ((175, 15, 175, 78), 'importlib.util.spec_from_file_location', 'importlib.util.spec_from_file_location', ({(175, 54, 175, 64): '"""settings"""', (175, 66, 175, 77): 'args.config'}, {}), "('settings', args.config)", False, 'import importlib\n'), ((176, 19, 176, 56), 'importlib.util.module_from_spec', 'importlib.util.module_from_spec', ({(176, 51, 176, 55): 'spec'}, {}), '(spec)', False, 'import importlib\n'), ((88, 35, 88, 50), 're.search', 're.search', ({(88, 45, 88, 46): 'e', (88, 48, 88, 49): 'f'}, {}), '(e, f)', False, 'import sys, os, argparse, re, unicodedata, magic\n'), ((121, 20, 121, 38), 'os.path.join', 'os.path.join', ({(121, 33, 121, 34): 'd', (121, 36, 121, 37): 'f'}, {}), '(d, f)', False, 'import sys, os, argparse, re, unicodedata, magic\n'), ((93, 48, 93, 73), 're.search', 're.search', ({(93, 58, 93, 59): 'e', (93, 61, 93, 72): 'm.mime_type'}, {}), '(e, m.mime_type)', False, 'import sys, os, argparse, re, unicodedata, magic\n')] |
mamadbiabon/iGibson | igibson/object_states/aabb.py | d416a470240eb7ad86e04fee475ae4bd67263a7c | import numpy as np
from igibson.external.pybullet_tools.utils import aabb_union, get_aabb, get_all_links
from igibson.object_states.object_state_base import CachingEnabledObjectState
class AABB(CachingEnabledObjectState):
def _compute_value(self):
body_id = self.obj.get_body_id()
all_links = get_all_links(body_id)
aabbs = [get_aabb(body_id, link=link) for link in all_links]
aabb_low, aabb_hi = aabb_union(aabbs)
if not hasattr(self.obj, "category") or self.obj.category != "floors" or self.obj.room_floor is None:
return np.array(aabb_low), np.array(aabb_hi)
# TODO: remove after split floors
# room_floor will be set to the correct RoomFloor beforehand
room_instance = self.obj.room_floor.room_instance
# Get the x-y values from the room segmentation map
room_aabb_low, room_aabb_hi = self.obj.room_floor.scene.get_aabb_by_room_instance(room_instance)
if room_aabb_low is None:
return np.array(aabb_low), np.array(aabb_hi)
# Use the z values from pybullet
room_aabb_low[2] = aabb_low[2]
room_aabb_hi[2] = aabb_hi[2]
return np.array(room_aabb_low), np.array(room_aabb_hi)
def _set_value(self, new_value):
raise NotImplementedError("AABB state currently does not support setting.")
# Nothing needs to be done to save/load AABB since it will happen due to pose caching.
def _dump(self):
return None
def load(self, data):
return
| [((10, 20, 10, 42), 'igibson.external.pybullet_tools.utils.get_all_links', 'get_all_links', ({(10, 34, 10, 41): 'body_id'}, {}), '(body_id)', False, 'from igibson.external.pybullet_tools.utils import aabb_union, get_aabb, get_all_links\n'), ((12, 28, 12, 45), 'igibson.external.pybullet_tools.utils.aabb_union', 'aabb_union', ({(12, 39, 12, 44): 'aabbs'}, {}), '(aabbs)', False, 'from igibson.external.pybullet_tools.utils import aabb_union, get_aabb, get_all_links\n'), ((11, 17, 11, 45), 'igibson.external.pybullet_tools.utils.get_aabb', 'get_aabb', (), '', False, 'from igibson.external.pybullet_tools.utils import aabb_union, get_aabb, get_all_links\n'), ((31, 15, 31, 38), 'numpy.array', 'np.array', ({(31, 24, 31, 37): 'room_aabb_low'}, {}), '(room_aabb_low)', True, 'import numpy as np\n'), ((31, 40, 31, 62), 'numpy.array', 'np.array', ({(31, 49, 31, 61): 'room_aabb_hi'}, {}), '(room_aabb_hi)', True, 'import numpy as np\n'), ((15, 19, 15, 37), 'numpy.array', 'np.array', ({(15, 28, 15, 36): 'aabb_low'}, {}), '(aabb_low)', True, 'import numpy as np\n'), ((15, 39, 15, 56), 'numpy.array', 'np.array', ({(15, 48, 15, 55): 'aabb_hi'}, {}), '(aabb_hi)', True, 'import numpy as np\n'), ((25, 19, 25, 37), 'numpy.array', 'np.array', ({(25, 28, 25, 36): 'aabb_low'}, {}), '(aabb_low)', True, 'import numpy as np\n'), ((25, 39, 25, 56), 'numpy.array', 'np.array', ({(25, 48, 25, 55): 'aabb_hi'}, {}), '(aabb_hi)', True, 'import numpy as np\n')] |
fujigon/integrations-core | vsphere/tests/test_vsphere.py | 256b1c138fd1bf1c71db63698737e813cfda00f8 | # (C) Datadog, Inc. 2010-2017
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
from __future__ import unicode_literals
import time
from datetime import datetime
import mock
import pytest
from mock import MagicMock
from pyVmomi import vim
from datadog_checks.vsphere import VSphereCheck
from datadog_checks.vsphere.cache_config import CacheConfig
from datadog_checks.vsphere.common import SOURCE_TYPE
from datadog_checks.vsphere.errors import BadConfigError, ConnectionError
from datadog_checks.vsphere.vsphere import (
REFRESH_METRICS_METADATA_INTERVAL,
REFRESH_MORLIST_INTERVAL,
RESOURCE_TYPE_METRICS,
SHORT_ROLLUP,
)
from .utils import MockedMOR, assertMOR, disable_thread_pool, get_mocked_server
SERVICE_CHECK_TAGS = ["vcenter_server:vsphere_mock", "vcenter_host:None", "foo:bar"]
def test__init__(instance):
with pytest.raises(BadConfigError):
# Must define a unique 'name' per vCenter instance
VSphereCheck('vsphere', {}, {}, [{'': ''}])
init_config = {
'clean_morlist_interval': 50,
'refresh_morlist_interval': 42,
'refresh_metrics_metadata_interval': -42,
'batch_property_collector_size': -1,
}
check = VSphereCheck('vsphere', init_config, {}, [instance])
i_key = check._instance_key(instance)
assert check.time_started > 0
assert not check.server_instances
assert check.cache_config.get_interval(CacheConfig.Morlist, i_key) == 42
assert check.cache_config.get_interval(CacheConfig.Metadata, i_key) == -42
assert check.clean_morlist_interval == 50
assert len(check.event_config) == 1
assert 'vsphere_mock' in check.event_config
assert not check.registry
assert not check.latest_event_query
assert check.batch_collector_size == 0
assert check.batch_morlist_size == 50
assert check.excluded_host_tags == []
def test_excluded_host_tags(vsphere, instance, aggregator):
# Check default value and precedence of instance config over init config
check = VSphereCheck('vsphere', {}, {}, [instance])
assert check.excluded_host_tags == []
check = VSphereCheck('vsphere', {"excluded_host_tags": ["vsphere_host"]}, {}, [instance])
assert check.excluded_host_tags == ["vsphere_host"]
instance["excluded_host_tags"] = []
check = VSphereCheck('vsphere', {"excluded_host_tags": ["vsphere_host"]}, {}, [instance])
assert check.excluded_host_tags == []
# Test host tags are excluded from external host metadata, but still stored in the cache for metrics
vsphere.excluded_host_tags = ["vsphere_host"]
mocked_vm = MockedMOR(spec="VirtualMachine")
mocked_host = MockedMOR(spec="HostSystem")
mocked_mors_attrs = {
mocked_vm: {
"name": "mocked_vm",
"parent": mocked_host,
"runtime.powerState": vim.VirtualMachinePowerState.poweredOn,
},
mocked_host: {"name": "mocked_host", "parent": None},
}
with mock.patch("datadog_checks.vsphere.VSphereCheck._collect_mors_and_attributes", return_value=mocked_mors_attrs):
server_instance = vsphere._get_server_instance(instance)
result = MagicMock()
result.value = [23.4]
server_instance.content.perfManager.QueryPerf.return_value = [MagicMock(value=[result], entity=mocked_vm)]
vsphere.metadata_cache = MagicMock()
vsphere.metadata_cache.get_metadata.return_value = {"name": "mymetric", "unit": "kb"}
vsphere.in_compatibility_mode = MagicMock()
vsphere.in_compatibility_mode.return_value = False
vsphere.check(instance)
ext_host_tags = vsphere.get_external_host_tags()
# vsphere_host tag not in external metadata
for host, source_tags in ext_host_tags:
if host == u"mocked_vm":
tags = source_tags["vsphere"]
for tag in tags:
assert "vsphere_host:" not in tag
break
# vsphere_host tag still in cache for sending with metrics
aggregator.assert_metric('vsphere.mymetric', value=23.4, hostname="mocked_vm", count=1)
aggregator.assert_metric_has_tag('vsphere.mymetric', tag="vsphere_host:mocked_host", count=1)
def test__is_excluded():
"""
* Exclude hosts/vms not compliant with the user's `*_include` configuration.
* Exclude "non-labeled" virtual machines when the user configuration instructs to.
"""
# Sample(s)
include_regexes = {'host_include': "f[o]+", 'vm_include': "f[o]+"}
# OK
included_host = MockedMOR(spec="HostSystem", name="foo")
included_vm = MockedMOR(spec="VirtualMachine", name="foo")
assert not VSphereCheck._is_excluded(included_host, {"name": included_host.name}, include_regexes, None)
assert not VSphereCheck._is_excluded(included_vm, {"name": included_vm.name}, include_regexes, None)
# Not OK!
excluded_host = MockedMOR(spec="HostSystem", name="bar")
excluded_vm = MockedMOR(spec="VirtualMachine", name="bar")
assert VSphereCheck._is_excluded(excluded_host, {"name": excluded_host.name}, include_regexes, None)
assert VSphereCheck._is_excluded(excluded_vm, {"name": excluded_vm.name}, include_regexes, None)
# Sample(s)
include_regexes = None
include_only_marked = True
# OK
included_vm = MockedMOR(spec="VirtualMachine", name="foo", label=True)
assert not VSphereCheck._is_excluded(
included_vm, {"customValue": included_vm.customValue}, include_regexes, include_only_marked
)
# Not OK
included_vm = MockedMOR(spec="VirtualMachine", name="foo")
assert VSphereCheck._is_excluded(included_vm, {"customValue": []}, include_regexes, include_only_marked)
def test_vms_in_filtered_host_are_filtered(vsphere, instance):
"""Test that all vms belonging to a filtered host are also filtered"""
server_instance = vsphere._get_server_instance(instance)
filtered_host = MockedMOR(spec="HostSystem")
filtered_vm = MockedMOR(spec="VirtualMachine")
non_filtered_host = MockedMOR(spec="HostSystem")
non_filtered_vm = MockedMOR(spec="VirtualMachine")
mocked_mors_attrs = {
filtered_host: {"name": "filtered_host_number_1", "parent": None},
filtered_vm: {
"name": "this_vm_is_filtered",
"runtime.powerState": vim.VirtualMachinePowerState.poweredOn,
"runtime.host": filtered_host,
},
non_filtered_host: {"name": "non_filtered_host_number_1", "parent": None},
non_filtered_vm: {
"name": "this_vm_is_not_filtered",
"runtime.powerState": vim.VirtualMachinePowerState.poweredOn,
"runtime.host": non_filtered_host,
},
}
regex = {'host_include': '^(?!filtered_.+)'}
with mock.patch("datadog_checks.vsphere.VSphereCheck._collect_mors_and_attributes", return_value=mocked_mors_attrs):
obj_list = vsphere._get_all_objs(server_instance, regex, False, [])
assert len(obj_list[vim.VirtualMachine]) == 1
assert len(obj_list[vim.HostSystem]) == 1
assert {
"mor_type": "vm",
"mor": non_filtered_vm,
"hostname": "this_vm_is_not_filtered",
"tags": ["vsphere_host:non_filtered_host_number_1", "vsphere_type:vm"],
} == obj_list[vim.VirtualMachine][0]
assert {
"mor_type": "host",
"mor": non_filtered_host,
"hostname": "non_filtered_host_number_1",
"tags": ["vsphere_type:host"],
} == obj_list[vim.HostSystem][0]
def test__get_all_objs(vsphere, instance):
"""
Test that we don't raise KeyError if the property collector failed to collect some attributes
and that we handle the case were there are missing attributes
"""
server_instance = vsphere._get_server_instance(instance)
vm_no_parent = MockedMOR(spec="VirtualMachine")
vm_no_powerstate = MockedMOR(spec="VirtualMachine")
vm_host_parent = MockedMOR(spec="VirtualMachine")
mocked_host = MockedMOR(spec="HostSystem")
mocked_datastore = MockedMOR(spec="Datastore")
mocked_datacenter = MockedMOR(spec="Datacenter")
mocked_cluster = MockedMOR(spec="ClusterComputeResource")
mocked_mors_attrs = {
vm_no_parent: {"name": "vm_no_parent", "runtime.powerState": vim.VirtualMachinePowerState.poweredOn},
vm_no_powerstate: {"name": "vm_no_powerstate"},
vm_host_parent: {"parent": mocked_host, "runtime.powerState": vim.VirtualMachinePowerState.poweredOn},
mocked_host: {"name": "mocked_host", "parent": None},
mocked_datastore: {},
mocked_cluster: {"name": "cluster"},
mocked_datacenter: {"parent": MockedMOR(spec="Folder", name="unknown folder"), "name": "datacenter"},
}
with mock.patch("datadog_checks.vsphere.VSphereCheck._collect_mors_and_attributes", return_value=mocked_mors_attrs):
obj_list = vsphere._get_all_objs(server_instance, None, False, [])
assert len(obj_list[vim.VirtualMachine]) == 2
assert {
"mor_type": "vm",
"mor": vm_no_parent,
"hostname": "vm_no_parent",
"tags": ["vsphere_host:unknown", "vsphere_type:vm"],
} in obj_list[vim.VirtualMachine]
assert {
"mor_type": "vm",
"mor": vm_host_parent,
"hostname": "unknown",
"tags": ["vsphere_host:mocked_host", "vsphere_host:unknown", "vsphere_type:vm"],
} in obj_list[vim.VirtualMachine]
assert len(obj_list[vim.HostSystem]) == 1
assert {
"mor_type": "host",
"mor": mocked_host,
"hostname": "mocked_host",
"tags": ["vsphere_type:host"],
} in obj_list[vim.HostSystem]
assert len(obj_list[vim.Datastore]) == 1
assert {
"mor_type": "datastore",
"mor": mocked_datastore,
"hostname": None,
"tags": ["vsphere_datastore:unknown", "vsphere_type:datastore"],
} in obj_list[vim.Datastore]
assert len(obj_list[vim.Datacenter]) == 1
assert {
"mor_type": "datacenter",
"mor": mocked_datacenter,
"hostname": None,
"tags": ["vsphere_folder:unknown", "vsphere_datacenter:datacenter", "vsphere_type:datacenter"],
} in obj_list[vim.Datacenter]
assert len(obj_list[vim.ClusterComputeResource]) == 1
assert {
"mor_type": "cluster",
"mor": mocked_cluster,
"hostname": None,
"tags": ["vsphere_cluster:cluster", "vsphere_type:cluster"],
} in obj_list[vim.ClusterComputeResource]
def test__collect_mors_and_attributes(vsphere, instance):
"""
Test that we check for errors when collecting properties with property collector
"""
server_instance = vsphere._get_server_instance(instance)
with mock.patch("datadog_checks.vsphere.vsphere.vmodl"):
obj = MagicMock(missingSet=None, obj="obj")
result = MagicMock(token=None, objects=[obj])
server_instance.content.propertyCollector.RetrievePropertiesEx.return_value = result
log = MagicMock()
vsphere.log = log
mor_attrs = vsphere._collect_mors_and_attributes(server_instance)
log.error.assert_not_called()
assert len(mor_attrs) == 1
obj.missingSet = [MagicMock(path="prop", fault="fault")]
mor_attrs = vsphere._collect_mors_and_attributes(server_instance)
log.error.assert_called_once_with('Unable to retrieve property %s for object %s: %s', 'prop', 'obj', 'fault')
assert len(mor_attrs) == 1
def test__cache_morlist_raw(vsphere, instance):
"""
Explore the vCenter infrastructure to discover hosts, virtual machines.
Input topology:
```
rootFolder
- datacenter1
- compute_resource1
- host1 # Filtered out
- host2
- folder1
- datacenter2
- compute_resource2
- host3
- vm1 # Not labeled
- vm2 # Filtered out
- vm3 # Powered off
- vm4
```
"""
# Samples
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
instance["host_include_only_regex"] = "host[2-9]"
instance["vm_include_only_regex"] = "vm[^2]"
instance["include_only_marked"] = True
# Discover hosts and virtual machines
vsphere._cache_morlist_raw(instance)
# Assertions: 1 labeled+monitored VM + 2 hosts + 2 datacenters + 2 clusters + 1 datastore.
assertMOR(vsphere, instance, count=8)
# ...on hosts
assertMOR(vsphere, instance, spec="host", count=2)
tags = [
"vcenter_server:vsphere_mock",
"vsphere_folder:rootFolder",
"vsphere_datacenter:datacenter1",
"vsphere_compute:compute_resource1",
"vsphere_cluster:compute_resource1",
"vsphere_type:host",
]
assertMOR(vsphere, instance, name="host2", spec="host", tags=tags)
tags = [
"vcenter_server:vsphere_mock",
"vsphere_folder:rootFolder",
"vsphere_folder:folder1",
"vsphere_datacenter:datacenter2",
"vsphere_compute:compute_resource2",
"vsphere_cluster:compute_resource2",
"vsphere_type:host",
]
assertMOR(vsphere, instance, name="host3", spec="host", tags=tags)
# ...on VMs
assertMOR(vsphere, instance, spec="vm", count=1)
tags = [
"vcenter_server:vsphere_mock",
"vsphere_folder:folder1",
"vsphere_datacenter:datacenter2",
"vsphere_compute:compute_resource2",
"vsphere_cluster:compute_resource2",
"vsphere_host:host3",
"vsphere_type:vm",
]
assertMOR(vsphere, instance, name="vm4", spec="vm", subset=True, tags=tags)
def test_use_guest_hostname(vsphere, instance):
# Default value
with mock.patch("datadog_checks.vsphere.VSphereCheck._get_all_objs") as mock_get_all_objs, mock.patch(
"datadog_checks.vsphere.vsphere.vmodl"
):
vsphere._cache_morlist_raw(instance)
# Default value
assert not mock_get_all_objs.call_args[1]["use_guest_hostname"]
# use guest hostname
instance["use_guest_hostname"] = True
vsphere._cache_morlist_raw(instance)
assert mock_get_all_objs.call_args[1]["use_guest_hostname"]
with mock.patch("datadog_checks.vsphere.vsphere.vmodl"):
# Discover hosts and virtual machines
instance["use_guest_hostname"] = True
vsphere._cache_morlist_raw(instance)
assertMOR(vsphere, instance, spec="vm", count=3)
# Fallback on VM name when guest hostname not available
assertMOR(vsphere, instance, name="vm1", spec="vm", subset=True)
assertMOR(vsphere, instance, name="vm2_guest", spec="vm", subset=True)
assertMOR(vsphere, instance, name="vm4_guest", spec="vm", subset=True)
def test__process_mor_objects_queue(vsphere, instance):
vsphere.log = MagicMock()
vsphere._process_mor_objects_queue_async = MagicMock()
vsphere._process_mor_objects_queue(instance)
# Queue hasn't been initialized
vsphere.log.debug.assert_called_once_with(
"Objects queue is not initialized yet for instance %s, skipping processing", vsphere._instance_key(instance)
)
vsphere.batch_morlist_size = 1
i_key = vsphere._instance_key(instance)
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
vsphere._cache_morlist_raw(instance)
assert sum(vsphere.mor_objects_queue.size(i_key, res_type) for res_type in RESOURCE_TYPE_METRICS) == 11
vsphere._process_mor_objects_queue(instance)
# Object queue should be empty after processing
assert sum(vsphere.mor_objects_queue.size(i_key, res_type) for res_type in RESOURCE_TYPE_METRICS) == 0
assert vsphere._process_mor_objects_queue_async.call_count == 0 # realtime only
for call_args in vsphere._process_mor_objects_queue_async.call_args_list:
# query_specs parameter should be a list of size 1 since the batch size is 1
assert len(call_args[0][1]) == 1
instance["collect_realtime_only"] = False
vsphere._cache_morlist_raw(instance)
assert sum(vsphere.mor_objects_queue.size(i_key, res_type) for res_type in RESOURCE_TYPE_METRICS) == 11
vsphere._process_mor_objects_queue(instance)
# Object queue should be empty after processing
assert sum(vsphere.mor_objects_queue.size(i_key, res_type) for res_type in RESOURCE_TYPE_METRICS) == 0
assert vsphere._process_mor_objects_queue_async.call_count == 5 # 2 datacenters, 2 clusters, 1 datastore
def test_collect_realtime_only(vsphere, instance):
"""
Test the collect_realtime_only parameter acts as expected
"""
vsphere._process_mor_objects_queue_async = MagicMock()
instance["collect_realtime_only"] = False
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
vsphere._cache_morlist_raw(instance)
vsphere._process_mor_objects_queue(instance)
# Called once to process the 2 datacenters, then 2 clusters, then the datastore
assert vsphere._process_mor_objects_queue_async.call_count == 3
instance["collect_realtime_only"] = True
vsphere._process_mor_objects_queue_async.reset_mock()
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
vsphere._cache_morlist_raw(instance)
vsphere._process_mor_objects_queue(instance)
assert vsphere._process_mor_objects_queue_async.call_count == 0
def test__cache_metrics_metadata(vsphere, instance):
vsphere.metadata_cache = MagicMock()
vsphere._cache_metrics_metadata(instance)
vsphere.metadata_cache.init_instance.assert_called_once_with(vsphere._instance_key(instance))
vsphere.metadata_cache.set_metadata.assert_called_once()
vsphere.metadata_cache.set_metric_ids.assert_called_once()
def test__cache_metrics_metadata_compatibility(vsphere, instance):
server_instance = vsphere._get_server_instance(instance)
i_key = vsphere._instance_key(instance)
counter = MagicMock()
counter.rollupType = "average"
counter.key = 1
vsphere.format_metric_name = MagicMock()
# New way
instance["collection_level"] = 3
server_instance.content.perfManager.QueryPerfCounterByLevel.return_value = [counter]
vsphere._cache_metrics_metadata(instance)
server_instance.content.perfManager.QueryPerfCounterByLevel.assert_called_once_with(3)
assert len(vsphere.metadata_cache._metric_ids[i_key]) == 1
assert len(vsphere.metadata_cache._metadata[i_key]) == 1
vsphere.format_metric_name.assert_called_once_with(counter)
# Compatibility mode
instance["all_metrics"] = False
del instance["collection_level"]
vsphere.format_metric_name.reset_mock()
server_instance.content.perfManager.perfCounter = [counter]
vsphere._cache_metrics_metadata(instance)
assert not vsphere.metadata_cache._metric_ids[i_key]
assert len(vsphere.metadata_cache._metadata[i_key]) == 1
vsphere.format_metric_name.assert_called_once_with(counter, compatibility=True)
def test_in_compatibility_mode(vsphere, instance):
vsphere.log = MagicMock()
instance["collection_level"] = 2
assert not vsphere.in_compatibility_mode(instance)
instance["all_metrics"] = True
assert not vsphere.in_compatibility_mode(instance)
vsphere.log.warning.assert_not_called()
assert not vsphere.in_compatibility_mode(instance, log_warning=True)
vsphere.log.warning.assert_called_once()
del instance["collection_level"]
vsphere.log.reset_mock()
assert vsphere.in_compatibility_mode(instance)
vsphere.log.warning.assert_not_called()
assert vsphere.in_compatibility_mode(instance, log_warning=True)
vsphere.log.warning.assert_called_once()
def test_format_metric_name(vsphere):
counter = MagicMock()
counter.groupInfo.key = "group"
counter.nameInfo.key = "name"
counter.rollupType = "rollup"
assert vsphere.format_metric_name(counter, compatibility=True) == "group.name"
for rollup, short_rollup in SHORT_ROLLUP.items():
counter.rollupType = rollup
assert vsphere.format_metric_name(counter) == "group.name.{}".format(short_rollup)
def test_collect_metrics(vsphere, instance):
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
vsphere.batch_morlist_size = 1
vsphere._collect_metrics_async = MagicMock()
vsphere._cache_metrics_metadata(instance)
vsphere._cache_morlist_raw(instance)
vsphere._process_mor_objects_queue(instance)
vsphere.collect_metrics(instance)
assert vsphere._collect_metrics_async.call_count == 6 # One for each VM/host, datacenters are not collected
for call_args in vsphere._collect_metrics_async.call_args_list:
# query_specs parameter should be a list of size 1 since the batch size is 1
assert len(call_args[0][1]) == 1
def test__collect_metrics_async_compatibility(vsphere, instance):
server_instance = vsphere._get_server_instance(instance)
server_instance.content.perfManager.QueryPerf.return_value = [MagicMock(value=[MagicMock()])]
vsphere.mor_cache = MagicMock()
vsphere.metadata_cache = MagicMock()
vsphere.metadata_cache.get_metadata.return_value = {"name": "unknown"}
vsphere.in_compatibility_mode = MagicMock()
vsphere.log = MagicMock()
vsphere.in_compatibility_mode.return_value = True
vsphere._collect_metrics_async(instance, [])
vsphere.log.debug.assert_called_with('Skipping unknown `%s` metric.', 'unknown')
vsphere.log.reset_mock()
vsphere.in_compatibility_mode.return_value = False
vsphere._collect_metrics_async(instance, [])
vsphere.log.debug.assert_not_called()
def test__collect_metrics_async_hostname(vsphere, instance, aggregator):
server_instance = vsphere._get_server_instance(instance)
result = MagicMock()
result.value = [23.4]
server_instance.content.perfManager.QueryPerf.return_value = [MagicMock(value=[result])]
mor = {"hostname": "foo"}
vsphere.mor_cache = MagicMock()
vsphere.mor_cache.get_mor.return_value = mor
vsphere.metadata_cache = MagicMock()
vsphere.metadata_cache.get_metadata.return_value = {"name": "mymetric", "unit": "kb"}
vsphere.in_compatibility_mode = MagicMock()
vsphere.in_compatibility_mode.return_value = False
vsphere._collect_metrics_async(instance, [])
aggregator.assert_metric('vsphere.mymetric', value=23.4, hostname="foo")
def test_check(vsphere, instance):
"""
Test the check() method
"""
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
with mock.patch.object(vsphere, 'set_external_tags') as set_external_tags:
vsphere.check(instance)
set_external_tags.assert_called_once()
all_the_tags = dict(set_external_tags.call_args[0][0])
assert all_the_tags['vm4'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_folder:folder1',
'vsphere_datacenter:datacenter2',
'vsphere_cluster:compute_resource2',
'vsphere_compute:compute_resource2',
'vsphere_host:host3',
'vsphere_host:host3',
'vsphere_type:vm',
]
assert all_the_tags['host1'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_datacenter:datacenter1',
'vsphere_cluster:compute_resource1',
'vsphere_compute:compute_resource1',
'vsphere_type:host',
]
assert all_the_tags['host3'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_folder:folder1',
'vsphere_datacenter:datacenter2',
'vsphere_cluster:compute_resource2',
'vsphere_compute:compute_resource2',
'vsphere_type:host',
]
assert all_the_tags['vm2'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_folder:folder1',
'vsphere_datacenter:datacenter2',
'vsphere_cluster:compute_resource2',
'vsphere_compute:compute_resource2',
'vsphere_host:host3',
'vsphere_host:host3',
'vsphere_type:vm',
]
assert all_the_tags['vm1'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_folder:folder1',
'vsphere_datacenter:datacenter2',
'vsphere_cluster:compute_resource2',
'vsphere_compute:compute_resource2',
'vsphere_host:host3',
'vsphere_host:host3',
'vsphere_type:vm',
]
assert all_the_tags['host2'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_datacenter:datacenter1',
'vsphere_cluster:compute_resource1',
'vsphere_compute:compute_resource1',
'vsphere_type:host',
]
def test_service_check_ko(aggregator, instance):
check = disable_thread_pool(VSphereCheck('disk', {}, {}, [instance]))
with mock.patch('datadog_checks.vsphere.vsphere.connect.SmartConnect') as SmartConnect:
# SmartConnect fails
SmartConnect.side_effect = Exception()
with pytest.raises(ConnectionError):
check.check(instance)
aggregator.assert_service_check(
VSphereCheck.SERVICE_CHECK_NAME, status=VSphereCheck.CRITICAL, count=1, tags=SERVICE_CHECK_TAGS
)
aggregator.reset()
# SmartConnect succeeds, CurrentTime fails
server = MagicMock()
server.CurrentTime.side_effect = Exception()
SmartConnect.side_effect = None
SmartConnect.return_value = server
with pytest.raises(ConnectionError):
check.check(instance)
aggregator.assert_service_check(
VSphereCheck.SERVICE_CHECK_NAME, status=VSphereCheck.CRITICAL, count=1, tags=SERVICE_CHECK_TAGS
)
def test_service_check_ok(aggregator, instance):
check = disable_thread_pool(VSphereCheck('disk', {}, {}, [instance]))
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
with mock.patch('datadog_checks.vsphere.vsphere.connect.SmartConnect') as SmartConnect:
SmartConnect.return_value = get_mocked_server()
check.check(instance)
aggregator.assert_service_check(
VSphereCheck.SERVICE_CHECK_NAME, status=VSphereCheck.OK, tags=SERVICE_CHECK_TAGS
)
def test__instance_key(vsphere, instance):
assert vsphere._instance_key(instance) == "vsphere_mock"
del instance['name']
with pytest.raises(BadConfigError):
vsphere._instance_key(instance)
def test__should_cache(instance):
now = time.time()
# do not use fixtures for the check instance, some params are set at
# __init__ time and we need to instantiate the check multiple times
check = VSphereCheck('vsphere', {}, {}, [instance])
i_key = check._instance_key(instance)
# first run should always cache
assert check._should_cache(instance, CacheConfig.Morlist)
assert check._should_cache(instance, CacheConfig.Metadata)
# explicitly set cache expiration times, don't use defaults so we also test
# configuration is properly propagated
init_config = {
'refresh_morlist_interval': 2 * REFRESH_MORLIST_INTERVAL,
'refresh_metrics_metadata_interval': 2 * REFRESH_METRICS_METADATA_INTERVAL,
}
check = VSphereCheck('vsphere', init_config, {}, [instance])
# simulate previous runs, set the last execution time in the past
check.cache_config.set_last(CacheConfig.Morlist, i_key, now - (2 * REFRESH_MORLIST_INTERVAL))
check.cache_config.set_last(CacheConfig.Metadata, i_key, now - (2 * REFRESH_METRICS_METADATA_INTERVAL))
with mock.patch("time.time", return_value=now):
assert not check._should_cache(instance, CacheConfig.Morlist)
assert not check._should_cache(instance, CacheConfig.Metadata)
def alarm_event(from_status='green', to_status='red', message='Some error'):
now = datetime.utcnow()
vm = MockedMOR(spec='VirtualMachine')
dc = MockedMOR(spec="Datacenter")
dc_arg = vim.event.DatacenterEventArgument(datacenter=dc, name='dc1')
alarm = MockedMOR(spec="Alarm")
alarm_arg = vim.event.AlarmEventArgument(alarm=alarm, name='alarm1')
entity = vim.event.ManagedEntityEventArgument(entity=vm, name='vm1')
event = vim.event.AlarmStatusChangedEvent(
entity=entity, fullFormattedMessage=message, createdTime=now, to=to_status, datacenter=dc_arg, alarm=alarm_arg
)
setattr(event, 'from', from_status) # noqa: B009
return event
def migrated_event():
now = datetime.utcnow()
vm = MockedMOR(spec='VirtualMachine', name='vm1')
vm_arg = vim.event.VmEventArgument(vm=vm)
host = MockedMOR(spec='HostSystem')
host_arg = vim.event.HostEventArgument(host=host, name='host1')
host_dest = MockedMOR(spec='HostSystem')
host_dest_arg = vim.event.HostEventArgument(host=host_dest, name='host2')
dc = MockedMOR(spec='Datacenter')
dc_arg = vim.event.DatacenterEventArgument(datacenter=dc, name='dc1')
dc_dest = MockedMOR(spec='Datacenter')
dc_dest_arg = vim.event.DatacenterEventArgument(datacenter=dc_dest, name='dc2')
ds = MockedMOR(spec='Datastore')
ds_arg = vim.event.DatastoreEventArgument(datastore=ds, name='ds1')
ds_dest = MockedMOR(spec='Datastore')
ds_dest_arg = vim.event.DatastoreEventArgument(datastore=ds_dest, name='ds2')
event = vim.event.VmBeingHotMigratedEvent(
vm=vm_arg,
userName='John',
fullFormattedMessage='Some error',
createdTime=now,
host=host_arg,
destHost=host_dest_arg,
datacenter=dc_arg,
destDatacenter=dc_dest_arg,
ds=ds_arg,
destDatastore=ds_dest_arg,
)
return event
def test_events(aggregator, vsphere, instance):
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
server_instance = vsphere._get_server_instance(instance)
server_instance.content.eventManager.QueryEvents.return_value = [alarm_event()]
vsphere.event_config['vsphere_mock'] = {'collect_vcenter_alarms': True}
vsphere.check(instance)
aggregator.assert_event(
"vCenter monitor status changed on this alarm, it was green and it's now red.", tags=['foo:bar']
)
def test_events_tags(aggregator, vsphere, instance):
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
server_instance = vsphere._get_server_instance(instance)
server_instance.content.eventManager.QueryEvents.return_value = [migrated_event()]
vsphere.event_config['vsphere_mock'] = {'collect_vcenter_alarms': True}
vsphere.check(instance)
aggregator.assert_event(
"John has launched a hot migration of this virtual machine",
exact_match=False,
tags=[
'foo:bar',
'vsphere_host:host1',
'vsphere_host:host2',
'vsphere_datacenter:dc1',
'vsphere_datacenter:dc2',
],
)
server_instance = vsphere._get_server_instance(instance)
server_instance.content.eventManager.QueryEvents.return_value = [alarm_event()]
vsphere.check(instance)
aggregator.assert_event(
"vCenter monitor status changed on this alarm, it was green and it's now red.", tags=['foo:bar']
)
def test_events_gray_handled(aggregator, vsphere, instance):
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
server_instance = vsphere._get_server_instance(instance)
event = alarm_event(from_status='gray', message='Went from Gray to Red')
server_instance.content.eventManager.QueryEvents.return_value = [event]
vsphere.event_config['vsphere_mock'] = {'collect_vcenter_alarms': True}
vsphere.check(instance)
aggregator.assert_event(
"vCenter monitor status changed on this alarm, it was gray and it's now red.", tags=['foo:bar']
)
event = alarm_event(from_status='yellow', to_status='gray', message='Went from Yellow to Gray')
server_instance.content.eventManager.QueryEvents.return_value = [event]
vsphere.check(instance)
aggregator.assert_event(
"vCenter monitor status changed on this alarm, it was yellow and it's now gray.",
tags=['foo:bar'],
alert_type='info',
)
def test_events_gray_ignored(aggregator, vsphere, instance):
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
server_instance = vsphere._get_server_instance(instance)
event = alarm_event(from_status='gray', to_status='green', message='Went from Gray to Green')
server_instance.content.eventManager.QueryEvents.return_value = [event]
vsphere.event_config['vsphere_mock'] = {'collect_vcenter_alarms': True}
vsphere.check(instance)
assert not aggregator.events
event = alarm_event(from_status='green', to_status='gray', message='Went from Green to Gray')
server_instance.content.eventManager.QueryEvents.return_value = [event]
vsphere.check(instance)
assert not aggregator.events
| [((41, 12, 41, 64), 'datadog_checks.vsphere.VSphereCheck', 'VSphereCheck', ({(41, 25, 41, 34): '"""vsphere"""', (41, 36, 41, 47): 'init_config', (41, 49, 41, 51): '{}', (41, 53, 41, 63): '[instance]'}, {}), "('vsphere', init_config, {}, [instance])", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((60, 12, 60, 55), 'datadog_checks.vsphere.VSphereCheck', 'VSphereCheck', ({(60, 25, 60, 34): '"""vsphere"""', (60, 36, 60, 38): '{}', (60, 40, 60, 42): '{}', (60, 44, 60, 54): '[instance]'}, {}), "('vsphere', {}, {}, [instance])", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((62, 12, 62, 93), 'datadog_checks.vsphere.VSphereCheck', 'VSphereCheck', ({(62, 25, 62, 34): '"""vsphere"""', (62, 36, 62, 76): "{'excluded_host_tags': ['vsphere_host']}", (62, 78, 62, 80): '{}', (62, 82, 62, 92): '[instance]'}, {}), "('vsphere', {'excluded_host_tags': ['vsphere_host']}, {}, [\n instance])", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((65, 12, 65, 93), 'datadog_checks.vsphere.VSphereCheck', 'VSphereCheck', ({(65, 25, 65, 34): '"""vsphere"""', (65, 36, 65, 76): "{'excluded_host_tags': ['vsphere_host']}", (65, 78, 65, 80): '{}', (65, 82, 65, 92): '[instance]'}, {}), "('vsphere', {'excluded_host_tags': ['vsphere_host']}, {}, [\n instance])", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((127, 11, 127, 104), 'datadog_checks.vsphere.VSphereCheck._is_excluded', 'VSphereCheck._is_excluded', ({(127, 37, 127, 50): 'excluded_host', (127, 52, 127, 80): "{'name': excluded_host.name}", (127, 82, 127, 97): 'include_regexes', (127, 99, 127, 103): 'None'}, {}), "(excluded_host, {'name': excluded_host.name},\n include_regexes, None)", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((128, 11, 128, 100), 'datadog_checks.vsphere.VSphereCheck._is_excluded', 'VSphereCheck._is_excluded', ({(128, 37, 128, 48): 'excluded_vm', (128, 50, 128, 76): "{'name': excluded_vm.name}", (128, 78, 128, 93): 'include_regexes', (128, 95, 128, 99): 'None'}, {}), "(excluded_vm, {'name': excluded_vm.name},\n include_regexes, None)", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((142, 11, 142, 108), 'datadog_checks.vsphere.VSphereCheck._is_excluded', 'VSphereCheck._is_excluded', ({(142, 37, 142, 48): 'included_vm', (142, 50, 142, 69): "{'customValue': []}", (142, 71, 142, 86): 'include_regexes', (142, 88, 142, 107): 'include_only_marked'}, {}), "(included_vm, {'customValue': []}, include_regexes,\n include_only_marked)", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((371, 18, 371, 29), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((372, 47, 372, 58), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((405, 47, 405, 58), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((422, 29, 422, 40), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((433, 14, 433, 25), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((436, 33, 436, 44), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((461, 18, 461, 29), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((483, 14, 483, 25), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((489, 32, 489, 52), 'datadog_checks.vsphere.vsphere.SHORT_ROLLUP.items', 'SHORT_ROLLUP.items', ({}, {}), '()', False, 'from datadog_checks.vsphere.vsphere import REFRESH_METRICS_METADATA_INTERVAL, REFRESH_MORLIST_INTERVAL, RESOURCE_TYPE_METRICS, SHORT_ROLLUP\n'), ((511, 24, 511, 35), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((512, 29, 512, 40), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((514, 36, 514, 47), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((515, 18, 515, 29), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((529, 13, 529, 24), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((534, 24, 534, 35), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((536, 29, 536, 40), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((538, 36, 538, 47), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((665, 10, 665, 21), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((668, 12, 668, 55), 'datadog_checks.vsphere.VSphereCheck', 'VSphereCheck', ({(668, 25, 668, 34): '"""vsphere"""', (668, 36, 668, 38): '{}', (668, 40, 668, 42): '{}', (668, 44, 668, 54): '[instance]'}, {}), "('vsphere', {}, {}, [instance])", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((681, 12, 681, 64), 'datadog_checks.vsphere.VSphereCheck', 'VSphereCheck', ({(681, 25, 681, 34): '"""vsphere"""', (681, 36, 681, 47): 'init_config', (681, 49, 681, 51): '{}', (681, 53, 681, 63): '[instance]'}, {}), "('vsphere', init_config, {}, [instance])", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((692, 10, 692, 27), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((695, 13, 695, 73), 'pyVmomi.vim.event.DatacenterEventArgument', 'vim.event.DatacenterEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((697, 16, 697, 72), 'pyVmomi.vim.event.AlarmEventArgument', 'vim.event.AlarmEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((698, 13, 698, 72), 'pyVmomi.vim.event.ManagedEntityEventArgument', 'vim.event.ManagedEntityEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((699, 12, 701, 5), 'pyVmomi.vim.event.AlarmStatusChangedEvent', 'vim.event.AlarmStatusChangedEvent', (), '', False, 'from pyVmomi import vim\n'), ((707, 10, 707, 27), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime\n'), ((709, 13, 709, 45), 'pyVmomi.vim.event.VmEventArgument', 'vim.event.VmEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((711, 15, 711, 67), 'pyVmomi.vim.event.HostEventArgument', 'vim.event.HostEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((713, 20, 713, 77), 'pyVmomi.vim.event.HostEventArgument', 'vim.event.HostEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((715, 13, 715, 73), 'pyVmomi.vim.event.DatacenterEventArgument', 'vim.event.DatacenterEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((717, 18, 717, 83), 'pyVmomi.vim.event.DatacenterEventArgument', 'vim.event.DatacenterEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((719, 13, 719, 71), 'pyVmomi.vim.event.DatastoreEventArgument', 'vim.event.DatastoreEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((721, 18, 721, 81), 'pyVmomi.vim.event.DatastoreEventArgument', 'vim.event.DatastoreEventArgument', (), '', False, 'from pyVmomi import vim\n'), ((722, 12, 733, 5), 'pyVmomi.vim.event.VmBeingHotMigratedEvent', 'vim.event.VmBeingHotMigratedEvent', (), '', False, 'from pyVmomi import vim\n'), ((31, 9, 31, 38), 'pytest.raises', 'pytest.raises', ({(31, 23, 31, 37): 'BadConfigError'}, {}), '(BadConfigError)', False, 'import pytest\n'), ((33, 8, 33, 51), 'datadog_checks.vsphere.VSphereCheck', 'VSphereCheck', ({(33, 21, 33, 30): '"""vsphere"""', (33, 32, 33, 34): '{}', (33, 36, 33, 38): '{}', (33, 40, 33, 50): "[{'': ''}]"}, {}), "('vsphere', {}, {}, [{'': ''}])", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((81, 9, 81, 119), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((84, 17, 84, 28), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((87, 33, 87, 44), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((89, 40, 89, 51), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((120, 15, 120, 108), 'datadog_checks.vsphere.VSphereCheck._is_excluded', 'VSphereCheck._is_excluded', ({(120, 41, 120, 54): 'included_host', (120, 56, 120, 84): "{'name': included_host.name}", (120, 86, 120, 101): 'include_regexes', (120, 103, 120, 107): 'None'}, {}), "(included_host, {'name': included_host.name},\n include_regexes, None)", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((121, 15, 121, 104), 'datadog_checks.vsphere.VSphereCheck._is_excluded', 'VSphereCheck._is_excluded', ({(121, 41, 121, 52): 'included_vm', (121, 54, 121, 80): "{'name': included_vm.name}", (121, 82, 121, 97): 'include_regexes', (121, 99, 121, 103): 'None'}, {}), "(included_vm, {'name': included_vm.name},\n include_regexes, None)", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((136, 15, 138, 5), 'datadog_checks.vsphere.VSphereCheck._is_excluded', 'VSphereCheck._is_excluded', ({(137, 8, 137, 19): 'included_vm', (137, 21, 137, 61): "{'customValue': included_vm.customValue}", (137, 63, 137, 78): 'include_regexes', (137, 80, 137, 99): 'include_only_marked'}, {}), "(included_vm, {'customValue': included_vm.\n customValue}, include_regexes, include_only_marked)", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((168, 9, 168, 119), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((209, 9, 209, 119), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((259, 9, 259, 59), 'mock.patch', 'mock.patch', ({(259, 20, 259, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((260, 14, 260, 51), 'mock.MagicMock', 'MagicMock', (), '', False, 'from mock import MagicMock\n'), ((261, 17, 261, 53), 'mock.MagicMock', 'MagicMock', (), '', False, 'from mock import MagicMock\n'), ((263, 14, 263, 25), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((297, 9, 297, 59), 'mock.patch', 'mock.patch', ({(297, 20, 297, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((346, 9, 346, 72), 'mock.patch', 'mock.patch', ({(346, 20, 346, 71): '"""datadog_checks.vsphere.VSphereCheck._get_all_objs"""'}, {}), "('datadog_checks.vsphere.VSphereCheck._get_all_objs')", False, 'import mock\n'), ((346, 95, 348, 5), 'mock.patch', 'mock.patch', ({(347, 8, 347, 46): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((358, 9, 358, 59), 'mock.patch', 'mock.patch', ({(358, 20, 358, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((381, 9, 381, 59), 'mock.patch', 'mock.patch', ({(381, 20, 381, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((407, 9, 407, 59), 'mock.patch', 'mock.patch', ({(407, 20, 407, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((415, 9, 415, 59), 'mock.patch', 'mock.patch', ({(415, 20, 415, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((495, 9, 495, 59), 'mock.patch', 'mock.patch', ({(495, 20, 495, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((497, 41, 497, 52), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((532, 66, 532, 91), 'mock.MagicMock', 'MagicMock', (), '', False, 'from mock import MagicMock\n'), ((549, 9, 549, 59), 'mock.patch', 'mock.patch', ({(549, 20, 549, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((616, 32, 616, 72), 'datadog_checks.vsphere.VSphereCheck', 'VSphereCheck', ({(616, 45, 616, 51): '"""disk"""', (616, 53, 616, 55): '{}', (616, 57, 616, 59): '{}', (616, 61, 616, 71): '[instance]'}, {}), "('disk', {}, {}, [instance])", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((618, 9, 618, 74), 'mock.patch', 'mock.patch', ({(618, 20, 618, 73): '"""datadog_checks.vsphere.vsphere.connect.SmartConnect"""'}, {}), "('datadog_checks.vsphere.vsphere.connect.SmartConnect')", False, 'import mock\n'), ((632, 17, 632, 28), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n'), ((646, 32, 646, 72), 'datadog_checks.vsphere.VSphereCheck', 'VSphereCheck', ({(646, 45, 646, 51): '"""disk"""', (646, 53, 646, 55): '{}', (646, 57, 646, 59): '{}', (646, 61, 646, 71): '[instance]'}, {}), "('disk', {}, {}, [instance])", False, 'from datadog_checks.vsphere import VSphereCheck\n'), ((647, 9, 647, 59), 'mock.patch', 'mock.patch', ({(647, 20, 647, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((660, 9, 660, 38), 'pytest.raises', 'pytest.raises', ({(660, 23, 660, 37): 'BadConfigError'}, {}), '(BadConfigError)', False, 'import pytest\n'), ((686, 9, 686, 50), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((738, 9, 738, 59), 'mock.patch', 'mock.patch', ({(738, 20, 738, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((749, 9, 749, 59), 'mock.patch', 'mock.patch', ({(749, 20, 749, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((775, 9, 775, 59), 'mock.patch', 'mock.patch', ({(775, 20, 775, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((796, 9, 796, 59), 'mock.patch', 'mock.patch', ({(796, 20, 796, 58): '"""datadog_checks.vsphere.vsphere.vmodl"""'}, {}), "('datadog_checks.vsphere.vsphere.vmodl')", False, 'import mock\n'), ((86, 70, 86, 113), 'mock.MagicMock', 'MagicMock', (), '', False, 'from mock import MagicMock\n'), ((269, 26, 269, 63), 'mock.MagicMock', 'MagicMock', (), '', False, 'from mock import MagicMock\n'), ((550, 13, 550, 60), 'mock.patch.object', 'mock.patch.object', ({(550, 31, 550, 38): 'vsphere', (550, 40, 550, 59): '"""set_external_tags"""'}, {}), "(vsphere, 'set_external_tags')", False, 'import mock\n'), ((622, 13, 622, 43), 'pytest.raises', 'pytest.raises', ({(622, 27, 622, 42): 'ConnectionError'}, {}), '(ConnectionError)', False, 'import pytest\n'), ((637, 13, 637, 43), 'pytest.raises', 'pytest.raises', ({(637, 27, 637, 42): 'ConnectionError'}, {}), '(ConnectionError)', False, 'import pytest\n'), ((648, 13, 648, 78), 'mock.patch', 'mock.patch', ({(648, 24, 648, 77): '"""datadog_checks.vsphere.vsphere.connect.SmartConnect"""'}, {}), "('datadog_checks.vsphere.vsphere.connect.SmartConnect')", False, 'import mock\n'), ((510, 83, 510, 94), 'mock.MagicMock', 'MagicMock', ({}, {}), '()', False, 'from mock import MagicMock\n')] |
jp3477/curation | data_steward/constants/validation/email_notification.py | 41f98d57c8273d9963ad6d466a237c99b63c74be | MANDRILL_API_KEY = 'MANDRILL_API_KEY'
UNSET_MANDRILL_API_KEY_MSG = f"Mandrill API key not set in environment variable {MANDRILL_API_KEY}"
CONTACT_LIST_QUERY = """
SELECT *
FROM `{{project}}.{{dataset}}.{{contact_table}}`
"""
EHR_OPERATIONS = 'EHR Ops'
EHR_OPS_ZENDESK = '[email protected]'
DATA_CURATION_LISTSERV = '[email protected]'
NO_REPLY_ADDRESS = '[email protected]'
NO_DATA_STEWARD = 'no data steward'
# HPO contact list table columns
SITE_NAME = 'site_name'
HPO_ID = 'hpo_id'
SITE_POINT_OF_CONTACT = 'site_point_of_contact'
# Mandrill API constants
MAIL_TO = 'mail_to'
EHR_OPS_SITE_URL = 'https://sites.google.com/view/ehrupload'
# Email content
EMAIL_BODY = """
<p style="font-size:115%;">Hi {{ site_name }},</p>
<p style="font-size:115%;">Your submission <b>{{ folder }}</b>
{% if submission_error %}was NOT successfully loaded on {{ timestamp }}.<br>
{% else %}was successfully loaded on {{ timestamp }}.<br>
{% endif %}
Please review the <code>results.html</code> submission report attached to this email{% if submission_error %}<br>
and resolve the errors before making a new submission{% endif %}.<br>
If any of your files have not been successfully uploaded, please run the
<a href="https://github.com/all-of-us/aou-ehr-file-check">local file check</a> before making your submission.<br>
To view the full set of curation reports, please visit the submission folder in your
GCS bucket <a href="{{ submission_folder_url }}">here</a>.<br>
For more information on the reports and how to download them, please refer to our
<a href="{{ ehr_ops_site_url }}">EHR Ops website</a>.</p>
<p style="font-size:115%;">You are receiving this email because you are listed as a point of contact
for HPO Site <em>{{ site_name }}</em>.<br>
If you have additional questions or wish to no longer receive these emails, please reply/send an
email to <a href="mailto:{{ eo_zendesk }}">{{ eo_zendesk }}</a>.</p>
<p style="font-size:115%;">EHR Ops team, DRC<br>
<em>All of Us</em> Research Program<br>
<img src="cid:{{ aou_logo }}"/></p>
"""
AOU_LOGO = 'aou_logo'
AOU_LOGO_PNG = 'all-of-us-logo.png'
| [] |
keshav11/clip | clip/clip.py | f426dee5c3a6885ddeba20d450d85fc71951c5ca | import os
import argparse
from pathlib import Path
CLIP_FILE = os.path.join(Path.home(), '.clip')
TEMP_FILE = '.TEMP_FILE'
def add_text(key, text):
if os.path.exists(CLIP_FILE):
open_mode = 'a'
else:
open_mode = 'w+'
with open(CLIP_FILE, open_mode) as clip_file:
clip_file.write(key + ": " + text + "\n")
def list_texts():
with open(CLIP_FILE, 'r') as clip_file:
for text in clip_file.read().split('\n'):
print(text)
def get_text(key):
with open(CLIP_FILE, 'r') as clip_file:
for text in clip_file.read().split('\n'):
key_val = text.split(':')
if key_val[0].strip() == key:
print(key_val[1].strip(), end='')
def delete_text(key):
exists = False
with open(TEMP_FILE, 'w+') as temp_file:
with open(CLIP_FILE, 'r') as clip_file:
for text in clip_file.read().split('\n'):
if text.strip() == "":
continue
key_val = text.split(':')
if key_val[0].strip() != key:
temp_file.write(text+"\n")
else:
exists = True
if not exists:
print("key:", key, "was not found in the clip store")
try:
os.rename(TEMP_FILE, CLIP_FILE)
except Exception as ex:
os.remove(TEMP_FILE)
print('remove text failed.', ex)
def main():
parser = argparse.ArgumentParser(description='clips and saves texts from the command line')
parser.add_argument('-a', '--add', nargs=2)
parser.add_argument('-g', '--get', nargs=1)
parser.add_argument('-d', '--delete', nargs=1)
parser.add_argument('-l', '--list', action='store_true')
args = parser.parse_args()
if args.add:
key, value = args.add[0], args.add[1]
add_text(key, value)
elif args.list:
list_texts()
elif args.get:
key = args.get[0]
get_text(key)
elif args.delete:
key = args.delete[0]
delete_text(key)
else:
parser.print_usage()
if __name__ == '__main__':
main()
| [((5, 25, 5, 36), 'pathlib.Path.home', 'Path.home', ({}, {}), '()', False, 'from pathlib import Path\n'), ((10, 7, 10, 32), 'os.path.exists', 'os.path.exists', ({(10, 22, 10, 31): 'CLIP_FILE'}, {}), '(CLIP_FILE)', False, 'import os\n'), ((54, 13, 54, 95), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((47, 8, 47, 39), 'os.rename', 'os.rename', ({(47, 18, 47, 27): 'TEMP_FILE', (47, 29, 47, 38): 'CLIP_FILE'}, {}), '(TEMP_FILE, CLIP_FILE)', False, 'import os\n'), ((49, 8, 49, 28), 'os.remove', 'os.remove', ({(49, 18, 49, 27): 'TEMP_FILE'}, {}), '(TEMP_FILE)', False, 'import os\n')] |
learsi1911/GAMA_pygmo_v4 | tests/unit/test_nsga2.py | 459807db352dd1c9f9c1e0e322f8c1e9b5abbca0 | from typing import List, Tuple
from gama.genetic_programming.nsga2 import (
NSGAMeta,
fast_non_dominated_sort,
crowding_distance_assignment,
)
def _tuples_to_NSGAMeta(tuples: List[Tuple]) -> List[NSGAMeta]:
""" Converts a list of tuples to NSGAMeta objects. """
# Can't declare it directly in a loop as it does not create a new scope.
def fetch_value(i):
return lambda x: x[i]
metrics = [fetch_value(i) for i in range(len(tuples[0]))]
return [NSGAMeta(t, metrics) for t in tuples]
def test_nsgameta_value_assignment():
pareto = _tuples_to_NSGAMeta([(3, 5), (5, 3), (4, 4)])
three_five, five_three, four_four = pareto
assert three_five.values == (3, 5)
assert five_three.values == (5, 3)
assert four_four.values == (4, 4)
def test_dominates():
pareto = _tuples_to_NSGAMeta([(3, 5), (5, 3), (2, 4)])
three_five, five_three, two_four = pareto
assert not three_five.dominates(five_three)
assert not five_three.dominates(three_five)
assert three_five.dominates(two_four)
assert not two_four.dominates(three_five)
assert not five_three.dominates(two_four)
assert not two_four.dominates(five_three)
def test_crowding_distance_assignment():
pareto = _tuples_to_NSGAMeta([(3, 5), (5, 3), (4, 4)])
three_five, five_three, four_four = pareto
crowding_distance_assignment(pareto)
assert three_five.distance == float("inf")
assert five_three.distance == float("inf")
assert four_four.distance == 2
def test_crowding_distance_assignment_inf():
pareto = _tuples_to_NSGAMeta([(3, float("inf")), (5, 3), (4, 4)])
three_inf, five_three, four_four = pareto
crowding_distance_assignment(pareto)
assert three_inf.distance == float("inf")
assert five_three.distance == float("inf")
# In our implementation, we ignore 'axis' that contain inf values.
assert four_four.distance == 1
def test_crowd_compare():
pareto = _tuples_to_NSGAMeta([(3, 5), (5, 3), (4, 4), (4.01, 3.99), (4.5, 3.5)])
three_five, five_three, four_four, approx_four_four, half_half = pareto
fast_non_dominated_sort(pareto) # assigns rank
crowding_distance_assignment(pareto) # assigns distance
assert all([three_five.crowd_compare(other) == -1 for other in pareto[2:]])
assert all([five_three.crowd_compare(other) == -1 for other in pareto[2:]])
| [((45, 4, 45, 40), 'gama.genetic_programming.nsga2.crowding_distance_assignment', 'crowding_distance_assignment', ({(45, 33, 45, 39): 'pareto'}, {}), '(pareto)', False, 'from gama.genetic_programming.nsga2 import NSGAMeta, fast_non_dominated_sort, crowding_distance_assignment\n'), ((55, 4, 55, 40), 'gama.genetic_programming.nsga2.crowding_distance_assignment', 'crowding_distance_assignment', ({(55, 33, 55, 39): 'pareto'}, {}), '(pareto)', False, 'from gama.genetic_programming.nsga2 import NSGAMeta, fast_non_dominated_sort, crowding_distance_assignment\n'), ((66, 4, 66, 35), 'gama.genetic_programming.nsga2.fast_non_dominated_sort', 'fast_non_dominated_sort', ({(66, 28, 66, 34): 'pareto'}, {}), '(pareto)', False, 'from gama.genetic_programming.nsga2 import NSGAMeta, fast_non_dominated_sort, crowding_distance_assignment\n'), ((67, 4, 67, 40), 'gama.genetic_programming.nsga2.crowding_distance_assignment', 'crowding_distance_assignment', ({(67, 33, 67, 39): 'pareto'}, {}), '(pareto)', False, 'from gama.genetic_programming.nsga2 import NSGAMeta, fast_non_dominated_sort, crowding_distance_assignment\n'), ((16, 12, 16, 32), 'gama.genetic_programming.nsga2.NSGAMeta', 'NSGAMeta', ({(16, 21, 16, 22): 't', (16, 24, 16, 31): 'metrics'}, {}), '(t, metrics)', False, 'from gama.genetic_programming.nsga2 import NSGAMeta, fast_non_dominated_sort, crowding_distance_assignment\n')] |
crim-ca/stac-ingest | stac_ingest/utils/tds.py | e4cc2a66fee4b86ec238f139135d78215ec91ea4 | # File taken from https://github.com/Ouranosinc/pavics-vdb/blob/master/catalog/tds.py
"""Utility function to parse metadata from a THREDDS Data Server catalog."""
def walk(cat, depth=1):
"""Return a generator walking a THREDDS data catalog for datasets.
Parameters
----------
cat : TDSCatalog
THREDDS catalog.
depth : int
Maximum recursive depth. Setting 0 will return only datasets within the top-level catalog. If None,
depth is set to 1000.
"""
yield from cat.datasets.items()
if depth is None:
depth = 1000
if depth > 0:
for name, ref in cat.catalog_refs.items():
child = ref.follow()
yield from walk(child, depth=depth-1)
def attrs_from_ds(ds):
"""Extract attributes from TDS Dataset."""
url = ds.access_urls["NCML"]
attrs = attrs_from_ncml(url)
attrs["__services__"] = ds.access_urls
return attrs
def attrs_from_ncml(url):
"""Extract attributes from NcML file.
Parameters
----------
url : str
Link to NcML service of THREDDS server for a dataset.
Returns
-------
dict
Global attribute values keyed by facet names, with variable attributes in `__variable__` nested dict, and
additional specialized attributes in `__group__` nested dict.
"""
import lxml.etree
import requests
parser = lxml.etree.XMLParser(encoding='UTF-8')
ns = {"ncml": "http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2"}
# Parse XML content - UTF-8 encoded documents need to be read as bytes
xml = requests.get(url).content
doc = lxml.etree.fromstring(xml, parser=parser)
nc = doc.xpath("/ncml:netcdf", namespaces=ns)[0]
# Extract global attributes
out = _attrib_to_dict(nc.xpath("ncml:attribute", namespaces=ns))
# Extract group attributes
gr = {}
for group in nc.xpath("ncml:group", namespaces=ns):
gr[group.attrib["name"]] = _attrib_to_dict(group.xpath("ncml:attribute", namespaces=ns))
# Extract variable attributes
va = {}
for variable in nc.xpath("ncml:variable", namespaces=ns):
if '_CoordinateAxisType' in variable.xpath("ncml:attribute/@name", namespaces=ns):
continue
va[variable.attrib["name"]] = _attrib_to_dict(variable.xpath("ncml:attribute", namespaces=ns))
out["__group__"] = gr
out["__variable__"] = va
return out
def _attrib_to_dict(elems):
"""Convert element attributes to dictionary.
Ignore attributes with names starting with _
"""
hidden_prefix = "_"
out = {}
for e in elems:
a = e.attrib
if a["name"].startswith(hidden_prefix):
continue
out[a["name"]] = a["value"]
return out | [((56, 10, 56, 27), 'requests.get', 'requests.get', ({(56, 23, 56, 26): 'url'}, {}), '(url)', False, 'import requests\n')] |
brunel-physics/mva_scikit | tact/util.py | b0182da89efa466461aaf2cff4387c821df1758b | # -*- coding: utf-8 -*-
"""
Module containing miscellaneous utility functions.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import collections
import itertools
import numpy as np
class BinaryTree(object):
def __init__(self):
self.left = None
self.right = None
self.val = None
def deep_update(d1, d2):
"""
Adds key-value pairs in d2 to d1. Conflicts are resolved in favour of d2.
Recurses into all values in d2 which belong to the collections.Mapping
abstract base class.
Parameters
----------
d1 : collections.Mapping
Base dictionary
d2 : collections.Mapping
Dictionary with updated values
Returns
-------
d1 : collections.Mapping
Updated dictionary
"""
for k, v in d2.iteritems():
if isinstance(v, collections.Mapping):
d1[k] = deep_update(d1.get(k, {}), v)
else:
d1[k] = v
return d1
def nodes(tree):
"""
Return a list of values at every node of a tree.
Parameters
----------
tree : BinaryTree
BinaryTree to extract nodes from.
Returns
-------
nodelist : list
List of values at tree nodes.
"""
nodelist = []
def _get_nodes(tree):
"""
Build up a list of nodes.
Parameters
----------
tree : BinaryTree
BinaryTree to extract nodes from.
Returns
-------
None
"""
nodelist.append(tree.val)
try:
_get_nodes(tree.left)
except AttributeError:
nodelist.append(tree.left)
try:
_get_nodes(tree.right)
except AttributeError:
nodelist.append(tree.right)
_get_nodes(tree)
return nodelist
def maenumerate(marr):
"""
Multidimensional index iterator for masked arrays.
Return an iterator yielding pairs of array coordinates and values, with
masked values skipped.
Parameters
----------
marr : MaskedArray
Input array.
"""
for i, m in itertools.izip(np.ndenumerate(marr), ~marr.mask.ravel()):
if m:
yield i
def corrcoef(x, y=None, rowvar=True, fweights=None, aweights=None):
"""
Return Pearson product-moment correlation coefficients.
This is a copy of the implementation found in numpy, with the removal of
the deperecated bias and ddof keyword arguments, and the addition of
the fweights and aweights arguments, which are pased to np.cov.
Parameters
----------
x : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `x` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same
shape as `x`.
rowvar : bool, optional
If `rowvar` is True (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
fweights : array_like, int, optional
1-D array of integer freguency weights; the number of times each
observation vector should be repeated.
aweights : array_like, optional
1-D array of observation vector weights. These relative weights are
typically large for observations considered "important" and smaller for
observations considered less "important". If ``ddof=0`` the array of
weights can be used to assign probabilities to observation vectors.
Returns
-------
R : ndarray
The correlation coefficient matrix of the variables.
"""
c = np.cov(x, y, rowvar, fweights=fweights, aweights=aweights)
try:
d = np.diag(c)
except ValueError:
# scalar covariance
# nan if incorrect value (nan, inf, 0), 1 otherwise
return c / c
stddev = np.sqrt(d.real)
c /= stddev[:, None]
c /= stddev[None, :]
# Clip real and imaginary parts to [-1, 1]. This does not guarantee
# abs(a[i,j]) <= 1 for complex arrays, but is the best we can do without
# excessive work.
np.clip(c.real, -1, 1, out=c.real)
if np.iscomplexobj(c):
np.clip(c.imag, -1, 1, out=c.imag)
return c
| [((152, 8, 152, 66), 'numpy.cov', 'np.cov', (), '', True, 'import numpy as np\n'), ((159, 13, 159, 28), 'numpy.sqrt', 'np.sqrt', ({(159, 21, 159, 27): 'd.real'}, {}), '(d.real)', True, 'import numpy as np\n'), ((166, 4, 166, 38), 'numpy.clip', 'np.clip', (), '', True, 'import numpy as np\n'), ((167, 7, 167, 25), 'numpy.iscomplexobj', 'np.iscomplexobj', ({(167, 23, 167, 24): 'c'}, {}), '(c)', True, 'import numpy as np\n'), ((110, 31, 110, 51), 'numpy.ndenumerate', 'np.ndenumerate', ({(110, 46, 110, 50): 'marr'}, {}), '(marr)', True, 'import numpy as np\n'), ((154, 12, 154, 22), 'numpy.diag', 'np.diag', ({(154, 20, 154, 21): 'c'}, {}), '(c)', True, 'import numpy as np\n'), ((168, 8, 168, 42), 'numpy.clip', 'np.clip', (), '', True, 'import numpy as np\n')] |
DavidNKraemer/ams553-final-project | src/stochastic_tour.py | fc23fe5f126a8bd9ea593c0b339883ec71820a05 |
import numpy as np
import random
from collections import namedtuple
def generate_prob_matrix(n):
matrix = np.random.rand(n, n)
for i in range(n):
matrix[i][i] = 0
for i in range(n):
matrix[i] = (1/np.sum(matrix[i]))*matrix[i]
return matrix
def categorical(p):
return np.random.choice(len(p), 1, p=p)[0]
Drone = namedtuple('Drone', 'speed probability')
Site = namedtuple('Site', 'location')
class System:
def __init__(self, sites, drones):
self.sites = {}
self.drones = {}
n = len(sites)
for i, drone in enumerate(drones):
self.drones[i] = drone
for i, site in enumerate(sites):
self.sites[i] = site
distance = np.zeros([n, n])
for i in range(n):
for j in range(n):
if i < j:
x = np.subtract(sites[i], sites[j])
d = np.linalg.norm(x)
distance[i][j] = d
distance[j][i] = d
self.distance = distance
def get_site(self, site_id):
return self.sites[site_id]
def get_drone(self, drone_id):
return self.drones[drone_id]
def compute_path_distance(self, path):
n = len(path)
d = 0
for i in range(n - 1):
d += self.distance[path[i]][path[i + 1]]
return d
def compute_path_time(self, path, drone_id):
d = self.compute_path_distance(path)
return d/self.get_drone(drone_id).speed
def generate_path_of_length(self, length, drone_id):
path = []
P = self.get_drone(drone_id).probability
num_sites = len(self.sites)
s = categorical([1/num_sites]*num_sites)
path.append(s)
site = s
for i in range(length):
site = categorical(P[site])
path.append(site)
return path
def generate_path(self, s, t, drone_id):
path = [s]
P = self.get_drone(drone_id).probability
site = categorical(P[s])
path.append(site)
while site != t:
site = categorical(P[site])
path.append(site)
return path
@staticmethod
def generate_random_system(n, k):
locations = np.random.rand(n, 2)
sites = []
for i in locations:
sites.append(Site(i))
drones = []
for i in range(k):
speed = abs(random.random())
probability = generate_prob_matrix(n)
drones.append(Drone(speed, probability))
return System(sites, drones)
def _compute_arrival_times(path, drone_id, sites, speed):
arrival_times = []
t = 0
for i in range(len(path) - 1):
t += system.compute_path_time(path[i:i+2], drone_id=drone_id)
arrival_times.append((drone_id, path[i], path[i+1], t))
return arrival_times
def _generate_arrival_times(system, num_drones, length):
arrival_times = [[] for _ in range(len(system.sites))]
events = []
for i in range(system):
pass
events.extend(compute_arrival_times(path, i))
def get_key(item):
return item[3]
events = sorted(events, key=get_key)
for event in events:
drone_id = event[0]
site_id = event[2]
time = event[3]
arrival_times[site_id].append((drone_id, time))
return arrival_times
def compute_cost(system, n):
arrival_times = generate_arrival_times(system, n)
interarrival_times = [[] for _ in range(len(system.sites))]
for i in range(len(arrival_times)):
arrivals = arrival_times[i]
for j in range(len(arrivals) - 1):
interarrival_times[i].append(arrivals[j+1][1] - arrivals[j][1])
interarrival_avgs = [compute_average(i) for i in interarrival_times]
return max(interarrival_avgs)
def compute_average(data):
return (1/len(data))*sum(data)
| [((23, 8, 23, 48), 'collections.namedtuple', 'namedtuple', ({(23, 19, 23, 26): '"""Drone"""', (23, 28, 23, 47): '"""speed probability"""'}, {}), "('Drone', 'speed probability')", False, 'from collections import namedtuple\n'), ((24, 7, 24, 37), 'collections.namedtuple', 'namedtuple', ({(24, 18, 24, 24): '"""Site"""', (24, 26, 24, 36): '"""location"""'}, {}), "('Site', 'location')", False, 'from collections import namedtuple\n'), ((8, 13, 8, 33), 'numpy.random.rand', 'np.random.rand', ({(8, 28, 8, 29): 'n', (8, 31, 8, 32): 'n'}, {}), '(n, n)', True, 'import numpy as np\n'), ((40, 19, 40, 35), 'numpy.zeros', 'np.zeros', ({(40, 28, 40, 34): '[n, n]'}, {}), '([n, n])', True, 'import numpy as np\n'), ((92, 20, 92, 40), 'numpy.random.rand', 'np.random.rand', ({(92, 35, 92, 36): 'n', (92, 38, 92, 39): '2'}, {}), '(n, 2)', True, 'import numpy as np\n'), ((14, 23, 14, 40), 'numpy.sum', 'np.sum', ({(14, 30, 14, 39): 'matrix[i]'}, {}), '(matrix[i])', True, 'import numpy as np\n'), ((99, 24, 99, 39), 'random.random', 'random.random', ({}, {}), '()', False, 'import random\n'), ((44, 24, 44, 55), 'numpy.subtract', 'np.subtract', ({(44, 36, 44, 44): 'sites[i]', (44, 46, 44, 54): 'sites[j]'}, {}), '(sites[i], sites[j])', True, 'import numpy as np\n'), ((45, 24, 45, 41), 'numpy.linalg.norm', 'np.linalg.norm', ({(45, 39, 45, 40): 'x'}, {}), '(x)', True, 'import numpy as np\n')] |
mcasanova1445/models | orbit/actions/conditional_action_test.py | 37be0fdb4abccca633bb3199a4e6f3f71cd174d9 | # Copyright 2022 The Orbit Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for orbit.actions.conditional_action."""
from orbit import actions
import tensorflow as tf
class ConditionalActionTest(tf.test.TestCase):
def test_conditional_action(self):
# Define a function to raise an AssertionError, since we can't in a lambda.
def raise_assertion(arg):
raise AssertionError(str(arg))
conditional_action = actions.ConditionalAction(
condition=lambda x: x['value'], action=raise_assertion)
conditional_action({'value': False}) # Nothing is raised.
with self.assertRaises(AssertionError) as ctx:
conditional_action({'value': True})
self.assertEqual(ctx.exception.message, "{'value': True}")
if __name__ == '__main__':
tf.test.main()
| [((39, 2, 39, 16), 'tensorflow.test.main', 'tf.test.main', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((29, 25, 30, 63), 'orbit.actions.ConditionalAction', 'actions.ConditionalAction', (), '', False, 'from orbit import actions\n')] |
phnomcobra/valarie-content | Customizations/Tagging/show_tags.task.py | b1f6242605badd2b0b2e53c4320f5d963b5e0b21 | #!/usr/bin/python
################################################################################
# DOCUMENTS
#
# Justin Dierking
# [email protected]
# 614 692 2050
#
# 04/22/2018 Original Construction
################################################################################
import traceback
import json
class Task:
def __init__(self):
self.output = []
self.status = STATUS_NOT_EXECUTED
def execute(self, cli):
try:
keys = cli.AGTCollections("tags")
self.status = STATUS_SUCCESS
for key in keys.find():
#key.set()
self.output.append(json.dumps(key.object, indent = 4))
except Exception:
self.status = STATUS_EXCEPTION
self.output.append(traceback.format_exc())
return self.status | [((28, 35, 28, 69), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((32, 31, 32, 53), 'traceback.format_exc', 'traceback.format_exc', ({}, {}), '()', False, 'import traceback\n')] |
DerThorsten/seglib | examples/python/masked_hist.py | 4655079e390e301dd93e53f5beed6c9737d6df9f | import vigra
import numpy
import pylab
from seglib import cgp2d
from seglib.preprocessing import norm01
import seglib.edge_detectors.pixel as edp
import seglib.region_descriptors.pixel as rdp
from seglib.preprocessing import norm01
from seglib.histogram import jointHistogram,histogram
from seglib.region_descriptors.pixel.sift import denseSift
# change me to your path
img = "img/text.jpg"
img = numpy.squeeze(vigra.readImage(img))#[0:75,0:75,:]
binCount = 30
sigma = 1.5
histImg = numpy.zeros(img.shape[0:2]+(binCount*3,))
imgBig = None
sizes = [3,4,5,8,10,15,20,25,40,100]
scalings = [5,10,15]
for size in sizes:
for scaling in scalings:
size = int (size)
scaling = float(scaling)
print size,scaling
labels ,nseg= vigra.analysis.slicSuperpixels(vigra.colors.transform_RGB2Lab(img),scaling,size)
labels = vigra.analysis.labelImage(labels).astype(numpy.uint64)
cgp,tgrid = cgp2d.cgpFromLabels(labels)
if imgBig is None:
imgBig=vigra.sampling.resize(img,cgp.shape)
#cgp2d.visualize(imgBig,cgp=cgp)
print "accumulate cell "
hist = cgp.accumulateCellHistogram(cellType=2,image=img,binCount=binCount,sigma=sigma)
hist = hist.reshape([cgp.numCells(2),-1])
for c in range(histImg.shape[2]):
histImg[:,:,c] += (size)*cgp.featureToImage(cellType=2,features=hist[:,c],ignoreInactive=False,useTopologicalShape=False)
histImg=numpy.require(histImg,dtype=numpy.float32)
histImg=vigra.taggedView(histImg, 'xyc')
histImg = vigra.gaussianSmoothing(histImg,sigma=1.0)
#for c in range(histImg.shape[2]):
# #print c
# pylab.imshow( numpy.swapaxes( norm01(histImg[:,:,c]) ,0,1) )
# pylab.show()
#
# print "hist",hist.shape
imgdt = rdp.deepDetexturize(srcImg=img,img=histImg,nIteration=10,
nCluster=10,reductionAlg='pca',nldEdgeThreshold=10.0,nldScale=10.0,distance=None)#'cityblock')
| [] |
helwete/simple-backup | backup/models.py | c7dd1a08d398f5b4005c187e274e192b2e024f30 | from datetime import date
from django.conf import settings
from django.db import models
# Create your models here.
def user_directory_path(instance, filename):
# file will be uploaded to MEDIA_ROOT/user_<id>/<filename>
today = date.today()
return '{0}/{2}/{1}'.format(instance.user.username, filename, today.strftime("%Y/%m/%d/"))
class Upload(models.Model):
uploaded_file = models.FileField(null=True, blank=True, upload_to=user_directory_path)
file_name = models.CharField(max_length=255, null=True)
date_uploaded = models.DateField(auto_now_add=True, null=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True)
def __str__(self):
return self.uploaded_file.name
| [((9, 12, 9, 24), 'datetime.date.today', 'date.today', ({}, {}), '()', False, 'from datetime import date\n'), ((14, 20, 14, 90), 'django.db.models.FileField', 'models.FileField', (), '', False, 'from django.db import models\n'), ((15, 16, 15, 59), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((16, 20, 16, 66), 'django.db.models.DateField', 'models.DateField', (), '', False, 'from django.db import models\n'), ((17, 11, 17, 91), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n')] |
Jojoxiao/Machine-Learning-for-Beginner-by-Python3 | Kmeans Cluster/Kmeans_Compare.py | 71b91c9cba5803bd78d4d31be6dabb1d3989e968 | #-*- coding:utf-8 -*-
# &Author AnFany
# 引入方法
import Kmeans_AnFany as K_Af # AnFany
import Kmeans_Sklearn as K_Sk # Sklearn
import matplotlib.pyplot as plt
from pylab import mpl # 作图显示中文
mpl.rcParams['font.sans-serif'] = ['FangSong'] # 设置中文字体新宋体
mpl.rcParams['axes.unicode_minus'] = False
import numpy as np
# 利用sklearn生成数据集
from sklearn.datasets import make_blobs
X, Y = make_blobs(n_samples=600, centers=6, n_features=2)
# 绘制散点图
def fig_scatter(exdata, eydata, titl='训练数据散点图', co=['r', 'g', 'k', 'b', 'y', 'm'], marker=['o','^','H','v','d','>']):
typeclass = sorted(list(set(eydata)))
for ii in range(len(typeclass)):
datax = exdata[eydata == typeclass[ii]]
plt.scatter(datax[:, 0], datax[:, -1], c=co[ii], s=50, marker=marker[ii])
plt.title(titl)
#plt.legend(['%d类'%i for i in typeclass], bbox_to_anchor=(1.2, 0.9))
plt.xlabel('特征1')
plt.ylabel('特征2')
# 调用不同的方法
# AnFany
kresult = K_Af.op_kmeans(X, countcen=6)
# Sklearn
sk = K_Sk.KMeans(init='k-means++', n_clusters=6, n_init=10)
train = sk.fit(X)
result = sk.predict(X)
skru = K_Sk.trans(result)
#绘制算法后的类别的散点图
def sca(Xdata, Center, signdict, co=['r', 'g', 'y', 'b', 'c', 'm'], marker=['o','^','H','s','d','*'], titl = 'AnFany 结果'):
du = 1
for jj in signdict:
xdata = Xdata[signdict[jj]]
plt.scatter(xdata[:, 0], xdata[:, -1], c=co[jj], s=50, marker=marker[jj], label='%d类' % jj) # 绘制样本散点图
for ss in Center:
if du:
plt.scatter(ss[0], ss[1], c='k', s=100, marker='8', label='类别中心') #绘制类别中心点
du = 0
else:
plt.scatter(ss[0], ss[1], c='k', s=100, marker='8') # 绘制类别中心点
plt.legend(bbox_to_anchor=(1.2, 1))
plt.title(titl)
plt.xlabel('特征1')
plt.ylabel('特征2')
# 定义欧几里得距离
def dis(sample, center):
cen = np.array([center])
sample = np.array(sample)
if len(sample) != 0:
usb = np.sum((sample - cen) ** 2, axis=1) ** 0.5
return usb
else:
return 0
# 计算最终的分类结果的成本值
def Cost(Xdata, typedict):
center = {}
for kk in typedict:
center[kk] = np.mean(Xdata[typedict[kk]], axis=0) # 均值
cio = 0
for cc in typedict:
cio += np.sum(dis(Xdata[typedict[cc]], center[cc]))
return cio
# 最终的结果展示
plt.subplot(2, 2, 1)
fig_scatter(X, Y)
plt.subplot(2, 2, 2)
sca(X, kresult[0], kresult[2])
plt.subplot(2, 2, 3)
sca(X, train.cluster_centers_, skru, titl='Sklearn 结果')
plt.subplot(2, 2, 4)
plt.axis('off')
plt.text(0.3, 0.6, 'AnFany 最终的分类成本值为:%.5f'%Cost(X, kresult[2]))
plt.text(0.3, 0.3, 'Sklearn 最终的分类成本值为:%.5f'%Cost(X, skru))
plt.show()
| [((15, 7, 15, 57), 'sklearn.datasets.make_blobs', 'make_blobs', (), '', False, 'from sklearn.datasets import make_blobs\n'), ((31, 10, 31, 39), 'Kmeans_AnFany.op_kmeans', 'K_Af.op_kmeans', (), '', True, 'import Kmeans_AnFany as K_Af\n'), ((35, 5, 35, 59), 'Kmeans_Sklearn.KMeans', 'K_Sk.KMeans', (), '', True, 'import Kmeans_Sklearn as K_Sk\n'), ((39, 7, 39, 25), 'Kmeans_Sklearn.trans', 'K_Sk.trans', ({(39, 18, 39, 24): 'result'}, {}), '(result)', True, 'import Kmeans_Sklearn as K_Sk\n'), ((81, 0, 81, 20), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(81, 12, 81, 13): '(2)', (81, 15, 81, 16): '(2)', (81, 18, 81, 19): '(1)'}, {}), '(2, 2, 1)', True, 'import matplotlib.pyplot as plt\n'), ((84, 0, 84, 20), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(84, 12, 84, 13): '(2)', (84, 15, 84, 16): '(2)', (84, 18, 84, 19): '(2)'}, {}), '(2, 2, 2)', True, 'import matplotlib.pyplot as plt\n'), ((87, 0, 87, 20), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(87, 12, 87, 13): '(2)', (87, 15, 87, 16): '(2)', (87, 18, 87, 19): '(3)'}, {}), '(2, 2, 3)', True, 'import matplotlib.pyplot as plt\n'), ((90, 0, 90, 20), 'matplotlib.pyplot.subplot', 'plt.subplot', ({(90, 12, 90, 13): '(2)', (90, 15, 90, 16): '(2)', (90, 18, 90, 19): '(4)'}, {}), '(2, 2, 4)', True, 'import matplotlib.pyplot as plt\n'), ((91, 0, 91, 15), 'matplotlib.pyplot.axis', 'plt.axis', ({(91, 9, 91, 14): '"""off"""'}, {}), "('off')", True, 'import matplotlib.pyplot as plt\n'), ((95, 0, 95, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((23, 4, 23, 19), 'matplotlib.pyplot.title', 'plt.title', ({(23, 14, 23, 18): 'titl'}, {}), '(titl)', True, 'import matplotlib.pyplot as plt\n'), ((25, 4, 25, 25), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(25, 15, 25, 24): '"""特征1"""'}, {}), "('特征1')", True, 'import matplotlib.pyplot as plt\n'), ((26, 4, 26, 25), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(26, 15, 26, 24): '"""特征2"""'}, {}), "('特征2')", True, 'import matplotlib.pyplot as plt\n'), ((56, 4, 56, 39), 'matplotlib.pyplot.legend', 'plt.legend', (), '', True, 'import matplotlib.pyplot as plt\n'), ((57, 4, 57, 19), 'matplotlib.pyplot.title', 'plt.title', ({(57, 14, 57, 18): 'titl'}, {}), '(titl)', True, 'import matplotlib.pyplot as plt\n'), ((58, 4, 58, 25), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(58, 15, 58, 24): '"""特征1"""'}, {}), "('特征1')", True, 'import matplotlib.pyplot as plt\n'), ((59, 4, 59, 25), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(59, 15, 59, 24): '"""特征2"""'}, {}), "('特征2')", True, 'import matplotlib.pyplot as plt\n'), ((63, 10, 63, 28), 'numpy.array', 'np.array', ({(63, 19, 63, 27): '[center]'}, {}), '([center])', True, 'import numpy as np\n'), ((64, 13, 64, 29), 'numpy.array', 'np.array', ({(64, 22, 64, 28): 'sample'}, {}), '(sample)', True, 'import numpy as np\n'), ((22, 8, 22, 81), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((48, 8, 48, 101), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((74, 21, 74, 57), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((51, 12, 51, 85), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((54, 12, 54, 63), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((66, 14, 66, 49), 'numpy.sum', 'np.sum', (), '', True, 'import numpy as np\n')] |
Stayermax/5dof-bartender-robot | control_panel.py | dd04303afd2c252e6f7105e33ba35b01f3915194 | #!/usr/bin/env python
"""
Control panel file
"""
import pddl_solver as pddl
import ik
import rospy
from get_object_position import get_object_position
import time
from constants import *
from spawn_models import reset_model_position, reset_all, spawn_model, spawn_all_models
from delete_models import delete_all, delete_model
def control_panel():
robot = ik.MoveGroupPythonIntefaceTutorial()
# robot.go_to_init_state()
# robot.open_gripper()
bottle = 'bottle_1'
# simulatiuon
current_bottle_orig_pos = get_object_position(bottle)
# real_world
# current_bottle_orig_pos = Real_poses(bottle)
# current_bottle_orig_pos[-1] += BZS
while(True):
print()
cmd = raw_input("Enter command:\n open, close, init,\n gtb, hover, gtc, move,\n pour, cb, rb, ra,\n pgr, parm, pj,\n setj, att, box,\n del, dela, spawn, exit:\n")
if(cmd == 'open'): # open the gripper
robot.open_gripper()
elif(cmd == 'close'): # close the gripper
goal = float(raw_input("Enter closing goal in range [-0.12; 0]:\n"))
if(goal==""):
goal = -0.075
while(goal > 0 or goal < -0.12):
goal = float(raw_input("Enter closing goal in range [-0.12; 0]:\n"))
robot.close_gripper(goal)
elif(cmd == 'init'): # go to initial pose
robot.go_to_init_state()
elif(cmd == 'gtb'): # go to bottle
x,y,z = current_bottle_orig_pos
h = raw_input("Set z level: ")
if(h == ""):
h = BZS
else:
h = float(h)
robot.go_to_xyz(x, y, z + h)
elif(cmd == 'hover'): # hover over the bottle
x,y,z = current_bottle_orig_pos
robot.go_to_xyz(x, y, BUO)
elif(cmd == 'gtc'): # go to cup
# simulation
x,y,z = get_object_position('cup_1')
# real_world
# pos, angle = Real_world_PourPos[cup]
# x,y,z = pos
robot.go_to_xyz(x, y, CUO)
elif(cmd == 'move'): # go to cup
x,y,z = robot.get_arm_pose()
dir = raw_input("Enter coord: x,y or z:\n")
while(dir not in ['x','y','z']):
dir = raw_input("Enter coord: x,y or z:\n")
step = float(raw_input("Enter step size:\n"))
if(dir == 'x'):
x += step
elif(dir == 'y'):
y += step
elif(dir == 'z'):
z += step
robot.go_to_xyz(x, y, z)
elif(cmd == 'pour'): # turn gripper on pouring angle
robot.rotate_gripper(angle = 1)
rospy.sleep(1.5)
robot.rotate_gripper(angle = 0)
elif(cmd == 'cb'): # change bottle
b_n = int(raw_input("Enter bottle number from 1 to 6\n"))
while(b_n not in [1,2,3,4,5,6]):
b_n = int(raw_input("Enter bottle number from 1 to 6\n"))
bottle = 'bottle_' + str(b_n)
# simulatiuon
current_bottle_orig_pos = get_object_position(bottle)
# real_world
# current_bottle_orig_pos = Real_poses(bottle)
elif(cmd == 'rb'): # reset bottle position
reset_model_position(bottle)
elif(cmd == 'ra'): # reset all models positions
reset_all()
elif(cmd == 'pgr'): # print gripper postiion
pos = robot.get_gripper_pose()
print("Current gripper coordinates: " + str(pos))
elif(cmd == 'parm'): # print arm postiion
pos = robot.get_arm_pose()
print("Current arm coordinates: " + str(pos))
elif(cmd == 'pj'): # print arm joints
current_joints = robot.get_arm_joints()
print("Current joints poistion: " + str(current_joints))
elif(cmd == 'setj'): # set robot joint angles
joints = robot.get_arm_joints()
# joints[0] = float(raw_input("Enter theta_0")) # We don't want to change the arm direction
t1 = raw_input("Enter theta_1: ")
t2 = raw_input("Enter theta_2: ")
t3 = raw_input("Enter theta_3: ")
if(t1 != ''):
joints[1] = float(t1)
if(t2 != ''):
joints[2] = float(t2)
if(t3 != ''):
joints[3] = float(t3)
joints[4] = 0
robot.set_joints(joints)
elif(cmd == 'att'): # attaches object to the gripper
robot.attach_object(bottle)
attached_objects = robot.scene.get_attached_objects([bottle])
print("Attached objects: " + str(attached_objects))
elif(cmd == 'box'):
robot.add_box()
robot.attach_object('box')
attached_objects = robot.scene.get_attached_objects([bottle])
print("Attached objects: " + str(attached_objects))
elif(cmd == 'del'):
delete_model(bottle)
print("Bottle " + str(bottle.split('_')[1]) + " was deleted")
elif(cmd == 'dela'):
delete_all()
print("All models were deleted")
elif(cmd == 'spawn'):
spawn_model(bottle)
print("Bottle " + str(bottle.split('_')[1]) + " was spawned")
elif(cmd == 'exit'): # exit control panel script
print('Finish performance')
return
else:
print('Wrong command')
if __name__ == '__main__':
control_panel() | [((16, 12, 16, 48), 'ik.MoveGroupPythonIntefaceTutorial', 'ik.MoveGroupPythonIntefaceTutorial', ({}, {}), '()', False, 'import ik\n'), ((22, 30, 22, 57), 'get_object_position.get_object_position', 'get_object_position', ({(22, 50, 22, 56): 'bottle'}, {}), '(bottle)', False, 'from get_object_position import get_object_position\n'), ((54, 20, 54, 48), 'get_object_position.get_object_position', 'get_object_position', ({(54, 40, 54, 47): '"""cup_1"""'}, {}), "('cup_1')", False, 'from get_object_position import get_object_position\n'), ((75, 12, 75, 28), 'rospy.sleep', 'rospy.sleep', ({(75, 24, 75, 27): '(1.5)'}, {}), '(1.5)', False, 'import rospy\n'), ((83, 38, 83, 65), 'get_object_position.get_object_position', 'get_object_position', ({(83, 58, 83, 64): 'bottle'}, {}), '(bottle)', False, 'from get_object_position import get_object_position\n'), ((87, 12, 87, 40), 'spawn_models.reset_model_position', 'reset_model_position', ({(87, 33, 87, 39): 'bottle'}, {}), '(bottle)', False, 'from spawn_models import reset_model_position, reset_all, spawn_model, spawn_all_models\n'), ((89, 12, 89, 23), 'spawn_models.reset_all', 'reset_all', ({}, {}), '()', False, 'from spawn_models import reset_model_position, reset_all, spawn_model, spawn_all_models\n'), ((123, 12, 123, 32), 'delete_models.delete_model', 'delete_model', ({(123, 25, 123, 31): 'bottle'}, {}), '(bottle)', False, 'from delete_models import delete_all, delete_model\n'), ((126, 12, 126, 24), 'delete_models.delete_all', 'delete_all', ({}, {}), '()', False, 'from delete_models import delete_all, delete_model\n'), ((129, 12, 129, 31), 'spawn_models.spawn_model', 'spawn_model', ({(129, 24, 129, 30): 'bottle'}, {}), '(bottle)', False, 'from spawn_models import reset_model_position, reset_all, spawn_model, spawn_all_models\n')] |
archanpatkar/Enigma | Enigma/Enigma.py | dbbc1fda99bf451a0284f051c724ed43915dfe2a | from Enigma.Rotor import Rotor
from Enigma.Reflector import Reflector
from Enigma.Plugboard import Plugboard
class Enigma:
def __init__(self , rotors = [ Rotor(0,"IC") , Rotor(0,"IIC") , Rotor(0,"IIIC") ] , plugboard = Plugboard() , reflector = Reflector("A")):
self.rotors = rotors
for i in range(len(rotors)):
if i + 1 < len(rotors):
rotors[i].on("Sidereal", lambda *args: rotors[i+1].step())
self.Plugboard = plugboard;
self.Reflector = reflector;
def encrypt(self,data):
data = data.upper().replace(" ","");
string = "";
for char in data:
string += self.each(char,True);
return string;
def decrypt(self,data):
data = data.upper();
string = "";
for char in data:
string += self.each(char,False);
return string;
def each(self,char,flag):
self.rotors[0].step()
output = self.Plugboard.get(char)
for rotor in self.rotors:
if flag:
output = rotor.scramble(output)
else:
output = rotor.unscramble(output)
output = self.Reflector.get(output)
for rotor in self.rotors[::-1]:
if flag:
output = rotor.scramble(output)
else:
output = rotor.unscramble(output)
return self.Plugboard.get(output);
| [((7, 100, 7, 111), 'Enigma.Plugboard.Plugboard', 'Plugboard', ({}, {}), '()', False, 'from Enigma.Plugboard import Plugboard\n'), ((7, 126, 7, 140), 'Enigma.Reflector.Reflector', 'Reflector', ({(7, 136, 7, 139): '"""A"""'}, {}), "('A')", False, 'from Enigma.Reflector import Reflector\n'), ((7, 35, 7, 48), 'Enigma.Rotor.Rotor', 'Rotor', ({(7, 41, 7, 42): '(0)', (7, 43, 7, 47): '"""IC"""'}, {}), "(0, 'IC')", False, 'from Enigma.Rotor import Rotor\n'), ((7, 51, 7, 65), 'Enigma.Rotor.Rotor', 'Rotor', ({(7, 57, 7, 58): '(0)', (7, 59, 7, 64): '"""IIC"""'}, {}), "(0, 'IIC')", False, 'from Enigma.Rotor import Rotor\n'), ((7, 68, 7, 83), 'Enigma.Rotor.Rotor', 'Rotor', ({(7, 74, 7, 75): '(0)', (7, 76, 7, 82): '"""IIIC"""'}, {}), "(0, 'IIIC')", False, 'from Enigma.Rotor import Rotor\n')] |
xiki-tempula/spack | var/spack/repos/builtin/packages/exiv2/package.py | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Exiv2(CMakePackage):
"""Exiv2 is a Cross-platform C++ library and a command line utility
to manage image metadata
"""
homepage = "https://www.exiv2.org/"
url = "https://github.com/Exiv2/exiv2/archive/v0.27.2.tar.gz"
version('0.27.2', sha256='3dbcaf01fbc5b98d42f091d1ff0d4b6cd9750dc724de3d9c0d113948570b2934')
depends_on('zlib', type='link')
depends_on('[email protected]:', type='link')
| [] |
JMIdeaMaker/django-magicauth | magicauth/send_token.py | ffca3423c46f8f3d7e49eaf374b33265d4730587 | import math
from django.contrib.auth import get_user_model
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import send_mail
from django.template import loader
from magicauth import settings as magicauth_settings
from django.conf import settings as django_settings
from magicauth.models import MagicToken
import sendgrid
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail
sg = sendgrid.SendGridAPIClient(django_settings.SENDGRID_API_KEY)
class SendTokenMixin(object):
"""
Helper for sending an email containing a link containing the MagicToken.
"""
def create_token(self, user):
token = MagicToken.objects.create(user=user)
return token
def get_user_from_email(self, user_email):
"""
Query the DB for the user corresponding to the email.
- We use get_user_model() instead of User (in case the Django app has customised the User
class)
- We use magicauth_settings.EMAIL_FIELD, which is the name of the field in the user
model. By default "username" but not always.
"""
user_class = get_user_model()
email_field = magicauth_settings.EMAIL_FIELD
field_lookup = {f"{email_field}__iexact": user_email}
user = user_class.objects.get(**field_lookup)
return user
def send_email(self, user, user_email, token, extra_context=None):
email_subject = magicauth_settings.EMAIL_SUBJECT
html_template = magicauth_settings.EMAIL_HTML_TEMPLATE
text_template = magicauth_settings.EMAIL_TEXT_TEMPLATE
from_email = magicauth_settings.FROM_EMAIL
context = {
"token": token,
"user": user,
"site": get_current_site(self.request),
"TOKEN_DURATION_MINUTES": math.floor(magicauth_settings.TOKEN_DURATION_SECONDS / 60),
"TOKEN_DURATION_SECONDS": magicauth_settings.TOKEN_DURATION_SECONDS,
}
if extra_context:
context.update(extra_context)
text_message = loader.render_to_string(text_template, context)
html_message = loader.render_to_string(html_template, context)
mail = Mail(
from_email=(
django_settings.MAGICAUTH_FROM_EMAIL,
django_settings.MAGICAUTH_SENDER
),
to_emails=[user_email],
subject=email_subject,
html_content=html_message
)
sg.send(mail)
def send_token(self, user_email, extra_context=None):
user = self.get_user_from_email(user_email)
token = self.create_token(user)
self.send_email(user, user_email, token, extra_context)
| [((16, 5, 16, 65), 'sendgrid.SendGridAPIClient', 'sendgrid.SendGridAPIClient', ({(16, 32, 16, 64): 'django_settings.SENDGRID_API_KEY'}, {}), '(django_settings.SENDGRID_API_KEY)', False, 'import sendgrid\n'), ((25, 16, 25, 52), 'magicauth.models.MagicToken.objects.create', 'MagicToken.objects.create', (), '', False, 'from magicauth.models import MagicToken\n'), ((36, 21, 36, 37), 'django.contrib.auth.get_user_model', 'get_user_model', ({}, {}), '()', False, 'from django.contrib.auth import get_user_model\n'), ((56, 23, 56, 70), 'django.template.loader.render_to_string', 'loader.render_to_string', ({(56, 47, 56, 60): 'text_template', (56, 62, 56, 69): 'context'}, {}), '(text_template, context)', False, 'from django.template import loader\n'), ((57, 23, 57, 70), 'django.template.loader.render_to_string', 'loader.render_to_string', ({(57, 47, 57, 60): 'html_template', (57, 62, 57, 69): 'context'}, {}), '(html_template, context)', False, 'from django.template import loader\n'), ((59, 15, 67, 9), 'sendgrid.helpers.mail.Mail', 'Mail', (), '', False, 'from sendgrid.helpers.mail import Mail\n'), ((50, 20, 50, 50), 'django.contrib.sites.shortcuts.get_current_site', 'get_current_site', ({(50, 37, 50, 49): 'self.request'}, {}), '(self.request)', False, 'from django.contrib.sites.shortcuts import get_current_site\n'), ((51, 38, 51, 96), 'math.floor', 'math.floor', ({(51, 49, 51, 95): '(magicauth_settings.TOKEN_DURATION_SECONDS / 60)'}, {}), '(magicauth_settings.TOKEN_DURATION_SECONDS / 60)', False, 'import math\n')] |
loriab/qccddb | qcdb/util/paths.py | d9e156ef8b313ac0633211fc6b841f84a3ddde24 | import os
import sys
## {{{ http://code.activestate.com/recipes/52224/ (r1)
def search_file(filename, search_path):
"""Given an os.pathsep divided `search_path`, find first occurrence of
`filename`. Returns full path to file if found or None if unfound.
"""
file_found = False
paths = search_path.split(os.pathsep)
# paths = string.split(search_path, os.pathsep)
for path in paths:
if os.path.exists(os.path.join(path, filename)):
file_found = True
break
if file_found:
return os.path.abspath(os.path.join(path, filename))
else:
return None
## end of http://code.activestate.com/recipes/52224/ }}}
def all_casings(input_string):
"""Function to return a generator of all lettercase permutations
of *input_string*.
"""
if not input_string:
yield ""
else:
first = input_string[:1]
if first.lower() == first.upper():
for sub_casing in all_casings(input_string[1:]):
yield first + sub_casing
else:
for sub_casing in all_casings(input_string[1:]):
yield first.lower() + sub_casing
yield first.upper() + sub_casing
def import_ignorecase(module, lenv=None):
"""Function to import *module* in any possible lettercase
permutation. Returns module object if available, None if not.
`lenv` is list (not str) of addl sys.path members to try.
"""
lenv = [] if lenv is None else lenv
with add_path(lenv):
modobj = None
for per in list(all_casings(module)):
try:
modobj = __import__(per)
except ImportError:
pass
else:
break
return modobj
class add_path:
"""https://stackoverflow.com/a/39855753"""
def __init__(self, paths):
# paths must be list
self.paths = paths
def __enter__(self):
for pth in reversed(self.paths):
sys.path.insert(0, pth)
def __exit__(self, exc_type, exc_value, traceback):
for pth in self.paths:
sys.path.remove(pth)
| [((15, 26, 15, 54), 'os.path.join', 'os.path.join', ({(15, 39, 15, 43): 'path', (15, 45, 15, 53): 'filename'}, {}), '(path, filename)', False, 'import os\n'), ((19, 31, 19, 59), 'os.path.join', 'os.path.join', ({(19, 44, 19, 48): 'path', (19, 50, 19, 58): 'filename'}, {}), '(path, filename)', False, 'import os\n'), ((75, 12, 75, 35), 'sys.path.insert', 'sys.path.insert', ({(75, 28, 75, 29): '(0)', (75, 31, 75, 34): 'pth'}, {}), '(0, pth)', False, 'import sys\n'), ((79, 12, 79, 32), 'sys.path.remove', 'sys.path.remove', ({(79, 28, 79, 31): 'pth'}, {}), '(pth)', False, 'import sys\n')] |
JiangBowen-master/DeepCTR | tests/models/DCN_test.py | 291ffb0ff3b8322f64bd839f963d5c7a70e6b358 | import pytest
import tensorflow as tf
from deepctr.estimator import DCNEstimator
from deepctr.models import DCN
from ..utils import check_model, get_test_data, SAMPLE_SIZE, get_test_data_estimator, check_estimator, \
Estimator_TEST_TF1
@pytest.mark.parametrize(
'cross_num,hidden_size,sparse_feature_num,cross_parameterization',
[(0, (8,), 2, 'vector'), (1, (), 1, 'vector'), (1, (8,), 3, 'vector'),
(0, (8,), 2, 'matrix'), (1, (), 1, 'matrix'), (1, (8,), 3, 'matrix'),
]
)
def test_DCN(cross_num, hidden_size, sparse_feature_num, cross_parameterization):
model_name = "DCN"
sample_size = SAMPLE_SIZE
x, y, feature_columns = get_test_data(sample_size, sparse_feature_num=sparse_feature_num,
dense_feature_num=sparse_feature_num)
model = DCN(feature_columns, feature_columns, cross_num=cross_num, cross_parameterization=cross_parameterization,
dnn_hidden_units=hidden_size, dnn_dropout=0.5)
check_model(model, model_name, x, y)
@pytest.mark.parametrize(
'cross_num,hidden_size,sparse_feature_num',
[(1, (8,), 3)
]
)
def test_DCNEstimator(cross_num, hidden_size, sparse_feature_num):
if not Estimator_TEST_TF1 and tf.__version__ < "2.2.0":
return
model_name = "DCN"
sample_size = SAMPLE_SIZE
linear_feature_columns, dnn_feature_columns, input_fn = get_test_data_estimator(sample_size,
sparse_feature_num=sparse_feature_num,
dense_feature_num=sparse_feature_num)
model = DCNEstimator(linear_feature_columns, dnn_feature_columns, cross_num=cross_num, dnn_hidden_units=hidden_size,
dnn_dropout=0.5)
check_estimator(model, input_fn)
# def test_DCN_invalid(embedding_size=8, cross_num=0, hidden_size=()):
# feature_dim_dict = {'sparse': [SparseFeat('sparse_1', 2), SparseFeat('sparse_2', 5), SparseFeat('sparse_3', 10)],
# 'dense': [SparseFeat('dense_1', 1), SparseFeat('dense_1', 1), SparseFeat('dense_1', 1)]}
# with pytest.raises(ValueError):
# _ = DCN(None, embedding_size=embedding_size, cross_num=cross_num, dnn_hidden_units=hidden_size, dnn_dropout=0.5)
if __name__ == "__main__":
pass
| [((10, 1, 15, 1), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(11, 4, 11, 69): '"""cross_num,hidden_size,sparse_feature_num,cross_parameterization"""', (12, 4, 14, 6): "[(0, (8,), 2, 'vector'), (1, (), 1, 'vector'), (1, (8,), 3, 'vector'), (0,\n (8,), 2, 'matrix'), (1, (), 1, 'matrix'), (1, (8,), 3, 'matrix')]"}, {}), "(\n 'cross_num,hidden_size,sparse_feature_num,cross_parameterization', [(0,\n (8,), 2, 'vector'), (1, (), 1, 'vector'), (1, (8,), 3, 'vector'), (0, (\n 8,), 2, 'matrix'), (1, (), 1, 'matrix'), (1, (8,), 3, 'matrix')])", False, 'import pytest\n'), ((28, 1, 32, 1), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(29, 4, 29, 46): '"""cross_num,hidden_size,sparse_feature_num"""', (30, 4, 31, 6): '[(1, (8,), 3)]'}, {}), "('cross_num,hidden_size,sparse_feature_num', [(1, (8\n ,), 3)])", False, 'import pytest\n'), ((23, 12, 24, 62), 'deepctr.models.DCN', 'DCN', (), '', False, 'from deepctr.models import DCN\n'), ((42, 12, 43, 41), 'deepctr.estimator.DCNEstimator', 'DCNEstimator', (), '', False, 'from deepctr.estimator import DCNEstimator\n')] |
jacnugent/fv3net | workflows/post_process_run/fv3post/gsutil.py | 84958651bdd17784fdab98f87ad0d65414c03368 | import os
import subprocess
import backoff
class GSUtilResumableUploadException(Exception):
pass
def _decode_to_str_if_bytes(s, encoding="utf-8"):
if isinstance(s, bytes):
return s.decode(encoding)
else:
return s
def authenticate():
try:
credentials = os.environ["GOOGLE_APPLICATION_CREDENTIALS"]
except KeyError:
pass
else:
subprocess.check_call(
["gcloud", "auth", "activate-service-account", "--key-file", credentials]
)
@backoff.on_exception(backoff.expo, GSUtilResumableUploadException, max_tries=3)
def upload_dir(d, dest):
try:
# Pipe stderr to stdout because gsutil logs upload progress there.
subprocess.check_output(
["gsutil", "-m", "rsync", "-r", "-e", d, dest], stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError as e:
output = _decode_to_str_if_bytes(e.output)
if "ResumableUploadException" in output:
raise GSUtilResumableUploadException()
else:
raise e
def download_directory(dir_, dest):
os.makedirs(dest, exist_ok=True)
subprocess.check_call(["gsutil", "-m", "rsync", "-r", dir_, dest])
def cp(source, destination):
subprocess.check_call(["gsutil", "cp", source, destination])
| [((29, 1, 29, 80), 'backoff.on_exception', 'backoff.on_exception', (), '', False, 'import backoff\n'), ((45, 4, 45, 36), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((46, 4, 46, 70), 'subprocess.check_call', 'subprocess.check_call', ({(46, 26, 46, 69): "['gsutil', '-m', 'rsync', '-r', dir_, dest]"}, {}), "(['gsutil', '-m', 'rsync', '-r', dir_, dest])", False, 'import subprocess\n'), ((50, 4, 50, 64), 'subprocess.check_call', 'subprocess.check_call', ({(50, 26, 50, 63): "['gsutil', 'cp', source, destination]"}, {}), "(['gsutil', 'cp', source, destination])", False, 'import subprocess\n'), ((24, 8, 26, 9), 'subprocess.check_call', 'subprocess.check_call', ({(25, 12, 25, 85): "['gcloud', 'auth', 'activate-service-account', '--key-file', credentials]"}, {}), "(['gcloud', 'auth', 'activate-service-account',\n '--key-file', credentials])", False, 'import subprocess\n'), ((33, 8, 35, 9), 'subprocess.check_output', 'subprocess.check_output', (), '', False, 'import subprocess\n')] |
rpappalax/deploy-tix | deploy_tix/__main__.py | a53c7fa7898b9f0c2f530c8abd8bab322a2eb7bc | import argparse
from deploy_tix.bugzilla_rest_client import BugzillaRESTClient
from deploy_tix.release_notes import ReleaseNotes
from output_helper import OutputHelper
def main(args=None):
parser = argparse.ArgumentParser(
description='Scripts for creating / updating deployment tickets in \
Bugzilla',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'-a', '--application',
help='Example: loop-server',
required=True)
parser.add_argument(
'-B', '--bugzilla-mozilla',
help='Set this switch to post directly to bugzilla.mozilla.org \
(without switch posts to: bugzilla-dev.allizom.org)',
action='store_true',
default=False,
required=False)
subparsers = parser.add_subparsers(help='Ticket action')
# parser for ticket - {create} option
parser_create = \
subparsers.add_parser('NEW', help='Create a NEW deployment ticket.')
parser_create.add_argument(
'-o', '--repo-owner',
help='Example: mozilla-services',
default='mozilla-services',
required=False)
parser_create.add_argument(
'-e', '--environment',
help='Enter: STAGE, PROD',
default='STAGE',
required=False)
parser_create.add_argument(
'-m', '--cc-mail',
help='Example: [email protected] \
NOTE: must be a registered username!',
default='',
required=False)
# parser for ticket - {upate} option
parser_update = subparsers.add_parser(
'UPDATE',
help='UPDATE an existing deployment ticket'
)
parser_update.add_argument(
'-i', '--bug-id',
help='Example: 1234567',
required=False)
parser_update.add_argument(
'-c', '--comment',
help='Enter: <your bug comment>',
required=True)
args = vars(parser.parse_args())
application = args['application']
bugzilla_mozilla = args['bugzilla_mozilla']
ticket = BugzillaRESTClient(bugzilla_mozilla)
if all(key in args for key in ['bug_id', 'comment']):
bug_id = args['bug_id']
comment = args['comment']
ticket.bug_update(application, comment, bug_id)
if all(key in args for key in ['repo_owner', 'application', 'environment']): # noqa
repo_owner = args['repo_owner']
environment = args['environment'].lower()
if args['cc_mail']:
cc_mail = args['cc_mail']
else:
cc_mail = ''
status = 'NEW'
output = OutputHelper()
output.log('Create deployment ticket', True, True)
notes = ReleaseNotes(repo_owner, application, environment)
description = notes.get_release_notes()
release_num = notes.last_tag
output.log('Release Notes', True)
output.log(description)
ticket.bug_create(
release_num, application, environment, status, description, cc_mail
)
| [((9, 13, 12, 63), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((72, 13, 72, 49), 'deploy_tix.bugzilla_rest_client.BugzillaRESTClient', 'BugzillaRESTClient', ({(72, 32, 72, 48): 'bugzilla_mozilla'}, {}), '(bugzilla_mozilla)', False, 'from deploy_tix.bugzilla_rest_client import BugzillaRESTClient\n'), ((89, 17, 89, 31), 'output_helper.OutputHelper', 'OutputHelper', ({}, {}), '()', False, 'from output_helper import OutputHelper\n'), ((91, 16, 91, 66), 'deploy_tix.release_notes.ReleaseNotes', 'ReleaseNotes', ({(91, 29, 91, 39): 'repo_owner', (91, 41, 91, 52): 'application', (91, 54, 91, 65): 'environment'}, {}), '(repo_owner, application, environment)', False, 'from deploy_tix.release_notes import ReleaseNotes\n')] |
lebarsfa/vpython-wx | site-packages/visual/examples/drape.py | 38df062e5532b79f632f4f2a1abae86754c264a9 | from visual import *
print("""
Click to place spheres under falling string.
Right button drag or Ctrl-drag to rotate view.
Middle button drag or Alt-drag to zoom in or out.
On a two-button mouse, middle is left + right.
""")
# David Scherer
scene.title = "Drape"
restlength = 0.02
m = 0.010 * restlength
g = 9.8
dt = 0.002
k = 3
damp = (1-0)**dt
nspheres = 3
floor = 0
# Create the stringy thing:
band = curve( x = arange(-1,1,restlength),
y = 1,
radius = 0.02
)
band.p = band.pos * 0
scene.range = 1.5
scene.autoscale = 0
# Let the user position obstacles:
spheres = []
for i in range(nspheres):
s = sphere( pos = scene.mouse.getclick().pos, #(i*0.6 - 0.7,0.5 + i*0.1,0),
radius = 0.25,
color = (abs(sin(i)),cos(i)**2,(i%10)/10.0) )
spheres.append( s )
while True:
rate(1.0 / dt)
if scene.mouse.clicked:
i = len(spheres)
s = sphere( pos = scene.mouse.getclick().pos,
radius = 0.25,
color = (abs(sin(i)),cos(i)**2,(i%10)/10.0) )
spheres.append( s )
if floor:
below = less(band.pos[:,1],-1)
band.p[:,1] = where( below, 0, band.p[:,1] )
band.pos[:,1] = where( below, -1, band.pos[:,1] )
# need a more physical way to make 'damped springs' than this!
band.p = band.p * damp
#band.p[0] = 0 # nail down left endpoint
#band.p[-1] = 0 # nail down right endpoint
band.pos = band.pos + band.p/m*dt
#gravity
band.p[:,1] = band.p[:,1] - m * g * dt
# force[n] is the force on point n from point n+1 (to the right):
length = (band.pos[1:] - band.pos[:-1])
dist = sqrt(sum(length*length,-1))
force = k * ( dist - restlength )
force = length/dist[:,newaxis] * force[:,newaxis]
band.p[:-1] = band.p[:-1] + force*dt
band.p[1:] = band.p[1:] - force*dt
# color based on "stretch": blue -> white -> red
c = clip( dist/restlength * 0.5, 0, 2 )
# blue (compressed) -> white (relaxed) -> red (tension)
band.red[1:] = where( less(c,1), c, 1 )
band.green[1:] = where( less(c,1), c, 2-c )
band.blue[1:] = where( less(c,1), 1, 2-c )
for s in spheres:
dist = mag( band.pos - s.pos )[:,newaxis]
inside = less( dist, s.radius )
if sometrue(inside):
R = ( band.pos - s.pos ) / dist
surface = s.pos + (s.radius)*R
band.pos = surface*inside + band.pos*(1-inside)
pdotR = sum(asarray(band.p)*asarray(R),-1)
band.p = band.p - R*pdotR[:,newaxis]*inside
| [] |
dlee992/sdc | sdc/utilities/sdc_typing_utils.py | 1ebf55c00ef38dfbd401a70b3945e352a5a38b87 | # *****************************************************************************
# Copyright (c) 2020, Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
"""
| This file contains SDC utility functions related to typing compilation phase
"""
import numpy
import numba
import sdc
from numba import types
from numba.core.errors import TypingError
from numba.np import numpy_support
from sdc.datatypes.indexes import *
from sdc.str_arr_type import string_array_type, StringArrayType
from sdc.datatypes.categorical.types import Categorical
sdc_old_index_types = (types.Array, StringArrayType, )
sdc_pandas_index_types = (
EmptyIndexType,
PositionalIndexType,
RangeIndexType,
Int64IndexType,
MultiIndexType,
) + sdc_old_index_types
sdc_indexes_range_like = (
PositionalIndexType,
RangeIndexType,
)
# TO-DO: support caching of data allocated for range indexes at request for .values
sdc_indexes_wo_values_cache = (
EmptyIndexType,
PositionalIndexType,
RangeIndexType,
)
sdc_pandas_df_column_types = (
types.Array,
StringArrayType,
Categorical,
)
class TypeChecker:
"""
Validate object type and raise TypingError if the type is invalid, e.g.:
Method nsmallest(). The object n
given: bool
expected: int
"""
msg_template = '{} The object {}\n given: {}\n expected: {}'
def __init__(self, func_name):
"""
Parameters
----------
func_name: :obj:`str`
name of the function where types checking
"""
self.func_name = func_name
def raise_exc(self, data, expected_types, name=''):
"""
Raise exception with unified message
Parameters
----------
data: :obj:`any`
real type of the data
expected_types: :obj:`str`
expected types inserting directly to the exception
name: :obj:`str`
name of the parameter
"""
msg = self.msg_template.format(self.func_name, name, data, expected_types)
raise TypingError(msg)
def check(self, data, accepted_type, name=''):
"""
Check data type belongs to specified type
Parameters
----------
data: :obj:`any`
real type of the data
accepted_type: :obj:`type`
accepted type
name: :obj:`str`
name of the parameter
"""
if not isinstance(data, accepted_type):
self.raise_exc(data, accepted_type.__name__, name=name)
class SDCLimitation(Exception):
"""Exception to be raised in case of SDC limitation"""
pass
def kwsparams2list(params):
"""Convert parameters dict to a list of string of a format 'key=value'"""
return ['{}={}'.format(k, v) for k, v in params.items()]
def sigparams2list(param_names, defaults):
"""Creates a list of strings of a format 'key=value' from parameter names and default values"""
return [(f'{param}' if param not in defaults else f'{param}={defaults[param]}') for param in param_names]
def has_literal_value(var, value):
"""Used during typing to check that variable var is a Numba literal value equal to value"""
if not isinstance(var, types.Literal):
return False
if value is None:
return isinstance(var, types.NoneType) or var.literal_value is value
elif isinstance(value, type(bool)):
return var.literal_value is value
else:
return var.literal_value == value
def has_python_value(var, value):
"""Used during typing to check that variable var was resolved as Python type and has specific value"""
if not isinstance(var, type(value)):
return False
if value is None or isinstance(value, type(bool)):
return var is value
else:
return var == value
def is_default(var, value):
return has_literal_value(var, value) or has_python_value(var, value) or isinstance(var, types.Omitted)
def check_is_numeric_array(type_var):
"""Used during typing to check that type_var is a numeric numpy arrays"""
return check_is_array_of_dtype(type_var, types.Number)
def check_index_is_numeric(ty_series):
"""Used during typing to check that series has numeric index"""
return isinstance(ty_series.index.dtype, types.Number)
def check_types_comparable(ty_left, ty_right):
"""Used during typing to check that specified types can be compared"""
if hasattr(ty_left, 'dtype'):
ty_left = ty_left.dtype
if hasattr(ty_right, 'dtype'):
ty_right = ty_right.dtype
# add the rest of supported types here
if isinstance(ty_left, types.Number):
return isinstance(ty_right, types.Number)
if isinstance(ty_left, types.UnicodeType):
return isinstance(ty_right, types.UnicodeType)
if isinstance(ty_left, types.Boolean):
return isinstance(ty_right, types.Boolean)
if isinstance(ty_left, (types.Tuple, types.UniTuple)):
# FIXME: just for now to unblock compilation
return ty_left == ty_right
return False
def check_arrays_comparable(ty_left, ty_right):
"""Used during typing to check that underlying arrays of specified types can be compared"""
return ((ty_left == string_array_type and ty_right == string_array_type)
or (check_is_numeric_array(ty_left) and check_is_numeric_array(ty_right)))
def check_is_array_of_dtype(type_var, dtype):
"""Used during typing to check that type_var is a numeric numpy array of specific dtype"""
return isinstance(type_var, types.Array) and isinstance(type_var.dtype, dtype)
def find_common_dtype_from_numpy_dtypes(array_types, scalar_types):
"""Used to find common numba dtype for a sequences of numba dtypes each representing some numpy dtype"""
np_array_dtypes = [numpy_support.as_dtype(dtype) for dtype in array_types]
np_scalar_dtypes = [numpy_support.as_dtype(dtype) for dtype in scalar_types]
np_common_dtype = numpy.find_common_type(np_array_dtypes, np_scalar_dtypes)
numba_common_dtype = numpy_support.from_dtype(np_common_dtype)
return numba_common_dtype
def find_index_common_dtype(left, right):
"""Used to find common dtype for indexes of two series and verify if index dtypes are equal"""
left_index_dtype = left.dtype
right_index_dtype = right.dtype
index_dtypes_match = left_index_dtype == right_index_dtype
if not index_dtypes_match:
numba_index_common_dtype = find_common_dtype_from_numpy_dtypes(
[left_index_dtype, right_index_dtype], [])
else:
numba_index_common_dtype = left_index_dtype
return index_dtypes_match, numba_index_common_dtype
def gen_impl_generator(codegen, impl_name):
"""Generate generator of an implementation"""
def _df_impl_generator(*args, **kwargs):
func_text, global_vars = codegen(*args, **kwargs)
loc_vars = {}
exec(func_text, global_vars, loc_vars)
_impl = loc_vars[impl_name]
return _impl
return _df_impl_generator
def check_signed_integer(ty):
return isinstance(ty, types.Integer) and ty.signed
def _check_dtype_param_type(dtype):
""" Returns True is dtype is a valid type for dtype parameter and False otherwise.
Used in RangeIndex ctor and other methods that take dtype parameter. """
valid_dtype_types = (types.NoneType, types.Omitted, types.UnicodeType, types.NumberClass)
return isinstance(dtype, valid_dtype_types) or dtype is None
| [((215, 22, 215, 79), 'numpy.find_common_type', 'numpy.find_common_type', ({(215, 45, 215, 60): 'np_array_dtypes', (215, 62, 215, 78): 'np_scalar_dtypes'}, {}), '(np_array_dtypes, np_scalar_dtypes)', False, 'import numpy\n'), ((216, 25, 216, 66), 'numba.np.numpy_support.from_dtype', 'numpy_support.from_dtype', ({(216, 50, 216, 65): 'np_common_dtype'}, {}), '(np_common_dtype)', False, 'from numba.np import numpy_support\n'), ((104, 14, 104, 30), 'numba.core.errors.TypingError', 'TypingError', ({(104, 26, 104, 29): 'msg'}, {}), '(msg)', False, 'from numba.core.errors import TypingError\n'), ((213, 23, 213, 52), 'numba.np.numpy_support.as_dtype', 'numpy_support.as_dtype', ({(213, 46, 213, 51): 'dtype'}, {}), '(dtype)', False, 'from numba.np import numpy_support\n'), ((214, 24, 214, 53), 'numba.np.numpy_support.as_dtype', 'numpy_support.as_dtype', ({(214, 47, 214, 52): 'dtype'}, {}), '(dtype)', False, 'from numba.np import numpy_support\n')] |
PROxZIMA/Competitive-Coding | Hackerrank/Contests/Project Euler/euler010.py | ba6b365ea130b6fcaa15c5537b530ed363bab793 | from math import sqrt
# Naive method: Loop through N and check if every number is prime or not. If prime add to sum. Time complexity is O(√n). Time of execution ~ 8sec for n = 1000000
def prime(n):
yield 2
yield 3
for p in range(5, n+1, 2):
if p % 3 == 0:
continue
else:
for i in range (5, int(sqrt(p)) + 1, 6):
if p % i == 0 or p % (i + 2) == 0:
break
else:
yield p
s = set(prime(1000000))
for _ in range(int(input())):
n = int(input())
print(sum(i for i in s if i <= n))
# Sieve implementation: Time complexity of O(n*log(log(n))). Time of execution ~ 2sec for n = 1000000
limit = 1000000
sieve = [0] + [1, 0] * 500000
sieve[0], sieve[1], sieve[2] = 0, 0, 2
p = 3
while p <= limit:
if sieve[p]:
sieve[p] = sieve[p-1] + p
for i in range(p*p, limit+1, p):
sieve[i] = 0
else:
sieve[p] = sieve[p-1]
sieve[p+1] = sieve[p]
p += 2
for _ in range(int(input())):
print(sieve[int(input())])
| [((12, 35, 12, 42), 'math.sqrt', 'sqrt', ({(12, 40, 12, 41): 'p'}, {}), '(p)', False, 'from math import sqrt\n')] |
ckamtsikis/cmssw | DQM/L1TMonitorClient/python/L1EmulatorErrorFlagClient_cfi.py | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDHarvester import DQMEDHarvester
l1EmulatorErrorFlagClient = DQMEDHarvester("L1EmulatorErrorFlagClient",
#
# for each L1 system, give:
# - SystemLabel: system label
# - HwValLabel: system label as used in hardware validation package
# (the package producing the ErrorFlag histogram)
# - SystemMask: system mask: if 1, the system is masked in the summary plot
# - SystemFolder: the folder where the ErrorFlag histogram is looked for
#
# the position in the parameter set gives, in reverse order, the position in the reportSummaryMap
# in the emulator column (left column)
L1Systems = cms.VPSet(
cms.PSet(
SystemLabel = cms.string("ECAL"),
HwValLabel = cms.string("ETP"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("HCAL"),
HwValLabel = cms.string("HTP"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("RCT"),
HwValLabel = cms.string("RCT"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("Stage1Layer2"),
HwValLabel = cms.string("Stage1Layer2"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("DTTF"),
HwValLabel = cms.string("DTF"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("DTTPG"),
HwValLabel = cms.string("DTP"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("CSCTF"),
HwValLabel = cms.string("CTF"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("CSCTPG"),
HwValLabel = cms.string("CTP"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("RPC"),
HwValLabel = cms.string("RPC"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("GMT"),
HwValLabel = cms.string("GMT"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("GT"),
HwValLabel = cms.string("GT"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("L1TEMU/Stage1GTexpert")
)
)
)
| [((17, 38, 17, 56), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(17, 49, 17, 55): '"""ECAL"""'}, {}), "('ECAL')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((18, 37, 18, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(18, 48, 18, 53): '"""ETP"""'}, {}), "('ETP')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((19, 38, 19, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(19, 49, 19, 50): '1'}, {}), '(1)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((20, 39, 20, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(20, 50, 20, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((23, 38, 23, 56), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(23, 49, 23, 55): '"""HCAL"""'}, {}), "('HCAL')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((24, 37, 24, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(24, 48, 24, 53): '"""HTP"""'}, {}), "('HTP')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((25, 38, 25, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(25, 49, 25, 50): '1'}, {}), '(1)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((26, 39, 26, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(26, 50, 26, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((29, 38, 29, 55), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(29, 49, 29, 54): '"""RCT"""'}, {}), "('RCT')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((30, 37, 30, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(30, 48, 30, 53): '"""RCT"""'}, {}), "('RCT')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((31, 38, 31, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(31, 49, 31, 50): '0'}, {}), '(0)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((32, 39, 32, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(32, 50, 32, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((35, 38, 35, 64), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(35, 49, 35, 63): '"""Stage1Layer2"""'}, {}), "('Stage1Layer2')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((36, 37, 36, 63), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(36, 48, 36, 62): '"""Stage1Layer2"""'}, {}), "('Stage1Layer2')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((37, 38, 37, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(37, 49, 37, 50): '0'}, {}), '(0)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((38, 39, 38, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(38, 50, 38, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((41, 38, 41, 56), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(41, 49, 41, 55): '"""DTTF"""'}, {}), "('DTTF')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((42, 37, 42, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(42, 48, 42, 53): '"""DTF"""'}, {}), "('DTF')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((43, 38, 43, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(43, 49, 43, 50): '0'}, {}), '(0)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((44, 39, 44, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(44, 50, 44, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((47, 38, 47, 57), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(47, 49, 47, 56): '"""DTTPG"""'}, {}), "('DTTPG')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((48, 37, 48, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(48, 48, 48, 53): '"""DTP"""'}, {}), "('DTP')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((49, 38, 49, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(49, 49, 49, 50): '1'}, {}), '(1)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((50, 39, 50, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(50, 50, 50, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((53, 38, 53, 57), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(53, 49, 53, 56): '"""CSCTF"""'}, {}), "('CSCTF')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((54, 37, 54, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(54, 48, 54, 53): '"""CTF"""'}, {}), "('CTF')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((55, 38, 55, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(55, 49, 55, 50): '1'}, {}), '(1)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((56, 39, 56, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(56, 50, 56, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((59, 38, 59, 58), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(59, 49, 59, 57): '"""CSCTPG"""'}, {}), "('CSCTPG')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((60, 37, 60, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(60, 48, 60, 53): '"""CTP"""'}, {}), "('CTP')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((61, 38, 61, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(61, 49, 61, 50): '1'}, {}), '(1)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((62, 39, 62, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(62, 50, 62, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((65, 38, 65, 55), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(65, 49, 65, 54): '"""RPC"""'}, {}), "('RPC')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((66, 37, 66, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(66, 48, 66, 53): '"""RPC"""'}, {}), "('RPC')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((67, 38, 67, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(67, 49, 67, 50): '0'}, {}), '(0)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((68, 39, 68, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(68, 50, 68, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((71, 38, 71, 55), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(71, 49, 71, 54): '"""GMT"""'}, {}), "('GMT')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((72, 37, 72, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(72, 48, 72, 53): '"""GMT"""'}, {}), "('GMT')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((73, 38, 73, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(73, 49, 73, 50): '0'}, {}), '(0)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((74, 39, 74, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(74, 50, 74, 52): '""""""'}, {}), "('')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((77, 38, 77, 54), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(77, 49, 77, 53): '"""GT"""'}, {}), "('GT')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((78, 37, 78, 53), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(78, 48, 78, 52): '"""GT"""'}, {}), "('GT')", True, 'import FWCore.ParameterSet.Config as cms\n'), ((79, 38, 79, 51), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', ({(79, 49, 79, 50): '1'}, {}), '(1)', True, 'import FWCore.ParameterSet.Config as cms\n'), ((80, 39, 80, 74), 'FWCore.ParameterSet.Config.string', 'cms.string', ({(80, 50, 80, 73): '"""L1TEMU/Stage1GTexpert"""'}, {}), "('L1TEMU/Stage1GTexpert')", True, 'import FWCore.ParameterSet.Config as cms\n')] |
jzwang43/codalab-worksheets | codalab/model/tables.py | b1d4c6cc4b72f4dfa35a15f876e2d0ce9a03d28d | """
The SQLAlchemy table objects for the CodaLab bundle system tables.
"""
# TODO: Replace String and Text columns with Unicode and UnicodeText as appropriate
# This way, SQLAlchemy will automatically perform conversions to and from UTF-8
# encoding, or use appropriate database engine-specific data types for Unicode
# data. Currently, only worksheet.title uses the Unicode column type.
from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint
from sqlalchemy.types import (
BigInteger,
Boolean,
DateTime,
Enum,
Float,
Integer,
LargeBinary,
String,
Text,
Unicode,
)
from sqlalchemy.sql.schema import ForeignKeyConstraint
db_metadata = MetaData()
bundle = Table(
'bundle',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('uuid', String(63), nullable=False),
Column('bundle_type', String(63), nullable=False),
# The command will be NULL except for run bundles.
Column('command', Text, nullable=True),
# The data_hash will be NULL if the bundle's value is still being computed.
Column('data_hash', String(63), nullable=True),
Column('state', String(63), nullable=False),
Column('owner_id', String(255), nullable=True),
Column('is_anonymous', Boolean, nullable=False, default=False),
UniqueConstraint('uuid', name='uix_1'),
Index('bundle_data_hash_index', 'data_hash'),
Index('state_index', 'state'), # Needed for the bundle manager.
)
# Includes things like name, description, etc.
bundle_metadata = Table(
'bundle_metadata',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('bundle_uuid', String(63), ForeignKey(bundle.c.uuid), nullable=False),
Column('metadata_key', String(63), nullable=False),
Column('metadata_value', Text, nullable=False),
Index('metadata_kv_index', 'metadata_key', 'metadata_value', mysql_length=63),
)
# For each child_uuid, we have: key = child_path, target = (parent_uuid, parent_path)
bundle_dependency = Table(
'bundle_dependency',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('child_uuid', String(63), ForeignKey(bundle.c.uuid), nullable=False),
Column('child_path', Text, nullable=False),
# Deliberately omit ForeignKey(bundle.c.uuid), because bundles can have
# dependencies to bundles not (yet) in the system.
Column('parent_uuid', String(63), nullable=False),
Column('parent_path', Text, nullable=False),
)
# The worksheet table does not have many columns now, but it will eventually
# include columns for owner, group, permissions, etc.
worksheet = Table(
'worksheet',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('uuid', String(63), nullable=False),
Column('name', String(255), nullable=False),
Column('owner_id', String(255), nullable=True),
Column(
'title', Unicode(255), nullable=True
), # Short human-readable description of the worksheet
Column(
'frozen', DateTime, nullable=True
), # When the worksheet was frozen (forever immutable) if it is.
Column('is_anonymous', Boolean, nullable=False, default=False),
Column(
'date_created', DateTime
), # When the worksheet was created; Set to null if the worksheet created before v0.5.31; Set to current timestamp by default
Column(
'date_last_modified', DateTime
), # When the worksheet was last modified; Set to null if the worksheet created before v0.5.31; Set to current_timestamp by default
UniqueConstraint('uuid', name='uix_1'),
Index('worksheet_name_index', 'name'),
Index('worksheet_owner_index', 'owner_id'),
)
worksheet_item = Table(
'worksheet_item',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('worksheet_uuid', String(63), ForeignKey(worksheet.c.uuid), nullable=False),
# A worksheet item is either:
# - type = bundle (bundle_uuid != null)
# - type = worksheet (subworksheet_uuid != null)
# - type = markup (value != null)
# - type = directive (value != null)
# Deliberately omit ForeignKey(bundle.c.uuid), because worksheets can contain
# bundles and worksheets not (yet) in the system.
Column('bundle_uuid', String(63), nullable=True),
Column('subworksheet_uuid', String(63), nullable=True),
Column('value', Text, nullable=False), # TODO: make this nullable
Column('type', String(20), nullable=False),
Column('sort_key', Integer, nullable=True),
Index('worksheet_item_worksheet_uuid_index', 'worksheet_uuid'),
Index('worksheet_item_bundle_uuid_index', 'bundle_uuid'),
Index('worksheet_item_subworksheet_uuid_index', 'subworksheet_uuid'),
)
# Worksheet tags
worksheet_tag = Table(
'worksheet_tag',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('worksheet_uuid', String(63), ForeignKey(worksheet.c.uuid), nullable=False),
Column('tag', String(63), nullable=False),
Index('worksheet_tag_worksheet_uuid_index', 'worksheet_uuid'),
Index('worksheet_tag_tag_index', 'tag'),
)
group = Table(
'group',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('uuid', String(63), nullable=False),
Column('name', String(255), nullable=False),
Column('user_defined', Boolean),
Column('owner_id', String(255), nullable=True),
UniqueConstraint('uuid', name='uix_1'),
Index('group_name_index', 'name'),
Index('group_owner_id_index', 'owner_id'),
)
user_group = Table(
'user_group',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('group_uuid', String(63), ForeignKey(group.c.uuid), nullable=False),
Column('user_id', String(63), ForeignKey("user.user_id"), nullable=False),
# Whether a user is able to modify this group.
Column('is_admin', Boolean),
Index('group_uuid_index', 'group_uuid'),
Index('user_id_index', 'user_id'),
)
# Permissions for bundles
group_bundle_permission = Table(
'group_bundle_permission',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('group_uuid', String(63), ForeignKey(group.c.uuid), nullable=False),
# Reference to a bundle
Column('object_uuid', String(63), ForeignKey(bundle.c.uuid), nullable=False),
# Permissions encoded as integer (see below)
Column('permission', Integer, nullable=False),
)
# Permissions for worksheets
group_object_permission = Table(
'group_object_permission',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('group_uuid', String(63), ForeignKey(group.c.uuid), nullable=False),
# Reference to a worksheet object
Column('object_uuid', String(63), ForeignKey(worksheet.c.uuid), nullable=False),
# Permissions encoded as integer (see below)
Column('permission', Integer, nullable=False),
)
# A permission value is one of the following: none (0), read (1), or all (2).
GROUP_OBJECT_PERMISSION_NONE = 0x00
GROUP_OBJECT_PERMISSION_READ = 0x01
GROUP_OBJECT_PERMISSION_ALL = 0x02
# A notifications value is one of the following:
NOTIFICATIONS_NONE = 0x00 # Receive no notifications
NOTIFICATIONS_IMPORTANT = 0x01 # Receive only important notifications
NOTIFICATIONS_GENERAL = 0x02 # Receive general notifications (new features)
# Store information about users.
user = Table(
'user',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
# Basic information
Column('user_id', String(63), nullable=False),
Column('user_name', String(63), nullable=False, unique=True),
Column(
'email', String(254), nullable=False, unique=True
), # Length of 254 to be compliant with RFC3696/5321
Column(
'notifications', Integer, nullable=False, default=NOTIFICATIONS_GENERAL
), # Which emails user wants to receive
Column('last_login', DateTime), # Null if user has never logged in
Column(
'is_active', Boolean, nullable=False, default=True
), # Set to False instead of deleting users to maintain foreign key integrity
Column('first_name', String(30, convert_unicode=True)),
Column('last_name', String(30, convert_unicode=True)),
Column('date_joined', DateTime, nullable=False),
Column('has_access', Boolean, default=False, nullable=True),
Column('is_verified', Boolean, nullable=False, default=False),
Column('is_superuser', Boolean, nullable=False, default=False),
Column('password', String(128), nullable=False),
# Additional information
Column('affiliation', String(255, convert_unicode=True), nullable=True),
Column('url', String(255, convert_unicode=True), nullable=True),
# Quotas
Column('time_quota', Float, nullable=False), # Number of seconds allowed
Column('parallel_run_quota', Integer, nullable=False), # Number of parallel jobs allowed
Column('time_used', Float, nullable=False), # Number of seconds already used
Column('disk_quota', Float, nullable=False), # Number of bytes allowed
Column('disk_used', Float, nullable=False), # Number of bytes already used
Index('user_user_id_index', 'user_id'),
Index('user_user_name_index', 'user_name'),
UniqueConstraint('user_id', name='uix_1'),
)
# Stores (email) verification keys
user_verification = Table(
'user_verification',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('date_created', DateTime, nullable=False),
Column('date_sent', DateTime, nullable=True),
Column('key', String(64), nullable=False),
)
# Stores password reset codes
user_reset_code = Table(
'user_reset_code',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('date_created', DateTime, nullable=False),
Column('code', String(64), nullable=False),
)
# OAuth2 Tables
oauth2_client = Table(
'oauth2_client',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('client_id', String(63), nullable=False),
Column('name', String(63), nullable=True),
Column('secret', String(255), nullable=True),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=True),
Column(
'grant_type',
Enum("authorization_code", "password", "client_credentials", "refresh_token"),
nullable=False,
),
Column('response_type', Enum("code", "token"), nullable=False),
Column('scopes', Text, nullable=False), # comma-separated list of allowed scopes
Column('redirect_uris', Text, nullable=False), # comma-separated list of allowed redirect URIs
UniqueConstraint('client_id', name='uix_1'),
)
oauth2_token = Table(
'oauth2_token',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('client_id', String(63), ForeignKey(oauth2_client.c.client_id), nullable=False),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('scopes', Text, nullable=False),
Column('access_token', String(255), unique=True),
Column('refresh_token', String(255), unique=True),
Column('expires', DateTime, nullable=False),
)
oauth2_auth_code = Table(
'oauth2_auth_code',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('client_id', String(63), ForeignKey(oauth2_client.c.client_id), nullable=False),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('scopes', Text, nullable=False),
Column('code', String(100), nullable=False),
Column('expires', DateTime, nullable=False),
Column('redirect_uri', String(255), nullable=False),
)
# Store information about users' questions or feedback.
chat = Table(
'chat',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
), # Primary key
Column('time', DateTime, nullable=False), # When did the user send this query?
Column('sender_user_id', String(63), nullable=True), # Who sent it?
Column('recipient_user_id', String(63), nullable=True), # Who received it?
Column('message', Text, nullable=False), # What's the content of the chat?
Column(
'worksheet_uuid', String(63), nullable=True
), # What is the id of the worksheet that the sender is on?
Column(
'bundle_uuid', String(63), nullable=True
), # What is the id of the bundle that the sender is on?
)
# Store information about workers.
worker = Table(
'worker',
db_metadata,
Column('user_id', String(63), ForeignKey(user.c.user_id), primary_key=True, nullable=False),
Column('worker_id', String(127), primary_key=True, nullable=False),
Column('group_uuid', String(63), ForeignKey(group.c.uuid), nullable=True),
Column('tag', Text, nullable=True), # Tag that allows for scheduling runs on specific workers.
Column('cpus', Integer, nullable=False), # Number of CPUs on worker.
Column('gpus', Integer, nullable=False), # Number of GPUs on worker.
Column('memory_bytes', BigInteger, nullable=False), # Total memory of worker.
Column('free_disk_bytes', BigInteger, nullable=True), # Available disk space on worker.
Column(
'checkin_time', DateTime, nullable=False
), # When the worker last checked in with the bundle service.
Column('socket_id', Integer, nullable=False), # Socket ID worker listens for messages on.
Column(
'shared_file_system', Boolean, nullable=False
), # Whether the worker and the server have a shared filesystem.
Column(
'tag_exclusive', Boolean, nullable=False
), # Whether worker runs bundles if and only if they match tags.
Column(
'exit_after_num_runs', Integer, nullable=False
), # Number of jobs allowed to run on worker.
Column('is_terminating', Boolean, nullable=False),
)
# Store information about all sockets currently allocated to each worker.
worker_socket = Table(
'worker_socket',
db_metadata,
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('worker_id', String(127), nullable=False),
# No foreign key constraint on the worker table so that we can create a socket
# for the worker before adding the worker to the worker table.
Column('socket_id', Integer, primary_key=True, nullable=False),
)
# Store information about the bundles currently running on each worker.
worker_run = Table(
'worker_run',
db_metadata,
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('worker_id', String(127), nullable=False),
ForeignKeyConstraint(['user_id', 'worker_id'], ['worker.user_id', 'worker.worker_id']),
Column('run_uuid', String(63), ForeignKey(bundle.c.uuid), nullable=False),
Index('uuid_index', 'run_uuid'),
)
# Store information about the dependencies available on each worker.
worker_dependency = Table(
'worker_dependency',
db_metadata,
Column('user_id', String(63), ForeignKey(user.c.user_id), primary_key=True, nullable=False),
Column('worker_id', String(127), primary_key=True, nullable=False),
ForeignKeyConstraint(['user_id', 'worker_id'], ['worker.user_id', 'worker.worker_id']),
# Serialized list of dependencies for the user/worker combination.
# See WorkerModel for the serialization method.
Column('dependencies', LargeBinary, nullable=False),
)
| [((23, 14, 23, 24), 'sqlalchemy.MetaData', 'MetaData', ({}, {}), '()', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((38, 4, 38, 42), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((43, 4, 43, 66), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((44, 4, 44, 42), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((45, 4, 45, 48), 'sqlalchemy.Index', 'Index', ({(45, 10, 45, 34): '"""bundle_data_hash_index"""', (45, 36, 45, 47): '"""data_hash"""'}, {}), "('bundle_data_hash_index', 'data_hash')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((46, 4, 46, 33), 'sqlalchemy.Index', 'Index', ({(46, 10, 46, 23): '"""state_index"""', (46, 25, 46, 32): '"""state"""'}, {}), "('state_index', 'state')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((62, 4, 62, 50), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((63, 4, 63, 81), 'sqlalchemy.Index', 'Index', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((78, 4, 78, 46), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((82, 4, 82, 47), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((103, 4, 105, 5), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((106, 4, 106, 66), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((107, 4, 109, 5), 'sqlalchemy.Column', 'Column', ({(108, 8, 108, 22): '"""date_created"""', (108, 24, 108, 32): 'DateTime'}, {}), "('date_created', DateTime)", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((110, 4, 112, 5), 'sqlalchemy.Column', 'Column', ({(111, 8, 111, 28): '"""date_last_modified"""', (111, 30, 111, 38): 'DateTime'}, {}), "('date_last_modified', DateTime)", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((113, 4, 113, 42), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((114, 4, 114, 41), 'sqlalchemy.Index', 'Index', ({(114, 10, 114, 32): '"""worksheet_name_index"""', (114, 34, 114, 40): '"""name"""'}, {}), "('worksheet_name_index', 'name')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((115, 4, 115, 46), 'sqlalchemy.Index', 'Index', ({(115, 10, 115, 33): '"""worksheet_owner_index"""', (115, 35, 115, 45): '"""owner_id"""'}, {}), "('worksheet_owner_index', 'owner_id')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((138, 4, 138, 41), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((140, 4, 140, 46), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((141, 4, 141, 66), 'sqlalchemy.Index', 'Index', ({(141, 10, 141, 47): '"""worksheet_item_worksheet_uuid_index"""', (141, 49, 141, 65): '"""worksheet_uuid"""'}, {}), "('worksheet_item_worksheet_uuid_index', 'worksheet_uuid')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((142, 4, 142, 60), 'sqlalchemy.Index', 'Index', ({(142, 10, 142, 44): '"""worksheet_item_bundle_uuid_index"""', (142, 46, 142, 59): '"""bundle_uuid"""'}, {}), "('worksheet_item_bundle_uuid_index', 'bundle_uuid')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((143, 4, 143, 72), 'sqlalchemy.Index', 'Index', ({(143, 10, 143, 50): '"""worksheet_item_subworksheet_uuid_index"""', (143, 52, 143, 71): '"""subworksheet_uuid"""'}, {}), "('worksheet_item_subworksheet_uuid_index', 'subworksheet_uuid')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((159, 4, 159, 65), 'sqlalchemy.Index', 'Index', ({(159, 10, 159, 46): '"""worksheet_tag_worksheet_uuid_index"""', (159, 48, 159, 64): '"""worksheet_uuid"""'}, {}), "('worksheet_tag_worksheet_uuid_index', 'worksheet_uuid')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((160, 4, 160, 43), 'sqlalchemy.Index', 'Index', ({(160, 10, 160, 35): '"""worksheet_tag_tag_index"""', (160, 37, 160, 42): '"""tag"""'}, {}), "('worksheet_tag_tag_index', 'tag')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((175, 4, 175, 35), 'sqlalchemy.Column', 'Column', ({(175, 11, 175, 25): '"""user_defined"""', (175, 27, 175, 34): 'Boolean'}, {}), "('user_defined', Boolean)", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((177, 4, 177, 42), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((178, 4, 178, 37), 'sqlalchemy.Index', 'Index', ({(178, 10, 178, 28): '"""group_name_index"""', (178, 30, 178, 36): '"""name"""'}, {}), "('group_name_index', 'name')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((179, 4, 179, 45), 'sqlalchemy.Index', 'Index', ({(179, 10, 179, 32): '"""group_owner_id_index"""', (179, 34, 179, 44): '"""owner_id"""'}, {}), "('group_owner_id_index', 'owner_id')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((195, 4, 195, 31), 'sqlalchemy.Column', 'Column', ({(195, 11, 195, 21): '"""is_admin"""', (195, 23, 195, 30): 'Boolean'}, {}), "('is_admin', Boolean)", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((196, 4, 196, 43), 'sqlalchemy.Index', 'Index', ({(196, 10, 196, 28): '"""group_uuid_index"""', (196, 30, 196, 42): '"""group_uuid"""'}, {}), "('group_uuid_index', 'group_uuid')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((197, 4, 197, 37), 'sqlalchemy.Index', 'Index', ({(197, 10, 197, 25): '"""user_id_index"""', (197, 27, 197, 36): '"""user_id"""'}, {}), "('user_id_index', 'user_id')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((215, 4, 215, 49), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((233, 4, 233, 49), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((263, 4, 265, 5), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((266, 4, 266, 34), 'sqlalchemy.Column', 'Column', ({(266, 11, 266, 23): '"""last_login"""', (266, 25, 266, 33): 'DateTime'}, {}), "('last_login', DateTime)", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((267, 4, 269, 5), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((272, 4, 272, 51), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((273, 4, 273, 63), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((274, 4, 274, 65), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((275, 4, 275, 66), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((281, 4, 281, 47), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((282, 4, 282, 57), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((283, 4, 283, 46), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((284, 4, 284, 47), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((285, 4, 285, 46), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((286, 4, 286, 42), 'sqlalchemy.Index', 'Index', ({(286, 10, 286, 30): '"""user_user_id_index"""', (286, 32, 286, 41): '"""user_id"""'}, {}), "('user_user_id_index', 'user_id')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((287, 4, 287, 46), 'sqlalchemy.Index', 'Index', ({(287, 10, 287, 32): '"""user_user_name_index"""', (287, 34, 287, 45): '"""user_name"""'}, {}), "('user_user_name_index', 'user_name')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((288, 4, 288, 45), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((303, 4, 303, 52), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((304, 4, 304, 48), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((320, 4, 320, 52), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((346, 4, 346, 42), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((347, 4, 347, 49), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((348, 4, 348, 47), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((363, 4, 363, 42), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((366, 4, 366, 47), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((381, 4, 381, 42), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((383, 4, 383, 47), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((398, 4, 398, 44), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((401, 4, 401, 43), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((417, 4, 417, 38), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((418, 4, 418, 43), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((419, 4, 419, 43), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((420, 4, 420, 54), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((421, 4, 421, 56), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((422, 4, 424, 5), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((425, 4, 425, 48), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((426, 4, 428, 5), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((429, 4, 431, 5), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((432, 4, 434, 5), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((435, 4, 435, 53), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((446, 4, 446, 66), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((455, 4, 455, 90), 'sqlalchemy.sql.schema.ForeignKeyConstraint', 'ForeignKeyConstraint', ({(455, 25, 455, 49): "['user_id', 'worker_id']", (455, 51, 455, 89): "['worker.user_id', 'worker.worker_id']"}, {}), "(['user_id', 'worker_id'], ['worker.user_id',\n 'worker.worker_id'])", False, 'from sqlalchemy.sql.schema import ForeignKeyConstraint\n'), ((457, 4, 457, 35), 'sqlalchemy.Index', 'Index', ({(457, 10, 457, 22): '"""uuid_index"""', (457, 24, 457, 34): '"""run_uuid"""'}, {}), "('uuid_index', 'run_uuid')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((466, 4, 466, 90), 'sqlalchemy.sql.schema.ForeignKeyConstraint', 'ForeignKeyConstraint', ({(466, 25, 466, 49): "['user_id', 'worker_id']", (466, 51, 466, 89): "['worker.user_id', 'worker.worker_id']"}, {}), "(['user_id', 'worker_id'], ['worker.user_id',\n 'worker.worker_id'])", False, 'from sqlalchemy.sql.schema import ForeignKeyConstraint\n'), ((469, 4, 469, 55), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((35, 19, 35, 29), 'sqlalchemy.types.String', 'String', ({(35, 26, 35, 28): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((36, 26, 36, 36), 'sqlalchemy.types.String', 'String', ({(36, 33, 36, 35): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((40, 24, 40, 34), 'sqlalchemy.types.String', 'String', ({(40, 31, 40, 33): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((41, 20, 41, 30), 'sqlalchemy.types.String', 'String', ({(41, 27, 41, 29): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((42, 23, 42, 34), 'sqlalchemy.types.String', 'String', ({(42, 30, 42, 33): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((60, 26, 60, 36), 'sqlalchemy.types.String', 'String', ({(60, 33, 60, 35): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((60, 38, 60, 63), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(60, 49, 60, 62): 'bundle.c.uuid'}, {}), '(bundle.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((61, 27, 61, 37), 'sqlalchemy.types.String', 'String', ({(61, 34, 61, 36): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((77, 25, 77, 35), 'sqlalchemy.types.String', 'String', ({(77, 32, 77, 34): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((77, 37, 77, 62), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(77, 48, 77, 61): 'bundle.c.uuid'}, {}), '(bundle.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((81, 26, 81, 36), 'sqlalchemy.types.String', 'String', ({(81, 33, 81, 35): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((97, 19, 97, 29), 'sqlalchemy.types.String', 'String', ({(97, 26, 97, 28): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((98, 19, 98, 30), 'sqlalchemy.types.String', 'String', ({(98, 26, 98, 29): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((99, 23, 99, 34), 'sqlalchemy.types.String', 'String', ({(99, 30, 99, 33): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((101, 17, 101, 29), 'sqlalchemy.types.Unicode', 'Unicode', ({(101, 25, 101, 28): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((128, 29, 128, 39), 'sqlalchemy.types.String', 'String', ({(128, 36, 128, 38): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((128, 41, 128, 69), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(128, 52, 128, 68): 'worksheet.c.uuid'}, {}), '(worksheet.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((136, 26, 136, 36), 'sqlalchemy.types.String', 'String', ({(136, 33, 136, 35): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((137, 32, 137, 42), 'sqlalchemy.types.String', 'String', ({(137, 39, 137, 41): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((139, 19, 139, 29), 'sqlalchemy.types.String', 'String', ({(139, 26, 139, 28): '20'}, {}), '(20)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((157, 29, 157, 39), 'sqlalchemy.types.String', 'String', ({(157, 36, 157, 38): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((157, 41, 157, 69), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(157, 52, 157, 68): 'worksheet.c.uuid'}, {}), '(worksheet.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((158, 18, 158, 28), 'sqlalchemy.types.String', 'String', ({(158, 25, 158, 27): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((173, 19, 173, 29), 'sqlalchemy.types.String', 'String', ({(173, 26, 173, 28): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((174, 19, 174, 30), 'sqlalchemy.types.String', 'String', ({(174, 26, 174, 29): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((176, 23, 176, 34), 'sqlalchemy.types.String', 'String', ({(176, 30, 176, 33): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((192, 25, 192, 35), 'sqlalchemy.types.String', 'String', ({(192, 32, 192, 34): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((192, 37, 192, 61), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(192, 48, 192, 60): 'group.c.uuid'}, {}), '(group.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((193, 22, 193, 32), 'sqlalchemy.types.String', 'String', ({(193, 29, 193, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((193, 34, 193, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(193, 45, 193, 59): '"""user.user_id"""'}, {}), "('user.user_id')", False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((211, 25, 211, 35), 'sqlalchemy.types.String', 'String', ({(211, 32, 211, 34): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((211, 37, 211, 61), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(211, 48, 211, 60): 'group.c.uuid'}, {}), '(group.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((213, 26, 213, 36), 'sqlalchemy.types.String', 'String', ({(213, 33, 213, 35): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((213, 38, 213, 63), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(213, 49, 213, 62): 'bundle.c.uuid'}, {}), '(bundle.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((229, 25, 229, 35), 'sqlalchemy.types.String', 'String', ({(229, 32, 229, 34): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((229, 37, 229, 61), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(229, 48, 229, 60): 'group.c.uuid'}, {}), '(group.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((231, 26, 231, 36), 'sqlalchemy.types.String', 'String', ({(231, 33, 231, 35): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((231, 38, 231, 66), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(231, 49, 231, 65): 'worksheet.c.uuid'}, {}), '(worksheet.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((258, 22, 258, 32), 'sqlalchemy.types.String', 'String', ({(258, 29, 258, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((259, 24, 259, 34), 'sqlalchemy.types.String', 'String', ({(259, 31, 259, 33): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((261, 17, 261, 28), 'sqlalchemy.types.String', 'String', ({(261, 24, 261, 27): '254'}, {}), '(254)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((270, 25, 270, 57), 'sqlalchemy.types.String', 'String', (), '', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((271, 24, 271, 56), 'sqlalchemy.types.String', 'String', (), '', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((276, 23, 276, 34), 'sqlalchemy.types.String', 'String', ({(276, 30, 276, 33): '128'}, {}), '(128)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((278, 26, 278, 59), 'sqlalchemy.types.String', 'String', (), '', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((279, 18, 279, 51), 'sqlalchemy.types.String', 'String', (), '', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((302, 22, 302, 32), 'sqlalchemy.types.String', 'String', ({(302, 29, 302, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((302, 34, 302, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(302, 45, 302, 59): 'user.c.user_id'}, {}), '(user.c.user_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((305, 18, 305, 28), 'sqlalchemy.types.String', 'String', ({(305, 25, 305, 27): '64'}, {}), '(64)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((319, 22, 319, 32), 'sqlalchemy.types.String', 'String', ({(319, 29, 319, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((319, 34, 319, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(319, 45, 319, 59): 'user.c.user_id'}, {}), '(user.c.user_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((321, 19, 321, 29), 'sqlalchemy.types.String', 'String', ({(321, 26, 321, 28): '64'}, {}), '(64)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((336, 24, 336, 34), 'sqlalchemy.types.String', 'String', ({(336, 31, 336, 33): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((337, 19, 337, 29), 'sqlalchemy.types.String', 'String', ({(337, 26, 337, 28): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((338, 21, 338, 32), 'sqlalchemy.types.String', 'String', ({(338, 28, 338, 31): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((339, 22, 339, 32), 'sqlalchemy.types.String', 'String', ({(339, 29, 339, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((339, 34, 339, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(339, 45, 339, 59): 'user.c.user_id'}, {}), '(user.c.user_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((342, 8, 342, 85), 'sqlalchemy.types.Enum', 'Enum', ({(342, 13, 342, 33): '"""authorization_code"""', (342, 35, 342, 45): '"""password"""', (342, 47, 342, 67): '"""client_credentials"""', (342, 69, 342, 84): '"""refresh_token"""'}, {}), "('authorization_code', 'password', 'client_credentials', 'refresh_token')", False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((345, 28, 345, 49), 'sqlalchemy.types.Enum', 'Enum', ({(345, 33, 345, 39): '"""code"""', (345, 41, 345, 48): '"""token"""'}, {}), "('code', 'token')", False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((361, 24, 361, 34), 'sqlalchemy.types.String', 'String', ({(361, 31, 361, 33): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((361, 36, 361, 73), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(361, 47, 361, 72): 'oauth2_client.c.client_id'}, {}), '(oauth2_client.c.client_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((362, 22, 362, 32), 'sqlalchemy.types.String', 'String', ({(362, 29, 362, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((362, 34, 362, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(362, 45, 362, 59): 'user.c.user_id'}, {}), '(user.c.user_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((364, 27, 364, 38), 'sqlalchemy.types.String', 'String', ({(364, 34, 364, 37): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((365, 28, 365, 39), 'sqlalchemy.types.String', 'String', ({(365, 35, 365, 38): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((379, 24, 379, 34), 'sqlalchemy.types.String', 'String', ({(379, 31, 379, 33): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((379, 36, 379, 73), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(379, 47, 379, 72): 'oauth2_client.c.client_id'}, {}), '(oauth2_client.c.client_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((380, 22, 380, 32), 'sqlalchemy.types.String', 'String', ({(380, 29, 380, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((380, 34, 380, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(380, 45, 380, 59): 'user.c.user_id'}, {}), '(user.c.user_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((382, 19, 382, 30), 'sqlalchemy.types.String', 'String', ({(382, 26, 382, 29): '100'}, {}), '(100)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((384, 27, 384, 38), 'sqlalchemy.types.String', 'String', ({(384, 34, 384, 37): '255'}, {}), '(255)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((399, 29, 399, 39), 'sqlalchemy.types.String', 'String', ({(399, 36, 399, 38): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((400, 32, 400, 42), 'sqlalchemy.types.String', 'String', ({(400, 39, 400, 41): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((403, 26, 403, 36), 'sqlalchemy.types.String', 'String', ({(403, 33, 403, 35): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((406, 23, 406, 33), 'sqlalchemy.types.String', 'String', ({(406, 30, 406, 32): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((414, 22, 414, 32), 'sqlalchemy.types.String', 'String', ({(414, 29, 414, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((414, 34, 414, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(414, 45, 414, 59): 'user.c.user_id'}, {}), '(user.c.user_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((415, 24, 415, 35), 'sqlalchemy.types.String', 'String', ({(415, 31, 415, 34): '127'}, {}), '(127)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((416, 25, 416, 35), 'sqlalchemy.types.String', 'String', ({(416, 32, 416, 34): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((416, 37, 416, 61), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(416, 48, 416, 60): 'group.c.uuid'}, {}), '(group.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((442, 22, 442, 32), 'sqlalchemy.types.String', 'String', ({(442, 29, 442, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((442, 34, 442, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(442, 45, 442, 59): 'user.c.user_id'}, {}), '(user.c.user_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((443, 24, 443, 35), 'sqlalchemy.types.String', 'String', ({(443, 31, 443, 34): '127'}, {}), '(127)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((453, 22, 453, 32), 'sqlalchemy.types.String', 'String', ({(453, 29, 453, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((453, 34, 453, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(453, 45, 453, 59): 'user.c.user_id'}, {}), '(user.c.user_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((454, 24, 454, 35), 'sqlalchemy.types.String', 'String', ({(454, 31, 454, 34): '127'}, {}), '(127)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((456, 23, 456, 33), 'sqlalchemy.types.String', 'String', ({(456, 30, 456, 32): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((456, 35, 456, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(456, 46, 456, 59): 'bundle.c.uuid'}, {}), '(bundle.c.uuid)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((464, 22, 464, 32), 'sqlalchemy.types.String', 'String', ({(464, 29, 464, 31): '63'}, {}), '(63)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((464, 34, 464, 60), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(464, 45, 464, 59): 'user.c.user_id'}, {}), '(user.c.user_id)', False, 'from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint\n'), ((465, 24, 465, 35), 'sqlalchemy.types.String', 'String', ({(465, 31, 465, 34): '127'}, {}), '(127)', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((30, 8, 30, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((55, 8, 55, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((72, 8, 72, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((92, 8, 92, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((123, 8, 123, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((152, 8, 152, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((168, 8, 168, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((187, 8, 187, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((206, 8, 206, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((224, 8, 224, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((252, 8, 252, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((297, 8, 297, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((314, 8, 314, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((331, 8, 331, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((356, 8, 356, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((374, 8, 374, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n'), ((393, 8, 393, 20), 'sqlalchemy.types.BigInteger', 'BigInteger', ({}, {}), '()', False, 'from sqlalchemy.types import BigInteger, Boolean, DateTime, Enum, Float, Integer, LargeBinary, String, Text, Unicode\n')] |
hansthienpondt/ansible-networking-collections | grpc/plugins/connection/gnmi.py | 278c88fceac297693a31df3cb54c942284823fbd | # (c) 2020 Nokia
#
# Licensed under the BSD 3 Clause license
# SPDX-License-Identifier: BSD-3-Clause
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
author:
- "Hans Thienpondt (@HansThienpondt)"
- "Sven Wisotzky (@wisotzky)"
connection: gnmi
short_description: Provides a persistent gRPC connection for gNMI API service
description:
- This gRPC plugin provides methods to interact with the gNMI service.
- OpenConfig gNMI specification
https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-specification.md
- gNMI API
https://raw.githubusercontent.com/openconfig/gnmi/master/proto/gnmi/gnmi.proto
- This connection plugin provides a persistent communication channel to
remote devices using gRPC including the underlying transport (TLS).
- The plugin binds to the gNMI gRPC service. It provide wrappers for gNMI
requests (Capabilities, Get, Set, Subscribe)
requirements:
- grpcio
- protobuf
options:
host:
description:
- Target host FQDN or IP address to establish gRPC connection.
default: inventory_hostname
vars:
- name: ansible_host
port:
type: int
description:
- Specifies the port on the remote device that listens for connections
when establishing the gRPC connection. If None only the C(host) part
will be used.
ini:
- section: defaults
key: remote_port
env:
- name: ANSIBLE_REMOTE_PORT
vars:
- name: ansible_port
remote_user:
description:
- The username used to authenticate to the remote device when the gRPC
connection is first established. If the remote_user is not specified,
the connection will use the username of the logged in user.
- Can be configured from the CLI via the C(--user) or C(-u) options.
ini:
- section: defaults
key: remote_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
password:
description:
- Configures the user password used to authenticate to the remote device
when first establishing the gRPC connection.
vars:
- name: ansible_password
- name: ansible_ssh_pass
private_key_file:
description:
- The PEM encoded private key file used to authenticate to the
remote device when first establishing the grpc connection.
ini:
- section: grpc_connection
key: private_key_file
env:
- name: ANSIBLE_PRIVATE_KEY_FILE
vars:
- name: ansible_private_key_file
root_certificates_file:
description:
- The PEM encoded root certificate file used to create a SSL-enabled
channel, if the value is None it reads the root certificates from
a default location chosen by gRPC at runtime.
ini:
- section: grpc_connection
key: root_certificates_file
env:
- name: ANSIBLE_ROOT_CERTIFICATES_FILE
vars:
- name: ansible_root_certificates_file
certificate_chain_file:
description:
- The PEM encoded certificate chain file used to create a SSL-enabled
channel. If the value is None, no certificate chain is used.
ini:
- section: grpc_connection
key: certificate_chain_file
env:
- name: ANSIBLE_CERTIFICATE_CHAIN_FILE
vars:
- name: ansible_certificate_chain_file
certificate_path:
description:
- Folder to search for certificate and key files
ini:
- section: grpc_connection
key: certificate_path
env:
- name: ANSIBLE_CERTIFICATE_PATH
vars:
- name: ansible_certificate_path
gnmi_encoding:
description:
- Encoding used for gNMI communication
- Must be either JSON or JSON_IETF
- If not provided, will run CapabilityRequest for auto-detection
ini:
- section: grpc_connection
key: gnmi_encoding
env:
- name: ANSIBLE_GNMI_ENCODING
vars:
- name: ansible_gnmi_encoding
grpc_channel_options:
description:
- Key/Value pairs (dict) to define gRPC channel options to be used
- gRPC reference
U(https://grpc.github.io/grpc/core/group__grpc__arg__keys.html)
- Provide the I(ssl_target_name_override) option to override the TLS
subject or subjectAltName (only in the case secure connections are
used). The option must be provided in cases, when the FQDN or IPv4
address that is used to connect to the device is different from the
subject name that is provided in the host certificate. This is
needed, because the TLS validates hostname or IP address to avoid
man-in-the-middle attacks.
vars:
- name: ansible_grpc_channel_options
grpc_environment:
description:
- Key/Value pairs (dict) to define environment settings specific to gRPC
- The standard mechanism to provide/set the environment in Ansible
cannot be used, because those environment settings are not passed to
the client process that establishes the gRPC connection.
- Set C(GRPC_VERBOSITY) and C(GRPC_TRACE) to setup gRPC logging. Need to
add code for log forwarding of gRPC related log messages to the
persistent messages log (see below).
- Set C(HTTPS_PROXY) to specify your proxy settings (if needed).
- Set C(GRPC_SSL_CIPHER_SUITES) in case the default TLS ciphers do not match
what is offered by the gRPC server.
vars:
- name: ansible_grpc_environment
persistent_connect_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait when trying to
initially establish a persistent connection. If this value expires
before the connection to the remote device is completed, the connection
will fail.
default: 5
ini:
- section: persistent_connection
key: connect_timeout
env:
- name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT
vars:
- name: ansible_connect_timeout
persistent_command_timeout:
type: int
description:
- Configures the default timeout value (in seconds) when awaiting a
response after issuing a call to a RPC. If the RPC does not return
before the timeout exceed, an error is generated and the connection
is closed.
default: 300
ini:
- section: persistent_connection
key: command_timeout
env:
- name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
vars:
- name: ansible_command_timeout
persistent_log_messages:
type: boolean
description:
- This flag will enable logging the command executed and response received
from target device in the ansible log file. For this option to work the
'log_path' ansible configuration option is required to be set to a file
path with write access.
- Be sure to fully understand the security implications of enabling this
option as it could create a security vulnerability by logging sensitive
information in log file.
default: False
ini:
- section: persistent_connection
key: log_messages
env:
- name: ANSIBLE_PERSISTENT_LOG_MESSAGES
vars:
- name: ansible_persistent_log_messages
"""
import os
import re
import json
import base64
import datetime
try:
import grpc
HAS_GRPC = True
except ImportError:
HAS_GRPC = False
try:
from google import protobuf
HAS_PROTOBUF = True
except ImportError:
HAS_PROTOBUF = False
from ansible.errors import AnsibleConnectionFailure, AnsibleError
from ansible.plugins.connection import NetworkConnectionBase
from ansible.plugins.connection import ensure_connect
from google.protobuf import json_format
from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2
from ansible.module_utils._text import to_text
class Connection(NetworkConnectionBase):
"""
Connection plugin for gRPC
To use gRPC connections in Ansible one (or more) sub-plugin(s) for the
required gRPC service(s) must be loaded. To load gRPC sub-plugins use the
method `register_service()` with the name of the sub-plugin to be
registered.
After loading the sub-plugin, Ansible modules can call methods provided by
that sub-plugin. There is a wrapper available that consumes the attribute
name {sub-plugin name}__{method name} to call a specific method of that
sub-plugin.
"""
transport = "nokia.grpc.gnmi"
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(
play_context, new_stdin, *args, **kwargs
)
self._task_uuid = to_text(kwargs.get("task_uuid", ""))
if not HAS_PROTOBUF:
raise AnsibleError(
"protobuf is required to use gRPC connection type. " +
"Please run 'pip install protobuf'"
)
if not HAS_GRPC:
raise AnsibleError(
"grpcio is required to use gRPC connection type. " +
"Please run 'pip install grpcio'"
)
self._connected = False
def readFile(self, optionName):
"""
Reads a binary certificate/key file
Parameters:
optionName(str): used to read filename from options
Returns:
File content
Raises:
AnsibleConnectionFailure: file does not exist or read excpetions
"""
path = self.get_option('certificate_path')
if not path:
path = '/etc/ssl:/etc/ssl/certs:/etc/ca-certificates'
filename = self.get_option(optionName)
if filename:
if filename.startswith('~'):
filename = os.path.expanduser(filename)
if not filename.startswith('/'):
for entry in path.split(':'):
if os.path.isfile(os.path.join(entry, filename)):
filename = os.path.join(entry, filename)
break
if os.path.isfile(filename):
try:
with open(filename, 'rb') as f:
return f.read()
except Exception as exc:
raise AnsibleConnectionFailure(
'Failed to read cert/keys file %s: %s' % (filename, exc)
)
else:
raise AnsibleConnectionFailure(
'Cert/keys file %s does not exist' % filename
)
return None
def _connect(self):
"""
Establish gRPC connection to remote node and create gNMI stub.
This method will establish the persistent gRPC connection, if not
already done. After this, the gNMI stub will be created. To get
visibility about gNMI capabilities of the remote device, a gNM
CapabilityRequest will be sent and result will be persisted.
Parameters:
None
Returns:
None
"""
if self.connected:
self.queue_message('v', 'gRPC connection to host %s already exist' % self._target)
return
grpcEnv = self.get_option('grpc_environment') or {}
if not isinstance(grpcEnv, dict):
raise AnsibleConnectionFailure("grpc_environment must be a dict")
for key in grpcEnv:
if grpcEnv[key]:
os.environ[key] = str(grpcEnv[key])
else:
try:
del os.environ[key]
except KeyError:
# no such setting in current environment, but thats ok
pass
self._login_credentials = [
('username', self.get_option('remote_user')),
('password', self.get_option('password'))
]
host = self.get_option('host')
port = self.get_option('port')
self._target = host if port is None else '%s:%d' % (host, port)
self._timeout = self.get_option('persistent_command_timeout')
certs = {}
certs['root_certificates'] = self.readFile('root_certificates_file')
certs['certificate_chain'] = self.readFile('certificate_chain_file')
certs['private_key'] = self.readFile('private_key_file')
options = self.get_option('grpc_channel_options')
if options:
if not isinstance(options, dict):
raise AnsibleConnectionFailure("grpc_channel_options must be a dict")
options = options.items()
if certs['root_certificates'] or certs['private_key'] or certs['certificate_chain']:
self.queue_message('v', 'Starting secure gRPC connection')
creds = grpc.ssl_channel_credentials(**certs)
self._channel = grpc.secure_channel(self._target, creds, options=options)
else:
self.queue_message('v', 'Starting insecure gRPC connection')
self._channel = grpc.insecure_channel(self._target, options=options)
self.queue_message('v', "gRPC connection established for user %s to %s" %
(self.get_option('remote_user'), self._target))
self.queue_message('v', 'Creating gNMI stub')
self._stub = gnmi_pb2.gNMIStub(self._channel)
self._encoding = self.get_option('gnmi_encoding')
if not self._encoding:
self.queue_message('v', 'Run CapabilityRequest()')
request = gnmi_pb2.CapabilityRequest()
response = self._stub.Capabilities(request, metadata=self._login_credentials)
self.queue_message('v', 'CapabilityRequest() succeeded')
self._gnmiVersion = response.gNMI_version
self._yangModels = response.supported_models
if gnmi_pb2.Encoding.Value('JSON_IETF') in response.supported_encodings:
self._encoding = 'JSON_IETF'
elif gnmi_pb2.Encoding.Value('JSON') in response.supported_encodings:
self._encoding = 'JSON'
else:
raise AnsibleConnectionFailure("No compatible supported encoding found (JSON or JSON_IETF)")
else:
if self._encoding not in ['JSON_IETF', 'JSON']:
raise AnsibleConnectionFailure("Incompatible encoding '%s' requested (JSON or JSON_IETF)" % self._encoding)
self._encoding_value = gnmi_pb2.Encoding.Value(self._encoding)
self._connected = True
self.queue_message('v', 'gRPC/gNMI connection has established successfully')
def close(self):
"""
Closes the active gRPC connection to the target host
Parameters:
None
Returns:
None
"""
if self._connected:
self.queue_message('v', "Closing gRPC connection to target host")
self._channel.close()
super(Connection, self).close()
# -----------------------------------------------------------------------
def _encodeXpath(self, xpath='/'):
"""
Encodes XPATH to dict representation that allows conversion to gnmi_pb.Path object
Parameters:
xpath (str): path string using XPATH syntax
Returns:
(dict): path dict using gnmi_pb2.Path structure for easy conversion
"""
mypath = []
xpath = xpath.strip('\t\n\r /')
if xpath:
path_elements = re.split('''/(?=(?:[^\[\]]|\[[^\[\]]+\])*$)''', xpath)
for e in path_elements:
entry = {'name': e.split("[", 1)[0]}
eKeys = re.findall('\[(.*?)\]', e)
dKeys = dict(x.split('=', 1) for x in eKeys)
if dKeys:
entry['key'] = dKeys
mypath.append(entry)
return {'elem': mypath}
return {}
def _decodeXpath(self, path):
"""
Decodes XPATH from dict representation converted from gnmi_pb.Path object
Parameters:
path (dict): decoded gnmi_pb2.Path object
Returns:
(str): path string using XPATH syntax
"""
result = []
if 'elem' not in path:
return ""
for elem in path['elem']:
tmp = elem['name']
if 'key' in elem:
for k, v in elem['key'].items():
tmp += "[%s=%s]" % (k, v)
result.append(tmp)
return '/'.join(result)
def _encodeVal(self, data):
"""
Encodes value to dict representation that allows conversion to gnmi_pb.TypedValue object
Parameters:
data (ANY): data to be encoded as gnmi_pb.TypedValue object
Returns:
(dict): dict using gnmi_pb.TypedValue structure for easy conversion
"""
value = base64.b64encode(json.dumps(data).encode())
if self._encoding == 'JSON_IETF':
return {'jsonIetfVal': value}
else:
return {'jsonVal': value}
def _decodeVal(self, val):
"""
Decodes value from dict representation converted from gnmi_pb.TypedValue object
Parameters:
val (dict): decoded gnmi_pb.TypedValue object
Returns:
(ANY): extracted data
"""
if 'jsonIetfVal' in val:
return json.loads(base64.b64decode(val['jsonIetfVal']))
elif 'jsonVal' in val:
return json.loads(base64.b64decode(val['jsonVal']))
else:
raise AnsibleConnectionFailure("Ansible gNMI plugin does not support encoding for value: %s" % json.dumps(val))
def _dictToList(self, aDict):
for key in aDict.keys():
if key.startswith('___'):
aDict[key[3:]] = [self._dictToList(val) if isinstance(val, dict) else val for val in aDict[key].values()]
del aDict[key]
else:
if isinstance(aDict[key], dict):
aDict[key] = self._dictToList(aDict[key])
return aDict
def _mergeToSingleDict(self, rawData):
result = {}
for entry in rawData:
if 'syncResponse' in entry and entry['syncResponse']:
# Ignore: SyncResponse is sent after initial update
break
elif 'update' not in entry:
# Ignore: entry without updates
break
elif 'timestamp' not in entry:
# Subscribe response, enter update context
entry = entry['update']
else:
# Get response, keep context
pass
prfx = result
if ('prefix' in entry) and ('elem' in entry['prefix']):
prfx_elements = entry['prefix']['elem']
else:
prfx_elements = []
for elem in prfx_elements:
eleName = elem['name']
if 'key' in elem:
eleKey = json.dumps(elem['key'])
eleName = '___'+eleName
# Path Element has key => must be list()
if eleName in prfx:
# Path Element exists => Change Context
prfx = prfx[eleName]
if eleKey not in prfx:
# List entry does not exist => Create
prfx[eleKey] = elem['key']
prfx = prfx[eleKey]
else:
# Path Element does not exist => Create
prfx[eleName] = {}
prfx = prfx[eleName]
prfx[eleKey] = elem['key']
prfx = prfx[eleKey]
else:
# Path Element hasn't key => must be dict()
if eleName in prfx:
# Path Element exists => Change Context
prfx = prfx[eleName]
else:
# Path Element does not exist => Create
prfx[eleName] = {}
prfx = prfx[eleName]
for _upd in entry['update']:
if 'val' not in _upd:
# requested path without content (no value) => skip
continue
elif ('path' in _upd) and ('elem' in _upd['path']):
path_elements = _upd['path']['elem']
cPath = prfx
elif prfx_elements:
path_elements = prfx_elements
cPath = result
else:
# No path at all, replace the objecttree with value
result = self._decodeVal(_upd['val'])
prfx = result
continue
# If path_elements has more than just a single entry,
# we need to create/navigate to the specified subcontext
for elem in path_elements[:-1]:
eleName = elem['name']
if 'key' in elem:
eleKey = json.dumps(elem['key'])
eleName = '___'+eleName
# Path Element has key => must be list()
if eleName in cPath:
# Path Element exists => Change Context
cPath = cPath[eleName]
if eleKey not in cPath:
# List entry does not exist => Create
cPath[eleKey] = elem['key']
cPath = cPath[eleKey]
else:
# Path Element does not exist => Create
cPath[eleName] = {}
cPath = cPath[eleName]
cPath[eleKey] = elem['key']
cPath = cPath[eleKey]
else:
# Path Element hasn't key => must be dict()
if eleName in cPath:
# Path Element exists => Change Context
cPath = cPath[eleName]
else:
# Path Element does not exist => Create
cPath[eleName] = {}
cPath = cPath[eleName]
# The last entry of path_elements is the leaf element
# that needs to be created/updated
leaf_elem = path_elements[-1]
if 'key' in leaf_elem:
eleKey = json.dumps(leaf_elem['key'])
eleName = '___'+leaf_elem['name']
if eleName not in cPath:
cPath[eleName] = {}
cPath = cPath[eleName]
cPath[eleKey] = self._decodeVal(_upd['val'])
else:
cPath[leaf_elem['name']] = self._decodeVal(_upd['val'])
return self._dictToList(result)
def _simplifyUpdates(self, rawData):
for msg in rawData:
entry = json_format.MessageToDict(msg)
if 'syncResponse' in entry:
# Ignore: SyncResponse is sent after initial update
pass
elif 'update' in entry:
result = {}
update = entry['update']
if 'prefix' in update:
result['prefix'] = '/'+self._decodeXpath(update['prefix'])
if 'timestamp' in update:
result['timestamp'] = datetime.datetime.fromtimestamp(float(update['timestamp'])/1000000000).isoformat()
if 'update' in update:
result['values'] = {self._decodeXpath(u['path']): self._decodeVal(u['val']) for u in update['update']}
yield result
else:
# Ignore: Invalid message format
pass
# -----------------------------------------------------------------------
@ensure_connect
def gnmiCapabilities(self):
"""
Executes a gNMI Capabilities request
Parameters:
None
Returns:
str: gNMI capabilities converted into JSON format
"""
request = gnmi_pb2.CapabilityRequest()
auth = self._login_credentials
try:
response = self._stub.Capabilities(request, metadata=auth)
except grpc.RpcError as e:
raise AnsibleConnectionFailure("%s" % e)
return json_format.MessageToJson(response)
@ensure_connect
def gnmiGet(self, *args, **kwargs):
"""
Executes a gNMI Get request
Encoding that is used for data serialization is automatically determined
based on the remote device capabilities. This gNMI plugin has implemented
suppport for JSON_IETF (preferred) and JSON (fallback).
Parameters:
type (str): Type of data that is requested: ALL, CONFIG, STATE
prefix (str): Path prefix that is added to all paths (XPATH syntax)
paths (list): List of paths (str) to be captured
Returns:
str: GetResponse message converted into JSON format
"""
# Remove all input parameters from kwargs that are not set
input = dict(filter(lambda x: x[1], kwargs.items()))
# Adjust input parameters to match specification for gNMI SetRequest
if 'prefix' in input:
input['prefix'] = self._encodeXpath(input['prefix'])
if 'path' in input:
input['path'] = [self._encodeXpath(path) for path in input['path']]
if 'type' in input:
input['type'] = input['type'].upper()
input['encoding'] = self._encoding_value
request = json_format.ParseDict(input, gnmi_pb2.GetRequest())
auth = self._login_credentials
try:
response = self._stub.Get(request, metadata=auth)
except grpc.RpcError as e:
raise AnsibleConnectionFailure("%s" % e)
output = self._mergeToSingleDict(json_format.MessageToDict(response)['notification'])
return json.dumps(output, indent=4).encode()
@ensure_connect
def gnmiSet(self, *args, **kwargs):
"""
Executes a gNMI Set request
Encoding that is used for data serialization is automatically determined
based on the remote device capabilities. This gNMI plugin has implemented
suppport for JSON_IETF (preferred) and JSON (fallback).
Parameters:
prefix (str): Path prefix that is added to all paths (XPATH syntax)
update (list): Path/Value pairs to be updated
replace (list): Path/Value pairs to be replaced
delete (list): Paths (str) to be deleted
Returns:
str: SetResponse message converted into JSON format
"""
# Remove all input parameters from kwargs that are not set
input = dict(filter(lambda x: x[1], kwargs.items()))
# Backup options are not to be used in gNMI SetRequest
if 'backup' in input:
del input['backup']
if 'backup_options' in input:
del input['backup_options']
# Adjust input parameters to match specification for gNMI SetRequest
if 'prefix' in input:
input['prefix'] = self._encodeXpath(input['prefix'])
if 'delete' in input:
input['delete'] = [self._encodeXpath(entry) for entry in input['delete']]
if 'update' in input:
for entry in input['update']:
entry['path'] = self._encodeXpath(entry['path'])
entry['val'] = self._encodeVal(entry['val'])
if 'replace' in input:
for entry in input['replace']:
entry['path'] = self._encodeXpath(entry['path'])
entry['val'] = self._encodeVal(entry['val'])
request = json_format.ParseDict(input, gnmi_pb2.SetRequest())
auth = self._login_credentials
try:
response = self._stub.Set(request, metadata=auth)
except grpc.RpcError as e:
raise AnsibleConnectionFailure("%s" % e)
output = json_format.MessageToDict(response)
output['timestamp'] = datetime.datetime.fromtimestamp(float(output['timestamp'])/1000000000).isoformat()
if 'prefix' in output:
output['prefix'] = self._decodeXpath(output['prefix'])
for item in output['response']:
item['path'] = self._decodeXpath(item['path'])
return json.dumps(output, indent=4).encode()
@ensure_connect
def gnmiSubscribe(self, *args, **kwargs):
"""
Executes a gNMI Subscribe request
Encoding that is used for data serialization is automatically determined
based on the remote device capabilities. This gNMI plugin has implemented
suppport for JSON_IETF (preferred) and JSON (fallback).
Parameters:
prefix (str): Path prefix that is added to all paths (XPATH syntax)
mode (str): Mode of subscription (STREAM, ONCE)
subscription (list of dict): Subscription specification (path, interval, submode)
duration (int): timeout, to stop receiving
qos (int): DSCP marking that is used
updates_only (bool): Send only updates to initial state
allow_aggregation (bool): Aggregate elements marked as eligible for aggregation
Returns:
str: Updates received converted into JSON format
"""
# Remove all input parameters from kwargs that are not set
input = dict(filter(lambda x: x[1], kwargs.items()))
# Adjust input parameters to match specification for gNMI SubscribeRequest
if 'mode' in input:
input['mode'] = input['mode'].upper()
input['encoding'] = self._encoding_value
if 'prefix' in input:
input['prefix'] = self._encodeXpath(input['prefix'])
if 'subscription' in input:
for item in input['subscription']:
item['path'] = self._encodeXpath(item['path'])
# Extract duration from input attributes
if 'duration' in input:
duration = input['duration']
del input['duration']
else:
duration = 20
request = json_format.ParseDict({'subscribe': input}, gnmi_pb2.SubscribeRequest())
auth = self._login_credentials
try:
output = []
responses = self._stub.Subscribe(iter([request]), duration, metadata=auth)
if input['mode'] == 'ONCE':
responses = [json_format.MessageToDict(response) for response in responses]
output = self._mergeToSingleDict(responses)
else:
for update in self._simplifyUpdates(responses):
output.append(update)
except grpc.RpcError as e:
if e.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
if input['mode'] == 'ONCE':
raise AnsibleConnectionFailure("gNMI ONCE Subscription timed out")
else:
# RPC timed out, which is okay
pass
else:
raise AnsibleConnectionFailure("%s" % e)
return json.dumps(output, indent=4).encode()
| [((376, 21, 376, 53), 'ansible_collections.nokia.grpc.plugins.connection.pb.gnmi_pb2.gNMIStub', 'gnmi_pb2.gNMIStub', ({(376, 39, 376, 52): 'self._channel'}, {}), '(self._channel)', False, 'from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2\n'), ((398, 31, 398, 70), 'ansible_collections.nokia.grpc.plugins.connection.pb.gnmi_pb2.Encoding.Value', 'gnmi_pb2.Encoding.Value', ({(398, 55, 398, 69): 'self._encoding'}, {}), '(self._encoding)', False, 'from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2\n'), ((655, 18, 655, 46), 'ansible_collections.nokia.grpc.plugins.connection.pb.gnmi_pb2.CapabilityRequest', 'gnmi_pb2.CapabilityRequest', ({}, {}), '()', False, 'from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2\n'), ((662, 15, 662, 50), 'google.protobuf.json_format.MessageToJson', 'json_format.MessageToJson', ({(662, 41, 662, 49): 'response'}, {}), '(response)', False, 'from google.protobuf import json_format\n'), ((756, 17, 756, 52), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', ({(756, 43, 756, 51): 'response'}, {}), '(response)', False, 'from google.protobuf import json_format\n'), ((257, 18, 260, 13), 'ansible.errors.AnsibleError', 'AnsibleError', ({(258, 16, 259, 51): '(\'protobuf is required to use gRPC connection type. \' +\n "Please run \'pip install protobuf\'")'}, {}), '(\'protobuf is required to use gRPC connection type. \' +\n "Please run \'pip install protobuf\'")', False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((262, 18, 265, 13), 'ansible.errors.AnsibleError', 'AnsibleError', ({(263, 16, 264, 49): '(\'grpcio is required to use gRPC connection type. \' +\n "Please run \'pip install grpcio\'")'}, {}), '(\'grpcio is required to use gRPC connection type. \' +\n "Please run \'pip install grpcio\'")', False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((295, 15, 295, 39), 'os.path.isfile', 'os.path.isfile', ({(295, 30, 295, 38): 'filename'}, {}), '(filename)', False, 'import os\n'), ((331, 18, 331, 77), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(331, 43, 331, 76): '"""grpc_environment must be a dict"""'}, {}), "('grpc_environment must be a dict')", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((366, 20, 366, 57), 'grpc.ssl_channel_credentials', 'grpc.ssl_channel_credentials', ({}, {}), '(**certs)', False, 'import grpc\n'), ((367, 28, 367, 85), 'grpc.secure_channel', 'grpc.secure_channel', (), '', False, 'import grpc\n'), ((370, 28, 370, 80), 'grpc.insecure_channel', 'grpc.insecure_channel', (), '', False, 'import grpc\n'), ((381, 22, 381, 50), 'ansible_collections.nokia.grpc.plugins.connection.pb.gnmi_pb2.CapabilityRequest', 'gnmi_pb2.CapabilityRequest', ({}, {}), '()', False, 'from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2\n'), ((434, 28, 434, 82), 're.split', 're.split', ({(434, 37, 434, 74): '"""/(?=(?:[^\\\\[\\\\]]|\\\\[[^\\\\[\\\\]]+\\\\])*$)"""', (434, 76, 434, 81): 'xpath'}, {}), "('/(?=(?:[^\\\\[\\\\]]|\\\\[[^\\\\[\\\\]]+\\\\])*$)', xpath)", False, 'import re\n'), ((625, 20, 625, 50), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', ({(625, 46, 625, 49): 'msg'}, {}), '(msg)', False, 'from google.protobuf import json_format\n'), ((693, 47, 693, 68), 'ansible_collections.nokia.grpc.plugins.connection.pb.gnmi_pb2.GetRequest', 'gnmi_pb2.GetRequest', ({}, {}), '()', False, 'from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2\n'), ((748, 47, 748, 68), 'ansible_collections.nokia.grpc.plugins.connection.pb.gnmi_pb2.SetRequest', 'gnmi_pb2.SetRequest', ({}, {}), '()', False, 'from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2\n'), ((807, 62, 807, 89), 'ansible_collections.nokia.grpc.plugins.connection.pb.gnmi_pb2.SubscribeRequest', 'gnmi_pb2.SubscribeRequest', ({}, {}), '()', False, 'from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2\n'), ((289, 27, 289, 55), 'os.path.expanduser', 'os.path.expanduser', ({(289, 46, 289, 54): 'filename'}, {}), '(filename)', False, 'import os\n'), ((304, 22, 306, 21), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(305, 24, 305, 69): "('Cert/keys file %s does not exist' % filename)"}, {}), "('Cert/keys file %s does not exist' % filename)", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((361, 22, 361, 85), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(361, 47, 361, 84): '"""grpc_channel_options must be a dict"""'}, {}), "('grpc_channel_options must be a dict')", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((388, 15, 388, 51), 'ansible_collections.nokia.grpc.plugins.connection.pb.gnmi_pb2.Encoding.Value', 'gnmi_pb2.Encoding.Value', ({(388, 39, 388, 50): '"""JSON_IETF"""'}, {}), "('JSON_IETF')", False, 'from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2\n'), ((396, 22, 396, 123), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(396, 47, 396, 122): '("Incompatible encoding \'%s\' requested (JSON or JSON_IETF)" % self._encoding)'}, {}), '(\n "Incompatible encoding \'%s\' requested (JSON or JSON_IETF)" % self._encoding\n )', False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((437, 24, 437, 50), 're.findall', 're.findall', ({(437, 35, 437, 46): '"""\\\\[(.*?)\\\\]"""', (437, 48, 437, 49): 'e'}, {}), "('\\\\[(.*?)\\\\]', e)", False, 'import re\n'), ((493, 30, 493, 66), 'base64.b64decode', 'base64.b64decode', ({(493, 47, 493, 65): "val['jsonIetfVal']"}, {}), "(val['jsonIetfVal'])", False, 'import base64\n'), ((661, 18, 661, 52), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(661, 43, 661, 51): "('%s' % e)"}, {}), "('%s' % e)", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((699, 18, 699, 52), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(699, 43, 699, 51): "('%s' % e)"}, {}), "('%s' % e)", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((701, 41, 701, 76), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', ({(701, 67, 701, 75): 'response'}, {}), '(response)', False, 'from google.protobuf import json_format\n'), ((702, 15, 702, 43), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((754, 18, 754, 52), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(754, 43, 754, 51): "('%s' % e)"}, {}), "('%s' % e)", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((763, 15, 763, 43), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((831, 15, 831, 43), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((390, 17, 390, 48), 'ansible_collections.nokia.grpc.plugins.connection.pb.gnmi_pb2.Encoding.Value', 'gnmi_pb2.Encoding.Value', ({(390, 41, 390, 47): '"""JSON"""'}, {}), "('JSON')", False, 'from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2\n'), ((393, 22, 393, 108), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(393, 47, 393, 107): '"""No compatible supported encoding found (JSON or JSON_IETF)"""'}, {}), "(\n 'No compatible supported encoding found (JSON or JSON_IETF)')", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((476, 33, 476, 49), 'json.dumps', 'json.dumps', ({(476, 44, 476, 48): 'data'}, {}), '(data)', False, 'import json\n'), ((495, 30, 495, 62), 'base64.b64decode', 'base64.b64decode', ({(495, 47, 495, 61): "val['jsonVal']"}, {}), "(val['jsonVal'])", False, 'import base64\n'), ((535, 29, 535, 52), 'json.dumps', 'json.dumps', ({(535, 40, 535, 51): "elem['key']"}, {}), "(elem['key'])", False, 'import json\n'), ((612, 29, 612, 57), 'json.dumps', 'json.dumps', ({(612, 40, 612, 56): "leaf_elem['key']"}, {}), "(leaf_elem['key'])", False, 'import json\n'), ((815, 29, 815, 64), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', ({(815, 55, 815, 63): 'response'}, {}), '(response)', False, 'from google.protobuf import json_format\n'), ((829, 22, 829, 56), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(829, 47, 829, 55): "('%s' % e)"}, {}), "('%s' % e)", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((292, 38, 292, 67), 'os.path.join', 'os.path.join', ({(292, 51, 292, 56): 'entry', (292, 58, 292, 66): 'filename'}, {}), '(entry, filename)', False, 'import os\n'), ((293, 35, 293, 64), 'os.path.join', 'os.path.join', ({(293, 48, 293, 53): 'entry', (293, 55, 293, 63): 'filename'}, {}), '(entry, filename)', False, 'import os\n'), ((300, 26, 302, 21), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(301, 24, 301, 80): "('Failed to read cert/keys file %s: %s' % (filename, exc))"}, {}), "('Failed to read cert/keys file %s: %s' % (filename,\n exc))", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n'), ((497, 107, 497, 122), 'json.dumps', 'json.dumps', ({(497, 118, 497, 121): 'val'}, {}), '(val)', False, 'import json\n'), ((582, 33, 582, 56), 'json.dumps', 'json.dumps', ({(582, 44, 582, 55): "elem['key']"}, {}), "(elem['key'])", False, 'import json\n'), ((824, 26, 824, 86), 'ansible.errors.AnsibleConnectionFailure', 'AnsibleConnectionFailure', ({(824, 51, 824, 85): '"""gNMI ONCE Subscription timed out"""'}, {}), "('gNMI ONCE Subscription timed out')", False, 'from ansible.errors import AnsibleConnectionFailure, AnsibleError\n')] |
tbeckham/eutester | testcases/cloud_admin/services_up_test.py | 1440187150ce284bd87147e71ac7f0fda194b4d9 | #!/usr/bin/python
# Software License Agreement (BSD License)
#
# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: clarkmatthew
import eucaops
from eutester.eutestcase import EutesterTestCase
import time
class MyTestCase(EutesterTestCase):
def __init__(self, config_file=None, password=None):
self.setuptestcase()
self.setup_parser()
self.parser.add_argument("--timeout", default=600)
self.get_args()
def clean_method(self):
self.debug('No clean_method defined for this test')
pass
def wait_for_services_operational(self, timeout=None):
"""
Definition:
Test attempts to query the state of a subset of core services. The test will continue to poll the system
until it finds an ENABLED instance of each service. In the HA case it will wait for an ENABLED and DISABLED
instance of each.
"""
timeout= timeout or self.args.timeout
last_err = ""
elapsed = 0
start = time.time()
self.tester = None
while (not self.tester and elapsed < timeout):
elapsed = int(time.time() - start)
self.status('Attempting to create tester object. Elapsed:' + str(elapsed))
try:
self.tester = eucaops.Eucaops(config_file=self.args.config_file, password=self.args.password)
except Exception, e:
tb = eucaops.Eucaops.get_traceback()
last_err = str(tb) + "\n" + str(e)
print 'Services not up because of: ' + last_err + '\n'
if not self.tester:
raise Exception(str(last_err) + 'Could not create tester object after elapsed:' + str(elapsed))
timeout = timeout - elapsed
self.status('starting wait for all services operational, timeout:' + str(timeout))
self.tester.service_manager.wait_for_all_services_operational(timeout)
self.status('All services are up')
self.tester.service_manager.print_services_list()
if __name__ == "__main__":
testcase = MyTestCase()
### Use the list of tests passed from config/command line to determine what subset of tests to run
### or use a predefined list "VolumeTagging", "InstanceTagging", "SnapshotTagging", "ImageTagging"
list = testcase.args.tests or ["wait_for_services_operational"]
### Convert test suite methods to EutesterUnitTest objects
unit_list = [ ]
for test in list:
unit_list.append( testcase.create_testunit_by_name(test) )
### Run the EutesterUnitTest objects, dont worry about clean on exit until we need it for this method
result = testcase.run_test_case_list(unit_list,clean_on_exit=False)
exit(result)
| [] |
jules552/ProjetISN | intValues.py | 20da3572b59af25a166022bc2f5b25d46add2650 | MAP = 1
SPEED = 1.5
VELOCITYRESET = 6
WIDTH = 1280
HEIGHT = 720
X = WIDTH / 2 - 50
Y = HEIGHT / 2 - 50
MOUSER = 325
TICKRATES = 120
nfc = False
raspberry = False | [] |
while1618/DailyCodingProblem | April/Apr_25_2019/builder.py | 187909f78281828da543439646cdf52d64c2bd0c | # This problem was asked by Facebook.
#
# A builder is looking to build a row of N houses that can be of K different colors.
# He has a goal of minimizing cost while ensuring that no two neighboring houses are of the same color.
#
# Given an N by K matrix where the nth row and kth column represents the cost to build the nth house with kth color,
# return the minimum cost which achieves this goal.
| [] |
pfnet-research/bayesgrad | experiments/delaney/plot.py | 5db613391777b20b7a367c274804f0b736991b0a | import argparse
import numpy as np
import os
import sys
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from saliency.visualizer.smiles_visualizer import SmilesVisualizer
def visualize(dir_path):
parent_dir = os.path.dirname(dir_path)
saliency_vanilla = np.load(os.path.join(dir_path, "saliency_vanilla.npy"))
saliency_smooth = np.load(os.path.join(dir_path, "saliency_smooth.npy"))
saliency_bayes = np.load(os.path.join(dir_path, "saliency_bayes.npy"))
visualizer = SmilesVisualizer()
os.makedirs(os.path.join(parent_dir, "result_vanilla"), exist_ok=True)
os.makedirs(os.path.join(parent_dir, "result_smooth"), exist_ok=True)
os.makedirs(os.path.join(parent_dir, "result_bayes"), exist_ok=True)
test_idx = np.load(os.path.join(dir_path, "test_idx.npy"))
answer = np.load(os.path.join(dir_path, "answer.npy"))
output = np.load(os.path.join(dir_path, "output.npy"))
smiles_all = np.load(os.path.join(parent_dir, "smiles.npy"))
def calc_range(saliency):
vmax = float('-inf')
vmin = float('inf')
for v in saliency:
vmax = max(vmax, np.max(v))
vmin = min(vmin, np.min(v))
return vmin, vmax
v_range_vanilla = calc_range(saliency_vanilla)
v_range_smooth = calc_range(saliency_smooth)
v_range_bayes = calc_range(saliency_bayes)
def get_scaler(v_range):
def scaler(saliency_):
saliency = np.copy(saliency_)
minv, maxv = v_range
if maxv == minv:
saliency = np.zeros_like(saliency)
else:
pos = saliency >= 0.0
saliency[pos] = saliency[pos]/maxv
nega = saliency < 0.0
saliency[nega] = saliency[nega]/(np.abs(minv))
return saliency
return scaler
scaler_vanilla = get_scaler(v_range_vanilla)
scaler_smooth = get_scaler(v_range_smooth)
scaler_bayes = get_scaler(v_range_bayes)
def color(x):
if x > 0:
# Red for positive value
return 1., 1. - x, 1. - x
else:
# Blue for negative value
x *= -1
return 1. - x, 1. - x, 1.
for i, id in enumerate(test_idx):
smiles = smiles_all[id]
out = output[i]
ans = answer[i]
# legend = "t:{}, p:{}".format(ans, out)
legend = ''
ext = '.png' # '.svg'
# visualizer.visualize(
# saliency_vanilla[id], smiles, save_filepath=os.path.join(parent_dir, "result_vanilla", str(id) + ext),
# visualize_ratio=1.0, legend=legend, scaler=scaler_vanilla, color_fn=color)
# visualizer.visualize(
# saliency_smooth[id], smiles, save_filepath=os.path.join(parent_dir, "result_smooth", str(id) + ext),
# visualize_ratio=1.0, legend=legend, scaler=scaler_smooth, color_fn=color)
visualizer.visualize(
saliency_bayes[id], smiles, save_filepath=os.path.join(parent_dir, "result_bayes", str(id) + ext),
visualize_ratio=1.0, legend=legend, scaler=scaler_bayes, color_fn=color)
def plot_result(prediction, answer, save_filepath='result.png'):
plt.scatter(prediction, answer, marker='.')
plt.plot([-100, 100], [-100, 100], c='r')
max_v = max(np.max(prediction), np.max(answer))
min_v = min(np.min(prediction), np.min(answer))
plt.xlim([min_v-0.1, max_v+0.1])
plt.xlabel("prediction")
plt.ylim([min_v-0.1, max_v+0.1])
plt.ylabel("ground truth")
plt.savefig(save_filepath)
plt.close()
def main():
parser = argparse.ArgumentParser(
description='Regression with own dataset.')
parser.add_argument('--dirpath', '-d', type=str, default='./results/M_30_3_32_32')
args = parser.parse_args()
path = args.dirpath
n_split = 5
output = []
answer = []
for i in range(n_split):
suffix = str(i) + "-" + str(n_split)
output.append(np.load(os.path.join(path, suffix, "output.npy")))
answer.append(np.load(os.path.join(path, suffix, "answer.npy")))
output = np.concatenate(output)
answer = np.concatenate(answer)
plot_result(output, answer, save_filepath=os.path.join(path, "result.png"))
for i in range(n_split):
suffix = str(i) + "-" + str(n_split)
print(suffix)
visualize(os.path.join(path, suffix))
if __name__ == '__main__':
main()
| [((8, 0, 8, 21), 'matplotlib.use', 'matplotlib.use', ({(8, 15, 8, 20): '"""agg"""'}, {}), "('agg')", False, 'import matplotlib\n'), ((16, 17, 16, 42), 'os.path.dirname', 'os.path.dirname', ({(16, 33, 16, 41): 'dir_path'}, {}), '(dir_path)', False, 'import os\n'), ((21, 17, 21, 35), 'saliency.visualizer.smiles_visualizer.SmilesVisualizer', 'SmilesVisualizer', ({}, {}), '()', False, 'from saliency.visualizer.smiles_visualizer import SmilesVisualizer\n'), ((90, 4, 90, 47), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((91, 4, 91, 45), 'matplotlib.pyplot.plot', 'plt.plot', (), '', True, 'import matplotlib.pyplot as plt\n'), ((94, 4, 94, 36), 'matplotlib.pyplot.xlim', 'plt.xlim', ({(94, 13, 94, 35): '[min_v - 0.1, max_v + 0.1]'}, {}), '([min_v - 0.1, max_v + 0.1])', True, 'import matplotlib.pyplot as plt\n'), ((95, 4, 95, 28), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(95, 15, 95, 27): '"""prediction"""'}, {}), "('prediction')", True, 'import matplotlib.pyplot as plt\n'), ((96, 4, 96, 36), 'matplotlib.pyplot.ylim', 'plt.ylim', ({(96, 13, 96, 35): '[min_v - 0.1, max_v + 0.1]'}, {}), '([min_v - 0.1, max_v + 0.1])', True, 'import matplotlib.pyplot as plt\n'), ((97, 4, 97, 30), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(97, 15, 97, 29): '"""ground truth"""'}, {}), "('ground truth')", True, 'import matplotlib.pyplot as plt\n'), ((98, 4, 98, 30), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(98, 16, 98, 29): 'save_filepath'}, {}), '(save_filepath)', True, 'import matplotlib.pyplot as plt\n'), ((99, 4, 99, 15), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((103, 13, 104, 51), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((115, 13, 115, 35), 'numpy.concatenate', 'np.concatenate', ({(115, 28, 115, 34): 'output'}, {}), '(output)', True, 'import numpy as np\n'), ((116, 13, 116, 35), 'numpy.concatenate', 'np.concatenate', ({(116, 28, 116, 34): 'answer'}, {}), '(answer)', True, 'import numpy as np\n'), ((17, 31, 17, 77), 'os.path.join', 'os.path.join', ({(17, 44, 17, 52): 'dir_path', (17, 54, 17, 76): '"""saliency_vanilla.npy"""'}, {}), "(dir_path, 'saliency_vanilla.npy')", False, 'import os\n'), ((18, 30, 18, 75), 'os.path.join', 'os.path.join', ({(18, 43, 18, 51): 'dir_path', (18, 53, 18, 74): '"""saliency_smooth.npy"""'}, {}), "(dir_path, 'saliency_smooth.npy')", False, 'import os\n'), ((19, 29, 19, 73), 'os.path.join', 'os.path.join', ({(19, 42, 19, 50): 'dir_path', (19, 52, 19, 72): '"""saliency_bayes.npy"""'}, {}), "(dir_path, 'saliency_bayes.npy')", False, 'import os\n'), ((22, 16, 22, 58), 'os.path.join', 'os.path.join', ({(22, 29, 22, 39): 'parent_dir', (22, 41, 22, 57): '"""result_vanilla"""'}, {}), "(parent_dir, 'result_vanilla')", False, 'import os\n'), ((23, 16, 23, 57), 'os.path.join', 'os.path.join', ({(23, 29, 23, 39): 'parent_dir', (23, 41, 23, 56): '"""result_smooth"""'}, {}), "(parent_dir, 'result_smooth')", False, 'import os\n'), ((24, 16, 24, 56), 'os.path.join', 'os.path.join', ({(24, 29, 24, 39): 'parent_dir', (24, 41, 24, 55): '"""result_bayes"""'}, {}), "(parent_dir, 'result_bayes')", False, 'import os\n'), ((26, 23, 26, 61), 'os.path.join', 'os.path.join', ({(26, 36, 26, 44): 'dir_path', (26, 46, 26, 60): '"""test_idx.npy"""'}, {}), "(dir_path, 'test_idx.npy')", False, 'import os\n'), ((27, 21, 27, 57), 'os.path.join', 'os.path.join', ({(27, 34, 27, 42): 'dir_path', (27, 44, 27, 56): '"""answer.npy"""'}, {}), "(dir_path, 'answer.npy')", False, 'import os\n'), ((28, 21, 28, 57), 'os.path.join', 'os.path.join', ({(28, 34, 28, 42): 'dir_path', (28, 44, 28, 56): '"""output.npy"""'}, {}), "(dir_path, 'output.npy')", False, 'import os\n'), ((30, 25, 30, 63), 'os.path.join', 'os.path.join', ({(30, 38, 30, 48): 'parent_dir', (30, 50, 30, 62): '"""smiles.npy"""'}, {}), "(parent_dir, 'smiles.npy')", False, 'import os\n'), ((92, 16, 92, 34), 'numpy.max', 'np.max', ({(92, 23, 92, 33): 'prediction'}, {}), '(prediction)', True, 'import numpy as np\n'), ((92, 36, 92, 50), 'numpy.max', 'np.max', ({(92, 43, 92, 49): 'answer'}, {}), '(answer)', True, 'import numpy as np\n'), ((93, 16, 93, 34), 'numpy.min', 'np.min', ({(93, 23, 93, 33): 'prediction'}, {}), '(prediction)', True, 'import numpy as np\n'), ((93, 36, 93, 50), 'numpy.min', 'np.min', ({(93, 43, 93, 49): 'answer'}, {}), '(answer)', True, 'import numpy as np\n'), ((46, 23, 46, 41), 'numpy.copy', 'np.copy', ({(46, 31, 46, 40): 'saliency_'}, {}), '(saliency_)', True, 'import numpy as np\n'), ((118, 46, 118, 78), 'os.path.join', 'os.path.join', ({(118, 59, 118, 63): 'path', (118, 65, 118, 77): '"""result.png"""'}, {}), "(path, 'result.png')", False, 'import os\n'), ((122, 18, 122, 44), 'os.path.join', 'os.path.join', ({(122, 31, 122, 35): 'path', (122, 37, 122, 43): 'suffix'}, {}), '(path, suffix)', False, 'import os\n'), ((11, 64, 11, 89), 'os.path.abspath', 'os.path.abspath', ({(11, 80, 11, 88): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((36, 29, 36, 38), 'numpy.max', 'np.max', ({(36, 36, 36, 37): 'v'}, {}), '(v)', True, 'import numpy as np\n'), ((37, 29, 37, 38), 'numpy.min', 'np.min', ({(37, 36, 37, 37): 'v'}, {}), '(v)', True, 'import numpy as np\n'), ((49, 27, 49, 50), 'numpy.zeros_like', 'np.zeros_like', ({(49, 41, 49, 49): 'saliency'}, {}), '(saliency)', True, 'import numpy as np\n'), ((113, 30, 113, 70), 'os.path.join', 'os.path.join', ({(113, 43, 113, 47): 'path', (113, 49, 113, 55): 'suffix', (113, 57, 113, 69): '"""output.npy"""'}, {}), "(path, suffix, 'output.npy')", False, 'import os\n'), ((114, 30, 114, 70), 'os.path.join', 'os.path.join', ({(114, 43, 114, 47): 'path', (114, 49, 114, 55): 'suffix', (114, 57, 114, 69): '"""answer.npy"""'}, {}), "(path, suffix, 'answer.npy')", False, 'import os\n'), ((54, 49, 54, 61), 'numpy.abs', 'np.abs', ({(54, 56, 54, 60): 'minv'}, {}), '(minv)', True, 'import numpy as np\n')] |
btybug/main.albumbugs | public/js/tinymice/plugins/bootstrap/jquery-file-tree/connectors/jqueryFileTree.py | 2343466bae7ee3d8941abc4c9684667cccc3e103 | #
# jQuery File Tree
# Python/Django connector script
# By Martin Skou
#
import os
import urllib
def dirlist(request):
r=['<ul class="jqueryFileTree" style="display: none;">']
try:
r=['<ul class="jqueryFileTree" style="display: none;">']
d=urllib.unquote(request.POST.get('dir','c:\\temp'))
for f in os.listdir(d):
ff=os.path.join(d,f)
if os.path.isdir(ff):
r.append('<li class="directory collapsed"><a href="#" rel="%s/">%s</a></li>' % (ff,f))
else:
e=os.path.splitext(f)[1][1:] # get .ext and remove dot
r.append('<li class="file ext_%s"><a href="#" rel="%s">%s</a></li>' % (e,ff,f))
r.append('</ul>')
except Exception,e:
r.append('Could not load directory: %s' % str(e))
r.append('</ul>')
return HttpResponse(''.join(r))
| [] |
harvineet/gpytorch | gpytorch/lazy/chol_lazy_tensor.py | 8aa8f1a4298ef61cfea9c4d11c75576a84ffcc3e | #!/usr/bin/env python3
import torch
from .lazy_tensor import LazyTensor
from .root_lazy_tensor import RootLazyTensor
from .. import settings
class CholLazyTensor(RootLazyTensor):
def __init__(self, chol):
if isinstance(chol, LazyTensor): # Probably is an instance of NonLazyTensor
chol = chol.evaluate()
# Check that we have a lower triangular matrix
if settings.debug.on():
mask = torch.ones(chol.shape[-2:], dtype=chol.dtype, device=chol.device).triu_(1)
if torch.max(chol.mul(mask)).item() > 1e-3 and torch.equal(chol, chol):
raise RuntimeError("CholLazyVaraiable should take a lower-triangular matrix in the constructor.")
# Run super constructor
super(CholLazyTensor, self).__init__(chol)
@property
def _chol(self):
if not hasattr(self, "_chol_memo"):
self._chol_memo = self.root.evaluate()
return self._chol_memo
@property
def _chol_diag(self):
if not hasattr(self, "_chol_diag_memo"):
self._chol_diag_memo = self._chol.diagonal(dim1=-2, dim2=-1).clone()
return self._chol_diag_memo
def inv_quad_logdet(self, inv_quad_rhs=None, logdet=False, reduce_inv_quad=True):
inv_quad_term = None
logdet_term = None
if inv_quad_rhs is not None:
inv_quad_term, _ = super(CholLazyTensor, self).inv_quad_logdet(
inv_quad_rhs, logdet=False, reduce_inv_quad=reduce_inv_quad
)
if logdet:
logdet_term = self._chol_diag.pow(2).log().sum(-1)
return inv_quad_term, logdet_term
| [((18, 59, 18, 82), 'torch.equal', 'torch.equal', ({(18, 71, 18, 75): 'chol', (18, 77, 18, 81): 'chol'}, {}), '(chol, chol)', False, 'import torch\n'), ((17, 19, 17, 84), 'torch.ones', 'torch.ones', (), '', False, 'import torch\n')] |
ksmit799/POTCO-PS | pirates/audio/AmbientManagerBase.py | 520d38935ae8df4b452c733a82c94dddac01e275 | # File: A (Python 2.4)
from pandac.PandaModules import AudioSound
from direct.directnotify import DirectNotifyGlobal
from direct.interval.IntervalGlobal import LerpFunc, Sequence
from direct.showbase.DirectObject import DirectObject
class AmbientSound:
notify = DirectNotifyGlobal.directNotify.newCategory('AmbientSound')
def __init__(self, path, masterAmbientVolume, loop = True, isMusic = False):
self.isMusic = isMusic
if self.isMusic:
self.sfx = loader.loadMusic(path)
else:
self.sfx = loader.loadSfx(path)
self.path = path
self.loop = loop
self.setLoop(loop)
self.setVolume(0)
self.masterAmbientVolume = masterAmbientVolume
self.reloadAttempt = 0
self.curPriority = 0
self.duration = 0
self.finalVolume = 0
self.startVolume = 0
self.activeInterval = None
def unload(self):
if self.activeInterval:
self.activeInterval.finish()
del self.activeInterval
self.sfx.stop()
del self.sfx
def play(self):
self.sfx.play()
def getVolume(self):
return self.sfx.getVolume()
def setVolume(self, vol):
self.sfx.setVolume(vol)
def getLoop(self):
return self.sfx.getLoop()
def setLoop(self, loop):
self.sfx.setLoop(loop)
def set3dAttributes(self, *args):
self.sfx.set3dAttributes(*args)
def requestChangeVolume(self, duration, finalVolume, priority):
if priority < self.curPriority:
return None
self.curPriority = priority
if not self.sfx.getActive():
if self.reloadAttempt < 1:
self.reloadAttempt += 1
if self.isMusic:
self.sfx = loader.loadMusic(self.path)
else:
self.sfx = loader.loadSfx(self.path)
if self.sfx:
self.sfx.setLoop(self.loop)
self.duration = duration
self.startVolume = self.getVolume()
self.finalVolume = finalVolume
if self.activeInterval:
self.activeInterval.pause()
del self.activeInterval
self.activeInterval = Sequence(LerpFunc(self.changeVolumeTask, fromData = self.startVolume, toData = self.finalVolume, duration = self.duration))
self.activeInterval.start()
def changeMasterAmbientVolume(self, newMasterAmbientVolume):
if not self.masterAmbientVolume == newMasterAmbientVolume:
self.masterAmbientVolume = newMasterAmbientVolume
if self.activeInterval and self.activeInterval.isPlaying():
pass
elif self.sfx.status() == 2:
newVol = float(self.finalVolume) * self.masterAmbientVolume
self.sfx.setVolume(newVol)
def changeVolumeTask(self, t):
curVolume = t * self.masterAmbientVolume
self.sfx.setVolume(curVolume)
if not hasattr(self, 'reportCounter'):
self.reportCounter = 0
self.reportCounter += 1
if self.reportCounter % 10 == 0:
pass
1
if curVolume > 0 and self.sfx.status() == 1:
self.sfx.play()
if curVolume <= 0 and self.sfx.status() == 2:
self.sfx.stop()
self.curPriority = 0
class AmbientManagerBase(DirectObject):
notify = DirectNotifyGlobal.directNotify.newCategory('AmbientManagerBase')
def __init__(self):
self.ambientDict = { }
self.masterAmbientVolume = 1.0
def load(self, name, path, looping = True, isMusic = False):
retval = False
if self.ambientDict.has_key(name):
if self.ambientDict[name].path == path:
self.notify.warning('ambient name=%s path=%s already loaded' % (name, path))
else:
self.notify.warning('ambient name %s is already bound to %s' % self.ambientDict[name].path)
else:
newAmbient = AmbientSound(path, self.masterAmbientVolume, looping, isMusic)
self.ambientDict[name] = newAmbient
def unload(self, name):
if self.ambientDict.has_key(name):
self.ambientDict[name].unload()
del self.ambientDict[name]
else:
self.notify.warning('music: %s not in ambientDict' % name)
def requestFadeIn(self, name, duration = 5, finalVolume = 1.0, priority = 0):
self.requestChangeVolume(name, duration, finalVolume, priority)
def requestFadeOut(self, name, duration = 5, finalVolume = 0.0, priority = 0):
self.requestChangeVolume(name, duration, finalVolume, priority)
def requestChangeVolume(self, name, duration, finalVolume, priority = 0):
if self.ambientDict.has_key(name):
self.ambientDict[name].requestChangeVolume(duration, finalVolume, priority)
def delete(self):
for name in self.ambientDict.keys():
self.ambientDict[name].unload()
self.ambientDict = { }
def silence(self):
for name in self.ambientDict.keys():
self.ambientDict[name].requestChangeVolume(0.0, 0.0, priority = 1)
def changeMasterAmbientVolume(self, newMasterAmbientVolume):
if not newMasterAmbientVolume == self.masterAmbientVolume:
self.masterAmbientVolume = newMasterAmbientVolume
for name in self.ambientDict.keys():
self.ambientDict[name].changeMasterAmbientVolume(self.masterAmbientVolume)
| [((9, 13, 9, 72), 'direct.directnotify.DirectNotifyGlobal.directNotify.newCategory', 'DirectNotifyGlobal.directNotify.newCategory', ({(9, 57, 9, 71): '"""AmbientSound"""'}, {}), "('AmbientSound')", False, 'from direct.directnotify import DirectNotifyGlobal\n'), ((124, 13, 124, 78), 'direct.directnotify.DirectNotifyGlobal.directNotify.newCategory', 'DirectNotifyGlobal.directNotify.newCategory', ({(124, 57, 124, 77): '"""AmbientManagerBase"""'}, {}), "('AmbientManagerBase')", False, 'from direct.directnotify import DirectNotifyGlobal\n'), ((87, 39, 87, 152), 'direct.interval.IntervalGlobal.LerpFunc', 'LerpFunc', (), '', False, 'from direct.interval.IntervalGlobal import LerpFunc, Sequence\n')] |
jmgc/pyston | test/tests/import_test.py | 9f672c1bbb75710ac17dd3d9107da05c8e9e8e8f | import import_target
print import_target.x
import import_target
import_target.foo()
c = import_target.C()
print import_target.import_nested_target.y
import_target.import_nested_target.bar()
d = import_target.import_nested_target.D()
print "testing importfrom:"
from import_target import x as z
print z
import_nested_target = 15
from import_nested_target import y
print "This should still be 15:",import_nested_target
import import_nested_target
print import_nested_target.__name__
print import_nested_target.y
import_target.import_nested_target.y = import_nested_target.y + 1
print import_nested_target.y
print z
print y
print __name__
print __import__("import_target") is import_target
import sys
import _multiprocessing
del _multiprocessing
del sys.modules["_multiprocessing"]
import _multiprocessing
import time
del time
del sys.modules["time"]
import time
print time.sleep(0)
| [] |
HEXRD/hexrdgui | hexrd/ui/matrix_editor.py | d92915463f237e0521b5830655ae73bc5bcd9f80 | import numpy as np
from PySide2.QtCore import QSignalBlocker, Signal
from PySide2.QtWidgets import QGridLayout, QWidget
from hexrd.ui.scientificspinbox import ScientificDoubleSpinBox
DEFAULT_ENABLED_STYLE_SHEET = 'background-color: white'
DEFAULT_DISABLED_STYLE_SHEET = 'background-color: #F0F0F0'
INVALID_MATRIX_STYLE_SHEET = 'background-color: red'
class MatrixEditor(QWidget):
data_modified = Signal()
def __init__(self, data, parent=None):
super().__init__(parent)
self._data = data
# If this is not None, then only the elements present in the
# list (as (i, j) items) will be enabled.
self._enabled_elements = None
# If this is set, it will be called every time the data updates
# to apply equality constraints.
self._apply_constraints_func = None
# Whether or not the matrix is currently invalid
self.matrix_invalid = False
# Reason the matrix is currently invalid
self.matrix_invalid_reason = ''
self.setLayout(QGridLayout())
self.add_spin_boxes()
self.update_gui()
def add_spin_boxes(self):
layout = self.layout()
for i in range(self.rows):
for j in range(self.cols):
sb = self.create_spin_box()
layout.addWidget(sb, i, j)
def create_spin_box(self):
sb = ScientificDoubleSpinBox()
sb.setKeyboardTracking(False)
sb.valueChanged.connect(self.element_modified)
return sb
def element_modified(self):
self.update_data()
@property
def data(self):
return self._data
@data.setter
def data(self, v):
if not np.array_equal(self._data, v):
if self._data.shape != v.shape:
msg = (f'Shape {v.shape} does not match original shape '
f'{self._data.shape}')
raise AttributeError(msg)
self._data = v
self.reset_disabled_values()
self.update_gui()
@property
def rows(self):
return self.data.shape[0]
@property
def cols(self):
return self.data.shape[1]
def update_data(self):
self.data[:] = self.gui_data
self.apply_constraints()
self.data_modified.emit()
def update_gui(self):
self.gui_data = self.data
@property
def gui_data(self):
row_range = range(self.rows)
col_range = range(self.cols)
return [[self.gui_value(i, j) for j in col_range] for i in row_range]
@gui_data.setter
def gui_data(self, v):
blockers = [QSignalBlocker(w) for w in self.all_widgets] # noqa: F841
for i in range(self.rows):
for j in range(self.cols):
self.set_gui_value(i, j, v[i][j])
@property
def all_widgets(self):
row_range = range(self.rows)
col_range = range(self.cols)
return [self.widget(i, j) for j in col_range for i in row_range]
@property
def enabled_widgets(self):
widgets = []
for i in range(self.rows):
for j in range(self.cols):
if (i, j) in self.enabled_elements:
widgets.append(self.widget(i, j))
return widgets
def widget(self, row, col):
return self.layout().itemAtPosition(row, col).widget()
def gui_value(self, row, col):
return self.widget(row, col).value()
def set_gui_value(self, row, col, val):
self.widget(row, col).setValue(val)
def set_matrix_invalid(self, s):
self.matrix_invalid = True
self.matrix_invalid_reason = s
self.update_tooltips()
self.update_enable_states()
def set_matrix_valid(self):
self.matrix_invalid = False
self.matrix_invalid_reason = ''
self.update_tooltips()
self.update_enable_states()
def update_tooltips(self):
if self.matrix_invalid:
tooltip = self.matrix_invalid_reason
else:
tooltip = ''
for w in self.enabled_widgets:
w.setToolTip(tooltip)
def update_enable_states(self):
enable_all = self.enabled_elements is None
for i in range(self.rows):
for j in range(self.cols):
w = self.widget(i, j)
enable = enable_all or (i, j) in self.enabled_elements
w.setEnabled(enable)
enabled_str = 'enabled' if enable else 'disabled'
style_sheet = getattr(self, f'{enabled_str}_style_sheet')
w.setStyleSheet(style_sheet)
def reset_disabled_values(self):
# Resets all disabled values to zero, then applies constraints
for i in range(self.rows):
for j in range(self.cols):
if not self.widget(i, j).isEnabled():
self.data[i, j] = 0.0
self.apply_constraints()
self.update_gui()
@property
def enabled_style_sheet(self):
if self.matrix_invalid:
return INVALID_MATRIX_STYLE_SHEET
return DEFAULT_ENABLED_STYLE_SHEET
@property
def disabled_style_sheet(self):
return DEFAULT_DISABLED_STYLE_SHEET
@property
def enabled_elements(self):
return self._enabled_elements
@enabled_elements.setter
def enabled_elements(self, v):
if self._enabled_elements != v:
self._enabled_elements = v
self.update_enable_states()
self.reset_disabled_values()
@property
def apply_constraints_func(self):
return self._apply_constraints_func
@apply_constraints_func.setter
def apply_constraints_func(self, v):
if self._apply_constraints_func != v:
self._apply_constraints_func = v
self.apply_constraints()
def apply_constraints(self):
if (func := self.apply_constraints_func) is None:
return
func(self.data)
self.update_gui()
if __name__ == '__main__':
import sys
from PySide2.QtWidgets import QApplication, QDialog, QVBoxLayout
if len(sys.argv) < 2:
sys.exit('Usage: <script> <matrix_size>')
rows, cols = [int(x) for x in sys.argv[1].split('x')]
data = np.ones((rows, cols))
app = QApplication(sys.argv)
dialog = QDialog()
layout = QVBoxLayout()
dialog.setLayout(layout)
editor = MatrixEditor(data)
layout.addWidget(editor)
# def constraints(x):
# x[2][2] = x[1][1]
# editor.enabled_elements = [(1, 1), (3, 4)]
# editor.apply_constraints_func = constraints
def on_data_modified():
print(f'Data modified: {editor.data}')
editor.data_modified.connect(on_data_modified)
dialog.finished.connect(app.quit)
dialog.show()
app.exec_()
| [((16, 20, 16, 28), 'PySide2.QtCore.Signal', 'Signal', ({}, {}), '()', False, 'from PySide2.QtCore import QSignalBlocker, Signal\n'), ((219, 11, 219, 32), 'numpy.ones', 'np.ones', ({(219, 19, 219, 31): '(rows, cols)'}, {}), '((rows, cols))', True, 'import numpy as np\n'), ((221, 10, 221, 32), 'PySide2.QtWidgets.QApplication', 'QApplication', ({(221, 23, 221, 31): 'sys.argv'}, {}), '(sys.argv)', False, 'from PySide2.QtWidgets import QApplication, QDialog, QVBoxLayout\n'), ((222, 13, 222, 22), 'PySide2.QtWidgets.QDialog', 'QDialog', ({}, {}), '()', False, 'from PySide2.QtWidgets import QApplication, QDialog, QVBoxLayout\n'), ((223, 13, 223, 26), 'PySide2.QtWidgets.QVBoxLayout', 'QVBoxLayout', ({}, {}), '()', False, 'from PySide2.QtWidgets import QApplication, QDialog, QVBoxLayout\n'), ((49, 13, 49, 38), 'hexrd.ui.scientificspinbox.ScientificDoubleSpinBox', 'ScientificDoubleSpinBox', ({}, {}), '()', False, 'from hexrd.ui.scientificspinbox import ScientificDoubleSpinBox\n'), ((216, 8, 216, 49), 'sys.exit', 'sys.exit', ({(216, 17, 216, 48): '"""Usage: <script> <matrix_size>"""'}, {}), "('Usage: <script> <matrix_size>')", False, 'import sys\n'), ((37, 23, 37, 36), 'PySide2.QtWidgets.QGridLayout', 'QGridLayout', ({}, {}), '()', False, 'from PySide2.QtWidgets import QGridLayout, QWidget\n'), ((63, 15, 63, 44), 'numpy.array_equal', 'np.array_equal', ({(63, 30, 63, 40): 'self._data', (63, 42, 63, 43): 'v'}, {}), '(self._data, v)', True, 'import numpy as np\n'), ((97, 20, 97, 37), 'PySide2.QtCore.QSignalBlocker', 'QSignalBlocker', ({(97, 35, 97, 36): 'w'}, {}), '(w)', False, 'from PySide2.QtCore import QSignalBlocker, Signal\n')] |
harshp8l/deep-learning-lang-detection | data/train/python/990aa6cbf16ed34f5030609c03ab43c0f0ed8c2aurls.py | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | from django.conf.urls.defaults import *
urlpatterns = patterns('pytorque.views',
(r'^$', 'central_dispatch_view'),
(r'^browse$', 'central_dispatch_view'),
(r'^monitor$', 'central_dispatch_view'),
(r'^submit$', 'central_dispatch_view'),
(r'^stat$', 'central_dispatch_view'),
(r'^login/$', 'login'),
(r'^logout/$', 'logout'),
# (r'^$', 'central_dispatch_view'),
(r'^user/(?P<username>\w{0,50})/$', 'index'),
(r'^user/(?P<username>\w{0,50})/browse$', 'browse'),
# (r'^user/(?P<username>\w{0,50})/monitor', 'monitor'),
# (r'^user/(?P<username>\w{0,50})/submit', 'submit'),
# (r'^user/(?P<username>\w{0,50})/stat', 'stat'),
)
| [] |
yedivanseven/CheckerPy | checkerpy/types/all/typedtuple.py | 04612086d25fecdd0b20ca0a050db8620c437b0e | from typing import Tuple, Union, Any, Sequence
from collections import deque, defaultdict, OrderedDict
from ...validators.one import JustLen
from ...functional.mixins import CompositionClassMixin
from ..one import Just
dict_keys = type({}.keys())
odict_keys = type(OrderedDict({}).keys())
dict_values = type({}.values())
odict_values = type(OrderedDict({}).values())
dict_items = type({}.items())
odict_items = type(OrderedDict({}).items())
NAMED_TYPES = (frozenset, slice, range,
deque, defaultdict, OrderedDict,
dict_keys, dict_values, dict_items,
odict_keys, odict_values, odict_items)
TypesT = Union[type, Sequence[type]]
class TypedTuple(CompositionClassMixin):
"""Checks for different type(s) of each element in a defined-length tuple.
Parameters
----------
value : tuple
The tuple to check the length and element types of.
name : str, optional
The name of the tuple to check the length and the element type(s) of.
Defaults to None.
types : tuple(type), tuple(tuple(type))
Tuple of the length to check for with either one type for each element
of `value` or a tuple of types for each element of `value`. Use the
ellipsis literal ... to skip type checking of the tuple element at
that position.
Returns
-------
tuple
The tuple passed in.
Methods
-------
o(callable) : CompositionOf
Daisy-chains the tuple length and type checker to another `callable`,
returning the functional composition of both. The argument `types` is
passed through to the `TypedTuple` checker when when calling the
composition.
Raises
------
WrongTypeError
If `value` is not a tuple or if any of its elements do not have (one
of) the permitted type(s).
LenError
If the tuple passed in does not have the same length as `types` or
if the type specification does not have a meaningful length.
TypeError
If `types` is not a tuple or any of its elements are not of type type.
See Also
--------
All, JustLen, CompositionOf
"""
def __new__(cls, value: tuple, name=None, *, types=(), **kwargs) -> tuple:
cls.__name = str(name) if name is not None else ''
cls.__string = cls.__name or str(value)
types, length = cls.__valid(types)
value = JustLen.JustTuple(value, name=name, length=length)
for index, element in enumerate(value):
if not cls.__is_or_contains_ellipsis(types[index]):
element_name = f'element {index} in tuple {cls.__string}'
_ = Just(types[index])(element, name=element_name)
return value
@classmethod
def __valid(cls, types: Sequence[TypesT]) -> Tuple[TypesT, int]:
if type(types) not in (tuple, list, deque):
message = cls.__wrong_type_message_for(types)
raise TypeError(message)
return types, len(types)
@staticmethod
def __wrong_type_message_for(types: Any) -> str:
type_name = type(types).__name__
if isinstance(types, NAMED_TYPES):
of_type = type_name
else:
of_type = f'{type_name} like {types}'
return f'Type of types argument must be tuple, not {of_type}!'
@staticmethod
def __is_or_contains_ellipsis(types: TypesT) -> bool:
is_ellipsis = types is ...
try:
contains_ellipsis = ... in types
except TypeError:
contains_ellipsis = False
return is_ellipsis or contains_ellipsis
| [((8, 18, 8, 33), 'collections.OrderedDict', 'OrderedDict', ({(8, 30, 8, 32): '{}'}, {}), '({})', False, 'from collections import deque, defaultdict, OrderedDict\n'), ((10, 20, 10, 35), 'collections.OrderedDict', 'OrderedDict', ({(10, 32, 10, 34): '{}'}, {}), '({})', False, 'from collections import deque, defaultdict, OrderedDict\n'), ((12, 19, 12, 34), 'collections.OrderedDict', 'OrderedDict', ({(12, 31, 12, 33): '{}'}, {}), '({})', False, 'from collections import deque, defaultdict, OrderedDict\n')] |
iswenhao/Panda-Sandbox | data/analyzer/linux/lib/common/abstracts.py | a04069d404cb4326ff459e703f14625dc45759ed | # Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
from lib.api.process import Process
from lib.exceptions.exceptions import CuckooPackageError
class Package(object):
"""Base abstract analysis package."""
PATHS = []
def __init__(self, options={}):
"""@param options: options dict."""
self.options = options
self.pids = []
def set_pids(self, pids):
"""Update list of monitored PIDs in the package context.
@param pids: list of pids.
"""
self.pids = pids
def start(self):
"""Run analysis package.
@raise NotImplementedError: this method is abstract.
"""
raise NotImplementedError
def check(self):
"""Check."""
return True
def execute(self, cmd):
"""Start an executable for analysis.
@param path: executable path
@param args: executable arguments
@return: process pid
"""
p = Process()
if not p.execute(cmd):
raise CuckooPackageError("Unable to execute the initial process, "
"analysis aborted.")
return p.pid
def package_files(self):
"""A list of files to upload to host.
The list should be a list of tuples (<path on guest>, <name of file in package_files folder>).
(package_files is a folder that will be created in analysis folder).
"""
return None
def finish(self):
"""Finish run.
If specified to do so, this method dumps the memory of
all running processes.
"""
if self.options.get("procmemdump"):
for pid in self.pids:
p = Process(pid=pid)
p.dump_memory()
return True
def get_pids(self):
return []
class Auxiliary(object):
priority = 0
def get_pids(self):
return []
| [((39, 12, 39, 21), 'lib.api.process.Process', 'Process', ({}, {}), '()', False, 'from lib.api.process import Process\n'), ((41, 18, 42, 57), 'lib.exceptions.exceptions.CuckooPackageError', 'CuckooPackageError', ({(41, 37, 42, 56): '"""Unable to execute the initial process, analysis aborted."""'}, {}), "('Unable to execute the initial process, analysis aborted.')", False, 'from lib.exceptions.exceptions import CuckooPackageError\n'), ((60, 20, 60, 36), 'lib.api.process.Process', 'Process', (), '', False, 'from lib.api.process import Process\n')] |
Raspeanut/rdmo | rdmo/options/apps.py | 9f785010a499c372a2f8368ccf76d2ea4150adcb | from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class OptionsConfig(AppConfig):
name = 'rdmo.options'
verbose_name = _('Options')
| [((7, 19, 7, 31), 'django.utils.translation.ugettext_lazy', '_', ({(7, 21, 7, 30): '"""Options"""'}, {}), "('Options')", True, 'from django.utils.translation import ugettext_lazy as _\n')] |
sirodoht/mal | main/admin.py | 82295e1b6a03cd9a7ee1357ca3f5be7a26d0ffe9 | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from main import models
class Admin(UserAdmin):
list_display = ("id", "username", "email", "date_joined", "last_login")
admin.site.register(models.User, Admin)
class DocumentAdmin(admin.ModelAdmin):
list_display = ("id", "title")
admin.site.register(models.Document, DocumentAdmin)
| [((11, 0, 11, 39), 'django.contrib.admin.site.register', 'admin.site.register', ({(11, 20, 11, 31): 'models.User', (11, 33, 11, 38): 'Admin'}, {}), '(models.User, Admin)', False, 'from django.contrib import admin\n'), ((18, 0, 18, 51), 'django.contrib.admin.site.register', 'admin.site.register', ({(18, 20, 18, 35): 'models.Document', (18, 37, 18, 50): 'DocumentAdmin'}, {}), '(models.Document, DocumentAdmin)', False, 'from django.contrib import admin\n')] |
QualiSystems/cloudshell-cli | cloudshell/cli/configurator.py | 9a38ff37e91e7798511e860603f5a8a79b782472 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from abc import ABCMeta, abstractmethod
from collections import defaultdict
from cloudshell.cli.factory.session_factory import (
CloudInfoAccessKeySessionFactory,
GenericSessionFactory,
SessionFactory,
)
from cloudshell.cli.service.cli import CLI
from cloudshell.cli.session.ssh_session import SSHSession
from cloudshell.cli.session.telnet_session import TelnetSession
ABC = ABCMeta("ABC", (object,), {"__slots__": ()})
if sys.version_info >= (3, 0):
from functools import lru_cache
else:
from functools32 import lru_cache
class CLIServiceConfigurator(object):
REGISTERED_SESSIONS = (CloudInfoAccessKeySessionFactory(SSHSession), TelnetSession)
"""Using factories instead of """
def __init__(
self,
resource_config,
logger,
cli=None,
registered_sessions=None,
reservation_context=None,
):
"""Initialize CLI service configurator.
:param cloudshell.shell.standards.resource_config_generic_models.GenericCLIConfig resource_config: # noqa: E501
:param logging.Logger logger:
:param cloudshell.cli.service.cli.CLI cli:
:param registered_sessions: Session types and order
:param cloudshell.shell.core.driver_context.ReservationContextDetails reservation_context:
"""
self._cli = cli or CLI()
self._resource_config = resource_config
self._logger = logger
self._registered_sessions = registered_sessions or self.REGISTERED_SESSIONS
self._reservation_context = reservation_context
@property
def _cli_type(self):
"""Connection type property [ssh|telnet|console|auto]."""
return self._resource_config.cli_connection_type
@property
@lru_cache()
def _session_dict(self):
session_dict = defaultdict(list)
for sess in self._registered_sessions:
session_dict[sess.SESSION_TYPE.lower()].append(sess)
return session_dict
def initialize_session(self, session):
if not isinstance(session, SessionFactory):
session = GenericSessionFactory(session)
return session.init_session(
self._resource_config, self._logger, self._reservation_context
)
def _defined_sessions(self):
return [
self.initialize_session(sess)
for sess in self._session_dict.get(
self._cli_type.lower(), self._registered_sessions
)
]
def get_cli_service(self, command_mode):
"""Use cli.get_session to open CLI connection and switch into required mode.
:param CommandMode command_mode: operation mode, can be
default_mode/enable_mode/config_mode/etc.
:return: created session in provided mode
:rtype: cloudshell.cli.service.session_pool_context_manager.SessionPoolContextManager # noqa: E501
"""
return self._cli.get_session(
self._defined_sessions(), command_mode, self._logger
)
class AbstractModeConfigurator(ABC, CLIServiceConfigurator):
"""Used by shells to run enable/config command."""
@property
@abstractmethod
def enable_mode(self):
pass
@property
@abstractmethod
def config_mode(self):
pass
def enable_mode_service(self):
return self.get_cli_service(self.enable_mode)
def config_mode_service(self):
return self.get_cli_service(self.config_mode)
| [((16, 6, 16, 50), 'abc.ABCMeta', 'ABCMeta', ({(16, 14, 16, 19): '"""ABC"""', (16, 21, 16, 30): '(object,)', (16, 32, 16, 49): "{'__slots__': ()}"}, {}), "('ABC', (object,), {'__slots__': ()})", False, 'from abc import ABCMeta, abstractmethod\n'), ((56, 5, 56, 16), 'functools32.lru_cache', 'lru_cache', ({}, {}), '()', False, 'from functools32 import lru_cache\n'), ((25, 27, 25, 71), 'cloudshell.cli.factory.session_factory.CloudInfoAccessKeySessionFactory', 'CloudInfoAccessKeySessionFactory', ({(25, 60, 25, 70): 'SSHSession'}, {}), '(SSHSession)', False, 'from cloudshell.cli.factory.session_factory import CloudInfoAccessKeySessionFactory, GenericSessionFactory, SessionFactory\n'), ((58, 23, 58, 40), 'collections.defaultdict', 'defaultdict', ({(58, 35, 58, 39): 'list'}, {}), '(list)', False, 'from collections import defaultdict\n'), ((44, 27, 44, 32), 'cloudshell.cli.service.cli.CLI', 'CLI', ({}, {}), '()', False, 'from cloudshell.cli.service.cli import CLI\n'), ((65, 22, 65, 52), 'cloudshell.cli.factory.session_factory.GenericSessionFactory', 'GenericSessionFactory', ({(65, 44, 65, 51): 'session'}, {}), '(session)', False, 'from cloudshell.cli.factory.session_factory import CloudInfoAccessKeySessionFactory, GenericSessionFactory, SessionFactory\n')] |
quynhanh-ngx/pytago | examples/ingenerator.py | de976ad8d85702ae665e97978bc4a75d282c857f | def main():
n = 111
gen = (n * 7 for x in range(10))
if 777 in gen:
print("Yes!")
if __name__ == '__main__':
main()
| [] |
amittkSharma/scs_predictive_maintenance | source/packages/scs-pm-server/src/python-server/app.py | 105a218b47d81d02f7e799287bd1e9279db452ce | import json
import logging
import joblib
import pandas as pd
from flask import Flask, jsonify, request
from flask_cors import CORS, cross_origin
app = Flask(__name__)
CORS(app)
@app.route("/api/machinePrediction", methods=['GET'])
def home():
incomingMachineId = request.args.get('machineId')
modelPath = request.args.get('modelPath')
column_names = request.args.get('columnNames')
data_points = request.args.get('dataPoints')
app.logger.info('Received machine id is %s', incomingMachineId)
app.logger.info('Model path is %s', modelPath)
json_object = json.loads(data_points)
pairs = json_object.items()
vitals_value = []
for key, value in pairs:
vitals_value.append(value)
modelObj = joblib.load(modelPath)
data = [vitals_value]
df = pd.DataFrame(data=data, columns = column_names)
modelPrediction = modelObj.predict(df)
app.logger.info('Model prediction is: %s', modelPrediction)
return jsonify(modelPrediction[0])
if __name__ == "__main__":
app.run(debug=True)
# To start the server
# python3 app.py
| [((9, 6, 9, 21), 'flask.Flask', 'Flask', ({(9, 12, 9, 20): '__name__'}, {}), '(__name__)', False, 'from flask import Flask, jsonify, request\n'), ((10, 0, 10, 9), 'flask_cors.CORS', 'CORS', ({(10, 5, 10, 8): 'app'}, {}), '(app)', False, 'from flask_cors import CORS, cross_origin\n'), ((14, 24, 14, 53), 'flask.request.args.get', 'request.args.get', ({(14, 41, 14, 52): '"""machineId"""'}, {}), "('machineId')", False, 'from flask import Flask, jsonify, request\n'), ((15, 16, 15, 45), 'flask.request.args.get', 'request.args.get', ({(15, 33, 15, 44): '"""modelPath"""'}, {}), "('modelPath')", False, 'from flask import Flask, jsonify, request\n'), ((16, 19, 16, 50), 'flask.request.args.get', 'request.args.get', ({(16, 36, 16, 49): '"""columnNames"""'}, {}), "('columnNames')", False, 'from flask import Flask, jsonify, request\n'), ((17, 18, 17, 48), 'flask.request.args.get', 'request.args.get', ({(17, 35, 17, 47): '"""dataPoints"""'}, {}), "('dataPoints')", False, 'from flask import Flask, jsonify, request\n'), ((22, 18, 22, 41), 'json.loads', 'json.loads', ({(22, 29, 22, 40): 'data_points'}, {}), '(data_points)', False, 'import json\n'), ((29, 15, 29, 37), 'joblib.load', 'joblib.load', ({(29, 27, 29, 36): 'modelPath'}, {}), '(modelPath)', False, 'import joblib\n'), ((32, 9, 32, 56), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((37, 11, 37, 38), 'flask.jsonify', 'jsonify', ({(37, 19, 37, 37): 'modelPrediction[0]'}, {}), '(modelPrediction[0])', False, 'from flask import Flask, jsonify, request\n')] |
ess-dmsc/nexus-constructor | tests/test_remove_from_dependee_chain.py | ae0026c48f8f2d4d88d3ff00e45cb6591983853b | import pytest
from PySide2.QtGui import QVector3D
from nexus_constructor.model.component import Component
from nexus_constructor.model.dataset import Dataset
from nexus_constructor.model.instrument import Instrument
from nexus_constructor.model.value_type import ValueTypes
values = Dataset(
name="scalar_value",
type=ValueTypes.DOUBLE,
size=[1],
values=90.0,
parent_node=None,
)
@pytest.fixture
def instrument():
return Instrument(parent_node=None)
def test_remove_from_beginning_1(instrument):
component1 = Component("component1", instrument)
rot = component1.add_rotation(
name="rotation1",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
)
component1.depends_on = rot
assert len(rot.dependents) == 1
rot.remove_from_dependee_chain()
assert component1.depends_on is None
def test_remove_from_beginning_2(instrument):
component1 = Component("component1", instrument)
rot1 = component1.add_rotation(
name="rotation1",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
)
rot2 = component1.add_rotation(
name="rotation2",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
)
component1.depends_on = rot1
rot1.depends_on = rot2
assert len(rot2.dependents) == 1
rot1.remove_from_dependee_chain()
assert len(rot2.dependents) == 1
assert rot2.dependents[0] == component1
assert component1.depends_on == rot2
def test_remove_from_beginning_3(instrument):
component1 = Component("component1", instrument)
component2 = Component("component2", instrument)
rot1 = component1.add_rotation(
name="rotation1",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
)
rot2 = component2.add_rotation(
name="rotation2",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
)
component1.depends_on = rot1
component2.depends_on = rot2
rot1.depends_on = rot2
assert len(rot2.dependents) == 2
rot1.remove_from_dependee_chain()
assert len(rot2.dependents) == 2
assert component2 in rot2.dependents
assert component1 in rot2.dependents
assert component1.depends_on == rot2
assert component1.transforms.link.linked_component == component2
def test_remove_from_middle():
component1 = Component("component1", instrument)
component2 = Component("component2", instrument)
component3 = Component("component3", instrument)
rot1 = component1.add_rotation(
name="rotation1",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
)
rot2 = component2.add_rotation(
name="rotation2",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
)
rot3 = component3.add_rotation(
name="rotation3",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
)
component1.depends_on = rot1
component2.depends_on = rot2
component3.depends_on = rot3
component1.transforms.link.linked_component = component2
component2.transforms.link.linked_component = component3
rot2.remove_from_dependee_chain()
assert rot1.depends_on == rot3
assert component1.transforms.link.linked_component == component3
assert rot1 in rot3.dependents
assert component3 in rot3.dependents
def test_remove_from_end():
component1 = Component("component1", instrument)
rot1 = component1.add_rotation(
name="rotation1",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
)
rot2 = component1.add_rotation(
name="rotation2",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
depends_on=rot1,
)
rot3 = component1.add_rotation(
name="rotation3",
axis=QVector3D(1.0, 0.0, 0.0),
angle=values.values,
values=values,
depends_on=rot2,
)
component1.depends_on = rot3
rot1.remove_from_dependee_chain()
assert rot1.depends_on is None
assert not rot1.dependents
assert component1.depends_on == rot3
assert rot2.dependents[0] == rot3
assert len(component1.transforms) == 2
| [((9, 9, 15, 1), 'nexus_constructor.model.dataset.Dataset', 'Dataset', (), '', False, 'from nexus_constructor.model.dataset import Dataset\n'), ((20, 11, 20, 39), 'nexus_constructor.model.instrument.Instrument', 'Instrument', (), '', False, 'from nexus_constructor.model.instrument import Instrument\n'), ((24, 17, 24, 52), 'nexus_constructor.model.component.Component', 'Component', ({(24, 27, 24, 39): '"""component1"""', (24, 41, 24, 51): 'instrument'}, {}), "('component1', instrument)", False, 'from nexus_constructor.model.component import Component\n'), ((38, 17, 38, 52), 'nexus_constructor.model.component.Component', 'Component', ({(38, 27, 38, 39): '"""component1"""', (38, 41, 38, 51): 'instrument'}, {}), "('component1', instrument)", False, 'from nexus_constructor.model.component import Component\n'), ((61, 17, 61, 52), 'nexus_constructor.model.component.Component', 'Component', ({(61, 27, 61, 39): '"""component1"""', (61, 41, 61, 51): 'instrument'}, {}), "('component1', instrument)", False, 'from nexus_constructor.model.component import Component\n'), ((62, 17, 62, 52), 'nexus_constructor.model.component.Component', 'Component', ({(62, 27, 62, 39): '"""component2"""', (62, 41, 62, 51): 'instrument'}, {}), "('component2', instrument)", False, 'from nexus_constructor.model.component import Component\n'), ((88, 17, 88, 52), 'nexus_constructor.model.component.Component', 'Component', ({(88, 27, 88, 39): '"""component1"""', (88, 41, 88, 51): 'instrument'}, {}), "('component1', instrument)", False, 'from nexus_constructor.model.component import Component\n'), ((89, 17, 89, 52), 'nexus_constructor.model.component.Component', 'Component', ({(89, 27, 89, 39): '"""component2"""', (89, 41, 89, 51): 'instrument'}, {}), "('component2', instrument)", False, 'from nexus_constructor.model.component import Component\n'), ((90, 17, 90, 52), 'nexus_constructor.model.component.Component', 'Component', ({(90, 27, 90, 39): '"""component3"""', (90, 41, 90, 51): 'instrument'}, {}), "('component3', instrument)", False, 'from nexus_constructor.model.component import Component\n'), ((123, 17, 123, 52), 'nexus_constructor.model.component.Component', 'Component', ({(123, 27, 123, 39): '"""component1"""', (123, 41, 123, 51): 'instrument'}, {}), "('component1', instrument)", False, 'from nexus_constructor.model.component import Component\n'), ((27, 13, 27, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(27, 23, 27, 26): '1.0', (27, 28, 27, 31): '0.0', (27, 33, 27, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((41, 13, 41, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(41, 23, 41, 26): '1.0', (41, 28, 41, 31): '0.0', (41, 33, 41, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((47, 13, 47, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(47, 23, 47, 26): '1.0', (47, 28, 47, 31): '0.0', (47, 33, 47, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((65, 13, 65, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(65, 23, 65, 26): '1.0', (65, 28, 65, 31): '0.0', (65, 33, 65, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((71, 13, 71, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(71, 23, 71, 26): '1.0', (71, 28, 71, 31): '0.0', (71, 33, 71, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((93, 13, 93, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(93, 23, 93, 26): '1.0', (93, 28, 93, 31): '0.0', (93, 33, 93, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((99, 13, 99, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(99, 23, 99, 26): '1.0', (99, 28, 99, 31): '0.0', (99, 33, 99, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((105, 13, 105, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(105, 23, 105, 26): '1.0', (105, 28, 105, 31): '0.0', (105, 33, 105, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((126, 13, 126, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(126, 23, 126, 26): '1.0', (126, 28, 126, 31): '0.0', (126, 33, 126, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((132, 13, 132, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(132, 23, 132, 26): '1.0', (132, 28, 132, 31): '0.0', (132, 33, 132, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n'), ((139, 13, 139, 37), 'PySide2.QtGui.QVector3D', 'QVector3D', ({(139, 23, 139, 26): '1.0', (139, 28, 139, 31): '0.0', (139, 33, 139, 36): '0.0'}, {}), '(1.0, 0.0, 0.0)', False, 'from PySide2.QtGui import QVector3D\n')] |
molspace/FastMVS_experiments | fastmvsnet/train1.py | b897015d77600687ca2addf99bb6a6f0de524e5f | #!/usr/bin/env python
import argparse
import os.path as osp
import logging
import time
import sys
sys.path.insert(0, osp.dirname(__file__) + '/..')
import torch
import torch.nn as nn
from fastmvsnet.config import load_cfg_from_file
from fastmvsnet.utils.io import mkdir
from fastmvsnet.utils.logger import setup_logger
from fastmvsnet.utils.torch_utils import set_random_seed
from fastmvsnet.model1 import build_pointmvsnet as build_model
from fastmvsnet.solver import build_optimizer, build_scheduler
from fastmvsnet.utils.checkpoint import Checkpointer
from fastmvsnet.dataset1 import build_data_loader
from fastmvsnet.utils.tensorboard_logger import TensorboardLogger
from fastmvsnet.utils.metric_logger import MetricLogger
from fastmvsnet.utils.file_logger import file_logger
def parse_args():
parser = argparse.ArgumentParser(description="PyTorch Fast-MVSNet Training")
parser.add_argument(
"--cfg",
dest="config_file",
default="",
metavar="FILE",
help="path to config file",
type=str,
)
parser.add_argument(
"opts",
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
return args
def train_model(model,
loss_fn,
metric_fn,
image_scales,
inter_scales,
isFlow,
data_loader,
optimizer,
curr_epoch,
tensorboard_logger,
log_period=1,
output_dir="",
):
logger = logging.getLogger("fastmvsnet.train")
meters = MetricLogger(delimiter=" ")
model.train()
end = time.time()
total_iteration = data_loader.__len__()
path_list = []
for iteration, data_batch in enumerate(data_loader):
data_time = time.time() - end
curr_ref_img_path = data_batch["ref_img_path"]
path_list.extend(curr_ref_img_path)
data_batch = {k: v.cuda(non_blocking=True) for k, v in data_batch.items() if isinstance(v, torch.Tensor)}
preds = model(data_batch, image_scales, inter_scales, isFlow)
optimizer.zero_grad()
loss_dict = loss_fn(preds, data_batch, isFlow)
metric_dict = metric_fn(preds, data_batch, isFlow)
losses = sum(loss_dict.values())
#print("LOSS DICT", loss_dict['coarse_loss'])
#print("LOSSES", loss_dict.values())
meters.update(loss=losses, **loss_dict, **metric_dict)
losses.backward()
# print(poop)
optimizer.step()
batch_time = time.time() - end
end = time.time()
meters.update(time=batch_time, data=data_time)
if iteration % log_period == 0:
logger.info(
meters.delimiter.join(
[
"EPOCH: {epoch:2d}",
"iter: {iter:4d}",
"{meters}",
"lr: {lr:.2e}",
"max mem: {memory:.0f}",
]
).format(
epoch=curr_epoch,
iter=iteration,
meters=str(meters),
lr=optimizer.param_groups[0]["lr"],
memory=torch.cuda.max_memory_allocated() / (1024.0 ** 2),
)
)
tensorboard_logger.add_scalars(loss_dict, curr_epoch * total_iteration + iteration, prefix="train")
tensorboard_logger.add_scalars(metric_dict, curr_epoch * total_iteration + iteration, prefix="train")
if iteration % (100 * log_period) == 0:
file_logger(data_batch, preds, curr_epoch * total_iteration + iteration, output_dir, prefix="train")
return meters
def validate_model(model,
loss_fn,
metric_fn,
image_scales,
inter_scales,
isFlow,
data_loader,
curr_epoch,
tensorboard_logger,
log_period=1,
output_dir="",
):
logger = logging.getLogger("fastmvsnet.validate")
meters = MetricLogger(delimiter=" ")
model.train()
end = time.time()
total_iteration = data_loader.__len__()
with torch.no_grad():
for iteration, data_batch in enumerate(data_loader):
data_time = time.time() - end
curr_ref_img_path = data_batch["ref_img_path"]
data_batch = {k: v.cuda(non_blocking=True) for k, v in data_batch.items() if isinstance(v, torch.Tensor)}
preds = model(data_batch, image_scales, inter_scales, isFlow)
loss_dict = loss_fn(preds, data_batch, isFlow)
metric_dict = metric_fn(preds, data_batch, isFlow)
losses = sum(loss_dict.values())
meters.update(loss=losses, **loss_dict, **metric_dict)
batch_time = time.time() - end
end = time.time()
meters.update(time=batch_time, data=data_time)
if iteration % log_period == 0:
logger.info(
meters.delimiter.join(
[
"EPOCH: {epoch:2d}",
"iter: {iter:4d}",
"{meters}",
]
).format(
epoch=curr_epoch,
iter=iteration,
meters=str(meters),
)
)
tensorboard_logger.add_scalars(meters.meters, curr_epoch * total_iteration + iteration, prefix="valid")
if iteration % (100 * log_period) == 0:
file_logger(data_batch, preds, curr_epoch * total_iteration + iteration, output_dir, prefix="valid")
return meters
def train(cfg, output_dir=""):
logger = logging.getLogger("fastmvsnet.trainer")
# build model
set_random_seed(cfg.RNG_SEED)
model, loss_fn, metric_fn = build_model(cfg)
logger.info("Build model:\n{}".format(str(model)))
model = nn.DataParallel(model).cuda()
# build optimizer
optimizer = build_optimizer(cfg, model)
# build lr scheduler
scheduler = build_scheduler(cfg, optimizer)
# build checkpointer
checkpointer = Checkpointer(model,
optimizer=optimizer,
scheduler=scheduler,
save_dir=output_dir,
logger=logger)
checkpoint_data = checkpointer.load(cfg.MODEL.WEIGHT, resume=cfg.AUTO_RESUME)
ckpt_period = cfg.TRAIN.CHECKPOINT_PERIOD
# build data loader
train_data_loader = build_data_loader(cfg, mode="train")
val_period = cfg.TRAIN.VAL_PERIOD
val_data_loader = build_data_loader(cfg, mode="val") if val_period > 0 else None
# build tensorboard logger (optionally by comment)
tensorboard_logger = TensorboardLogger(output_dir)
# train
max_epoch = cfg.SCHEDULER.MAX_EPOCH
start_epoch = checkpoint_data.get("epoch", 0)
best_metric_name = "best_{}".format(cfg.TRAIN.VAL_METRIC)
best_metric = checkpoint_data.get(best_metric_name, None)
logger.info("Start training from epoch {}".format(start_epoch))
for epoch in range(start_epoch, max_epoch):
cur_epoch = epoch + 1
scheduler.step()
start_time = time.time()
train_meters = train_model(model,
loss_fn,
metric_fn,
image_scales=cfg.MODEL.TRAIN.IMG_SCALES,
inter_scales=cfg.MODEL.TRAIN.INTER_SCALES,
isFlow=(cur_epoch > cfg.SCHEDULER.INIT_EPOCH),
data_loader=train_data_loader,
optimizer=optimizer,
curr_epoch=epoch,
tensorboard_logger=tensorboard_logger,
log_period=cfg.TRAIN.LOG_PERIOD,
output_dir=output_dir,
)
epoch_time = time.time() - start_time
logger.info("Epoch[{}]-Train {} total_time: {:.2f}s".format(
cur_epoch, train_meters.summary_str, epoch_time))
# checkpoint
if cur_epoch % ckpt_period == 0 or cur_epoch == max_epoch:
checkpoint_data["epoch"] = cur_epoch
checkpoint_data[best_metric_name] = best_metric
checkpointer.save("model_{:03d}".format(cur_epoch), **checkpoint_data)
# validate
if val_period < 1:
continue
if cur_epoch % val_period == 0 or cur_epoch == max_epoch:
val_meters = validate_model(model,
loss_fn,
metric_fn,
image_scales=cfg.MODEL.VAL.IMG_SCALES,
inter_scales=cfg.MODEL.VAL.INTER_SCALES,
isFlow=(cur_epoch > cfg.SCHEDULER.INIT_EPOCH),
data_loader=val_data_loader,
curr_epoch=epoch,
tensorboard_logger=tensorboard_logger,
log_period=cfg.TEST.LOG_PERIOD,
output_dir=output_dir,
)
logger.info("Epoch[{}]-Val {}".format(cur_epoch, val_meters.summary_str))
# best validation
cur_metric = val_meters.meters[cfg.TRAIN.VAL_METRIC].global_avg
if best_metric is None or cur_metric > best_metric:
best_metric = cur_metric
checkpoint_data["epoch"] = cur_epoch
checkpoint_data[best_metric_name] = best_metric
checkpointer.save("model_best", **checkpoint_data)
logger.info("Best val-{} = {}".format(cfg.TRAIN.VAL_METRIC, best_metric))
return model
def main():
args = parse_args()
num_gpus = torch.cuda.device_count()
cfg = load_cfg_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
output_dir = cfg.OUTPUT_DIR
if output_dir:
config_path = osp.splitext(args.config_file)[0]
config_path = config_path.replace("configs", "outputs1")
output_dir = output_dir.replace('@', config_path)
mkdir(output_dir)
logger = setup_logger("fastmvsnet", output_dir, prefix="train")
logger.info("Using {} GPUs".format(num_gpus))
logger.info(args)
logger.info("Loaded configuration file {}".format(args.config_file))
logger.info("Running with config:\n{}".format(cfg))
train(cfg, output_dir)
if __name__ == "__main__":
main()
| [((26, 13, 26, 80), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((59, 13, 59, 50), 'logging.getLogger', 'logging.getLogger', ({(59, 31, 59, 49): '"""fastmvsnet.train"""'}, {}), "('fastmvsnet.train')", False, 'import logging\n'), ((60, 13, 60, 41), 'fastmvsnet.utils.metric_logger.MetricLogger', 'MetricLogger', (), '', False, 'from fastmvsnet.utils.metric_logger import MetricLogger\n'), ((62, 10, 62, 21), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((130, 13, 130, 53), 'logging.getLogger', 'logging.getLogger', ({(130, 31, 130, 52): '"""fastmvsnet.validate"""'}, {}), "('fastmvsnet.validate')", False, 'import logging\n'), ((131, 13, 131, 41), 'fastmvsnet.utils.metric_logger.MetricLogger', 'MetricLogger', (), '', False, 'from fastmvsnet.utils.metric_logger import MetricLogger\n'), ((133, 10, 133, 21), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((174, 13, 174, 52), 'logging.getLogger', 'logging.getLogger', ({(174, 31, 174, 51): '"""fastmvsnet.trainer"""'}, {}), "('fastmvsnet.trainer')", False, 'import logging\n'), ((177, 4, 177, 33), 'fastmvsnet.utils.torch_utils.set_random_seed', 'set_random_seed', ({(177, 20, 177, 32): 'cfg.RNG_SEED'}, {}), '(cfg.RNG_SEED)', False, 'from fastmvsnet.utils.torch_utils import set_random_seed\n'), ((178, 32, 178, 48), 'fastmvsnet.model1.build_pointmvsnet', 'build_model', ({(178, 44, 178, 47): 'cfg'}, {}), '(cfg)', True, 'from fastmvsnet.model1 import build_pointmvsnet as build_model\n'), ((183, 16, 183, 43), 'fastmvsnet.solver.build_optimizer', 'build_optimizer', ({(183, 32, 183, 35): 'cfg', (183, 37, 183, 42): 'model'}, {}), '(cfg, model)', False, 'from fastmvsnet.solver import build_optimizer, build_scheduler\n'), ((186, 16, 186, 47), 'fastmvsnet.solver.build_scheduler', 'build_scheduler', ({(186, 32, 186, 35): 'cfg', (186, 37, 186, 46): 'optimizer'}, {}), '(cfg, optimizer)', False, 'from fastmvsnet.solver import build_optimizer, build_scheduler\n'), ((189, 19, 193, 46), 'fastmvsnet.utils.checkpoint.Checkpointer', 'Checkpointer', (), '', False, 'from fastmvsnet.utils.checkpoint import Checkpointer\n'), ((199, 24, 199, 60), 'fastmvsnet.dataset1.build_data_loader', 'build_data_loader', (), '', False, 'from fastmvsnet.dataset1 import build_data_loader\n'), ((204, 25, 204, 54), 'fastmvsnet.utils.tensorboard_logger.TensorboardLogger', 'TensorboardLogger', ({(204, 43, 204, 53): 'output_dir'}, {}), '(output_dir)', False, 'from fastmvsnet.utils.tensorboard_logger import TensorboardLogger\n'), ((272, 15, 272, 40), 'torch.cuda.device_count', 'torch.cuda.device_count', ({}, {}), '()', False, 'import torch\n'), ((274, 10, 274, 46), 'fastmvsnet.config.load_cfg_from_file', 'load_cfg_from_file', ({(274, 29, 274, 45): 'args.config_file'}, {}), '(args.config_file)', False, 'from fastmvsnet.config import load_cfg_from_file\n'), ((285, 13, 285, 67), 'fastmvsnet.utils.logger.setup_logger', 'setup_logger', (), '', False, 'from fastmvsnet.utils.logger import setup_logger\n'), ((7, 19, 7, 40), 'os.path.dirname', 'osp.dirname', ({(7, 31, 7, 39): '__file__'}, {}), '(__file__)', True, 'import os.path as osp\n'), ((88, 14, 88, 25), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((135, 9, 135, 24), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((201, 22, 201, 56), 'fastmvsnet.dataset1.build_data_loader', 'build_data_loader', (), '', False, 'from fastmvsnet.dataset1 import build_data_loader\n'), ((215, 21, 215, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((283, 8, 283, 25), 'fastmvsnet.utils.io.mkdir', 'mkdir', ({(283, 14, 283, 24): 'output_dir'}, {}), '(output_dir)', False, 'from fastmvsnet.utils.io import mkdir\n'), ((68, 20, 68, 31), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((87, 21, 87, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((113, 12, 113, 112), 'fastmvsnet.utils.file_logger.file_logger', 'file_logger', (), '', False, 'from fastmvsnet.utils.file_logger import file_logger\n'), ((148, 18, 148, 29), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((180, 12, 180, 34), 'torch.nn.DataParallel', 'nn.DataParallel', ({(180, 28, 180, 33): 'model'}, {}), '(model)', True, 'import torch.nn as nn\n'), ((229, 21, 229, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((280, 22, 280, 52), 'os.path.splitext', 'osp.splitext', ({(280, 35, 280, 51): 'args.config_file'}, {}), '(args.config_file)', True, 'import os.path as osp\n'), ((137, 24, 137, 35), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((147, 25, 147, 36), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((168, 16, 168, 116), 'fastmvsnet.utils.file_logger.file_logger', 'file_logger', (), '', False, 'from fastmvsnet.utils.file_logger import file_logger\n'), ((106, 27, 106, 60), 'torch.cuda.max_memory_allocated', 'torch.cuda.max_memory_allocated', ({}, {}), '()', False, 'import torch\n')] |
fossabot/unifacisa-visao-computacional | modulo2/3-detectores/3.2-detector/models.py | 14aef22a3e7fe10ee820d31ce12ad21a3cad7b0b | # Estrutura básica para projetos de Machine Learning e Deep Learning
# Por Adriano Santos.
from torch import nn, relu
import torch.nn.functional as F
import torch.optim as optim
import torch
from torchvision import models
class ResNet(nn.Module):
def __init__(self, saida, pretreinado=True):
super(ResNet, self).__init__()
resnet = models.resnet34(pretrained=pretreinado)
layers = list(resnet.children())[:8]
self.features1 = nn.Sequential(*layers[:6])
self.features2 = nn.Sequential(*layers[6:])
self.classificador = nn.Sequential(nn.BatchNorm1d(512), nn.Linear(512, saida))
def forward(self, x):
x = self.features1(x)
x = self.features2(x)
x = F.relu(x)
x = nn.AdaptiveAvgPool2d((1,1))(x)
x = x.view(x.shape[0], -1)
return self.classificador(x) | [((15, 17, 15, 56), 'torchvision.models.resnet34', 'models.resnet34', (), '', False, 'from torchvision import models\n'), ((18, 25, 18, 51), 'torch.nn.Sequential', 'nn.Sequential', ({(18, 39, 18, 50): '*layers[:6]'}, {}), '(*layers[:6])', False, 'from torch import nn, relu\n'), ((19, 25, 19, 51), 'torch.nn.Sequential', 'nn.Sequential', ({(19, 39, 19, 50): '*layers[6:]'}, {}), '(*layers[6:])', False, 'from torch import nn, relu\n'), ((26, 12, 26, 21), 'torch.nn.functional.relu', 'F.relu', ({(26, 19, 26, 20): 'x'}, {}), '(x)', True, 'import torch.nn.functional as F\n'), ((21, 43, 21, 62), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', ({(21, 58, 21, 61): '512'}, {}), '(512)', False, 'from torch import nn, relu\n'), ((21, 64, 21, 85), 'torch.nn.Linear', 'nn.Linear', ({(21, 74, 21, 77): '512', (21, 79, 21, 84): 'saida'}, {}), '(512, saida)', False, 'from torch import nn, relu\n'), ((27, 12, 27, 39), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', ({(27, 33, 27, 38): '(1, 1)'}, {}), '((1, 1))', False, 'from torch import nn, relu\n')] |
sbrodeur/evert | python/setup.py | c7005ba29576145ab650144f9b9230eaf7bec460 | #!/usr/bin/env python
# Copyright (c) 2017, Simon Brodeur
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
"""
setup.py file for installing Python bindings using SWIG
"""
from distutils.core import setup, Extension
evert_module = Extension('_evert',
define_macros = [('MAJOR_VERSION', '1'),
('MINOR_VERSION', '0')],
include_dirs = ['../include'],
sources=['../src/elBeam.cpp',
'../src/elBSP.cpp',
'../src/elGLUT.cpp',
'../src/elListener.cpp',
'../src/elOrientedPoint.cpp',
'../src/elPathSolution.cpp',
'../src/elPolygon.cpp',
'../src/elRay.cpp',
'../src/elRoom.cpp',
'../src/elSource.cpp',
'../src/elTimer.cpp',
'../src/elVector.cpp',
'../src/elViewer.cpp',
'evert.i'],
libraries = ['GL', 'GLU', 'glut'],
library_dirs = [],
language='c++',
swig_opts=['-c++', '-I../include'],
#extra_compile_args=['-std=c++11'],
)
setup (name = 'evert',
version = '1.0',
author = "Samuli Laine",
description = """Accelerated beam tracing algorithm""",
ext_modules = [evert_module],
py_modules = ["evert"],
)
| [((37, 15, 60, 25), 'distutils.core.Extension', 'Extension', (), '', False, 'from distutils.core import setup, Extension\n'), ((62, 0, 68, 8), 'distutils.core.setup', 'setup', (), '', False, 'from distutils.core import setup, Extension\n')] |
kodo-pp/somegame-but-not-that-one | somegame/fps_osd.py | 6252d34b84fe7c83ada9e699df17688c50dd7596 | import pygame
from loguru import logger
from somegame.osd import OSD
class FpsOSD(OSD):
def __init__(self, game):
super().__init__(game)
logger.info('Loading font')
self.font = pygame.font.Font(pygame.font.get_default_font(), 32)
def draw(self, surface):
fps = self.game.get_average_fps()
fps_text = '<unknown>' if fps is None else '{:.1f}'.format(fps)
tmp_surf = self.font.render('{} FPS'.format(fps_text), True, (255, 255, 255))
surface.blit(tmp_surf, (0, 0))
| [((10, 8, 10, 35), 'loguru.logger.info', 'logger.info', ({(10, 20, 10, 34): '"""Loading font"""'}, {}), "('Loading font')", False, 'from loguru import logger\n'), ((11, 37, 11, 67), 'pygame.font.get_default_font', 'pygame.font.get_default_font', ({}, {}), '()', False, 'import pygame\n')] |
sgwhat/BigDL | python/chronos/test/bigdl/chronos/data/experimental/test_xshardstsdataset.py | 25b402666fbb26b0bc18fc8100e9a00469844778 | #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
import numpy as np
import pandas as pd
import random
import os
from unittest import TestCase
from bigdl.chronos.data import TSDataset
from bigdl.chronos.data.experimental import XShardsTSDataset
from bigdl.orca.data.pandas import read_csv
from bigdl.orca.common import init_orca_context, stop_orca_context, OrcaContext
from pandas.testing import assert_frame_equal
from numpy.testing import assert_array_almost_equal
def generate_spark_df():
init_orca_context(cores=8)
sc = OrcaContext.get_spark_context()
rdd = sc.range(0, 100)
from pyspark.ml.linalg import DenseVector
df = rdd.map(lambda x: (DenseVector(np.random.randn(1, ).astype(np.float)),
int(np.random.randint(0, 2, size=())),
int(x))).toDF(["feature", "id", "date"])
return df
def get_ugly_ts_df():
data = np.random.random_sample((100, 5))
mask = np.random.random_sample((100, 5))
newmask = mask.copy()
mask[newmask >= 0.4] = 2
mask[newmask < 0.4] = 1
mask[newmask < 0.2] = 0
data[mask == 0] = None
data[mask == 1] = np.nan
df = pd.DataFrame(data, columns=['a', 'b', 'c', 'd', 'e'])
df['a'][0] = np.nan # make sure column 'a' has a N/A
df["datetime"] = pd.date_range('1/1/2019', periods=100)
df.loc[50:100, "datetime"] = pd.date_range('1/1/2019', periods=50)
df["id"] = np.array(['00']*50 + ['01']*50)
return df
class TestXShardsTSDataset(TestCase):
def setUp(self):
self.resource_path = os.path.join(os.path.split(__file__)[0], "../../resources/")
def tearDown(self):
pass
@classmethod
def tearDownClass(cls):
# stop possible active_spark_context
from pyspark import SparkContext
from bigdl.orca.ray import OrcaRayContext
if SparkContext._active_spark_context is not None:
print("Stopping spark_orca context")
sc = SparkContext.getOrCreate()
if sc.getConf().get("spark.master").startswith("spark://"):
from bigdl.dllib.nncontext import stop_spark_standalone
stop_spark_standalone()
sc.stop()
def test_xshardstsdataset_initialization(self):
shards_single = read_csv(os.path.join(self.resource_path, "single.csv"))
tsdata = XShardsTSDataset.from_xshards(shards_single, dt_col="datetime", target_col="value",
extra_feature_col=["extra feature"], id_col="id")
assert tsdata._id_list == [0]
assert tsdata.feature_col == ["extra feature"]
assert tsdata.target_col == ["value"]
assert tsdata.dt_col == "datetime"
assert tsdata.shards.num_partitions() == 1
tsdata = XShardsTSDataset.from_xshards(shards_single, dt_col="datetime",
target_col=["value"],
extra_feature_col="extra feature", id_col="id")
assert tsdata._id_list == [0]
assert tsdata.feature_col == ["extra feature"]
assert tsdata.target_col == ["value"]
assert tsdata.dt_col == "datetime"
assert tsdata.shards.num_partitions() == 1
tsdata = XShardsTSDataset.from_xshards(shards_single, dt_col="datetime",
target_col=["value"],
extra_feature_col="extra feature")
assert tsdata._id_list == ["0"]
assert tsdata.feature_col == ["extra feature"]
assert tsdata.target_col == ["value"]
assert tsdata.dt_col == "datetime"
assert tsdata.shards.num_partitions() == 1
def test_xshardstsdataset_initialization_multiple(self):
shards_multiple = read_csv(os.path.join(self.resource_path, "multiple.csv"))
# legal input
tsdata = XShardsTSDataset.from_xshards(shards_multiple, dt_col="datetime",
target_col="value",
extra_feature_col=["extra feature"], id_col="id")
assert tsdata._id_list == [0, 1]
assert tsdata.feature_col == ["extra feature"]
assert tsdata.target_col == ["value"]
assert tsdata.dt_col == "datetime"
assert tsdata.shards.num_partitions() == 2
tsdata = XShardsTSDataset.from_xshards(shards_multiple, dt_col="datetime",
target_col=["value"],
extra_feature_col="extra feature", id_col="id")
assert tsdata._id_list == [0, 1]
assert tsdata.feature_col == ["extra feature"]
assert tsdata.target_col == ["value"]
assert tsdata.dt_col == "datetime"
assert tsdata.shards.num_partitions() == 2
tsdata = XShardsTSDataset.from_xshards(shards_multiple, dt_col="datetime",
target_col=["value"],
extra_feature_col="extra feature")
assert tsdata._id_list == ['0']
assert tsdata.feature_col == ["extra feature"]
assert tsdata.target_col == ["value"]
assert tsdata.dt_col == "datetime"
assert tsdata.shards.num_partitions() == 1
def test_xshardstsdataset_split(self):
shards_multiple = read_csv(os.path.join(self.resource_path, "multiple.csv"))
# only train and test
tsdata_train, tsdata_valid, tsdata_test =\
XShardsTSDataset.from_xshards(shards_multiple, dt_col="datetime", target_col="value",
extra_feature_col=["extra feature"], id_col="id",
with_split=True, val_ratio=0, test_ratio=0.1)
# standard split with all three sets
tsdata_train, tsdata_valid, tsdata_test =\
XShardsTSDataset.from_xshards(shards_multiple, dt_col="datetime", target_col="value",
extra_feature_col=["extra feature"], id_col="id",
with_split=True, val_ratio=0.1, test_ratio=0.1,
largest_look_back=5, largest_horizon=2)
tsdata_train.feature_col.append("new extra feature")
assert len(tsdata_train.feature_col) == 2
assert len(tsdata_valid.feature_col) == 1
assert len(tsdata_test.feature_col) == 1
tsdata_train.target_col[0] = "new value"
assert tsdata_train.target_col[0] == "new value"
assert tsdata_valid.target_col[0] != "new value"
assert tsdata_test.target_col[0] != "new value"
def test_xshardstsdataset_roll_multiple_id(self):
shards_multiple = read_csv(os.path.join(self.resource_path, "multiple.csv"))
horizon = random.randint(1, 10)
lookback = random.randint(1, 20)
tsdata = XShardsTSDataset.from_xshards(shards_multiple, dt_col="datetime",
target_col="value",
extra_feature_col=["extra feature"], id_col="id")
with pytest.raises(RuntimeError):
tsdata.to_xshards()
# roll train
tsdata.roll(lookback=lookback, horizon=horizon)
shards_numpy = tsdata.to_xshards()
collected_numpy = shards_numpy.collect() # collect and valid
x = np.concatenate([collected_numpy[i]['x'] for i in range(len(collected_numpy))], axis=0)
y = np.concatenate([collected_numpy[i]['y'] for i in range(len(collected_numpy))], axis=0)
assert x.shape == ((50-lookback-horizon+1)*2, lookback, 2)
assert y.shape == ((50-lookback-horizon+1)*2, horizon, 1)
tsdata.roll(lookback=lookback, horizon=horizon,
feature_col=["extra feature"], target_col="value")
shards_numpy = tsdata.to_xshards()
collected_numpy = shards_numpy.collect() # collect and valid
x = np.concatenate([collected_numpy[i]['x'] for i in range(len(collected_numpy))], axis=0)
y = np.concatenate([collected_numpy[i]['y'] for i in range(len(collected_numpy))], axis=0)
assert x.shape == ((50-lookback-horizon+1)*2, lookback, 2)
assert y.shape == ((50-lookback-horizon+1)*2, horizon, 1)
tsdata.roll(lookback=lookback, horizon=horizon,
feature_col=[], target_col="value")
shards_numpy = tsdata.to_xshards()
collected_numpy = shards_numpy.collect() # collect and valid
x = np.concatenate([collected_numpy[i]['x'] for i in range(len(collected_numpy))], axis=0)
y = np.concatenate([collected_numpy[i]['y'] for i in range(len(collected_numpy))], axis=0)
assert x.shape == ((50-lookback-horizon+1)*2, lookback, 1)
assert y.shape == ((50-lookback-horizon+1)*2, horizon, 1)
# roll test
horizon = 0
lookback = random.randint(1, 20)
tsdata.roll(lookback=lookback, horizon=horizon)
shards_numpy = tsdata.to_xshards()
collected_numpy = shards_numpy.collect() # collect and valid
x = np.concatenate([collected_numpy[i]['x'] for i in range(len(collected_numpy))], axis=0)
assert x.shape == ((50-lookback-horizon+1)*2, lookback, 2)
def test_xshardstsdataset_impute(self):
from tempfile import TemporaryDirectory
tmp_df = get_ugly_ts_df()
with TemporaryDirectory() as tmpdir:
file_name = os.path.join(tmpdir, 'impute.csv')
tmp_df.to_csv(file_name, index=False)
shards_tmp = read_csv(file_name)
for val in ["last", "const", "linear"]:
tsdata = XShardsTSDataset.from_xshards(shards_tmp,
dt_col="datetime", target_col="e",
extra_feature_col=["a", "b", "c", "d"], id_col="id")
tsdata.impute(mode=val)
collected_df = tsdata.shards.collect()
collected_df = pd.concat(collected_df, axis=0)
assert collected_df.isna().sum().sum() == 0
assert len(collected_df) == 100
def test_xshardstsdataset_sparkdf(self):
df = generate_spark_df()
# with id
tsdata = XShardsTSDataset.from_sparkdf(df, dt_col="date",
target_col="feature",
id_col="id")
tsdata.roll(lookback=4, horizon=2)
data = tsdata.to_xshards().collect()
assert data[0]['x'].shape[1] == 4
assert data[0]['x'].shape[2] == 1
assert data[0]['y'].shape[1] == 2
assert data[0]['y'].shape[2] == 1
assert tsdata.shards.num_partitions() == 2
# with only 1 id
tsdata = XShardsTSDataset.from_sparkdf(df, dt_col="date",
target_col="feature")
tsdata.roll(lookback=4, horizon=2)
data = tsdata.to_xshards().collect()
assert data[0]['x'].shape[1] == 4
assert data[0]['x'].shape[2] == 1
assert data[0]['y'].shape[1] == 2
assert data[0]['y'].shape[2] == 1
assert tsdata.shards.num_partitions() == 1
| [((34, 4, 34, 30), 'bigdl.orca.common.init_orca_context', 'init_orca_context', (), '', False, 'from bigdl.orca.common import init_orca_context, stop_orca_context, OrcaContext\n'), ((35, 9, 35, 40), 'bigdl.orca.common.OrcaContext.get_spark_context', 'OrcaContext.get_spark_context', ({}, {}), '()', False, 'from bigdl.orca.common import init_orca_context, stop_orca_context, OrcaContext\n'), ((44, 11, 44, 44), 'numpy.random.random_sample', 'np.random.random_sample', ({(44, 35, 44, 43): '(100, 5)'}, {}), '((100, 5))', True, 'import numpy as np\n'), ((45, 11, 45, 44), 'numpy.random.random_sample', 'np.random.random_sample', ({(45, 35, 45, 43): '(100, 5)'}, {}), '((100, 5))', True, 'import numpy as np\n'), ((52, 9, 52, 62), 'pandas.DataFrame', 'pd.DataFrame', (), '', True, 'import pandas as pd\n'), ((54, 21, 54, 59), 'pandas.date_range', 'pd.date_range', (), '', True, 'import pandas as pd\n'), ((55, 33, 55, 70), 'pandas.date_range', 'pd.date_range', (), '', True, 'import pandas as pd\n'), ((56, 15, 56, 46), 'numpy.array', 'np.array', ({(56, 24, 56, 45): "['00'] * 50 + ['01'] * 50"}, {}), "(['00'] * 50 + ['01'] * 50)", True, 'import numpy as np\n'), ((82, 17, 83, 96), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((90, 17, 92, 94), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((99, 17, 101, 81), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((111, 17, 113, 96), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((120, 17, 122, 94), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((129, 17, 131, 81), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((142, 12, 144, 87), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((147, 12, 150, 81), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((164, 18, 164, 39), 'random.randint', 'random.randint', ({(164, 33, 164, 34): '1', (164, 36, 164, 38): '10'}, {}), '(1, 10)', False, 'import random\n'), ((165, 19, 165, 40), 'random.randint', 'random.randint', ({(165, 34, 165, 35): '1', (165, 37, 165, 39): '20'}, {}), '(1, 20)', False, 'import random\n'), ((167, 17, 169, 96), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((203, 19, 203, 40), 'random.randint', 'random.randint', ({(203, 34, 203, 35): '1', (203, 37, 203, 39): '20'}, {}), '(1, 20)', False, 'import random\n'), ((234, 17, 236, 59), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_sparkdf', 'XShardsTSDataset.from_sparkdf', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((246, 17, 247, 68), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_sparkdf', 'XShardsTSDataset.from_sparkdf', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((74, 17, 74, 43), 'pyspark.SparkContext.getOrCreate', 'SparkContext.getOrCreate', ({}, {}), '()', False, 'from pyspark import SparkContext\n'), ((81, 33, 81, 79), 'os.path.join', 'os.path.join', ({(81, 46, 81, 64): 'self.resource_path', (81, 66, 81, 78): '"""single.csv"""'}, {}), "(self.resource_path, 'single.csv')", False, 'import os\n'), ((109, 35, 109, 83), 'os.path.join', 'os.path.join', ({(109, 48, 109, 66): 'self.resource_path', (109, 68, 109, 82): '"""multiple.csv"""'}, {}), "(self.resource_path, 'multiple.csv')", False, 'import os\n'), ((139, 35, 139, 83), 'os.path.join', 'os.path.join', ({(139, 48, 139, 66): 'self.resource_path', (139, 68, 139, 82): '"""multiple.csv"""'}, {}), "(self.resource_path, 'multiple.csv')", False, 'import os\n'), ((163, 35, 163, 83), 'os.path.join', 'os.path.join', ({(163, 48, 163, 66): 'self.resource_path', (163, 68, 163, 82): '"""multiple.csv"""'}, {}), "(self.resource_path, 'multiple.csv')", False, 'import os\n'), ((171, 13, 171, 40), 'pytest.raises', 'pytest.raises', ({(171, 27, 171, 39): 'RuntimeError'}, {}), '(RuntimeError)', False, 'import pytest\n'), ((214, 13, 214, 33), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ({}, {}), '()', False, 'from tempfile import TemporaryDirectory\n'), ((215, 24, 215, 58), 'os.path.join', 'os.path.join', ({(215, 37, 215, 43): 'tmpdir', (215, 45, 215, 57): '"""impute.csv"""'}, {}), "(tmpdir, 'impute.csv')", False, 'import os\n'), ((217, 25, 217, 44), 'bigdl.orca.data.pandas.read_csv', 'read_csv', ({(217, 34, 217, 43): 'file_name'}, {}), '(file_name)', False, 'from bigdl.orca.data.pandas import read_csv\n'), ((62, 42, 62, 65), 'os.path.split', 'os.path.split', ({(62, 56, 62, 64): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((77, 16, 77, 39), 'bigdl.dllib.nncontext.stop_spark_standalone', 'stop_spark_standalone', ({}, {}), '()', False, 'from bigdl.dllib.nncontext import stop_spark_standalone\n'), ((220, 25, 222, 96), 'bigdl.chronos.data.experimental.XShardsTSDataset.from_xshards', 'XShardsTSDataset.from_xshards', (), '', False, 'from bigdl.chronos.data.experimental import XShardsTSDataset\n'), ((225, 31, 225, 62), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((39, 32, 39, 64), 'numpy.random.randint', 'np.random.randint', (), '', True, 'import numpy as np\n'), ((38, 40, 38, 60), 'numpy.random.randn', 'np.random.randn', ({(38, 56, 38, 57): '1'}, {}), '(1)', True, 'import numpy as np\n')] |
WXSD-Sales/ZoomToWebex | zoom_functions.py | 16cc663620e2ef2904b0e2857d709aee96b78eb7 | import json
import tornado.gen
import traceback
from base64 import b64encode
from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError
from settings import Settings
from mongo_db_controller import ZoomUserDB
@tornado.gen.coroutine
def zoomRefresh(zoom_user):
url = "https://zoom.us/oauth/token"
payload = "grant_type=refresh_token&"
payload += "refresh_token={0}".format(zoom_user.get('refresh_token'))
#we need to base 64 encode it
#and then decode it to acsii as python 3 stores it as a byte string
userAndPass = b64encode("{0}:{1}".format(Settings.zoom_client_id, Settings.zoom_client_secret).encode()).decode("ascii")
headers = {
'authorization': 'Basic {0}'.format(userAndPass),
'content-type': "application/x-www-form-urlencoded"
}
request = HTTPRequest(url, method="POST", headers=headers, body=payload)
http_client = AsyncHTTPClient()
print(zoom_user)
print('making zoomRefresh')
print(payload)
try:
response = yield http_client.fetch(request)
resp = json.loads(response.body.decode("utf-8"))
print("zoomRefresh /access_token Response: {0}".format(resp))
zoom_user = ZoomUserDB.db.insert_user(zoom_user['person_id'], resp['access_token'], resp['expires_in'], resp['refresh_token'], "zoom")
print('new zoom_user:{0}'.format(zoom_user))
except HTTPError as he:
print('zoomRefresh HTTPError:')
print(he.code)
print(he.response.body)
if he.code == 401:
ZoomUserDB.db.delete_user(zoom_user['person_id'], "zoom")
zoom_user = None
raise tornado.gen.Return(zoom_user)
@tornado.gen.coroutine
def zoomGET(endpoint_url, zoom_user):
url = "https://api.zoom.us/v2{0}".format(endpoint_url)
headers = {"Authorization":"Bearer {0}".format(zoom_user.get('token'))}
request = HTTPRequest(url, method="GET", headers=headers)
http_client = AsyncHTTPClient()
response = None
try:
response = yield http_client.fetch(request)
body = response.body.decode('utf-8')
response = json.loads(body)
except HTTPError as he:
if he.code == 401:
print('token may be expired, attempting refresh')
zoom_user = yield zoomRefresh(zoom_user)
if zoom_user:
response, zoom_user = yield zoomGET(endpoint_url, zoom_user)
else:
try:
print(he.response.body)
except Exception as e:
pass
traceback.print_exc()
raise tornado.gen.Return((response, zoom_user))
| [((23, 14, 23, 76), 'tornado.httpclient.HTTPRequest', 'HTTPRequest', (), '', False, 'from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError\n'), ((24, 18, 24, 35), 'tornado.httpclient.AsyncHTTPClient', 'AsyncHTTPClient', ({}, {}), '()', False, 'from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError\n'), ((47, 14, 47, 61), 'tornado.httpclient.HTTPRequest', 'HTTPRequest', (), '', False, 'from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError\n'), ((48, 18, 48, 35), 'tornado.httpclient.AsyncHTTPClient', 'AsyncHTTPClient', ({}, {}), '()', False, 'from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError\n'), ((32, 20, 32, 142), 'mongo_db_controller.ZoomUserDB.db.insert_user', 'ZoomUserDB.db.insert_user', ({(32, 46, 32, 68): "zoom_user['person_id']", (32, 70, 32, 90): "resp['access_token']", (32, 92, 32, 110): "resp['expires_in']", (32, 112, 32, 133): "resp['refresh_token']", (32, 135, 32, 141): '"""zoom"""'}, {}), "(zoom_user['person_id'], resp['access_token'],\n resp['expires_in'], resp['refresh_token'], 'zoom')", False, 'from mongo_db_controller import ZoomUserDB\n'), ((53, 19, 53, 35), 'json.loads', 'json.loads', ({(53, 30, 53, 34): 'body'}, {}), '(body)', False, 'import json\n'), ((39, 12, 39, 69), 'mongo_db_controller.ZoomUserDB.db.delete_user', 'ZoomUserDB.db.delete_user', ({(39, 38, 39, 60): "zoom_user['person_id']", (39, 62, 39, 68): '"""zoom"""'}, {}), "(zoom_user['person_id'], 'zoom')", False, 'from mongo_db_controller import ZoomUserDB\n'), ((65, 12, 65, 33), 'traceback.print_exc', 'traceback.print_exc', ({}, {}), '()', False, 'import traceback\n')] |
gmuraru/CrypTen | crypten/mpc/__init__.py | e39a7aaf65436706321fe4e3fc055308c78b6b92 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
from crypten.mpc import primitives # noqa: F401
from crypten.mpc import provider # noqa: F40
from .context import run_multiprocess
from .mpc import MPCTensor
from .ptype import ptype
__all__ = ["MPCTensor", "primitives", "provider", "ptype", "run_multiprocess"]
# the different private type attributes of an mpc encrypted tensor
arithmetic = ptype.arithmetic
binary = ptype.binary
# Set provider
__SUPPORTED_PROVIDERS = {
"TFP": provider.TrustedFirstParty,
"TTP": provider.TrustedThirdParty,
"HE": provider.HomomorphicProvider,
}
__default_provider = __SUPPORTED_PROVIDERS[
os.environ.get("CRYPTEN_PROVIDER_NAME", "TFP")
]
def set_default_provider(new_default_provider):
global __default_provider
assert_msg = "Provider %s is not supported" % new_default_provider
if isinstance(new_default_provider, str):
assert new_default_provider in __SUPPORTED_PROVIDERS.keys(), assert_msg
else:
assert new_default_provider in __SUPPORTED_PROVIDERS.values(), assert_msg
__default_provider = new_default_provider
os.environ["CRYPTEN_PROVIDER_NAME"] = new_default_provider.NAME
def get_default_provider():
return __default_provider
def ttp_required():
return __default_provider == provider.TrustedThirdParty
| [((31, 4, 31, 50), 'os.environ.get', 'os.environ.get', ({(31, 19, 31, 42): '"""CRYPTEN_PROVIDER_NAME"""', (31, 44, 31, 49): '"""TFP"""'}, {}), "('CRYPTEN_PROVIDER_NAME', 'TFP')", False, 'import os\n')] |
lahosken/pants | contrib/python/src/python/pants/contrib/python/checks/tasks/checkstyle/pyflakes.py | 1b0340987c9b2eab9411416803c75b80736716e4 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pyflakes.checker import Checker as FlakesChecker
from pants.contrib.python.checks.tasks.checkstyle.common import CheckstylePlugin, Nit
class FlakeError(Nit):
# TODO(wickman) There is overlap between this and Flake8 -- consider integrating
# checkstyle plug-ins into the PEP8 tool directly so that this can be inherited
# by flake8.
# Code reference is here: http://flake8.readthedocs.org/en/latest/warnings.html
CLASS_ERRORS = {
'DuplicateArgument': 'F831',
'ImportShadowedByLoopVar': 'F402',
'ImportStarUsed': 'F403',
'LateFutureImport': 'F404',
'Redefined': 'F810',
'RedefinedInListComp': 'F812',
'RedefinedWhileUnused': 'F811',
'UndefinedExport': 'F822',
'UndefinedLocal': 'F823',
'UndefinedName': 'F821',
'UnusedImport': 'F401',
'UnusedVariable': 'F841',
}
def __init__(self, python_file, flake_message):
line_range = python_file.line_range(flake_message.lineno)
super(FlakeError, self).__init__(
self.get_error_code(flake_message),
Nit.ERROR,
python_file.filename,
flake_message.message % flake_message.message_args,
line_range,
python_file.lines[line_range])
@classmethod
def get_error_code(cls, message):
return cls.CLASS_ERRORS.get(message.__class__.__name__, 'F999')
class PyflakesChecker(CheckstylePlugin):
"""Detect common coding errors via the pyflakes package."""
def nits(self):
checker = FlakesChecker(self.python_file.tree, self.python_file.filename)
for message in sorted(checker.messages, key=lambda msg: msg.lineno):
if FlakeError.get_error_code(message) not in self.options.ignore:
yield FlakeError(self.python_file, message)
| [((52, 14, 52, 77), 'pyflakes.checker.Checker', 'FlakesChecker', ({(52, 28, 52, 49): 'self.python_file.tree', (52, 51, 52, 76): 'self.python_file.filename'}, {}), '(self.python_file.tree, self.python_file.filename)', True, 'from pyflakes.checker import Checker as FlakesChecker\n')] |
boyombo/pharmrep | pharmrep/forum/models.py | 2293ceb235dec949c58fa40d1ee43fce172e0ceb | from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
class Forum(models.Model):
title = models.CharField(max_length=60)
description = models.TextField(blank=True, default='')
updated = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now=True)
creator = models.ForeignKey(User, blank=True, null=True)
def __unicode__(self):
return self.title
def num_posts(self):
return sum([t.num_posts() for t in self.topic_set.all()])
def last_post(self):
if self.topic_set.count():
last = None
for t in self.topic_set.all():
l = t.last_post()
if l:
if not last: last = l
elif l.created > last.created: last = l
return last
class Topic(models.Model):
title = models.CharField(max_length=60)
description = models.TextField(max_length=10000, blank=True, null=True)
forum = models.ForeignKey(Forum)
created = models.DateTimeField(auto_now=True)
creator = models.ForeignKey(User, blank=True, null=True)
updated = models.DateTimeField(auto_now=True)
closed = models.BooleanField(blank=True, default=False)
def num_posts(self):
return self.post_set.count()
def num_replies(self):
return max(0, self.post_set.count() - 1)
def last_post(self):
if self.post_set.count():
return self.post_set.order_by("created")[0]
def __unicode__(self):
return unicode(self.creator) + " - " + self.title
class Post(models.Model):
title = models.CharField(max_length=60)
created = models.DateTimeField(auto_now_add=True)
creator = models.ForeignKey(User, blank=True, null=True)
updated = models.DateTimeField(auto_now=True)
topic = models.ForeignKey(Topic)
body = models.TextField(max_length=10000)
user_ip = models.GenericIPAddressField(blank=True, null=True)
def __unicode__(self):
return u"%s - %s - %s" % (self.creator, self.topic, self.title)
def short(self):
return u"%s - %s\n%s" % (self.creator, self.title, self.created.strftime("%b %d, %I:%M %p"))
short.allow_tags = True
class ProfaneWord(models.Model):
word = models.CharField(max_length=60)
def __unicode__(self):
return self.word
| [((7, 12, 7, 43), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((8, 18, 8, 58), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((9, 14, 9, 49), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((10, 14, 10, 49), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((11, 14, 11, 60), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((30, 12, 30, 43), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((31, 18, 31, 75), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((32, 12, 32, 36), 'django.db.models.ForeignKey', 'models.ForeignKey', ({(32, 30, 32, 35): 'Forum'}, {}), '(Forum)', False, 'from django.db import models\n'), ((33, 14, 33, 49), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((34, 14, 34, 60), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((35, 14, 35, 49), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((36, 13, 36, 59), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((52, 12, 52, 43), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((53, 14, 53, 53), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((54, 14, 54, 60), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((55, 14, 55, 49), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((56, 12, 56, 36), 'django.db.models.ForeignKey', 'models.ForeignKey', ({(56, 30, 56, 35): 'Topic'}, {}), '(Topic)', False, 'from django.db import models\n'), ((57, 11, 57, 45), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((58, 14, 58, 65), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', (), '', False, 'from django.db import models\n'), ((70, 11, 70, 42), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n')] |
jinnerbichler/home-automflashion | iri-node/fabfile.py | f93442712322ab819651f453437c11f685640e83 | import time
from fabric.api import run, env, task, put, cd, local, sudo
env.use_ssh_config = True
env.hosts = ['iota_node']
@task(default=True)
def iri():
run('mkdir -p /srv/private-tangle/')
with cd('/srv/private-tangle'):
put('.', '.')
run('docker-compose --project-name private-tangle pull')
run('docker-compose --project-name private-tangle up -d --force-recreate iri')
@task
def tools():
with cd('/srv/private-tangle'):
put('.', '.')
run('docker-compose --project-name private-tangle pull')
run('docker-compose --project-name private-tangle up -d --no-deps --force-recreate coordinator explorer')
run('docker-compose --project-name private-tangle logs -f --tail 100 coordinator explorer')
@task
def stop():
with cd('/srv/private-tangle'):
run('docker-compose --project-name private-tangle stop')
@task
def stop_coord():
with cd('/srv/private-tangle'):
run('docker-compose --project-name private-tangle stop coordinator')
@task
def down():
with cd('/srv/private-tangle'):
run('docker-compose --project-name private-tangle down -v')
@task
def logs():
with cd('/srv/private-tangle'):
run('docker-compose --project-name private-tangle logs -f --tail 100')
@task
def logs_coord():
with cd('/srv/private-tangle'):
run('docker-compose --project-name private-tangle logs -f --tail 100 coordinator')
@task
def logs_all():
with cd('/srv/private-tangle'):
run('docker-compose logs -f')
@task
def reset():
# stop services and delete database
down()
time.sleep(1)
run('rm -rf /srv/private-tangle/testnet_db/')
# restart all services
iri()
time.sleep(5)
tools()
| [((8, 1, 8, 19), 'fabric.api.task', 'task', (), '', False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((10, 4, 10, 40), 'fabric.api.run', 'run', ({(10, 8, 10, 39): '"""mkdir -p /srv/private-tangle/"""'}, {}), "('mkdir -p /srv/private-tangle/')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((65, 4, 65, 17), 'time.sleep', 'time.sleep', ({(65, 15, 65, 16): '(1)'}, {}), '(1)', False, 'import time\n'), ((66, 4, 66, 49), 'fabric.api.run', 'run', ({(66, 8, 66, 48): '"""rm -rf /srv/private-tangle/testnet_db/"""'}, {}), "('rm -rf /srv/private-tangle/testnet_db/')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((70, 4, 70, 17), 'time.sleep', 'time.sleep', ({(70, 15, 70, 16): '(5)'}, {}), '(5)', False, 'import time\n'), ((11, 9, 11, 34), 'fabric.api.cd', 'cd', ({(11, 12, 11, 33): '"""/srv/private-tangle"""'}, {}), "('/srv/private-tangle')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((12, 8, 12, 21), 'fabric.api.put', 'put', ({(12, 12, 12, 15): '"""."""', (12, 17, 12, 20): '"""."""'}, {}), "('.', '.')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((13, 8, 13, 64), 'fabric.api.run', 'run', ({(13, 12, 13, 63): '"""docker-compose --project-name private-tangle pull"""'}, {}), "('docker-compose --project-name private-tangle pull')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((14, 8, 14, 86), 'fabric.api.run', 'run', ({(14, 12, 14, 85): '"""docker-compose --project-name private-tangle up -d --force-recreate iri"""'}, {}), "('docker-compose --project-name private-tangle up -d --force-recreate iri')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((19, 9, 19, 34), 'fabric.api.cd', 'cd', ({(19, 12, 19, 33): '"""/srv/private-tangle"""'}, {}), "('/srv/private-tangle')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((20, 8, 20, 21), 'fabric.api.put', 'put', ({(20, 12, 20, 15): '"""."""', (20, 17, 20, 20): '"""."""'}, {}), "('.', '.')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((21, 8, 21, 64), 'fabric.api.run', 'run', ({(21, 12, 21, 63): '"""docker-compose --project-name private-tangle pull"""'}, {}), "('docker-compose --project-name private-tangle pull')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((22, 8, 22, 113), 'fabric.api.run', 'run', ({(22, 12, 22, 112): '"""docker-compose --project-name private-tangle up -d --no-deps --force-recreate coordinator explorer"""'}, {}), "('docker-compose --project-name private-tangle up -d --no-deps --force-recreate coordinator explorer'\n )", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((23, 8, 23, 99), 'fabric.api.run', 'run', ({(23, 12, 23, 98): '"""docker-compose --project-name private-tangle logs -f --tail 100 coordinator explorer"""'}, {}), "('docker-compose --project-name private-tangle logs -f --tail 100 coordinator explorer'\n )", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((28, 9, 28, 34), 'fabric.api.cd', 'cd', ({(28, 12, 28, 33): '"""/srv/private-tangle"""'}, {}), "('/srv/private-tangle')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((29, 8, 29, 64), 'fabric.api.run', 'run', ({(29, 12, 29, 63): '"""docker-compose --project-name private-tangle stop"""'}, {}), "('docker-compose --project-name private-tangle stop')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((33, 9, 33, 34), 'fabric.api.cd', 'cd', ({(33, 12, 33, 33): '"""/srv/private-tangle"""'}, {}), "('/srv/private-tangle')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((34, 8, 34, 76), 'fabric.api.run', 'run', ({(34, 12, 34, 75): '"""docker-compose --project-name private-tangle stop coordinator"""'}, {}), "('docker-compose --project-name private-tangle stop coordinator')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((39, 9, 39, 34), 'fabric.api.cd', 'cd', ({(39, 12, 39, 33): '"""/srv/private-tangle"""'}, {}), "('/srv/private-tangle')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((40, 8, 40, 67), 'fabric.api.run', 'run', ({(40, 12, 40, 66): '"""docker-compose --project-name private-tangle down -v"""'}, {}), "('docker-compose --project-name private-tangle down -v')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((45, 9, 45, 34), 'fabric.api.cd', 'cd', ({(45, 12, 45, 33): '"""/srv/private-tangle"""'}, {}), "('/srv/private-tangle')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((46, 8, 46, 78), 'fabric.api.run', 'run', ({(46, 12, 46, 77): '"""docker-compose --project-name private-tangle logs -f --tail 100"""'}, {}), "('docker-compose --project-name private-tangle logs -f --tail 100')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((51, 9, 51, 34), 'fabric.api.cd', 'cd', ({(51, 12, 51, 33): '"""/srv/private-tangle"""'}, {}), "('/srv/private-tangle')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((52, 8, 52, 90), 'fabric.api.run', 'run', ({(52, 12, 52, 89): '"""docker-compose --project-name private-tangle logs -f --tail 100 coordinator"""'}, {}), "('docker-compose --project-name private-tangle logs -f --tail 100 coordinator'\n )", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((57, 9, 57, 34), 'fabric.api.cd', 'cd', ({(57, 12, 57, 33): '"""/srv/private-tangle"""'}, {}), "('/srv/private-tangle')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n'), ((58, 8, 58, 37), 'fabric.api.run', 'run', ({(58, 12, 58, 36): '"""docker-compose logs -f"""'}, {}), "('docker-compose logs -f')", False, 'from fabric.api import run, env, task, put, cd, local, sudo\n')] |
ptorresmanque/MachineLearning_v2.0 | features.py | 795e47b9cfc68f4e0fefb700d43af6c59e2f1d73 | import sqlite3
from random import randint, choice
import numpy as np
conn = sqlite3.connect('ej.db')
c = conn.cursor()
#OBTENIENDO TAMAnOS MAXIMOS MINIMOS Y PROMEDIO#
c.execute('SELECT MAX(alto) FROM features')
resultado = c.fetchone()
if resultado:
altoMax = resultado[0]
c.execute('SELECT MIN(alto) FROM features')
resultado = c.fetchone()
if resultado:
altoMin = resultado[0]
altoProm = abs((altoMax + altoMin) / 2)
#print altoMax , altoProm , altoMin
arrAlto = [altoMax , altoProm , altoMin]
c.execute('SELECT MAX(ancho) FROM features')
resultado = c.fetchone()
if resultado:
anchoMax = resultado[0]
c.execute('SELECT MIN(ancho) FROM features')
resultado = c.fetchone()
if resultado:
anchoMin = resultado[0]
anchoProm = abs((anchoMax + anchoMin) / 2)
anchoMaxProm = abs((anchoMax + anchoProm) / 2)
anchoMinProm = abs((anchoMin + anchoProm) / 2)
arrAncho = [anchoMax, anchoMaxProm, anchoProm, anchoMinProm, anchoMin]
#### CREANDO CLASES NEGATIVAS
for i in range(0,3):
for j in range(0,5):
for _ in range(10):
negAncho = arrAncho[j]
negAlto = arrAlto[i]
rand_alto_max = int(negAlto * 1.5)
rand_alto_min = int(negAlto * 0.5)
r3 = rand_alto_max * 2
rand_ancho_max = int(negAncho*1.5)
rand_ancho_min = int(negAncho*0.5)
r33 = rand_ancho_max * 2
f1 = choice([np.random.randint(1, rand_alto_min), np.random.randint(rand_alto_max, r3)])
f2 = choice([np.random.randint(1, rand_ancho_min), np.random.randint(rand_ancho_max, r33)])
c.execute("insert into features (ancho, alto, area, clase) values (?, ?, ?, ?)",
(f2, f1, f2*f1, 0))
conn.commit()
conn.close() | [((6, 7, 6, 31), 'sqlite3.connect', 'sqlite3.connect', ({(6, 23, 6, 30): '"""ej.db"""'}, {}), "('ej.db')", False, 'import sqlite3\n'), ((61, 25, 61, 60), 'numpy.random.randint', 'np.random.randint', ({(61, 43, 61, 44): '1', (61, 46, 61, 59): 'rand_alto_min'}, {}), '(1, rand_alto_min)', True, 'import numpy as np\n'), ((61, 62, 61, 98), 'numpy.random.randint', 'np.random.randint', ({(61, 80, 61, 93): 'rand_alto_max', (61, 95, 61, 97): 'r3'}, {}), '(rand_alto_max, r3)', True, 'import numpy as np\n'), ((62, 25, 62, 61), 'numpy.random.randint', 'np.random.randint', ({(62, 43, 62, 44): '1', (62, 46, 62, 60): 'rand_ancho_min'}, {}), '(1, rand_ancho_min)', True, 'import numpy as np\n'), ((62, 63, 62, 101), 'numpy.random.randint', 'np.random.randint', ({(62, 81, 62, 95): 'rand_ancho_max', (62, 97, 62, 100): 'r33'}, {}), '(rand_ancho_max, r33)', True, 'import numpy as np\n')] |
achilleas-k/brian2 | dev/ideas/cython/playing_around.py | 906563b6b1321585b082f79f74f1b4ab386347ec | from pylab import *
import cython
import time, timeit
from brian2.codegen.runtime.cython_rt.modified_inline import modified_cython_inline
import numpy
from scipy import weave
import numexpr
import theano
from theano import tensor as tt
tau = 20 * 0.001
N = 1000000
b = 1.2 # constant current mean, the modulation varies
freq = 10.0
t = 0.0
dt = 0.0001
_array_neurongroup_a = a = linspace(.05, 0.75, N)
_array_neurongroup_v = v = rand(N)
ns = {'_array_neurongroup_a': a, '_array_neurongroup_v': v,
'_N': N,
'dt': dt, 't': t, 'tau': tau, 'b': b, 'freq': freq,# 'sin': numpy.sin,
'pi': pi,
}
code = '''
cdef int _idx
cdef int _vectorisation_idx
cdef int N = <int>_N
cdef double a, v, _v
#cdef double [:] _cy_array_neurongroup_a = _array_neurongroup_a
#cdef double [:] _cy_array_neurongroup_v = _array_neurongroup_v
cdef double* _cy_array_neurongroup_a = &(_array_neurongroup_a[0])
cdef double* _cy_array_neurongroup_v = &(_array_neurongroup_v[0])
for _idx in range(N):
_vectorisation_idx = _idx
a = _cy_array_neurongroup_a[_idx]
v = _cy_array_neurongroup_v[_idx]
_v = a*sin(2.0*freq*pi*t) + b + v*exp(-dt/tau) + (-a*sin(2.0*freq*pi*t) - b)*exp(-dt/tau)
#_v = a*b+0.0001*sin(v)
#_v = a*b+0.0001*v
v = _v
_cy_array_neurongroup_v[_idx] = v
'''
def timefunc_cython_inline():
cython.inline(code, locals=ns)
f_mod, f_arg_list = modified_cython_inline(code, locals=ns, globals={})
def timefunc_cython_modified_inline():
f_mod.__invoke(*f_arg_list)
#modified_cython_inline(code, locals=ns)
def timefunc_python():
for _idx in xrange(N):
_vectorisation_idx = _idx
a = _array_neurongroup_a[_idx]
v = _array_neurongroup_v[_idx]
_v = a*sin(2.0*freq*pi*t) + b + v*exp(-dt/tau) + (-a*sin(2.0*freq*pi*t) - b)*exp(-dt/tau)
v = _v
_array_neurongroup_v[_idx] = v
def timefunc_numpy():
_v = a*sin(2.0*freq*pi*t) + b + v*exp(-dt/tau) + (-a*sin(2.0*freq*pi*t) - b)*exp(-dt/tau)
v[:] = _v
def timefunc_numpy_smart():
_sin_term = sin(2.0*freq*pi*t)
_exp_term = exp(-dt/tau)
_a_term = (_sin_term-_sin_term*_exp_term)
_v = v
_v *= _exp_term
_v += a*_a_term
_v += -b*_exp_term + b
def timefunc_numpy_blocked():
ext = exp(-dt/tau)
sit = sin(2.0*freq*pi*t)
bs = 20000
for i in xrange(0, N, bs):
ab = a[i:i+bs]
vb = v[i:i+bs]
absit = ab*sit + b
vb *= ext
vb += absit
vb -= absit*ext
def timefunc_numexpr():
v[:] = numexpr.evaluate('a*sin(2.0*freq*pi*t) + b + v*exp(-dt/tau) + (-a*sin(2.0*freq*pi*t) - b)*exp(-dt/tau)')
def timefunc_numexpr_smart():
_sin_term = sin(2.0*freq*pi*t)
_exp_term = exp(-dt/tau)
_a_term = (_sin_term-_sin_term*_exp_term)
_const_term = -b*_exp_term + b
#v[:] = numexpr.evaluate('a*_a_term+v*_exp_term+_const_term')
numexpr.evaluate('a*_a_term+v*_exp_term+_const_term', out=v)
def timefunc_weave(*args):
code = '''
// %s
int N = _N;
for(int _idx=0; _idx<N; _idx++)
{
double a = _array_neurongroup_a[_idx];
double v = _array_neurongroup_v[_idx];
double _v = a*sin(2.0*freq*pi*t) + b + v*exp(-dt/tau) + (-a*sin(2.0*freq*pi*t) - b)*exp(-dt/tau);
v = _v;
_array_neurongroup_v[_idx] = v;
}
''' % str(args)
weave.inline(code, ns.keys(), ns, compiler='gcc', extra_compile_args=list(args))
def timefunc_weave_slow():
timefunc_weave('-O3', '-march=native')
def timefunc_weave_fast():
timefunc_weave('-O3', '-march=native', '-ffast-math')
def get_theano_func():
a = tt.dvector('a')
v = tt.dvector('v')
freq = tt.dscalar('freq')
t = tt.dscalar('t')
dt = tt.dscalar('dt')
tau = tt.dscalar('tau')
return theano.function([a, v, freq, t, dt, tau],
a*tt.sin(2.0*freq*pi*t) + b + v*tt.exp(-dt/tau) + (-a*tt.sin(2.0*freq*pi*t) - b)*tt.exp(-dt/tau))
# return theano.function([a, v],
# a*tt.sin(2.0*freq*pi*t) + b + v*tt.exp(-dt/tau) + (-a*tt.sin(2.0*freq*pi*t) - b)*tt.exp(-dt/tau))
theano.config.gcc.cxxflags = '-O3 -ffast-math'
theano_func = get_theano_func()
#print theano.pp(theano_func.maker.fgraph.outputs[0])
#print
#theano.printing.debugprint(theano_func.maker.fgraph.outputs[0])
#theano.printing.pydotprint(theano_func, 'func.png')
#exit()
def timefunc_theano():
v[:] = theano_func(a, v, freq, t, dt, tau)
def dotimeit(f):
v[:] = 1
f()
print '%s: %.2f' % (f.__name__.replace('timefunc_', ''),
timeit.timeit(f.__name__+'()', setup='from __main__ import '+f.__name__, number=100))
def check_values(f):
v[:] = 1
v[:5] = linspace(0, 1, 5)
f()
print '%s: %s' % (f.__name__.replace('timefunc_', ''), v[:5])
if __name__=='__main__':
funcs = [#timefunc_cython_inline,
timefunc_cython_modified_inline,
timefunc_numpy,
timefunc_numpy_smart,
timefunc_numpy_blocked,
timefunc_numexpr,
timefunc_numexpr_smart,
timefunc_weave_slow,
timefunc_weave_fast,
timefunc_theano,
]
if 1:
print 'Values'
print '======'
for f in funcs:
check_values(f)
print
if 1:
print 'Times'
print '====='
for f in funcs:
dotimeit(f)
| [] |
lordmahyar/az-iranian-bank-gateways | azbankgateways/views/__init__.py | e9eb7101f2b91318847d63d783c22c4a8d430ba3 | from .banks import callback_view, go_to_bank_gateway
from .samples import sample_payment_view, sample_result_view
| [] |
PowerDNS/exabgp | dev/unittest/update.py | bbf69f25853e10432fbe588b5bc2f8d9f1e5dda2 | #!/usr/bin/env python
# encoding: utf-8
"""
update.py
Created by Thomas Mangin on 2009-09-06.
Copyright (c) 2009-2013 Exa Networks. All rights reserved.
"""
import unittest
from exabgp.configuration.environment import environment
env = environment.setup('')
from exabgp.bgp.message.update.update import *
from exabgp.bgp.message.update.attribute.community import to_Community
from exabgp.bgp.message.update.attribute.community import Community, Communities
class TestData (unittest.TestCase):
def test_2_prefix (self):
self.assertEqual(str(to_NLRI('10.0.0.0','24')),'10.0.0.0/24')
def test_6_prefix (self):
self.assertEqual(to_NLRI('1.2.3.4','0').pack(),''.join([chr(c) for c in [0,]]))
def test_7_prefix (self):
self.assertEqual(to_NLRI('1.2.3.4','8').pack(),''.join([chr(c) for c in [8,1,]]))
def test_8_prefix (self):
self.assertEqual(to_NLRI('1.2.3.4','16').pack(),''.join([chr(c) for c in [16,1,2]]))
def test_9_prefix (self):
self.assertEqual(to_NLRI('1.2.3.4','24').pack(),''.join([chr(c) for c in [24,1,2,3]]))
def test_10_prefix (self):
self.assertEqual(to_NLRI('1.2.3.4','32').pack(),''.join([chr(c) for c in [32,1,2,3,4]]))
def test_1_community (self):
self.assertEqual(Community(256),256)
def test_2_community (self):
self.assertEqual(to_Community('0x100'),256)
def test_3_community (self):
self.assertEqual(to_Community('1:1'),65537)
def test_4_community (self):
communities = Communities()
community = to_Community('1:1')
communities.add(community)
self.assertEqual(communities.pack(),''.join([chr(c) for c in [0xc0,0x08,0x04,0x00,0x01,0x00,0x01]]))
def test_1_ipv4 (self):
header = ''.join([chr(c) for c in [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x22, 0x2]])
message = ''.join([chr(c) for c in [0x0, 0x0, 0x0, 0xb, 0x40, 0x1, 0x1, 0x0, 0x40, 0x2, 0x4, 0x2, 0x1, 0xfd, 0xe8, 0x18, 0xa, 0x0, 0x1]])
update = new_Update(message)
self.assertEqual(str(update.nlri[0]),'10.0.1.0/24')
def test_1_ipv6_1 (self):
header = ''.join([chr(c) for c in [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x47, 0x2]])
message = ''.join([chr(c) for c in [0x0, 0x0, 0x0, 0x30, 0x40, 0x1, 0x1, 0x0, 0x50, 0x2, 0x0, 0x4, 0x2, 0x1, 0xff, 0xfe, 0x80, 0x4, 0x4, 0x0, 0x0, 0x0, 0x0, 0x80, 0xe, 0x1a, 0x0, 0x2, 0x1, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x20, 0x12, 0x34, 0x56, 0x78]])
update = to_Update([],[to_NLRI('1234:5678::',32)])
self.assertEqual(str(update.nlri[0]),'1234:5678::/32')
def test_1_ipv6_2 (self):
route = RouteIP('1234:5678::',64)
route.next_hop = '8765:4321::1'
announced = route.announce(1,1)
message = announced[19:]
update = new_Update(message)
print update.nlri
print update.withdraw
print update.attributes[MPRNLRI.ID][0]
# def test_2_ipv4_broken (self):
# header = ''.join([chr(c) for c in h])
# message = ''.join([chr(c) for c in m])
# message = ''.join([chr(c) for c in [0x0, 0x0, 0x0, 0xf, 0x40, 0x1, 0x1, 0x0, 0x40, 0x2, 0x4, 0x2, 0x1, 0xfd, 0xe8, 0x0, 0x0, 0x0, 0x0, 0x18, 0xa, 0x0, 0x1]])
# update = new_Update(message)
if __name__ == '__main__':
unittest.main()
| [] |
WJ-Lai/NightFusion | nuscenes/eval/detection/evaluate.py | 1555692eceb6b85127d21cd43e6fc780b7f91ffd | # nuScenes dev-kit.
# Code written by Holger Caesar & Oscar Beijbom, 2018.
# Licensed under the Creative Commons [see licence.txt]
import argparse
import json
import os
import random
import time
from typing import Tuple, Dict, Any
import numpy as np
from nuscenes import NuScenes
from nuscenes.eval.detection.algo import accumulate, calc_ap, calc_tp
from nuscenes.eval.detection.config import config_factory
from nuscenes.eval.detection.constants import TP_METRICS
from nuscenes.eval.detection.data_classes import DetectionConfig, MetricDataList, DetectionMetrics, EvalBoxes
from nuscenes.eval.detection.loaders import load_prediction, load_gt, add_center_dist, filter_eval_boxes
from nuscenes.eval.detection.render import summary_plot, class_pr_curve, class_tp_curve, dist_pr_curve, visualize_sample
class NuScenesEval:
"""
This is the official nuScenes detection evaluation code.
Results are written to the provided output_dir.
nuScenes uses the following metrics:
- Mean Average Precision (mAP): Uses center-distance as matching criterion; averaged over distance thresholds.
- True Positive (TP) metrics: Average of translation, velocity, scale, orientation and attribute errors.
- nuScenes Detection Score (NDS): The weighted sum of the above.
Here is an overview of the functions in this method:
- init: Loads GT annotations an predictions stored in JSON format and filters the boxes.
- run: Performs evaluation and dumps the metric data to disk.
- render: Renders various plots and dumps to disk.
We assume that:
- Every sample_token is given in the results, although there may be not predictions for that sample.
Please see https://github.com/nutonomy/nuscenes-devkit for more details.
"""
def __init__(self,
nusc: NuScenes,
config: DetectionConfig,
result_path: str,
eval_set: str,
output_dir: str = None,
verbose: bool = True):
"""
Initialize a NuScenesEval object.
:param nusc: A NuScenes object.
:param config: A DetectionConfig object.
:param result_path: Path of the nuScenes JSON result file.
:param eval_set: The dataset split to evaluate on, e.g. train or val.
:param output_dir: Folder to save plots and results to.
:param verbose: Whether to print to stdout.
"""
self.nusc = nusc
self.result_path = result_path
self.eval_set = eval_set
self.output_dir = output_dir
self.verbose = verbose
self.cfg = config
# Make dirs.
self.plot_dir = os.path.join(self.output_dir, 'plots')
if not os.path.isdir(self.output_dir):
os.makedirs(self.output_dir)
if not os.path.isdir(self.plot_dir):
os.makedirs(self.plot_dir)
# Load data.
self.pred_boxes, self.meta = load_prediction(self.result_path, self.cfg.max_boxes_per_sample, verbose=verbose)
self.gt_boxes = load_gt(self.nusc, self.eval_set, verbose=verbose)
assert set(self.pred_boxes.sample_tokens) == set(self.gt_boxes.sample_tokens), \
"Samples in split doesn't match samples in predictions."
# Add center distances.
self.pred_boxes = add_center_dist(nusc, self.pred_boxes)
self.gt_boxes = add_center_dist(nusc, self.gt_boxes)
# Filter boxes (distance, points per box, etc.).
if verbose:
print('Filtering predictions')
self.pred_boxes = filter_eval_boxes(nusc, self.pred_boxes, self.cfg.class_range, verbose=verbose)
if verbose:
print('Filtering ground truth annotations')
self.gt_boxes = filter_eval_boxes(nusc, self.gt_boxes, self.cfg.class_range, verbose=verbose)
self.sample_tokens = self.gt_boxes.sample_tokens
def evaluate(self) -> Tuple[DetectionMetrics, MetricDataList]:
"""
Performs the actual evaluation.
:return: A tuple of high-level and the raw metric data.
"""
start_time = time.time()
# -----------------------------------
# Step 1: Accumulate metric data for all classes and distance thresholds.
# -----------------------------------
if self.verbose:
print('Accumulating metric data')
metric_data_list = MetricDataList()
for class_name in self.cfg.class_names:
for dist_th in self.cfg.dist_ths:
md = accumulate(self.gt_boxes, self.pred_boxes, class_name, self.cfg.dist_fcn, dist_th)
metric_data_list.set(class_name, dist_th, md)
# -----------------------------------
# Step 2: Calculate metrics from the data.
# -----------------------------------
if self.verbose:
print('Calculating metrics')
metrics = DetectionMetrics(self.cfg)
for class_name in self.cfg.class_names:
for dist_th in self.cfg.dist_ths:
metric_data = metric_data_list[(class_name, dist_th)]
ap = calc_ap(metric_data, self.cfg.min_recall, self.cfg.min_precision)
metrics.add_label_ap(class_name, dist_th, ap)
for metric_name in TP_METRICS:
metric_data = metric_data_list[(class_name, self.cfg.dist_th_tp)]
if class_name in ['traffic_cone'] and metric_name in ['attr_err', 'vel_err', 'orient_err']:
tp = np.nan
elif class_name in ['barrier'] and metric_name in ['attr_err', 'vel_err']:
tp = np.nan
else:
tp = calc_tp(metric_data, self.cfg.min_recall, metric_name)
metrics.add_label_tp(class_name, metric_name, tp)
metrics.add_runtime(time.time() - start_time)
return metrics, metric_data_list
def render(self, metrics: DetectionMetrics, md_list: MetricDataList) -> None:
"""
Renders various PR and TP curves.
:param metrics: DetectionMetrics instance.
:param md_list: MetricDataList instance.
"""
def savepath(name):
return os.path.join(self.plot_dir, name + '.pdf')
summary_plot(md_list, metrics, min_precision=self.cfg.min_precision, min_recall=self.cfg.min_recall,
dist_th_tp=self.cfg.dist_th_tp, savepath=savepath('summary'))
for detection_name in self.cfg.class_names:
class_pr_curve(md_list, metrics, detection_name, self.cfg.min_precision, self.cfg.min_recall,
savepath=savepath(detection_name + '_pr'))
class_tp_curve(md_list, metrics, detection_name, self.cfg.min_recall, self.cfg.dist_th_tp,
savepath=savepath(detection_name + '_tp'))
for dist_th in self.cfg.dist_ths:
dist_pr_curve(md_list, metrics, dist_th, self.cfg.min_precision, self.cfg.min_recall,
savepath=savepath('dist_pr_' + str(dist_th)))
def main(self,
plot_examples: int = 0,
render_curves: bool = True) -> Dict[str, Any]:
"""
Main function that loads the evaluation code, visualizes samples, runs the evaluation and renders stat plots.
:param plot_examples: How many example visualizations to write to disk.
:param render_curves: Whether to render PR and TP curves to disk.
:return: A dict that stores the high-level metrics and meta data.
"""
if plot_examples > 0:
# Select a random but fixed subset to plot.
random.seed(43)
sample_tokens = list(self.sample_tokens)
random.shuffle(sample_tokens)
sample_tokens = sample_tokens[:plot_examples]
# Visualize samples.
example_dir = os.path.join(self.output_dir, 'examples')
if not os.path.isdir(example_dir):
os.mkdir(example_dir)
for sample_token in sample_tokens:
visualize_sample(self.nusc,
sample_token,
self.gt_boxes if self.eval_set != 'test' else EvalBoxes(),
# Don't render test GT.
self.pred_boxes,
eval_range=max(self.cfg.class_range.values()),
savepath=os.path.join(example_dir, '{}.png'.format(sample_token)))
# Run evaluation.
metrics, metric_data_list = self.evaluate()
# Render PR and TP curves.
if render_curves:
self.render(metrics, metric_data_list)
# Dump the metric data, meta and metrics to disk.
if self.verbose:
print('Saving metrics to: %s' % self.output_dir)
metrics_summary = metrics.serialize()
metrics_summary['meta'] = self.meta.copy()
with open(os.path.join(self.output_dir, 'metrics_summary.json'), 'w') as f:
json.dump(metrics_summary, f, indent=2)
with open(os.path.join(self.output_dir, 'metrics_details.json'), 'w') as f:
json.dump(metric_data_list.serialize(), f, indent=2)
# Print high-level metrics.
print('mAP: %.4f' % (metrics_summary['mean_ap']))
err_name_mapping = {
'trans_err': 'mATE',
'scale_err': 'mASE',
'orient_err': 'mAOE',
'vel_err': 'mAVE',
'attr_err': 'mAAE'
}
for tp_name, tp_val in metrics_summary['tp_errors'].items():
print('%s: %.4f' % (err_name_mapping[tp_name], tp_val))
print('NDS: %.4f' % (metrics_summary['nd_score']))
print('Eval time: %.1fs' % metrics_summary['eval_time'])
return metrics_summary
if __name__ == "__main__":
# Settings.
parser = argparse.ArgumentParser(description='Evaluate nuScenes result submission.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('result_path', type=str, help='The submission as a JSON file.')
parser.add_argument('--output_dir', type=str, default='~/nuscenes-metrics',
help='Folder to store result metrics, graphs and example visualizations.')
parser.add_argument('--eval_set', type=str, default='val',
help='Which dataset split to evaluate on, train, val or test.')
parser.add_argument('--dataroot', type=str, default='/data/sets/nuscenes',
help='Default nuScenes data directory.')
parser.add_argument('--version', type=str, default='v1.0-trainval',
help='Which version of the nuScenes dataset to evaluate on, e.g. v1.0-trainval.')
parser.add_argument('--config_name', type=str, default='cvpr_2019',
help='Name of the configuration to use for evaluation, e.g. cvpr_2019.')
parser.add_argument('--plot_examples', type=int, default=10,
help='How many example visualizations to write to disk.')
parser.add_argument('--render_curves', type=int, default=1,
help='Whether to render PR and TP curves to disk.')
parser.add_argument('--verbose', type=int, default=1,
help='Whether to print to stdout.')
args = parser.parse_args()
result_path_ = os.path.expanduser(args.result_path)
output_dir_ = os.path.expanduser(args.output_dir)
eval_set_ = args.eval_set
dataroot_ = args.dataroot
version_ = args.version
config_name_ = args.config_name
plot_examples_ = args.plot_examples
render_curves_ = bool(args.render_curves)
verbose_ = bool(args.verbose)
cfg_ = config_factory(config_name_)
nusc_ = NuScenes(version=version_, verbose=verbose_, dataroot=dataroot_)
nusc_eval = NuScenesEval(nusc_, config=cfg_, result_path=result_path_, eval_set=eval_set_,
output_dir=output_dir_, verbose=verbose_)
nusc_eval.main(plot_examples=plot_examples_, render_curves=render_curves_)
| [((230, 13, 231, 92), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((251, 19, 251, 55), 'os.path.expanduser', 'os.path.expanduser', ({(251, 38, 251, 54): 'args.result_path'}, {}), '(args.result_path)', False, 'import os\n'), ((252, 18, 252, 53), 'os.path.expanduser', 'os.path.expanduser', ({(252, 37, 252, 52): 'args.output_dir'}, {}), '(args.output_dir)', False, 'import os\n'), ((261, 11, 261, 39), 'nuscenes.eval.detection.config.config_factory', 'config_factory', ({(261, 26, 261, 38): 'config_name_'}, {}), '(config_name_)', False, 'from nuscenes.eval.detection.config import config_factory\n'), ((262, 12, 262, 76), 'nuscenes.NuScenes', 'NuScenes', (), '', False, 'from nuscenes import NuScenes\n'), ((67, 24, 67, 62), 'os.path.join', 'os.path.join', ({(67, 37, 67, 52): 'self.output_dir', (67, 54, 67, 61): '"""plots"""'}, {}), "(self.output_dir, 'plots')", False, 'import os\n'), ((74, 37, 74, 118), 'nuscenes.eval.detection.loaders.load_prediction', 'load_prediction', (), '', False, 'from nuscenes.eval.detection.loaders import load_prediction, load_gt, add_center_dist, filter_eval_boxes\n'), ((75, 24, 75, 74), 'nuscenes.eval.detection.loaders.load_gt', 'load_gt', (), '', False, 'from nuscenes.eval.detection.loaders import load_prediction, load_gt, add_center_dist, filter_eval_boxes\n'), ((81, 26, 81, 64), 'nuscenes.eval.detection.loaders.add_center_dist', 'add_center_dist', ({(81, 42, 81, 46): 'nusc', (81, 48, 81, 63): 'self.pred_boxes'}, {}), '(nusc, self.pred_boxes)', False, 'from nuscenes.eval.detection.loaders import load_prediction, load_gt, add_center_dist, filter_eval_boxes\n'), ((82, 24, 82, 60), 'nuscenes.eval.detection.loaders.add_center_dist', 'add_center_dist', ({(82, 40, 82, 44): 'nusc', (82, 46, 82, 59): 'self.gt_boxes'}, {}), '(nusc, self.gt_boxes)', False, 'from nuscenes.eval.detection.loaders import load_prediction, load_gt, add_center_dist, filter_eval_boxes\n'), ((87, 26, 87, 105), 'nuscenes.eval.detection.loaders.filter_eval_boxes', 'filter_eval_boxes', (), '', False, 'from nuscenes.eval.detection.loaders import load_prediction, load_gt, add_center_dist, filter_eval_boxes\n'), ((90, 24, 90, 101), 'nuscenes.eval.detection.loaders.filter_eval_boxes', 'filter_eval_boxes', (), '', False, 'from nuscenes.eval.detection.loaders import load_prediction, load_gt, add_center_dist, filter_eval_boxes\n'), ((100, 21, 100, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((107, 27, 107, 43), 'nuscenes.eval.detection.data_classes.MetricDataList', 'MetricDataList', ({}, {}), '()', False, 'from nuscenes.eval.detection.data_classes import DetectionConfig, MetricDataList, DetectionMetrics, EvalBoxes\n'), ((118, 18, 118, 44), 'nuscenes.eval.detection.data_classes.DetectionMetrics', 'DetectionMetrics', ({(118, 35, 118, 43): 'self.cfg'}, {}), '(self.cfg)', False, 'from nuscenes.eval.detection.data_classes import DetectionConfig, MetricDataList, DetectionMetrics, EvalBoxes\n'), ((68, 15, 68, 45), 'os.path.isdir', 'os.path.isdir', ({(68, 29, 68, 44): 'self.output_dir'}, {}), '(self.output_dir)', False, 'import os\n'), ((69, 12, 69, 40), 'os.makedirs', 'os.makedirs', ({(69, 24, 69, 39): 'self.output_dir'}, {}), '(self.output_dir)', False, 'import os\n'), ((70, 15, 70, 43), 'os.path.isdir', 'os.path.isdir', ({(70, 29, 70, 42): 'self.plot_dir'}, {}), '(self.plot_dir)', False, 'import os\n'), ((71, 12, 71, 38), 'os.makedirs', 'os.makedirs', ({(71, 24, 71, 37): 'self.plot_dir'}, {}), '(self.plot_dir)', False, 'import os\n'), ((147, 19, 147, 61), 'os.path.join', 'os.path.join', ({(147, 32, 147, 45): 'self.plot_dir', (147, 47, 147, 60): "(name + '.pdf')"}, {}), "(self.plot_dir, name + '.pdf')", False, 'import os\n'), ((175, 12, 175, 27), 'random.seed', 'random.seed', ({(175, 24, 175, 26): '(43)'}, {}), '(43)', False, 'import random\n'), ((177, 12, 177, 41), 'random.shuffle', 'random.shuffle', ({(177, 27, 177, 40): 'sample_tokens'}, {}), '(sample_tokens)', False, 'import random\n'), ((181, 26, 181, 67), 'os.path.join', 'os.path.join', ({(181, 39, 181, 54): 'self.output_dir', (181, 56, 181, 66): '"""examples"""'}, {}), "(self.output_dir, 'examples')", False, 'import os\n'), ((206, 12, 206, 51), 'json.dump', 'json.dump', (), '', False, 'import json\n'), ((110, 21, 110, 103), 'nuscenes.eval.detection.algo.accumulate', 'accumulate', ({(110, 32, 110, 45): 'self.gt_boxes', (110, 47, 110, 62): 'self.pred_boxes', (110, 64, 110, 74): 'class_name', (110, 76, 110, 93): 'self.cfg.dist_fcn', (110, 95, 110, 102): 'dist_th'}, {}), '(self.gt_boxes, self.pred_boxes, class_name, self.cfg.dist_fcn,\n dist_th)', False, 'from nuscenes.eval.detection.algo import accumulate, calc_ap, calc_tp\n'), ((122, 21, 122, 86), 'nuscenes.eval.detection.algo.calc_ap', 'calc_ap', ({(122, 29, 122, 40): 'metric_data', (122, 42, 122, 61): 'self.cfg.min_recall', (122, 63, 122, 85): 'self.cfg.min_precision'}, {}), '(metric_data, self.cfg.min_recall, self.cfg.min_precision)', False, 'from nuscenes.eval.detection.algo import accumulate, calc_ap, calc_tp\n'), ((135, 28, 135, 39), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((182, 19, 182, 45), 'os.path.isdir', 'os.path.isdir', ({(182, 33, 182, 44): 'example_dir'}, {}), '(example_dir)', False, 'import os\n'), ((183, 16, 183, 37), 'os.mkdir', 'os.mkdir', ({(183, 25, 183, 36): 'example_dir'}, {}), '(example_dir)', False, 'import os\n'), ((205, 18, 205, 71), 'os.path.join', 'os.path.join', ({(205, 31, 205, 46): 'self.output_dir', (205, 48, 205, 70): '"""metrics_summary.json"""'}, {}), "(self.output_dir, 'metrics_summary.json')", False, 'import os\n'), ((207, 18, 207, 71), 'os.path.join', 'os.path.join', ({(207, 31, 207, 46): 'self.output_dir', (207, 48, 207, 70): '"""metrics_details.json"""'}, {}), "(self.output_dir, 'metrics_details.json')", False, 'import os\n'), ((132, 25, 132, 79), 'nuscenes.eval.detection.algo.calc_tp', 'calc_tp', ({(132, 33, 132, 44): 'metric_data', (132, 46, 132, 65): 'self.cfg.min_recall', (132, 67, 132, 78): 'metric_name'}, {}), '(metric_data, self.cfg.min_recall, metric_name)', False, 'from nuscenes.eval.detection.algo import accumulate, calc_ap, calc_tp\n'), ((187, 79, 187, 90), 'nuscenes.eval.detection.data_classes.EvalBoxes', 'EvalBoxes', ({}, {}), '()', False, 'from nuscenes.eval.detection.data_classes import DetectionConfig, MetricDataList, DetectionMetrics, EvalBoxes\n')] |
aberent/api-client | tests/get_problem_atcoder.py | 845e5f1daa02cc7fee5a65234a24bb59a7b71083 | import unittest
from onlinejudge_api.main import main
class DownloadAtCoderTest(unittest.TestCase):
def test_icpc2013spring_a(self):
"""This problem contains both words `Input` and `Output` for the headings for sample outputs.
"""
url = 'http://jag2013spring.contest.atcoder.jp/tasks/icpc2013spring_a'
expected = {
"status": "ok",
"messages": [],
"result": {
"url": "https://atcoder.jp/contests/jag2013spring/tasks/icpc2013spring_a",
"tests": [{
"input": "2 2\n2 \n1 >= 3\n2 <= 5\n2\n1 >= 4\n2 >= 3\n",
"output": "Yes\n"
}, {
"input": "2 2\n2 \n1 >= 5\n2 >= 5\n2\n1 <= 4\n2 <= 3\n",
"output": "Yes\n"
}, {
"input": "2 2\n2 \n1 >= 3\n2 <= 3\n2\n1 <= 2\n2 >= 5\n",
"output": "No\n"
}, {
"input": "1 2\n2\n1 <= 10\n1 >= 15\n",
"output": "No\n"
}, {
"input": "5 5\n3\n2 <= 1\n3 <= 1\n4 <= 1\n4\n2 >= 2\n3 <= 1\n4 <= 1\n5 <= 1\n3\n3 >= 2\n4 <= 1\n5 <= 1\n2\n4 >= 2\n5 <= 1\n1\n5 >= 2 \n",
"output": "Yes\n"
}],
"name": "Everlasting Zero",
"context": {
"contest": {
"name": "Japan Alumni Group Spring Contest 2013",
"url": "https://atcoder.jp/contests/jag2013spring"
},
"alphabet": "A"
},
"memoryLimit": 128,
"timeLimit": 5000
},
}
actual = main(['get-problem', url], debug=True)
self.assertEqual(expected, actual)
def test_arc035_a(self):
"""This problem uses <code> tags in the descriptoin text in the sample section.
"""
url = 'http://arc035.contest.atcoder.jp/tasks/arc035_a'
expected = {
"status": "ok",
"messages": [],
"result": {
"url": "https://atcoder.jp/contests/arc035/tasks/arc035_a",
"tests": [{
"input": "ab*\n",
"output": "YES\n"
}, {
"input": "abc\n",
"output": "NO\n"
}, {
"input": "a*bc*\n",
"output": "YES\n"
}, {
"input": "***\n",
"output": "YES\n"
}],
"name": "\u9ad8\u6a4b\u304f\u3093\u3068\u56de\u6587",
"context": {
"contest": {
"name": "AtCoder Regular Contest 035",
"url": "https://atcoder.jp/contests/arc035"
},
"alphabet": "A"
},
"memoryLimit": 256,
"timeLimit": 2000
},
}
actual = main(['get-problem', url], debug=True)
self.assertEqual(expected, actual)
def test_abc114_c(self):
"""This tests a problem which uses a new-style format HTML.
"""
url = 'https://atcoder.jp/contests/abc114/tasks/abc114_c'
expected = {
"status": "ok",
"messages": [],
"result": {
"url": "https://atcoder.jp/contests/abc114/tasks/abc114_c",
"tests": [{
"input": "575\n",
"output": "4\n"
}, {
"input": "3600\n",
"output": "13\n"
}, {
"input": "999999999\n",
"output": "26484\n"
}],
"name": "755",
"context": {
"contest": {
"name": "AtCoder Beginner Contest 114",
"url": "https://atcoder.jp/contests/abc114"
},
"alphabet": "C"
},
"memoryLimit": 1024,
"timeLimit": 2000
},
}
actual = main(['get-problem', url], debug=True)
self.assertEqual(expected, actual)
def test_call_download_atcoder_abc003_4(self):
"""This tests a problem which uses an old-style format HTML.
"""
url = 'https://atcoder.jp/contests/abc003/tasks/abc003_4'
expected = {
"status": "ok",
"messages": [],
"result": {
"url": "https://atcoder.jp/contests/abc003/tasks/abc003_4",
"tests": [{
"input": "3 2\n2 2\n2 2\n",
"output": "12\n"
}, {
"input": "4 5\n3 1\n3 0\n",
"output": "10\n"
}, {
"input": "23 18\n15 13\n100 95\n",
"output": "364527243\n"
}, {
"input": "30 30\n24 22\n145 132\n",
"output": "976668549\n"
}],
"name": "AtCoder\u793e\u306e\u51ac",
"context": {
"contest": {
"name": "AtCoder Beginner Contest 003",
"url": "https://atcoder.jp/contests/abc003"
},
"alphabet": "D"
},
"memoryLimit": 64,
"timeLimit": 2000
},
}
actual = main(['get-problem', url], debug=True)
self.assertEqual(expected, actual)
def test_agc036_b(self):
"""In this problem, a sample output is empty.
"""
url = 'https://atcoder.jp/contests/agc036/tasks/agc036_b'
expected = {
"status": "ok",
"messages": [],
"result": {
"url": "https://atcoder.jp/contests/agc036/tasks/agc036_b",
"tests": [{
"input": "3 2\n1 2 3\n",
"output": "2 3\n"
}, {
"input": "5 10\n1 2 3 2 3\n",
"output": "3\n"
}, {
"input": "6 1000000000000\n1 1 2 2 3 3\n",
"output": "\n"
}, {
"input": "11 97\n3 1 4 1 5 9 2 6 5 3 5\n",
"output": "9 2 6\n"
}],
"name": "Do Not Duplicate",
"context": {
"contest": {
"name": "AtCoder Grand Contest 036",
"url": "https://atcoder.jp/contests/agc036"
},
"alphabet": "B"
},
"memoryLimit": 1024,
"timeLimit": 2000
},
}
actual = main(['get-problem', url], debug=True)
self.assertEqual(expected, actual)
def test_tenka1_2014_qualA_e(self):
"""This problem uses an unusual HTML markup.
.. seealso::
https://github.com/kmyk/online-judge-tools/issues/618
"""
url = 'https://atcoder.jp/contests/tenka1-2014-quala/tasks/tenka1_2014_qualA_e'
expected = {
"status": "ok",
"messages": [],
"result": {
"url": "https://atcoder.jp/contests/tenka1-2014-quala/tasks/tenka1_2014_qualA_e",
"tests": [{
"input": "5 3\nAAB\nABB\nCDE\nFFH\nGHH\n2\n1 1\n2 3\n",
"output": "15\n7\n"
}, {
"input": "2 2\nAB\nBA\n2\n1 1\n2 1\n",
"output": "2\n2\n"
}, {
"input": "5 5\nAABAA\nACDEA\nAFGHA\nAIJKA\nAAAAA\n1\n3 1\n",
"output": "25\n"
}],
"name": "\u30d1\u30ba\u30eb\u306e\u79fb\u52d5",
"context": {
"contest": {
"name": "\u5929\u4e0b\u4e00\u30d7\u30ed\u30b0\u30e9\u30de\u30fc\u30b3\u30f3\u30c6\u30b9\u30c82014\u4e88\u9078A",
"url": "https://atcoder.jp/contests/tenka1-2014-quala"
},
"alphabet": "E"
},
"memoryLimit": 256,
"timeLimit": 5000
},
}
actual = main(['get-problem', url], debug=True)
self.assertEqual(expected, actual)
def test_non_existing_problem(self):
"""This tests an non-existing problem.
"""
url = 'http://abc001.contest.atcoder.jp/tasks/abc001_100'
expected = {
"status": "error",
"messages": ["requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://atcoder.jp/contests/abc001/tasks/abc001_100"],
"result": None,
}
actual = main(['get-problem', url], debug=True)
self.assertEqual(expected, actual)
def test_impossible_problem(self):
"""This tests a problem impossible to parse sample cases.
"""
url = 'https://chokudai001.contest.atcoder.jp/tasks/chokudai_001_a'
expected = {
"status": "error",
"messages": ["onlinejudge.type.SampleParseError: failed to parse samples"],
"result": None,
}
actual = main(['get-problem', url], debug=True)
self.assertEqual(expected, actual)
| [((45, 17, 45, 55), 'onlinejudge_api.main.main', 'main', (), '', False, 'from onlinejudge_api.main import main\n'), ((83, 17, 83, 55), 'onlinejudge_api.main.main', 'main', (), '', False, 'from onlinejudge_api.main import main\n'), ((118, 17, 118, 55), 'onlinejudge_api.main.main', 'main', (), '', False, 'from onlinejudge_api.main import main\n'), ((156, 17, 156, 55), 'onlinejudge_api.main.main', 'main', (), '', False, 'from onlinejudge_api.main import main\n'), ((194, 17, 194, 55), 'onlinejudge_api.main.main', 'main', (), '', False, 'from onlinejudge_api.main import main\n'), ((232, 17, 232, 55), 'onlinejudge_api.main.main', 'main', (), '', False, 'from onlinejudge_api.main import main\n'), ((245, 17, 245, 55), 'onlinejudge_api.main.main', 'main', (), '', False, 'from onlinejudge_api.main import main\n'), ((258, 17, 258, 55), 'onlinejudge_api.main.main', 'main', (), '', False, 'from onlinejudge_api.main import main\n')] |
UMCollab/ODM | odm/libexec/odm_tenant.py | 95da49939dbcd54318a58a132aa76725fd9c0b5f | #!/usr/bin/env python3
# This file is part of ODM and distributed under the terms of the
# MIT license. See COPYING.
import json
import sys
import odm.cli
def main():
cli = odm.cli.CLI(['action'])
client = cli.client
if cli.args.action == 'list-users':
print(json.dumps(client.list_users(), indent=2))
elif cli.args.action == 'list-sites':
print(json.dumps(client.list_sites(), indent=2))
elif cli.args.action == 'list-groups':
print(json.dumps(client.list_groups(), indent=2))
else:
print('Unsupported action {}'.format(cli.args.action), file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()
| [((27, 8, 27, 19), 'sys.exit', 'sys.exit', ({(27, 17, 27, 18): '(1)'}, {}), '(1)', False, 'import sys\n')] |
quaresmajose/tools-python | tests/test_tag_value_parser.py | 53c917a1a2491a373efa23e4ef8570b5e863fabc | # Copyright (c) 2014 Ahmed H. Ismail
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from unittest import TestCase
import spdx
from spdx.parsers.tagvalue import Parser
from spdx.parsers.lexers.tagvalue import Lexer
from spdx.parsers.tagvaluebuilders import Builder
from spdx.parsers.loggers import StandardLogger
from spdx.version import Version
class TestLexer(TestCase):
maxDiff = None
def setUp(self):
self.l = Lexer()
self.l.build()
def test_document(self):
data = '''
SPDXVersion: SPDX-2.1
# Comment.
DataLicense: CC0-1.0
DocumentName: Sample_Document-V2.1
SPDXID: SPDXRef-DOCUMENT
DocumentNamespace: https://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301
DocumentComment: <text>This is a sample spreadsheet</text>
'''
self.l.input(data)
self.token_assert_helper(self.l.token(), 'DOC_VERSION', 'SPDXVersion', 2)
self.token_assert_helper(self.l.token(), 'LINE', 'SPDX-2.1', 2)
self.token_assert_helper(self.l.token(), 'DOC_LICENSE', 'DataLicense', 4)
self.token_assert_helper(self.l.token(), 'LINE', 'CC0-1.0', 4)
self.token_assert_helper(self.l.token(), 'DOC_NAME', 'DocumentName', 5)
self.token_assert_helper(self.l.token(), 'LINE', 'Sample_Document-V2.1',
5)
self.token_assert_helper(self.l.token(), 'SPDX_ID', 'SPDXID', 6)
self.token_assert_helper(self.l.token(), 'LINE', 'SPDXRef-DOCUMENT', 6)
self.token_assert_helper(self.l.token(), 'DOC_NAMESPACE',
'DocumentNamespace', 7)
self.token_assert_helper(self.l.token(), 'LINE',
'https://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301',
7)
self.token_assert_helper(self.l.token(), 'DOC_COMMENT', 'DocumentComment', 8)
self.token_assert_helper(self.l.token(), 'TEXT', '<text>This is a sample spreadsheet</text>', 8)
def test_external_document_references(self):
data = '''
ExternalDocumentRef:DocumentRef-spdx-tool-2.1 http://spdx.org/spdxdocs/spdx-tools-v2.1-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1: d6a770ba38583ed4bb4525bd96e50461655d2759
'''
self.l.input(data)
self.token_assert_helper(self.l.token(), 'EXT_DOC_REF',
'ExternalDocumentRef', 2)
self.token_assert_helper(self.l.token(), 'DOC_REF_ID',
'DocumentRef-spdx-tool-2.1', 2)
self.token_assert_helper(self.l.token(), 'DOC_URI',
'http://spdx.org/spdxdocs/spdx-tools-v2.1-3F25'
'04E0-4F89-41D3-9A0C-0305E82C3301', 2)
self.token_assert_helper(self.l.token(), 'EXT_DOC_REF_CHKSUM',
'SHA1: '
'd6a770ba38583ed4bb4525bd96e50461655d2759', 2)
def test_creation_info(self):
data = '''
## Creation Information
Creator: Person: Gary O'Neall
Creator: Organization: Source Auditor Inc.
Creator: Tool: SourceAuditor-V1.2
Created: 2010-02-03T00:00:00Z
CreatorComment: <text>This is an example of an SPDX
spreadsheet format</text>
'''
self.l.input(data)
self.token_assert_helper(self.l.token(), 'CREATOR', 'Creator', 3)
self.token_assert_helper(self.l.token(), 'PERSON_VALUE', "Person: Gary O'Neall", 3)
self.token_assert_helper(self.l.token(), 'CREATOR', 'Creator', 4)
self.token_assert_helper(self.l.token(), 'ORG_VALUE', 'Organization: Source Auditor Inc.', 4)
self.token_assert_helper(self.l.token(), 'CREATOR', 'Creator', 5)
self.token_assert_helper(self.l.token(), 'TOOL_VALUE', 'Tool: SourceAuditor-V1.2', 5)
self.token_assert_helper(self.l.token(), 'CREATED', 'Created', 6)
self.token_assert_helper(self.l.token(), 'DATE', '2010-02-03T00:00:00Z', 6)
def test_review_info(self):
data = '''
Reviewer: Person: Joe Reviewer
ReviewDate: 2010-02-10T00:00:00Z
ReviewComment: <text>This is just an example.
Some of the non-standard licenses look like they are actually
BSD 3 clause licenses</text>
'''
self.l.input(data)
self.token_assert_helper(self.l.token(), 'REVIEWER', 'Reviewer', 2)
self.token_assert_helper(self.l.token(), 'PERSON_VALUE', "Person: Joe Reviewer", 2)
self.token_assert_helper(self.l.token(), 'REVIEW_DATE', 'ReviewDate', 3)
self.token_assert_helper(self.l.token(), 'DATE', '2010-02-10T00:00:00Z', 3)
self.token_assert_helper(self.l.token(), 'REVIEW_COMMENT', 'ReviewComment', 4)
self.token_assert_helper(self.l.token(), 'TEXT', '''<text>This is just an example.
Some of the non-standard licenses look like they are actually
BSD 3 clause licenses</text>''', 4)
def test_pacakage(self):
data = '''
SPDXID: SPDXRef-Package
FilesAnalyzed: False
PackageChecksum: SHA1: 2fd4e1c67a2d28fced849ee1bb76e7391b93eb12
PackageVerificationCode: 4e3211c67a2d28fced849ee1bb76e7391b93feba (SpdxTranslatorSpdx.rdf, SpdxTranslatorSpdx.txt)
ExternalRef: SECURITY cpe23Type cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:
ExternalRefComment: <text>Some comment about the package.</text>
'''
self.l.input(data)
self.token_assert_helper(self.l.token(), 'SPDX_ID', 'SPDXID', 2)
self.token_assert_helper(self.l.token(), 'LINE', 'SPDXRef-Package', 2)
self.token_assert_helper(self.l.token(), 'PKG_FILES_ANALYZED', 'FilesAnalyzed', 3)
self.token_assert_helper(self.l.token(), 'LINE', 'False', 3)
self.token_assert_helper(self.l.token(), 'PKG_CHKSUM', 'PackageChecksum', 4)
self.token_assert_helper(self.l.token(), 'CHKSUM', 'SHA1: 2fd4e1c67a2d28fced849ee1bb76e7391b93eb12', 4)
self.token_assert_helper(self.l.token(), 'PKG_VERF_CODE', 'PackageVerificationCode', 5)
self.token_assert_helper(self.l.token(), 'LINE', '4e3211c67a2d28fced849ee1bb76e7391b93feba (SpdxTranslatorSpdx.rdf, SpdxTranslatorSpdx.txt)', 5)
self.token_assert_helper(self.l.token(), 'PKG_EXT_REF', 'ExternalRef', 6)
self.token_assert_helper(self.l.token(), 'LINE', 'SECURITY cpe23Type cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:', 6)
self.token_assert_helper(self.l.token(), 'PKG_EXT_REF_COMMENT', 'ExternalRefComment', 7)
self.token_assert_helper(self.l.token(), 'TEXT', '<text>Some comment about the package.</text>', 7)
def test_unknown_tag(self):
data = '''
SomeUnknownTag: SomeUnknownValue
'''
self.l.input(data)
self.token_assert_helper(self.l.token(), 'UNKNOWN_TAG', 'SomeUnknownTag', 2)
self.token_assert_helper(self.l.token(), 'LINE', 'SomeUnknownValue', 2)
def test_snippet(self):
data = '''
SnippetSPDXID: SPDXRef-Snippet
SnippetLicenseComments: <text>Some lic comment.</text>
SnippetCopyrightText: <text>Some cr text.</text>
SnippetComment: <text>Some snippet comment.</text>
SnippetName: from linux kernel
SnippetFromFileSPDXID: SPDXRef-DoapSource
SnippetLicenseConcluded: Apache-2.0
LicenseInfoInSnippet: Apache-2.0
'''
self.l.input(data)
self.token_assert_helper(self.l.token(), 'SNIPPET_SPDX_ID', 'SnippetSPDXID', 2)
self.token_assert_helper(self.l.token(), 'LINE', 'SPDXRef-Snippet', 2)
self.token_assert_helper(self.l.token(), 'SNIPPET_LICS_COMMENT', 'SnippetLicenseComments', 3)
self.token_assert_helper(self.l.token(), 'TEXT', '<text>Some lic comment.</text>', 3)
self.token_assert_helper(self.l.token(), 'SNIPPET_CR_TEXT', 'SnippetCopyrightText', 4)
self.token_assert_helper(self.l.token(), 'TEXT', '<text>Some cr text.</text>', 4)
self.token_assert_helper(self.l.token(), 'SNIPPET_COMMENT', 'SnippetComment', 5)
self.token_assert_helper(self.l.token(), 'TEXT', '<text>Some snippet comment.</text>', 5)
self.token_assert_helper(self.l.token(), 'SNIPPET_NAME', 'SnippetName', 6)
self.token_assert_helper(self.l.token(), 'LINE', 'from linux kernel', 6)
self.token_assert_helper(self.l.token(), 'SNIPPET_FILE_SPDXID',
'SnippetFromFileSPDXID', 7)
self.token_assert_helper(self.l.token(), 'LINE', 'SPDXRef-DoapSource', 7)
self.token_assert_helper(self.l.token(), 'SNIPPET_LICS_CONC',
'SnippetLicenseConcluded', 8)
self.token_assert_helper(self.l.token(), 'LINE', 'Apache-2.0', 8)
self.token_assert_helper(self.l.token(), 'SNIPPET_LICS_INFO',
'LicenseInfoInSnippet', 9)
self.token_assert_helper(self.l.token(), 'LINE', 'Apache-2.0', 9)
def token_assert_helper(self, token, ttype, value, line):
assert token.type == ttype
assert token.value == value
assert token.lineno == line
class TestParser(TestCase):
maxDiff = None
document_str = '\n'.join([
'SPDXVersion: SPDX-2.1',
'DataLicense: CC0-1.0',
'DocumentName: Sample_Document-V2.1',
'SPDXID: SPDXRef-DOCUMENT',
'DocumentComment: <text>Sample Comment</text>',
'DocumentNamespace: https://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301'
])
creation_str = '\n'.join([
'Creator: Person: Bob ([email protected])',
'Creator: Organization: Acme.',
'Created: 2010-02-03T00:00:00Z',
'CreatorComment: <text>Sample Comment</text>'
])
review_str = '\n'.join([
'Reviewer: Person: Bob the Reviewer',
'ReviewDate: 2010-02-10T00:00:00Z',
'ReviewComment: <text>Bob was Here.</text>',
'Reviewer: Person: Alice the Reviewer',
'ReviewDate: 2011-02-10T00:00:00Z',
'ReviewComment: <text>Alice was also here.</text>'
])
package_str = '\n'.join([
'PackageName: Test',
'SPDXID: SPDXRef-Package',
'PackageVersion: Version 0.9.2',
'PackageDownloadLocation: http://example.com/test',
'FilesAnalyzed: True',
'PackageSummary: <text>Test package</text>',
'PackageSourceInfo: <text>Version 1.0 of test</text>',
'PackageFileName: test-1.0.zip',
'PackageSupplier: Organization:ACME',
'PackageOriginator: Organization:ACME',
'PackageChecksum: SHA1: 2fd4e1c67a2d28fced849ee1bb76e7391b93eb12',
'PackageVerificationCode: 4e3211c67a2d28fced849ee1bb76e7391b93feba (something.rdf, something.txt)',
'PackageDescription: <text>A package.</text>',
'PackageComment: <text>Comment on the package.</text>',
'PackageCopyrightText: <text> Copyright 2014 Acme Inc.</text>',
'PackageLicenseDeclared: Apache-2.0',
'PackageLicenseConcluded: (LicenseRef-2.0 and Apache-2.0)',
'PackageLicenseInfoFromFiles: Apache-1.0',
'PackageLicenseInfoFromFiles: Apache-2.0',
'PackageLicenseComments: <text>License Comments</text>',
'ExternalRef: SECURITY cpe23Type cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:',
'ExternalRefComment: <text>Some comment about the package.</text>'
])
file_str = '\n'.join([
'FileName: testfile.java',
'SPDXID: SPDXRef-File',
'FileType: SOURCE',
'FileChecksum: SHA1: 2fd4e1c67a2d28fced849ee1bb76e7391b93eb12',
'LicenseConcluded: Apache-2.0',
'LicenseInfoInFile: Apache-2.0',
'FileCopyrightText: <text>Copyright 2014 Acme Inc.</text>',
'ArtifactOfProjectName: AcmeTest',
'ArtifactOfProjectHomePage: http://www.acme.org/',
'ArtifactOfProjectURI: http://www.acme.org/',
'FileComment: <text>Very long file</text>'
])
unknown_tag_str = 'SomeUnknownTag: SomeUnknownValue'
snippet_str = '\n'.join([
'SnippetSPDXID: SPDXRef-Snippet',
'SnippetLicenseComments: <text>Some lic comment.</text>',
'SnippetCopyrightText: <text> Copyright 2008-2010 John Smith </text>',
'SnippetComment: <text>Some snippet comment.</text>',
'SnippetName: from linux kernel',
'SnippetFromFileSPDXID: SPDXRef-DoapSource',
'SnippetLicenseConcluded: Apache-2.0',
'LicenseInfoInSnippet: Apache-2.0',
])
complete_str = '{0}\n{1}\n{2}\n{3}\n{4}\n{5}'.format(document_str, creation_str, review_str, package_str, file_str, snippet_str)
def setUp(self):
self.p = Parser(Builder(), StandardLogger())
self.p.build()
def test_doc(self):
document, error = self.p.parse(self.complete_str)
assert document is not None
assert not error
assert document.version == Version(major=2, minor=1)
assert document.data_license.identifier == 'CC0-1.0'
assert document.name == 'Sample_Document-V2.1'
assert document.spdx_id == 'SPDXRef-DOCUMENT'
assert document.comment == 'Sample Comment'
assert document.namespace == 'https://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301'
def test_creation_info(self):
document, error = self.p.parse(self.complete_str)
assert document is not None
assert not error
assert len(document.creation_info.creators) == 2
assert document.creation_info.comment == 'Sample Comment'
assert (document.creation_info.created_iso_format == '2010-02-03T00:00:00Z')
def test_review(self):
document, error = self.p.parse(self.complete_str)
assert document is not None
assert not error
assert len(document.reviews) == 2
def test_package(self):
document, error = self.p.parse(self.complete_str)
assert document is not None
assert not error
assert document.package.name == 'Test'
assert document.package.spdx_id == 'SPDXRef-Package'
assert document.package.version == 'Version 0.9.2'
assert len(document.package.licenses_from_files) == 2
assert (document.package.conc_lics.identifier == 'LicenseRef-2.0 AND Apache-2.0')
assert document.package.files_analyzed == True
assert document.package.comment == 'Comment on the package.'
assert document.package.pkg_ext_refs[-1].category == 'SECURITY'
assert document.package.pkg_ext_refs[-1].pkg_ext_ref_type == 'cpe23Type'
assert document.package.pkg_ext_refs[-1].locator == 'cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:'
assert document.package.pkg_ext_refs[-1].comment == 'Some comment about the package.'
def test_file(self):
document, error = self.p.parse(self.complete_str)
assert document is not None
assert not error
assert len(document.package.files) == 1
spdx_file = document.package.files[0]
assert spdx_file.name == 'testfile.java'
assert spdx_file.spdx_id == 'SPDXRef-File'
assert spdx_file.type == spdx.file.FileType.SOURCE
assert len(spdx_file.artifact_of_project_name) == 1
assert len(spdx_file.artifact_of_project_home) == 1
assert len(spdx_file.artifact_of_project_uri) == 1
def test_unknown_tag(self):
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
saved_out = sys.stdout
sys.stdout = StringIO()
document, error = self.p.parse(self.unknown_tag_str)
self.assertEqual(sys.stdout.getvalue(), 'Found unknown tag : SomeUnknownTag at line: 1\n')
sys.stdout = saved_out
assert error
assert document is not None
def test_snippet(self):
document, error = self.p.parse(self.complete_str)
assert document is not None
assert not error
assert len(document.snippet) == 1
assert document.snippet[-1].spdx_id == 'SPDXRef-Snippet'
assert document.snippet[-1].name == 'from linux kernel'
assert document.snippet[-1].comment == 'Some snippet comment.'
assert document.snippet[-1].copyright == ' Copyright 2008-2010 John Smith '
assert document.snippet[-1].license_comment == 'Some lic comment.'
assert document.snippet[-1].snip_from_file_spdxid == 'SPDXRef-DoapSource'
assert document.snippet[-1].conc_lics.identifier == 'Apache-2.0'
assert document.snippet[-1].licenses_in_snippet[-1].identifier == 'Apache-2.0'
| [((27, 17, 27, 24), 'spdx.parsers.lexers.tagvalue.Lexer', 'Lexer', ({}, {}), '()', False, 'from spdx.parsers.lexers.tagvalue import Lexer\n'), ((330, 21, 330, 31), 'io.StringIO', 'StringIO', ({}, {}), '()', False, 'from io import StringIO\n'), ((265, 24, 265, 33), 'spdx.parsers.tagvaluebuilders.Builder', 'Builder', ({}, {}), '()', False, 'from spdx.parsers.tagvaluebuilders import Builder\n'), ((265, 35, 265, 51), 'spdx.parsers.loggers.StandardLogger', 'StandardLogger', ({}, {}), '()', False, 'from spdx.parsers.loggers import StandardLogger\n'), ((272, 35, 272, 60), 'spdx.version.Version', 'Version', (), '', False, 'from spdx.version import Version\n'), ((332, 25, 332, 46), 'sys.stdout.getvalue', 'sys.stdout.getvalue', ({}, {}), '()', False, 'import sys\n')] |
DT-was-an-ET/fanshim-python-pwm | mount_drives.py | dd3e6e29251000946e34d80704c040b5bcad7f8e | # Standard library imports
from subprocess import call as subprocess_call
from utility import fileexists
from time import sleep as time_sleep
from datetime import datetime
mount_try = 1
not_yet = True
done = False
start_time = datetime.now()
if fileexists("/home/rpi4-sftp/usb/drive_present.txt"):
when_usba = 0
else:
when_usba = -1
if fileexists("/home/duck-sftp/usb/drive_present.txt"):
when_usbb = 0
else:
when_usbb = -1
if fileexists("/home/pi/mycloud/drive_present.txt"):
when_mycloud = 0
else:
when_mycloud = -1
while (mount_try < 30) and not_yet:
try:
usba_mounted = fileexists("/home/rpi4-sftp/usb/drive_present.txt")
usbb_mounted = fileexists("/home/duck-sftp/usb/drive_present.txt")
mycloud_mounted = fileexists("/home/pi/mycloud/drive_present.txt")
if not(usba_mounted and usbb_mounted and mycloud_mounted):
print("Something Needs mounting this is try number: ", mount_try)
subprocess_call(["sudo", "mount", "-a"])
mount_try += 1
usba_mounted_after = fileexists("/home/rpi4-sftp/usb/drive_present.txt")
usbb_mounted_after = fileexists("/home/duck-sftp/usb/drive_present.txt")
mycloud_mounted_after = fileexists("/home/pi/mycloud/drive_present.txt")
if not(usba_mounted) and usba_mounted_after:
when_usba = round((datetime.now() - start_time).total_seconds(),2)
if not(usbb_mounted) and usbb_mounted_after:
when_usbb = round((datetime.now() - start_time).total_seconds(),2)
if not(mycloud_mounted) and mycloud_mounted_after:
when_mycloud = round((datetime.now() - start_time).total_seconds(),2)
if usba_mounted_after and usbb_mounted_after and mycloud_mounted_after:
print("Success at :",when_usba,when_usbb,when_mycloud, " secs from start")
not_yet = False
done = True
except:
print("Count: ", count," error")
time_sleep(1)
if done:
print("Great!")
else:
print("Failed to do all or drive_present.txt file not present; Times :",when_usba,when_usbb,when_mycloud)
while True:
time_sleep(20000)
| [((10, 13, 10, 27), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((11, 3, 11, 54), 'utility.fileexists', 'fileexists', ({(11, 14, 11, 53): '"""/home/rpi4-sftp/usb/drive_present.txt"""'}, {}), "('/home/rpi4-sftp/usb/drive_present.txt')", False, 'from utility import fileexists\n'), ((15, 3, 15, 54), 'utility.fileexists', 'fileexists', ({(15, 14, 15, 53): '"""/home/duck-sftp/usb/drive_present.txt"""'}, {}), "('/home/duck-sftp/usb/drive_present.txt')", False, 'from utility import fileexists\n'), ((19, 3, 19, 51), 'utility.fileexists', 'fileexists', ({(19, 14, 19, 50): '"""/home/pi/mycloud/drive_present.txt"""'}, {}), "('/home/pi/mycloud/drive_present.txt')", False, 'from utility import fileexists\n'), ((48, 1, 48, 14), 'time.sleep', 'time_sleep', ({(48, 12, 48, 13): '(1)'}, {}), '(1)', True, 'from time import sleep as time_sleep\n'), ((55, 1, 55, 18), 'time.sleep', 'time_sleep', ({(55, 12, 55, 17): '(20000)'}, {}), '(20000)', True, 'from time import sleep as time_sleep\n'), ((26, 17, 26, 68), 'utility.fileexists', 'fileexists', ({(26, 28, 26, 67): '"""/home/rpi4-sftp/usb/drive_present.txt"""'}, {}), "('/home/rpi4-sftp/usb/drive_present.txt')", False, 'from utility import fileexists\n'), ((27, 17, 27, 68), 'utility.fileexists', 'fileexists', ({(27, 28, 27, 67): '"""/home/duck-sftp/usb/drive_present.txt"""'}, {}), "('/home/duck-sftp/usb/drive_present.txt')", False, 'from utility import fileexists\n'), ((28, 20, 28, 68), 'utility.fileexists', 'fileexists', ({(28, 31, 28, 67): '"""/home/pi/mycloud/drive_present.txt"""'}, {}), "('/home/pi/mycloud/drive_present.txt')", False, 'from utility import fileexists\n'), ((33, 23, 33, 74), 'utility.fileexists', 'fileexists', ({(33, 34, 33, 73): '"""/home/rpi4-sftp/usb/drive_present.txt"""'}, {}), "('/home/rpi4-sftp/usb/drive_present.txt')", False, 'from utility import fileexists\n'), ((34, 23, 34, 74), 'utility.fileexists', 'fileexists', ({(34, 34, 34, 73): '"""/home/duck-sftp/usb/drive_present.txt"""'}, {}), "('/home/duck-sftp/usb/drive_present.txt')", False, 'from utility import fileexists\n'), ((35, 26, 35, 74), 'utility.fileexists', 'fileexists', ({(35, 37, 35, 73): '"""/home/pi/mycloud/drive_present.txt"""'}, {}), "('/home/pi/mycloud/drive_present.txt')", False, 'from utility import fileexists\n'), ((31, 3, 31, 43), 'subprocess.call', 'subprocess_call', ({(31, 19, 31, 42): "['sudo', 'mount', '-a']"}, {}), "(['sudo', 'mount', '-a'])", True, 'from subprocess import call as subprocess_call\n'), ((37, 22, 37, 36), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((39, 22, 39, 36), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((41, 25, 41, 39), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n')] |
Kshitij-Kumar-Singh-Chauhan/docon | home/views.py | bff0547e7bbd030e027217a2ca7800a8da529b56 | from django.http.response import HttpResponse
from django.shortcuts import render
from django.shortcuts import redirect, render
from cryptography.fernet import Fernet
from .models import Book, UserDetails
from .models import Contact
from .models import Book
from .models import Report
from .models import Diagnostic
from datetime import datetime
# Create your views here.
def homePage(request):
if(request.method == 'POST'):
email = request.POST.get('email')
password = request.POST.get('password')
try:
object = UserDetails.objects.get(email = email)
key1 = object.key
key1=key1[2:-1]
key1 = bytes(key1,'utf-8')
f = Fernet(key1)
truepassword = object.password
truepassword = truepassword[2:-1]
truepassword = bytes(truepassword,'utf-8')
truepassword = f.decrypt(truepassword).decode('utf-8')
except:
object = None
if(object==None):
context = {
'message': "Email Does Not Exist"
}
return render(request,"login.html",context)
elif(password == truepassword):
if object.profession == "PATIENT":
object1=UserDetails.objects.filter(profession="DOCTOR")
# name=(object.name)
# appointment(request,email,name)
context1={
'message':'Welcome '+object.name,
'mail' : object.email,
'doctors':object1
}
return render(request,"index.html",context1)
else:
context2={
'message':'Welcome '+object.name,
'mail' : object.email
}
return render(request,"dindex.html",context2)
else:
return redirect("/")
else:
return render(request,"login.html",{})
def signUpPage(request):
if(request.method == 'POST'):
name = request.POST.get('name')
email = request.POST.get('email')
password = request.POST.get('password')
passwordVerif = request.POST.get('passwordVerif')
profession = request.POST.get('user')
data = request.POST.get('data')
if(email ==''):
context = {
'message': "Please enter Email ID"
}
return render(request,"signup.html",context)
elif(password == passwordVerif):
key = Fernet.generate_key()
f = Fernet(key)
password = bytes(password,'utf-8')
token = f.encrypt(password)
key = str(key)
print(key)
UserDetails.objects.create(email=email, name=name , password=token, key = key, profession=profession, data=data)
return redirect("/")
else:
context = {
'message': "Password doesn't match"
}
return render(request,"signup.html",context)
else:
return render(request,"signup.html",{})
# def index(request):
# context={ 'alpha': 'This is sent'}
# if request.method=='POST':
# pass
# else: return render(request, 'index.html',context)
#HttpResponse('This is homepage')
def about(request):
return render(request, 'about.html')
def services(request):
return render(request, 'services.html')
def contact(request):
if request.method == "POST":
email = request.POST.get('email')
name = request.POST.get('name')
phone = request.POST.get('phone')
address = request.POST.get('address')
contact = Contact(email=email , name=name, phone=phone,address=address,date=datetime.today())
contact.save()
# messages.success(request, 'Your message has been sent !')
return render(request,"contact.html")
def book(request):
if request.method == "POST":
email = request.POST.get('email')
name = request.POST.get('name')
phone = request.POST.get('phone')
address = request.POST.get('address')
book = Book(email=email , name=name, phone=phone,problem=address,date=datetime.today())
book.save()
return render(request,"book.html")
def report(request):
if request.method == "POST":
email = request.POST.get('email')
name = request.POST.get('name')
phone = request.POST.get('phone')
message = request.POST.get('message')
report = Report(email=email , name=name, phone=phone, message=message, date=datetime.today())
report.save()
return render(request,"report.html")
def diag(request):
if request.method == "POST":
email = request.POST.get('email')
name = request.POST.get('name')
phone = request.POST.get('phone')
tests = request.POST.get('drop1')
tests = str(tests)
if(email ==''):
context = {
'message': "Please enter Email ID"
}
return render(request,"diag.html",context)
else:
diag = Diagnostic(email=email , name=name, phone=phone, tests=tests, date=datetime.today())
diag.save()
# messages.success(request, 'Your message has been sent !')
return render(request,"diag.html")
# def appointment(request,email,name):
# if request.method == "POST":
# problem = request.POST.get('problem')
# book = Appoint(problem=problem, email=email, name=name)
# book.save()
# return render(request,"index.html") | [((112, 11, 112, 40), 'django.shortcuts.render', 'render', ({(112, 18, 112, 25): 'request', (112, 27, 112, 39): '"""about.html"""'}, {}), "(request, 'about.html')", False, 'from django.shortcuts import redirect, render\n'), ((114, 11, 114, 43), 'django.shortcuts.render', 'render', ({(114, 18, 114, 25): 'request', (114, 27, 114, 42): '"""services.html"""'}, {}), "(request, 'services.html')", False, 'from django.shortcuts import redirect, render\n'), ((124, 11, 124, 41), 'django.shortcuts.render', 'render', ({(124, 18, 124, 25): 'request', (124, 26, 124, 40): '"""contact.html"""'}, {}), "(request, 'contact.html')", False, 'from django.shortcuts import redirect, render\n'), ((134, 11, 134, 38), 'django.shortcuts.render', 'render', ({(134, 18, 134, 25): 'request', (134, 26, 134, 37): '"""book.html"""'}, {}), "(request, 'book.html')", False, 'from django.shortcuts import redirect, render\n'), ((144, 11, 144, 40), 'django.shortcuts.render', 'render', ({(144, 18, 144, 25): 'request', (144, 26, 144, 39): '"""report.html"""'}, {}), "(request, 'report.html')", False, 'from django.shortcuts import redirect, render\n'), ((162, 11, 162, 38), 'django.shortcuts.render', 'render', ({(162, 18, 162, 25): 'request', (162, 26, 162, 37): '"""diag.html"""'}, {}), "(request, 'diag.html')", False, 'from django.shortcuts import redirect, render\n'), ((65, 15, 65, 46), 'django.shortcuts.render', 'render', ({(65, 22, 65, 29): 'request', (65, 30, 65, 42): '"""login.html"""', (65, 43, 65, 45): '{}'}, {}), "(request, 'login.html', {})", False, 'from django.shortcuts import redirect, render\n'), ((100, 15, 100, 47), 'django.shortcuts.render', 'render', ({(100, 22, 100, 29): 'request', (100, 30, 100, 43): '"""signup.html"""', (100, 44, 100, 46): '{}'}, {}), "(request, 'signup.html', {})", False, 'from django.shortcuts import redirect, render\n'), ((27, 16, 27, 28), 'cryptography.fernet.Fernet', 'Fernet', ({(27, 23, 27, 27): 'key1'}, {}), '(key1)', False, 'from cryptography.fernet import Fernet\n'), ((39, 19, 39, 55), 'django.shortcuts.render', 'render', ({(39, 26, 39, 33): 'request', (39, 34, 39, 46): '"""login.html"""', (39, 47, 39, 54): 'context'}, {}), "(request, 'login.html', context)", False, 'from django.shortcuts import redirect, render\n'), ((81, 19, 81, 56), 'django.shortcuts.render', 'render', ({(81, 26, 81, 33): 'request', (81, 34, 81, 47): '"""signup.html"""', (81, 48, 81, 55): 'context'}, {}), "(request, 'signup.html', context)", False, 'from django.shortcuts import redirect, render\n'), ((157, 19, 157, 54), 'django.shortcuts.render', 'render', ({(157, 26, 157, 33): 'request', (157, 34, 157, 45): '"""diag.html"""', (157, 46, 157, 53): 'context'}, {}), "(request, 'diag.html', context)", False, 'from django.shortcuts import redirect, render\n'), ((62, 19, 62, 32), 'django.shortcuts.redirect', 'redirect', ({(62, 28, 62, 31): '"""/"""'}, {}), "('/')", False, 'from django.shortcuts import redirect, render\n'), ((84, 18, 84, 39), 'cryptography.fernet.Fernet.generate_key', 'Fernet.generate_key', ({}, {}), '()', False, 'from cryptography.fernet import Fernet\n'), ((85, 16, 85, 27), 'cryptography.fernet.Fernet', 'Fernet', ({(85, 23, 85, 26): 'key'}, {}), '(key)', False, 'from cryptography.fernet import Fernet\n'), ((91, 19, 91, 32), 'django.shortcuts.redirect', 'redirect', ({(91, 28, 91, 31): '"""/"""'}, {}), "('/')", False, 'from django.shortcuts import redirect, render\n'), ((97, 19, 97, 56), 'django.shortcuts.render', 'render', ({(97, 26, 97, 33): 'request', (97, 34, 97, 47): '"""signup.html"""', (97, 48, 97, 55): 'context'}, {}), "(request, 'signup.html', context)", False, 'from django.shortcuts import redirect, render\n'), ((121, 84, 121, 100), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n'), ((132, 78, 132, 94), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n'), ((142, 84, 142, 100), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n'), ((52, 23, 52, 60), 'django.shortcuts.render', 'render', ({(52, 30, 52, 37): 'request', (52, 38, 52, 50): '"""index.html"""', (52, 51, 52, 59): 'context1'}, {}), "(request, 'index.html', context1)", False, 'from django.shortcuts import redirect, render\n'), ((59, 23, 59, 61), 'django.shortcuts.render', 'render', ({(59, 30, 59, 37): 'request', (59, 38, 59, 51): '"""dindex.html"""', (59, 52, 59, 60): 'context2'}, {}), "(request, 'dindex.html', context2)", False, 'from django.shortcuts import redirect, render\n'), ((159, 86, 159, 102), 'datetime.datetime.today', 'datetime.today', ({}, {}), '()', False, 'from datetime import datetime\n')] |
kube-HPC/python-wrapper.hkube | hkube_python_wrapper/storage/base_storage_manager.py | 74713d9fea6689c116ade7d67b7ab67373a79d3b |
class BaseStorageManager(object):
def __init__(self, adpter):
self.adapter = adpter
def put(self, options):
try:
return self.adapter.put(options)
except Exception:
raise Exception('Failed to write data to storage')
def get(self, options):
try:
data = self.adapter.get(options)
return data
except Exception as e:
raise Exception('Failed to read data from storage' + str(e))
def list(self, options):
try:
return self.adapter.list(options)
except Exception:
raise Exception('Failed to list storage data')
def listPrefix(self, options):
try:
return self.adapter.listPrefix(options)
except Exception:
raise Exception('Failed to listPrefix storage data')
def delete(self, options):
try:
self.adapter.delete(options)
except Exception:
raise Exception('Failed to delete storage data')
| [] |
bigmlcom/django_compressor | compressor/tests/templatetags.py | 66dfda503633018275fdb64ad46ef80dc9a3901d | from __future__ import with_statement
import os
import sys
from mock import Mock
from django.template import Template, Context, TemplateSyntaxError
from django.test import TestCase
from compressor.conf import settings
from compressor.signals import post_compress
from compressor.tests.base import css_tag, test_dir
def render(template_string, context_dict=None):
"""
A shortcut for testing template output.
"""
if context_dict is None:
context_dict = {}
c = Context(context_dict)
t = Template(template_string)
return t.render(c).strip()
class TemplatetagTestCase(TestCase):
def setUp(self):
self.old_enabled = settings.COMPRESS_ENABLED
settings.COMPRESS_ENABLED = True
self.context = {'MEDIA_URL': settings.COMPRESS_URL}
def tearDown(self):
settings.COMPRESS_ENABLED = self.old_enabled
def test_empty_tag(self):
template = u"""{% load compress %}{% compress js %}{% block js %}
{% endblock %}{% endcompress %}"""
self.assertEqual(u'', render(template, self.context))
def test_css_tag(self):
template = u"""{% load compress %}{% compress css %}
<link rel="stylesheet" href="{{ MEDIA_URL }}css/one.css" type="text/css">
<style type="text/css">p { border:5px solid green;}</style>
<link rel="stylesheet" href="{{ MEDIA_URL }}css/two.css" type="text/css">
{% endcompress %}"""
out = css_tag("/media/CACHE/css/e41ba2cc6982.css")
self.assertEqual(out, render(template, self.context))
def test_uppercase_rel(self):
template = u"""{% load compress %}{% compress css %}
<link rel="StyleSheet" href="{{ MEDIA_URL }}css/one.css" type="text/css">
<style type="text/css">p { border:5px solid green;}</style>
<link rel="StyleSheet" href="{{ MEDIA_URL }}css/two.css" type="text/css">
{% endcompress %}"""
out = css_tag("/media/CACHE/css/e41ba2cc6982.css")
self.assertEqual(out, render(template, self.context))
def test_nonascii_css_tag(self):
template = u"""{% load compress %}{% compress css %}
<link rel="stylesheet" href="{{ MEDIA_URL }}css/nonasc.css" type="text/css">
<style type="text/css">p { border:5px solid green;}</style>
{% endcompress %}
"""
out = css_tag("/media/CACHE/css/799f6defe43c.css")
self.assertEqual(out, render(template, self.context))
def test_js_tag(self):
template = u"""{% load compress %}{% compress js %}
<script src="{{ MEDIA_URL }}js/one.js" type="text/javascript"></script>
<script type="text/javascript">obj.value = "value";</script>
{% endcompress %}
"""
out = u'<script type="text/javascript" src="/media/CACHE/js/066cd253eada.js"></script>'
self.assertEqual(out, render(template, self.context))
def test_nonascii_js_tag(self):
template = u"""{% load compress %}{% compress js %}
<script src="{{ MEDIA_URL }}js/nonasc.js" type="text/javascript"></script>
<script type="text/javascript">var test_value = "\u2014";</script>
{% endcompress %}
"""
out = u'<script type="text/javascript" src="/media/CACHE/js/e214fe629b28.js"></script>'
self.assertEqual(out, render(template, self.context))
def test_nonascii_latin1_js_tag(self):
template = u"""{% load compress %}{% compress js %}
<script src="{{ MEDIA_URL }}js/nonasc-latin1.js" type="text/javascript" charset="latin-1"></script>
<script type="text/javascript">var test_value = "\u2014";</script>
{% endcompress %}
"""
out = u'<script type="text/javascript" src="/media/CACHE/js/be9e078b5ca7.js"></script>'
self.assertEqual(out, render(template, self.context))
def test_compress_tag_with_illegal_arguments(self):
template = u"""{% load compress %}{% compress pony %}
<script type="pony/application">unicorn</script>
{% endcompress %}"""
self.assertRaises(TemplateSyntaxError, render, template, {})
def test_debug_toggle(self):
template = u"""{% load compress %}{% compress js %}
<script src="{{ MEDIA_URL }}js/one.js" type="text/javascript"></script>
<script type="text/javascript">obj.value = "value";</script>
{% endcompress %}
"""
class MockDebugRequest(object):
GET = {settings.COMPRESS_DEBUG_TOGGLE: 'true'}
context = dict(self.context, request=MockDebugRequest())
out = u"""<script src="/media/js/one.js" type="text/javascript"></script>
<script type="text/javascript">obj.value = "value";</script>"""
self.assertEqual(out, render(template, context))
def test_named_compress_tag(self):
template = u"""{% load compress %}{% compress js inline foo %}
<script type="text/javascript">obj.value = "value";</script>
{% endcompress %}
"""
def listener(sender, **kwargs):
pass
callback = Mock(wraps=listener)
post_compress.connect(callback)
render(template)
args, kwargs = callback.call_args
context = kwargs['context']
self.assertEqual('foo', context['compressed']['name'])
class PrecompilerTemplatetagTestCase(TestCase):
def setUp(self):
self.old_enabled = settings.COMPRESS_ENABLED
self.old_precompilers = settings.COMPRESS_PRECOMPILERS
precompiler = os.path.join(test_dir, 'precompiler.py')
python = sys.executable
settings.COMPRESS_ENABLED = True
settings.COMPRESS_PRECOMPILERS = (
('text/coffeescript', '%s %s' % (python, precompiler)),
)
self.context = {'MEDIA_URL': settings.COMPRESS_URL}
def tearDown(self):
settings.COMPRESS_ENABLED = self.old_enabled
settings.COMPRESS_PRECOMPILERS = self.old_precompilers
def test_compress_coffeescript_tag(self):
template = u"""{% load compress %}{% compress js %}
<script type="text/coffeescript"># this is a comment.</script>
{% endcompress %}"""
out = script(src="/media/CACHE/js/e920d58f166d.js")
self.assertEqual(out, render(template, self.context))
def test_compress_coffeescript_tag_and_javascript_tag(self):
template = u"""{% load compress %}{% compress js %}
<script type="text/coffeescript"># this is a comment.</script>
<script type="text/javascript"># this too is a comment.</script>
{% endcompress %}"""
out = script(src="/media/CACHE/js/ef6b32a54575.js")
self.assertEqual(out, render(template, self.context))
def test_coffeescript_and_js_tag_with_compress_enabled_equals_false(self):
self.old_enabled = settings.COMPRESS_ENABLED
settings.COMPRESS_ENABLED = False
try:
template = u"""{% load compress %}{% compress js %}
<script type="text/coffeescript"># this is a comment.</script>
<script type="text/javascript"># this too is a comment.</script>
{% endcompress %}"""
out = (script('# this is a comment.\n') + '\n' +
script('# this too is a comment.'))
self.assertEqual(out, render(template, self.context))
finally:
settings.COMPRESS_ENABLED = self.old_enabled
def test_compress_coffeescript_tag_compress_enabled_is_false(self):
self.old_enabled = settings.COMPRESS_ENABLED
settings.COMPRESS_ENABLED = False
try:
template = u"""{% load compress %}{% compress js %}
<script type="text/coffeescript"># this is a comment.</script>
{% endcompress %}"""
out = script("# this is a comment.\n")
self.assertEqual(out, render(template, self.context))
finally:
settings.COMPRESS_ENABLED = self.old_enabled
def test_compress_coffeescript_file_tag_compress_enabled_is_false(self):
self.old_enabled = settings.COMPRESS_ENABLED
settings.COMPRESS_ENABLED = False
try:
template = u"""
{% load compress %}{% compress js %}
<script type="text/coffeescript" src="{{ MEDIA_URL }}js/one.coffee">
</script>
{% endcompress %}"""
out = script(src="/media/CACHE/js/one.95cfb869eead.js")
self.assertEqual(out, render(template, self.context))
finally:
settings.COMPRESS_ENABLED = self.old_enabled
def test_multiple_file_order_conserved(self):
self.old_enabled = settings.COMPRESS_ENABLED
settings.COMPRESS_ENABLED = False
try:
template = u"""
{% load compress %}{% compress js %}
<script type="text/coffeescript" src="{{ MEDIA_URL }}js/one.coffee">
</script>
<script src="{{ MEDIA_URL }}js/one.js"></script>
<script type="text/coffeescript" src="{{ MEDIA_URL }}js/one.js">
</script>
{% endcompress %}"""
out = '\n'.join([
script(src="/media/CACHE/js/one.95cfb869eead.js"),
script(scripttype="", src="/media/js/one.js"),
script(src="/media/CACHE/js/one.81a2cd965815.js"),])
self.assertEqual(out, render(template, self.context))
finally:
settings.COMPRESS_ENABLED = self.old_enabled
def script(content="", src="", scripttype="text/javascript"):
"""
returns a unicode text html script element.
>>> script('#this is a comment', scripttype="text/applescript")
'<script type="text/applescript">#this is a comment</script>'
"""
out_script = u'<script '
if scripttype:
out_script += u'type="%s" ' % scripttype
if src:
out_script += u'src="%s" ' % src
return out_script[:-1] + u'>%s</script>' % content
| [((22, 8, 22, 29), 'django.template.Context', 'Context', ({(22, 16, 22, 28): 'context_dict'}, {}), '(context_dict)', False, 'from django.template import Template, Context, TemplateSyntaxError\n'), ((23, 8, 23, 33), 'django.template.Template', 'Template', ({(23, 17, 23, 32): 'template_string'}, {}), '(template_string)', False, 'from django.template import Template, Context, TemplateSyntaxError\n'), ((47, 14, 47, 58), 'compressor.tests.base.css_tag', 'css_tag', ({(47, 22, 47, 57): '"""/media/CACHE/css/e41ba2cc6982.css"""'}, {}), "('/media/CACHE/css/e41ba2cc6982.css')", False, 'from compressor.tests.base import css_tag, test_dir\n'), ((56, 14, 56, 58), 'compressor.tests.base.css_tag', 'css_tag', ({(56, 22, 56, 57): '"""/media/CACHE/css/e41ba2cc6982.css"""'}, {}), "('/media/CACHE/css/e41ba2cc6982.css')", False, 'from compressor.tests.base import css_tag, test_dir\n'), ((65, 14, 65, 58), 'compressor.tests.base.css_tag', 'css_tag', ({(65, 22, 65, 57): '"""/media/CACHE/css/799f6defe43c.css"""'}, {}), "('/media/CACHE/css/799f6defe43c.css')", False, 'from compressor.tests.base import css_tag, test_dir\n'), ((121, 19, 121, 39), 'mock.Mock', 'Mock', (), '', False, 'from mock import Mock\n'), ((122, 8, 122, 39), 'compressor.signals.post_compress.connect', 'post_compress.connect', ({(122, 30, 122, 38): 'callback'}, {}), '(callback)', False, 'from compressor.signals import post_compress\n'), ((134, 22, 134, 62), 'os.path.join', 'os.path.join', ({(134, 35, 134, 43): 'test_dir', (134, 45, 134, 61): '"""precompiler.py"""'}, {}), "(test_dir, 'precompiler.py')", False, 'import os\n')] |
Ruide/angr-dev | cle/cle/backends/relocations/generic.py | 964dc80c758e25c698c2cbcc454ef5954c5fa0a0 | from ...address_translator import AT
from ...errors import CLEOperationError
from . import Relocation
import struct
import logging
l = logging.getLogger('cle.relocations.generic')
class GenericAbsoluteReloc(Relocation):
@property
def value(self):
return self.resolvedby.rebased_addr
class GenericAbsoluteAddendReloc(Relocation):
@property
def value(self):
return self.resolvedby.rebased_addr + self.addend
class GenericPCRelativeAddendReloc(Relocation):
@property
def value(self):
return self.resolvedby.rebased_addr + self.addend - self.rebased_addr
class GenericJumpslotReloc(Relocation):
@property
def value(self):
if self.is_rela:
return self.resolvedby.rebased_addr + self.addend
else:
return self.resolvedby.rebased_addr
class GenericRelativeReloc(Relocation):
@property
def value(self):
return self.owner_obj.mapped_base + self.addend
def resolve_symbol(self, solist, bypass_compatibility=False):
self.resolve(None)
return True
class GenericCopyReloc(Relocation):
@property
def value(self):
return self.resolvedby.owner_obj.memory.read_addr_at(self.resolvedby.relative_addr)
class MipsGlobalReloc(GenericAbsoluteReloc):
pass
class MipsLocalReloc(Relocation):
def relocate(self, solist, bypass_compatibility=False): # pylint: disable=unused-argument
if self.owner_obj.mapped_base == 0:
self.resolve(None)
return True # don't touch local relocations on the main bin
delta = self.owner_obj.mapped_base - self.owner_obj._dynamic['DT_MIPS_BASE_ADDRESS']
if delta == 0:
self.resolve(None)
return True
val = self.owner_obj.memory.read_addr_at(self.relative_addr)
newval = val + delta
self.owner_obj.memory.write_addr_at(self.relative_addr, newval)
self.resolve(None)
return True
class RelocTruncate32Mixin(object):
"""
A mix-in class for relocations that cover a 32-bit field regardless of the architecture's address word length.
"""
# If True, 32-bit truncated value must equal to its original when zero-extended
check_zero_extend = False
# If True, 32-bit truncated value must equal to its original when sign-extended
check_sign_extend = False
def relocate(self, solist, bypass_compatibility=False): # pylint: disable=unused-argument
if not self.resolve_symbol(solist):
return False
arch_bits = self.owner_obj.arch.bits
assert arch_bits >= 32 # 16-bit makes no sense here
val = self.value % (2**arch_bits) # we must truncate it to native range first
if (self.check_zero_extend and val >> 32 != 0 or
self.check_sign_extend and val >> 32 != ((1 << (arch_bits - 32)) - 1)
if ((val >> 31) & 1) == 1 else 0):
raise CLEOperationError("relocation truncated to fit: %s; consider making"
" relevant addresses fit in the 32-bit address space." % self.__class__.__name__)
by = struct.pack(self.owner_obj.arch.struct_fmt(32), val % (2**32))
self.owner_obj.memory.write_bytes(self.dest_addr, by)
| [((7, 4, 7, 48), 'logging.getLogger', 'logging.getLogger', ({(7, 22, 7, 47): '"""cle.relocations.generic"""'}, {}), "('cle.relocations.generic')", False, 'import logging\n')] |
charlescayno/automation | codes/Lib/site-packages/openpyxl/writer/tests/test_style.py | a4a34d87f372d49fd69740ad3ca46ae19bf2612d | # Copyright (c) 2010-2014 openpyxl
import pytest
from openpyxl.styles.borders import Border, Side
from openpyxl.styles.fills import GradientFill
from openpyxl.styles.colors import Color
from openpyxl.writer.styles import StyleWriter
from openpyxl.tests.helper import get_xml, compare_xml
class DummyWorkbook:
style_properties = []
def test_write_gradient_fill():
fill = GradientFill(degree=90, stop=[Color(theme=0), Color(theme=4)])
writer = StyleWriter(DummyWorkbook())
writer._write_gradient_fill(writer._root, fill)
xml = get_xml(writer._root)
expected = """<?xml version="1.0" ?>
<styleSheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<gradientFill degree="90" type="linear">
<stop position="0">
<color theme="0"/>
</stop>
<stop position="1">
<color theme="4"/>
</stop>
</gradientFill>
</styleSheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_write_borders():
borders = Border()
writer = StyleWriter(DummyWorkbook())
writer._write_border(writer._root, borders)
xml = get_xml(writer._root)
expected = """<?xml version="1.0"?>
<styleSheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<border>
<left/>
<right/>
<top/>
<bottom/>
<diagonal/>
</border>
</styleSheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
| [((21, 10, 21, 31), 'openpyxl.tests.helper.get_xml', 'get_xml', ({(21, 18, 21, 30): 'writer._root'}, {}), '(writer._root)', False, 'from openpyxl.tests.helper import get_xml, compare_xml\n'), ((34, 11, 34, 37), 'openpyxl.tests.helper.compare_xml', 'compare_xml', ({(34, 23, 34, 26): 'xml', (34, 28, 34, 36): 'expected'}, {}), '(xml, expected)', False, 'from openpyxl.tests.helper import get_xml, compare_xml\n'), ((39, 14, 39, 22), 'openpyxl.styles.borders.Border', 'Border', ({}, {}), '()', False, 'from openpyxl.styles.borders import Border, Side\n'), ((42, 10, 42, 31), 'openpyxl.tests.helper.get_xml', 'get_xml', ({(42, 18, 42, 30): 'writer._root'}, {}), '(writer._root)', False, 'from openpyxl.tests.helper import get_xml, compare_xml\n'), ((54, 11, 54, 37), 'openpyxl.tests.helper.compare_xml', 'compare_xml', ({(54, 23, 54, 26): 'xml', (54, 28, 54, 36): 'expected'}, {}), '(xml, expected)', False, 'from openpyxl.tests.helper import get_xml, compare_xml\n'), ((18, 41, 18, 55), 'openpyxl.styles.colors.Color', 'Color', (), '', False, 'from openpyxl.styles.colors import Color\n'), ((18, 57, 18, 71), 'openpyxl.styles.colors.Color', 'Color', (), '', False, 'from openpyxl.styles.colors import Color\n')] |
rschwiebert/RingApp | ringapp/migrations/0009_auto_20150116_1759.py | 35675b3dd81728d71b7dc70071be3185d7f99bf4 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ringapp', '0008_auto_20150116_1755'),
]
operations = [
migrations.AlterModelTable(
name='invariance',
table='invariance',
),
migrations.AlterModelTable(
name='invarianttype',
table='invariant_types',
),
]
| [((14, 8, 17, 9), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', (), '', False, 'from django.db import models, migrations\n'), ((18, 8, 21, 9), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', (), '', False, 'from django.db import models, migrations\n')] |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.1/Lib/json/tests/test_dump.py | b2bca5eed07ea2af7a2001cd4f63becdfb0570be | from unittest import TestCase
from io import StringIO
import json
class TestDump(TestCase):
def test_dump(self):
sio = StringIO()
json.dump({}, sio)
self.assertEquals(sio.getvalue(), '{}')
def test_dumps(self):
self.assertEquals(json.dumps({}), '{}')
def test_encode_truefalse(self):
self.assertEquals(json.dumps(
{True: False, False: True}, sort_keys=True),
'{"false": true, "true": false}')
self.assertEquals(json.dumps(
{2: 3.0, 4.0: 5, False: 1, 6: True}, sort_keys=True),
'{"false": 1, "2": 3.0, "4.0": 5, "6": true}')
| [((8, 14, 8, 24), 'io.StringIO', 'StringIO', ({}, {}), '()', False, 'from io import StringIO\n'), ((9, 8, 9, 26), 'json.dump', 'json.dump', ({(9, 18, 9, 20): '{}', (9, 22, 9, 25): 'sio'}, {}), '({}, sio)', False, 'import json\n'), ((13, 26, 13, 40), 'json.dumps', 'json.dumps', ({(13, 37, 13, 39): '{}'}, {}), '({})', False, 'import json\n'), ((16, 26, 17, 60), 'json.dumps', 'json.dumps', (), '', False, 'import json\n'), ((19, 26, 20, 68), 'json.dumps', 'json.dumps', (), '', False, 'import json\n')] |
visit-dav/vis | src/resources/clients/python_client/visitstate.py | c08bc6e538ecd7d30ddc6399ec3022b9e062127e | import sys
class RPCType(object):
CloseRPC = 0
DetachRPC = 1
AddWindowRPC = 2
DeleteWindowRPC = 3
SetWindowLayoutRPC = 4
SetActiveWindowRPC = 5
ClearWindowRPC = 6
ClearAllWindowsRPC = 7
OpenDatabaseRPC = 8
CloseDatabaseRPC = 9
ActivateDatabaseRPC = 10
CheckForNewStatesRPC = 11
CreateDatabaseCorrelationRPC = 12
AlterDatabaseCorrelationRPC = 13
DeleteDatabaseCorrelationRPC = 14
ReOpenDatabaseRPC = 15
ReplaceDatabaseRPC = 16
OverlayDatabaseRPC = 17
OpenComputeEngineRPC = 18
CloseComputeEngineRPC = 19
AnimationSetNFramesRPC = 20
AnimationPlayRPC = 21
AnimationReversePlayRPC = 22
AnimationStopRPC = 23
TimeSliderNextStateRPC = 24
TimeSliderPreviousStateRPC = 25
SetTimeSliderStateRPC = 26
SetActiveTimeSliderRPC = 27
AddPlotRPC = 28
SetPlotFrameRangeRPC = 29
DeletePlotKeyframeRPC = 30
MovePlotKeyframeRPC = 31
DeleteActivePlotsRPC = 32
HideActivePlotsRPC = 33
DrawPlotsRPC = 34
DisableRedrawRPC = 35
RedrawRPC = 36
SetActivePlotsRPC = 37
ChangeActivePlotsVarRPC = 38
AddOperatorRPC = 39
AddInitializedOperatorRPC = 40
PromoteOperatorRPC = 41
DemoteOperatorRPC = 42
RemoveOperatorRPC = 43
RemoveLastOperatorRPC = 44
RemoveAllOperatorsRPC = 45
SaveWindowRPC = 46
SetDefaultPlotOptionsRPC = 47
SetPlotOptionsRPC = 48
SetDefaultOperatorOptionsRPC = 49
SetOperatorOptionsRPC = 50
WriteConfigFileRPC = 51
ConnectToMetaDataServerRPC = 52
IconifyAllWindowsRPC = 53
DeIconifyAllWindowsRPC = 54
ShowAllWindowsRPC = 55
HideAllWindowsRPC = 56
UpdateColorTableRPC = 57
SetAnnotationAttributesRPC = 58
SetDefaultAnnotationAttributesRPC = 59
ResetAnnotationAttributesRPC = 60
SetKeyframeAttributesRPC = 61
SetPlotSILRestrictionRPC = 62
SetViewAxisArrayRPC = 63
SetViewCurveRPC = 64
SetView2DRPC = 65
SetView3DRPC = 66
ResetPlotOptionsRPC = 67
ResetOperatorOptionsRPC = 68
SetAppearanceRPC = 69
ProcessExpressionsRPC = 70
SetLightListRPC = 71
SetDefaultLightListRPC = 72
ResetLightListRPC = 73
SetAnimationAttributesRPC = 74
SetWindowAreaRPC = 75
PrintWindowRPC = 76
ResetViewRPC = 77
RecenterViewRPC = 78
ToggleAllowPopupRPC = 79
ToggleMaintainViewModeRPC = 80
ToggleBoundingBoxModeRPC = 81
ToggleCameraViewModeRPC = 82
TogglePerspectiveViewRPC = 83
ToggleSpinModeRPC = 84
ToggleLockTimeRPC = 85
ToggleLockToolsRPC = 86
ToggleLockViewModeRPC = 87
ToggleFullFrameRPC = 88
UndoViewRPC = 89
RedoViewRPC = 90
InvertBackgroundRPC = 91
ClearPickPointsRPC = 92
SetWindowModeRPC = 93
EnableToolRPC = 94
SetToolUpdateModeRPC = 95
CopyViewToWindowRPC = 96
CopyLightingToWindowRPC = 97
CopyAnnotationsToWindowRPC = 98
CopyPlotsToWindowRPC = 99
ClearCacheRPC = 100
ClearCacheForAllEnginesRPC = 101
SetViewExtentsTypeRPC = 102
ClearRefLinesRPC = 103
SetRenderingAttributesRPC = 104
QueryRPC = 105
CloneWindowRPC = 106
SetMaterialAttributesRPC = 107
SetDefaultMaterialAttributesRPC = 108
ResetMaterialAttributesRPC = 109
SetPlotDatabaseStateRPC = 110
DeletePlotDatabaseKeyframeRPC = 111
MovePlotDatabaseKeyframeRPC = 112
ClearViewKeyframesRPC = 113
DeleteViewKeyframeRPC = 114
MoveViewKeyframeRPC = 115
SetViewKeyframeRPC = 116
OpenMDServerRPC = 117
EnableToolbarRPC = 118
HideToolbarsRPC = 119
HideToolbarsForAllWindowsRPC = 120
ShowToolbarsRPC = 121
ShowToolbarsForAllWindowsRPC = 122
SetToolbarIconSizeRPC = 123
SaveViewRPC = 124
SetGlobalLineoutAttributesRPC = 125
SetPickAttributesRPC = 126
ExportColorTableRPC = 127
ExportEntireStateRPC = 128
ImportEntireStateRPC = 129
ImportEntireStateWithDifferentSourcesRPC = 130
ResetPickAttributesRPC = 131
AddAnnotationObjectRPC = 132
HideActiveAnnotationObjectsRPC = 133
DeleteActiveAnnotationObjectsRPC = 134
RaiseActiveAnnotationObjectsRPC = 135
LowerActiveAnnotationObjectsRPC = 136
SetAnnotationObjectOptionsRPC = 137
SetDefaultAnnotationObjectListRPC = 138
ResetAnnotationObjectListRPC = 139
ResetPickLetterRPC = 140
SetDefaultPickAttributesRPC = 141
ChooseCenterOfRotationRPC = 142
SetCenterOfRotationRPC = 143
SetQueryOverTimeAttributesRPC = 144
SetDefaultQueryOverTimeAttributesRPC = 145
ResetQueryOverTimeAttributesRPC = 146
ResetLineoutColorRPC = 147
SetInteractorAttributesRPC = 148
SetDefaultInteractorAttributesRPC = 149
ResetInteractorAttributesRPC = 150
GetProcInfoRPC = 151
SendSimulationCommandRPC = 152
UpdateDBPluginInfoRPC = 153
ExportDBRPC = 154
SetTryHarderCyclesTimesRPC = 155
OpenClientRPC = 156
OpenGUIClientRPC = 157
OpenCLIClientRPC = 158
SuppressQueryOutputRPC = 159
SetQueryFloatFormatRPC = 160
SetMeshManagementAttributesRPC = 161
SetDefaultMeshManagementAttributesRPC = 162
ResetMeshManagementAttributesRPC = 163
ResizeWindowRPC = 164
MoveWindowRPC = 165
MoveAndResizeWindowRPC = 166
SetStateLoggingRPC = 167
ConstructDataBinningRPC = 168
RequestMetaDataRPC = 169
SetTreatAllDBsAsTimeVaryingRPC = 170
SetCreateMeshQualityExpressionsRPC = 171
SetCreateTimeDerivativeExpressionsRPC = 172
SetCreateVectorMagnitudeExpressionsRPC = 173
CopyActivePlotsRPC = 174
SetPlotFollowsTimeRPC = 175
TurnOffAllLocksRPC = 176
SetDefaultFileOpenOptionsRPC = 177
SetSuppressMessagesRPC = 178
ApplyNamedSelectionRPC = 179
CreateNamedSelectionRPC = 180
DeleteNamedSelectionRPC = 181
LoadNamedSelectionRPC = 182
SaveNamedSelectionRPC = 183
SetNamedSelectionAutoApplyRPC = 184
UpdateNamedSelectionRPC = 185
InitializeNamedSelectionVariablesRPC = 186
MenuQuitRPC = 187
SetPlotDescriptionRPC = 188
MovePlotOrderTowardFirstRPC = 189
MovePlotOrderTowardLastRPC = 190
SetPlotOrderToFirstRPC = 191
SetPlotOrderToLastRPC = 192
RenamePickLabelRPC = 193
GetQueryParametersRPC = 194
DDTConnectRPC = 195
DDTFocusRPC = 196
ReleaseToDDTRPC = 197
MaxRPC = 198
| [] |
zhangyiming07/QT4C | tests/__init__.py | 2d8d60efe0a4ad78a2618c5beeb0c456a63da067 | # -*- coding: utf-8 -*-
#
# Tencent is pleased to support the open source community by making QT4C available.
# Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
# QT4C is licensed under the BSD 3-Clause License, except for the third-party components listed below.
# A copy of the BSD 3-Clause License is included in this file.
#
'''单元测试
'''
import unittest
import os
import sys
test_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(test_dir))
def main():
runner = unittest.TextTestRunner(verbosity=10 + sys.argv.count('-v'))
suite = unittest.TestLoader().discover(test_dir, pattern='test_*.py')
raise SystemExit(not runner.run(suite).wasSuccessful())
if __name__ == '__main__':
main()
| [((16, 27, 16, 52), 'os.path.abspath', 'os.path.abspath', ({(16, 43, 16, 51): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((17, 19, 17, 44), 'os.path.dirname', 'os.path.dirname', ({(17, 35, 17, 43): 'test_dir'}, {}), '(test_dir)', False, 'import os\n'), ((21, 12, 21, 33), 'unittest.TestLoader', 'unittest.TestLoader', ({}, {}), '()', False, 'import unittest\n'), ((20, 52, 20, 72), 'sys.argv.count', 'sys.argv.count', ({(20, 67, 20, 71): '"""-v"""'}, {}), "('-v')", False, 'import sys\n')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.