index
int64 0
10k
| blob_id
stringlengths 40
40
| step-1
stringlengths 13
984k
| step-2
stringlengths 6
1.23M
⌀ | step-3
stringlengths 15
1.34M
⌀ | step-4
stringlengths 30
1.34M
⌀ | step-5
stringlengths 64
1.2M
⌀ | step-ids
sequencelengths 1
5
|
---|---|---|---|---|---|---|---|
2,000 | 514a3fc312d36e6f9b601ede7f7a3940c138d39a | <mask token>
class Contact(models.Model):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def __unicode__(self):
return self.name
class Tag(models.Model):
contact = models.ForeignKey(Contact)
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
| <mask token>
class Test(models.Model):
<mask token>
def __unicode__(self):
return self.name
class Contact(models.Model):
GENDER_TYPES = ('M', u'男'), ('F', u'女'), ('X', u'不告诉你')
name = models.CharField(u'姓名', max_length=20)
age = models.IntegerField(u'年龄', default=0)
gender = models.CharField(u'性别', max_length=1, null=False, blank=False,
choices=GENDER_TYPES, default='X')
email = models.EmailField()
tele = models.CharField(u'电话', max_length=20)
address = models.CharField(u'地址', max_length=200)
postcode = models.CharField(u'邮政编码', max_length=6)
notes = models.CharField(u'备注', max_length=200)
def __unicode__(self):
return self.name
class Tag(models.Model):
contact = models.ForeignKey(Contact)
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
| <mask token>
class Test(models.Model):
name = models.CharField(max_length=20)
def __unicode__(self):
return self.name
class Contact(models.Model):
GENDER_TYPES = ('M', u'男'), ('F', u'女'), ('X', u'不告诉你')
name = models.CharField(u'姓名', max_length=20)
age = models.IntegerField(u'年龄', default=0)
gender = models.CharField(u'性别', max_length=1, null=False, blank=False,
choices=GENDER_TYPES, default='X')
email = models.EmailField()
tele = models.CharField(u'电话', max_length=20)
address = models.CharField(u'地址', max_length=200)
postcode = models.CharField(u'邮政编码', max_length=6)
notes = models.CharField(u'备注', max_length=200)
def __unicode__(self):
return self.name
class Tag(models.Model):
contact = models.ForeignKey(Contact)
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
| from django.db import models
class Test(models.Model):
name = models.CharField(max_length=20)
def __unicode__(self):
return self.name
class Contact(models.Model):
GENDER_TYPES = ('M', u'男'), ('F', u'女'), ('X', u'不告诉你')
name = models.CharField(u'姓名', max_length=20)
age = models.IntegerField(u'年龄', default=0)
gender = models.CharField(u'性别', max_length=1, null=False, blank=False,
choices=GENDER_TYPES, default='X')
email = models.EmailField()
tele = models.CharField(u'电话', max_length=20)
address = models.CharField(u'地址', max_length=200)
postcode = models.CharField(u'邮政编码', max_length=6)
notes = models.CharField(u'备注', max_length=200)
def __unicode__(self):
return self.name
class Tag(models.Model):
contact = models.ForeignKey(Contact)
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
| # -*- coding: utf-8 -*-
from django.db import models
# Create your models here.
class Test(models.Model):
name = models.CharField(max_length=20)
def __unicode__(self):
return self.name
class Contact(models.Model):
GENDER_TYPES = (
('M', u'男'),
('F', u'女'),
('X', u'不告诉你'),
)
name = models.CharField(u'姓名', max_length=20)
age = models.IntegerField(u'年龄', default=0)
gender = models.CharField(u'性别', max_length=1, null=False, blank=False, choices=GENDER_TYPES, default='X')
email = models.EmailField()
tele = models.CharField(u'电话', max_length=20)
address = models.CharField(u'地址', max_length=200)
postcode = models.CharField(u'邮政编码', max_length=6)
notes = models.CharField(u'备注', max_length=200)
def __unicode__(self):
return self.name
class Tag(models.Model):
contact = models.ForeignKey(Contact)
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
| [
5,
8,
9,
10,
11
] |
2,001 | c7147741784b37b42200869002d4df5ddc900675 | <mask token>
| <mask token>
def parse_args():
"""
Parse input arguments.
:return:
"""
parser = argparse.ArgumentParser(description='以图搜图API测试')
parser.add_argument('--ak', dest='access_key', help=
'access_key for qiniu account', type=str)
parser.add_argument('--sk', dest='secret_key', help=
'secret_key for qiniu account', type=str)
parser.add_argument('--in', dest='json_file', help='json file', type=str)
return parser.parse_args()
<mask token>
| <mask token>
def parse_args():
"""
Parse input arguments.
:return:
"""
parser = argparse.ArgumentParser(description='以图搜图API测试')
parser.add_argument('--ak', dest='access_key', help=
'access_key for qiniu account', type=str)
parser.add_argument('--sk', dest='secret_key', help=
'secret_key for qiniu account', type=str)
parser.add_argument('--in', dest='json_file', help='json file', type=str)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
file = open(args.json_file, 'r')
res = []
a = 0
for line in file.readlines():
dic = json.loads(line)
img_url = dic['url']
t = {'url': img_url, 'true': 0, 'simialr_uri': []}
if not 'error' in dic.keys():
a += 1
im_num = img_url.split('.')[-2].split('/')[-1]
print(im_num)
for i in dic['result']:
uri = []
print(i['uri'].split('/')[4])
if i['uri'].split('/')[4].split('__')[0
] == 'eval' and im_num in i['uri'].split('/')[4].split('-'
)[0]:
t['simialr_uri'].append(i)
t['true'] += 1
res.append(t)
r = 0
for i in range(a):
r += res[i]['true']
correct = r / (float(a) * 15)
print('The top-5 correct percentage is %f' % correct)
| import json
import argparse
def parse_args():
"""
Parse input arguments.
:return:
"""
parser = argparse.ArgumentParser(description='以图搜图API测试')
parser.add_argument('--ak', dest='access_key', help=
'access_key for qiniu account', type=str)
parser.add_argument('--sk', dest='secret_key', help=
'secret_key for qiniu account', type=str)
parser.add_argument('--in', dest='json_file', help='json file', type=str)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
file = open(args.json_file, 'r')
res = []
a = 0
for line in file.readlines():
dic = json.loads(line)
img_url = dic['url']
t = {'url': img_url, 'true': 0, 'simialr_uri': []}
if not 'error' in dic.keys():
a += 1
im_num = img_url.split('.')[-2].split('/')[-1]
print(im_num)
for i in dic['result']:
uri = []
print(i['uri'].split('/')[4])
if i['uri'].split('/')[4].split('__')[0
] == 'eval' and im_num in i['uri'].split('/')[4].split('-'
)[0]:
t['simialr_uri'].append(i)
t['true'] += 1
res.append(t)
r = 0
for i in range(a):
r += res[i]['true']
correct = r / (float(a) * 15)
print('The top-5 correct percentage is %f' % correct)
| # -*- coding: utf-8 -*-
import json
import argparse
def parse_args():
"""
Parse input arguments.
:return:
"""
parser = argparse.ArgumentParser(description='以图搜图API测试')
parser.add_argument('--ak', dest='access_key', help='access_key for qiniu account',
type=str)
parser.add_argument('--sk', dest='secret_key', help='secret_key for qiniu account',
type=str)
parser.add_argument('--in', dest='json_file', help='json file',
type=str)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
file = open(args.json_file,'r')
res = []
a = 0
for line in file.readlines():
dic = json.loads(line)
img_url = dic["url"]
t = {"url": img_url, "true":0, "simialr_uri":[]}
if not "error" in dic.keys():
a += 1
#im_num = img_url.split('.')[-2].split('/')[-1].lstrip('image_group_test_')
im_num = img_url.split('.')[-2].split('/')[-1]#.lstrip('image_group_test_')
print(im_num)
for i in dic["result"]:
uri = []
#print((i["uri"].split('/'))[4].split('__')[0]=="eval",(i["uri"].split('/'))[4].split('-')[0])
print((i["uri"].split('/'))[4])
if ((i["uri"].split('/'))[4].split('__')[0]=="eval") and (im_num in (i["uri"].split('/'))[4].split('-')[0]):
t["simialr_uri"].append(i)
t["true"] += 1
res.append(t)
r = 0
for i in range(a):
r += res[i]["true"]
correct = r/(float(a)*15)
print ("The top-5 correct percentage is %f" % correct)
| [
0,
1,
2,
3,
4
] |
2,002 | 043dd97d4d4ade29536a83c3557a34db3a4cb0f9 | <mask token>
| <mask token>
for x in range(1, 100000):
b = a * x
print(x, '*', a, '=', b)
if b > 100:
break
| a = int(input('Choose a number: '))
for x in range(1, 100000):
b = a * x
print(x, '*', a, '=', b)
if b > 100:
break
| a=int(input("Choose a number: "))
for x in range(1,100000):
b=a*x;
print(x, '*', a,'=',b)
if b>100:
break
| null | [
0,
1,
2,
3
] |
2,003 | 93150eb1c6746e2b1967eb5305fa526ae36968fd | <mask token>
| <mask token>
def write_csv(filename, train_acc, test_acc, train_loss, test_loss,
train_error, test_error, epoch):
if epoch == 0:
with open(filename, 'w') as f:
f.write(
'train_acc,test_acc,train_loss, test_loss, train_error, test_error\n'
)
f.write('{0},{1},{2},{3},{4},{5}\n'.format(train_acc[-1],
test_acc[-1], train_loss[-1], test_loss[-1], train_error[-1
], test_error[-1]))
else:
with open(filename, 'a') as f:
f.write('{0},{1},{2},{3},{4},{5}\n'.format(train_acc[-1],
test_acc[-1], train_loss[-1], test_loss[-1], train_error[-1
], test_error[-1]))
| <mask token>
def plots(epochs, train_acc, test_acc, train_loss, test_loss, train_error,
test_error, filename):
plt.style.use('bmh')
fig = plt.figure(figsize=(8, 6))
plt.plot(epochs, train_acc, 'r', epochs, test_acc, 'g')
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train_acc', 'test_acc'], loc='upper left')
fig.savefig(filename + '_accuracy.png')
fig = plt.figure(figsize=(8, 6))
plt.plot(epochs, train_loss, 'r', epochs, test_loss, 'g')
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train_loss', 'test_loss'], loc='upper left')
fig.savefig(filename + '_loss.png')
fig = plt.figure(figsize=(8, 6))
plt.plot(epochs, train_error, 'r', epochs, test_error, 'g')
plt.title('model error rate')
plt.ylabel('error rate')
plt.xlabel('epoch')
plt.legend(['train_error', 'test_error'], loc='upper left')
fig.savefig(filename + '_error.png')
plt.close('all')
def write_csv(filename, train_acc, test_acc, train_loss, test_loss,
train_error, test_error, epoch):
if epoch == 0:
with open(filename, 'w') as f:
f.write(
'train_acc,test_acc,train_loss, test_loss, train_error, test_error\n'
)
f.write('{0},{1},{2},{3},{4},{5}\n'.format(train_acc[-1],
test_acc[-1], train_loss[-1], test_loss[-1], train_error[-1
], test_error[-1]))
else:
with open(filename, 'a') as f:
f.write('{0},{1},{2},{3},{4},{5}\n'.format(train_acc[-1],
test_acc[-1], train_loss[-1], test_loss[-1], train_error[-1
], test_error[-1]))
| import matplotlib.pyplot as plt
import matplotlib
import numpy as np
from PIL import Image
from scipy.misc import imsave, imread
def plots(epochs, train_acc, test_acc, train_loss, test_loss, train_error,
test_error, filename):
plt.style.use('bmh')
fig = plt.figure(figsize=(8, 6))
plt.plot(epochs, train_acc, 'r', epochs, test_acc, 'g')
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train_acc', 'test_acc'], loc='upper left')
fig.savefig(filename + '_accuracy.png')
fig = plt.figure(figsize=(8, 6))
plt.plot(epochs, train_loss, 'r', epochs, test_loss, 'g')
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train_loss', 'test_loss'], loc='upper left')
fig.savefig(filename + '_loss.png')
fig = plt.figure(figsize=(8, 6))
plt.plot(epochs, train_error, 'r', epochs, test_error, 'g')
plt.title('model error rate')
plt.ylabel('error rate')
plt.xlabel('epoch')
plt.legend(['train_error', 'test_error'], loc='upper left')
fig.savefig(filename + '_error.png')
plt.close('all')
def write_csv(filename, train_acc, test_acc, train_loss, test_loss,
train_error, test_error, epoch):
if epoch == 0:
with open(filename, 'w') as f:
f.write(
'train_acc,test_acc,train_loss, test_loss, train_error, test_error\n'
)
f.write('{0},{1},{2},{3},{4},{5}\n'.format(train_acc[-1],
test_acc[-1], train_loss[-1], test_loss[-1], train_error[-1
], test_error[-1]))
else:
with open(filename, 'a') as f:
f.write('{0},{1},{2},{3},{4},{5}\n'.format(train_acc[-1],
test_acc[-1], train_loss[-1], test_loss[-1], train_error[-1
], test_error[-1]))
| import matplotlib.pyplot as plt
import matplotlib
import numpy as np
from PIL import Image
from scipy.misc import imsave, imread
def plots(epochs, train_acc, test_acc, train_loss, test_loss, train_error, test_error,filename):
plt.style.use('bmh')
fig=plt.figure(figsize=(8,6))
plt.plot(epochs,train_acc, 'r', epochs,test_acc, 'g')
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train_acc', 'test_acc'], loc='upper left')
fig.savefig(filename + '_accuracy.png')
fig=plt.figure(figsize=(8,6))
plt.plot(epochs,train_loss, 'r', epochs,test_loss, 'g')
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train_loss', 'test_loss'], loc='upper left')
fig.savefig(filename + '_loss.png')
fig=plt.figure(figsize=(8,6))
plt.plot(epochs,train_error, 'r', epochs,test_error, 'g')
plt.title('model error rate')
plt.ylabel('error rate')
plt.xlabel('epoch')
plt.legend(['train_error', 'test_error'], loc='upper left')
fig.savefig(filename + '_error.png')
plt.close('all')
def write_csv(filename, train_acc,test_acc,train_loss,test_loss,train_error,test_error,epoch):
if epoch==0:
with open(filename, 'w') as f:
f.write('train_acc,test_acc,train_loss, test_loss, train_error, test_error\n')
f.write('{0},{1},{2},{3},{4},{5}\n'.format(train_acc[-1],\
test_acc[-1],\
train_loss[-1],\
test_loss[-1],\
train_error[-1],\
test_error[-1]))
else:
with open(filename, 'a') as f:
f.write('{0},{1},{2},{3},{4},{5}\n'.format(train_acc[-1],\
test_acc[-1],\
train_loss[-1],\
test_loss[-1],\
train_error[-1],\
test_error[-1]))
| [
0,
1,
2,
3,
4
] |
2,004 | bab6b9a0178da119f753deb6c626dd5c41db2bdd | <mask token>
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
<mask token>
| <mask token>
sys.setrecursionlimit(1000000)
<mask token>
for _ in range(n - 1):
a, b = map(int, input().split())
graph[a - 1].add(b - 1)
graph[b - 1].add(a - 1)
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
<mask token>
odd.add(0)
dfs(0)
<mask token>
for i in range(q):
c, d = map(int, input().split())
if c - 1 in odd and d - 1 in odd or c - 1 in even and d - 1 in even:
ans.append('Town')
else:
ans.append('Road')
for i in ans:
print(i)
| <mask token>
sys.setrecursionlimit(1000000)
n, q = map(int, input().split())
graph = [set([]) for _ in range(n)]
for _ in range(n - 1):
a, b = map(int, input().split())
graph[a - 1].add(b - 1)
graph[b - 1].add(a - 1)
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
temp = [False] * n
odd = set([])
even = set([])
odd.add(0)
dfs(0)
ans = []
for i in range(q):
c, d = map(int, input().split())
if c - 1 in odd and d - 1 in odd or c - 1 in even and d - 1 in even:
ans.append('Town')
else:
ans.append('Road')
for i in ans:
print(i)
| import sys
sys.setrecursionlimit(1000000)
n, q = map(int, input().split())
graph = [set([]) for _ in range(n)]
for _ in range(n - 1):
a, b = map(int, input().split())
graph[a - 1].add(b - 1)
graph[b - 1].add(a - 1)
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
temp = [False] * n
odd = set([])
even = set([])
odd.add(0)
dfs(0)
ans = []
for i in range(q):
c, d = map(int, input().split())
if c - 1 in odd and d - 1 in odd or c - 1 in even and d - 1 in even:
ans.append('Town')
else:
ans.append('Road')
for i in ans:
print(i)
| import sys
sys.setrecursionlimit(1000000)
n, q = map(int, input().split())
graph = [set([]) for _ in range(n)]
for _ in range(n - 1):
a, b = map(int, input().split())
graph[a - 1].add(b - 1)
graph[b - 1].add(a - 1)
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
temp = [False] * n
odd = set([])
even = set([])
odd.add(0)
dfs(0)
ans = []
for i in range(q):
c, d = map(int, input().split())
if (c - 1 in odd and d - 1 in odd) or (c - 1 in even and d - 1 in even):
ans.append("Town")
else:
ans.append("Road")
for i in ans:
print(i)
| [
1,
2,
3,
4,
5
] |
2,005 | 402acaa263ee620fbd9bf7d271dce2e5de4eeae0 | #!/usr/bin/env python
from svg_ros.srv import *
import rospy
from std_msgs.msg import String
from geometry_msgs.msg import Twist
from math import *
import roslib
from nav_msgs.msg import Odometry
#Global variables
base_distance_x0=0
base_distance_y0=0
base_angle_0=0
base_distance_x1=0
base_distance_y1=0
base_angle_1=0
flag=0
def update_value(msg):
global base_distance_x1
global base_distance_y1
global base_angle_1
base_distance_x1=msg.pose.pose.position.x
base_distance_y1=msg.pose.pose.position.y
base_angle_1=msg.pose.pose.orientation.w
print "x: "+str(base_distance_x1)
print "y: "+str(base_distance_y1)
print "Ang: "+str( acos(base_angle_1)*2 )
#print msg.pose.pose
def move_robot(req):
print req.param
global flag
flag=0
global base_distance_x1
global base_distance_y1
global base_angle_1
global base_distance_x0
global base_distance_y0
global base_angle_0
r = rospy.Rate(10)
angle=req.param.split()
angle=(float)(angle[2])
distance=req.param.split()
distance=(float)(distance[1])
# print angle
# print distance
angle=(int)((angle*57.2958)/.9)
distance=(int)(distance/.04)
# print angle
# print distance
cmd_vel = rospy.Publisher('cmd_vel_mux/input/navi', Twist, queue_size=10)
move_cmd = Twist()
if distance<0:
move_cmd.linear.x =-0.2
else:
move_cmd.linear.x =0.2#0.4
turn_cmd = Twist()
turn_cmd.linear.x = 0
r = rospy.Rate(100)
if angle<0:
turn_cmd.angular.z =radians(-90)
angle*=-1
else :
turn_cmd.angular.z =radians(90)
if angle!=0:
rospy.loginfo("Turning")
for x in range(0,angle):
cmd_vel.publish(turn_cmd)
r.sleep()
turn_cmd.angular.z =0
cmd_vel.publish(move_cmd)
base_distance_x0=base_distance_x1
base_distance_y0=base_distance_y1
base_angle_0=base_angle_1
r = rospy.Rate(5)
rospy.loginfo("Going Straight")
if distance<=0:
for x in range(0,abs(distance)): #Resolucion .02m
cmd_vel.publish(move_cmd)
r.sleep()
flag=0
elif distance<=.15:
for x in range(0,abs(distance)): #Resolucion .02m
cmd_vel.publish(move_cmd)
r.sleep()
flag=0
else:
print 'siiiiiiiiiiiiiiiiiiiisisisissi'
flag=1
if distance<=0:
move_cmd.linear.x =0
cmd_vel.publish(move_cmd)
rospy.loginfo("Finished")
print 'Distancia: '
print ( ((base_distance_x1-base_distance_x0)**2) + ((base_distance_y1-base_distance_y0)**2) )**1/2
#print 'Angulo: '
#print base_angle_1
return req.param
def mv_turtle():
rospy.init_node('AUX')
rospy.Subscriber('odom',Odometry,update_value)
print "Ready to move turtle bot."
rospy.Rate(2)
rospy.spin()
if __name__ == '__main__':
try:
mv_turtle()
except rospy.ROSInterruptException:
pass | null | null | null | null | [
0
] |
2,006 | 3f7dddcfde9d33f30f00156fc41700da2692afc3 | name_list =[ ]
a = 1
for a in range(1,33):
name = input("请输入要加入列表的名字:")
name_list.append("name")
print(name)
print(list_ name)
| null | null | null | null | [
0
] |
2,007 | 0003d104a4dcd5a5b2357016cbc0317738c2cd3c | <mask token>
class Sprite(Widget):
def __init__(self, x, y, w, h, image=None, callback=None, **kw):
"""Sprite widget
"""
Widget.__init__(self, x, y, w, h, **kw)
if image:
self.image = pygame.image.load(image).convert()
else:
self.image = None
self.callback = callback
def draw(self):
surface = pygame.Surface((self.w, self.h), pygame.SRCALPHA)
if self.image:
my_img = pygame.transform.smoothscale(self.image, (self.w, self.h))
surface.blit(my_img, (0, 0))
self.parent.surface.blit(surface, (self.x, self.y))
<mask token>
<mask token>
| <mask token>
class Sprite(Widget):
def __init__(self, x, y, w, h, image=None, callback=None, **kw):
"""Sprite widget
"""
Widget.__init__(self, x, y, w, h, **kw)
if image:
self.image = pygame.image.load(image).convert()
else:
self.image = None
self.callback = callback
def draw(self):
surface = pygame.Surface((self.w, self.h), pygame.SRCALPHA)
if self.image:
my_img = pygame.transform.smoothscale(self.image, (self.w, self.h))
surface.blit(my_img, (0, 0))
self.parent.surface.blit(surface, (self.x, self.y))
<mask token>
def update(self):
if self.refresh:
self.refresh = False
self.draw()
return True
| <mask token>
class Sprite(Widget):
def __init__(self, x, y, w, h, image=None, callback=None, **kw):
"""Sprite widget
"""
Widget.__init__(self, x, y, w, h, **kw)
if image:
self.image = pygame.image.load(image).convert()
else:
self.image = None
self.callback = callback
def draw(self):
surface = pygame.Surface((self.w, self.h), pygame.SRCALPHA)
if self.image:
my_img = pygame.transform.smoothscale(self.image, (self.w, self.h))
surface.blit(my_img, (0, 0))
self.parent.surface.blit(surface, (self.x, self.y))
def touched(self, position):
if self.toggle:
self.state = not self.state
if self.callback:
self.refresh = self.callback(self)
else:
self.refresh = True
if self.refresh:
self.parent.update()
def update(self):
if self.refresh:
self.refresh = False
self.draw()
return True
| import pygame
import time
import math
from pygame.locals import *
from pygux.widgets.widget import Widget, hlBox
from pygux.colours import Colours
class Sprite(Widget):
def __init__(self, x, y, w, h, image=None, callback=None, **kw):
"""Sprite widget
"""
Widget.__init__(self, x, y, w, h, **kw)
if image:
self.image = pygame.image.load(image).convert()
else:
self.image = None
self.callback = callback
def draw(self):
surface = pygame.Surface((self.w, self.h), pygame.SRCALPHA)
if self.image:
my_img = pygame.transform.smoothscale(self.image, (self.w, self.h))
surface.blit(my_img, (0, 0))
self.parent.surface.blit(surface, (self.x, self.y))
def touched(self, position):
if self.toggle:
self.state = not self.state
if self.callback:
self.refresh = self.callback(self)
else:
self.refresh = True
if self.refresh:
self.parent.update()
def update(self):
if self.refresh:
self.refresh = False
self.draw()
return True
| null | [
3,
4,
5,
6
] |
2,008 | b048319a2ed182e70aa7f8a736ff02953577ec39 | <mask token>
| <mask token>
def run():
day = datetime.strptime('2017-10', '%Y-%m')
next_day = datetime.strptime('2017-11', '%Y-%m')
last_day = datetime.strptime('2018-11', '%Y-%m')
monthes = get_month()
result_keyword = {}
result_count = {}
dict_total = {}
idx = 1
while day < last_day:
keyword_caches = AnalyticsCacheSearchKeywordDay.objects.filter(
theday__gte=day, theday__lt=next_day)
date = str(day.year) + '-' + str(day.month)
result_keyword[date] = []
result_count[date] = []
dict_month = {}
for keyword in keyword_caches:
word = keyword.keyword.replace(' ', '')
if dict_total.get(word) is None:
dict_total[word] = 0
if dict_month.get(word) is None:
dict_month[word] = 0
dict_total[word] += keyword.total_count
dict_month[word] += keyword.total_count
sort_ids = sorted(dict_month, key=lambda x: dict_month[x], reverse=True
)
cnt = 0
for id in sort_ids:
if cnt > 99:
break
result_keyword[date].append(id)
result_count[date].append(dict_month[id])
cnt += 1
day = datetime.strptime(monthes[idx], '%Y-%m')
next_day = datetime.strptime(monthes[idx + 1], '%Y-%m')
idx += 1
sorted_ids = sorted(dict_total, key=lambda x: dict_total[x], reverse=True)
total_rank_keyword = []
total_rank_count = []
for id in sorted_ids:
total_rank_keyword.append(id)
total_rank_count.append(dict_total[id])
with open('result.txt', 'w') as f:
monthes = get_month()
for month in monthes:
if month == '2018-11' or month == '2018-12':
continue
print(month, file=f, end='\t')
print(' ', file=f, end='\t')
print('합산TOP100', file=f, end='\n')
for rank in range(0, 100):
for month in monthes:
if month == '2018-11' or month == '2018-12':
continue
if result_keyword.get(month) is None:
print(' ', file=f, end='\t')
print(' ', file=f, end='\t')
continue
if len(result_keyword[month]) < rank + 1:
print(' ', file=f, end='\t')
print(' ', file=f, end='\t')
continue
print(result_keyword[month][rank], file=f, end='\t')
print(result_count[month][rank], file=f, end='\t')
print(total_rank_keyword[rank], file=f, end='\t')
print(total_rank_count[rank], file=f, end='\n')
| <mask token>
def get_month():
return ['2017-10', '2017-11', '2017-12', '2018-1', '2018-2', '2018-3',
'2018-4', '2018-5', '2018-6', '2018-7', '2018-8', '2018-9',
'2018-10', '2018-11', '2018-12']
def run():
day = datetime.strptime('2017-10', '%Y-%m')
next_day = datetime.strptime('2017-11', '%Y-%m')
last_day = datetime.strptime('2018-11', '%Y-%m')
monthes = get_month()
result_keyword = {}
result_count = {}
dict_total = {}
idx = 1
while day < last_day:
keyword_caches = AnalyticsCacheSearchKeywordDay.objects.filter(
theday__gte=day, theday__lt=next_day)
date = str(day.year) + '-' + str(day.month)
result_keyword[date] = []
result_count[date] = []
dict_month = {}
for keyword in keyword_caches:
word = keyword.keyword.replace(' ', '')
if dict_total.get(word) is None:
dict_total[word] = 0
if dict_month.get(word) is None:
dict_month[word] = 0
dict_total[word] += keyword.total_count
dict_month[word] += keyword.total_count
sort_ids = sorted(dict_month, key=lambda x: dict_month[x], reverse=True
)
cnt = 0
for id in sort_ids:
if cnt > 99:
break
result_keyword[date].append(id)
result_count[date].append(dict_month[id])
cnt += 1
day = datetime.strptime(monthes[idx], '%Y-%m')
next_day = datetime.strptime(monthes[idx + 1], '%Y-%m')
idx += 1
sorted_ids = sorted(dict_total, key=lambda x: dict_total[x], reverse=True)
total_rank_keyword = []
total_rank_count = []
for id in sorted_ids:
total_rank_keyword.append(id)
total_rank_count.append(dict_total[id])
with open('result.txt', 'w') as f:
monthes = get_month()
for month in monthes:
if month == '2018-11' or month == '2018-12':
continue
print(month, file=f, end='\t')
print(' ', file=f, end='\t')
print('합산TOP100', file=f, end='\n')
for rank in range(0, 100):
for month in monthes:
if month == '2018-11' or month == '2018-12':
continue
if result_keyword.get(month) is None:
print(' ', file=f, end='\t')
print(' ', file=f, end='\t')
continue
if len(result_keyword[month]) < rank + 1:
print(' ', file=f, end='\t')
print(' ', file=f, end='\t')
continue
print(result_keyword[month][rank], file=f, end='\t')
print(result_count[month][rank], file=f, end='\t')
print(total_rank_keyword[rank], file=f, end='\t')
print(total_rank_count[rank], file=f, end='\n')
| from core.models import AnalyticsCacheSearchKeywordDay
from datetime import datetime, timedelta
def get_month():
return ['2017-10', '2017-11', '2017-12', '2018-1', '2018-2', '2018-3',
'2018-4', '2018-5', '2018-6', '2018-7', '2018-8', '2018-9',
'2018-10', '2018-11', '2018-12']
def run():
day = datetime.strptime('2017-10', '%Y-%m')
next_day = datetime.strptime('2017-11', '%Y-%m')
last_day = datetime.strptime('2018-11', '%Y-%m')
monthes = get_month()
result_keyword = {}
result_count = {}
dict_total = {}
idx = 1
while day < last_day:
keyword_caches = AnalyticsCacheSearchKeywordDay.objects.filter(
theday__gte=day, theday__lt=next_day)
date = str(day.year) + '-' + str(day.month)
result_keyword[date] = []
result_count[date] = []
dict_month = {}
for keyword in keyword_caches:
word = keyword.keyword.replace(' ', '')
if dict_total.get(word) is None:
dict_total[word] = 0
if dict_month.get(word) is None:
dict_month[word] = 0
dict_total[word] += keyword.total_count
dict_month[word] += keyword.total_count
sort_ids = sorted(dict_month, key=lambda x: dict_month[x], reverse=True
)
cnt = 0
for id in sort_ids:
if cnt > 99:
break
result_keyword[date].append(id)
result_count[date].append(dict_month[id])
cnt += 1
day = datetime.strptime(monthes[idx], '%Y-%m')
next_day = datetime.strptime(monthes[idx + 1], '%Y-%m')
idx += 1
sorted_ids = sorted(dict_total, key=lambda x: dict_total[x], reverse=True)
total_rank_keyword = []
total_rank_count = []
for id in sorted_ids:
total_rank_keyword.append(id)
total_rank_count.append(dict_total[id])
with open('result.txt', 'w') as f:
monthes = get_month()
for month in monthes:
if month == '2018-11' or month == '2018-12':
continue
print(month, file=f, end='\t')
print(' ', file=f, end='\t')
print('합산TOP100', file=f, end='\n')
for rank in range(0, 100):
for month in monthes:
if month == '2018-11' or month == '2018-12':
continue
if result_keyword.get(month) is None:
print(' ', file=f, end='\t')
print(' ', file=f, end='\t')
continue
if len(result_keyword[month]) < rank + 1:
print(' ', file=f, end='\t')
print(' ', file=f, end='\t')
continue
print(result_keyword[month][rank], file=f, end='\t')
print(result_count[month][rank], file=f, end='\t')
print(total_rank_keyword[rank], file=f, end='\t')
print(total_rank_count[rank], file=f, end='\n')
| from core.models import AnalyticsCacheSearchKeywordDay
from datetime import datetime, timedelta
def get_month():
return ["2017-10","2017-11","2017-12","2018-1","2018-2","2018-3","2018-4","2018-5","2018-6","2018-7","2018-8","2018-9","2018-10","2018-11", "2018-12"]
def run():
day = datetime.strptime("2017-10", "%Y-%m")
next_day = datetime.strptime("2017-11", "%Y-%m")
last_day = datetime.strptime("2018-11", "%Y-%m")
monthes = get_month()
result_keyword = {}
result_count = {}
dict_total = {}
idx = 1
while day < last_day:
keyword_caches = AnalyticsCacheSearchKeywordDay.objects.filter(theday__gte=day, theday__lt=next_day)
date = str(day.year) + "-" + str(day.month)
result_keyword[date] = []
result_count[date] = []
dict_month = {}
for keyword in keyword_caches:
word = keyword.keyword.replace(" ", "")
if dict_total.get(word) is None:
dict_total[word] = 0
if dict_month.get(word) is None:
dict_month[word] = 0
dict_total[word] += keyword.total_count
dict_month[word] += keyword.total_count
sort_ids = sorted(dict_month, key=lambda x:dict_month[x], reverse=True)
cnt = 0
for id in sort_ids:
if cnt > 99:
break
result_keyword[date].append(id)
result_count[date].append(dict_month[id])
cnt+=1
day = datetime.strptime(monthes[idx], "%Y-%m")
next_day = datetime.strptime(monthes[idx+1], "%Y-%m")
idx+=1
sorted_ids = sorted(dict_total, key=lambda x: dict_total[x], reverse=True)
total_rank_keyword = []
total_rank_count = []
for id in sorted_ids:
total_rank_keyword.append(id)
total_rank_count.append(dict_total[id])
with open("result.txt", "w") as f:
monthes = get_month()
for month in monthes:
if month == "2018-11" or month == "2018-12":
continue
print(month, file=f, end='\t')
print(" ", file=f, end='\t')
print("합산TOP100", file=f, end='\n')
for rank in range(0,100):
for month in monthes:
if month == "2018-11" or month == "2018-12":
continue
if result_keyword.get(month) is None:
print(" ", file=f, end='\t')
print(" ", file=f, end='\t')
continue
if len(result_keyword[month]) < rank+1:
print(" ", file=f, end='\t')
print(" ", file=f, end='\t')
continue
print(result_keyword[month][rank], file=f, end='\t')
print(result_count[month][rank], file=f, end='\t')
print(total_rank_keyword[rank], file=f, end='\t')
print(total_rank_count[rank], file=f, end='\n') | [
0,
1,
2,
3,
4
] |
2,009 | 94439ffe3303f5efe15562f26d693e1e7a8115df | import math #h=g^x
h=input("h: ")
g=input("g: ")
p=input("p: ")
m=math.ceil(math.sqrt(p))
m=int(m)
aj=[0]*m
for i in range(m):
aj[i]=pow(g,i*m)
ainvm=pow(g,p-2,p)
gamma = h
for i in range(m):
if gamma in aj:
j = aj.index(gamma)
print (j*m)+i
break
gamma=(gamma*ainvm)%p
| null | null | null | null | [
0
] |
2,010 | ffd7aef2e72e64ac5b9f85b9d12845479187d89b | <mask token>
class ClientInline(admin.StackedInline):
<mask token>
<mask token>
<mask token>
class ClientAdmin(admin.ModelAdmin):
inlines = [ClientInline]
<mask token>
| <mask token>
class ClientInline(admin.StackedInline):
model = Adress
can_delete = False
extra = 1
class ClientAdmin(admin.ModelAdmin):
inlines = [ClientInline]
<mask token>
| <mask token>
class ClientInline(admin.StackedInline):
model = Adress
can_delete = False
extra = 1
class ClientAdmin(admin.ModelAdmin):
inlines = [ClientInline]
admin.site.register(Client, ClientAdmin)
| from django.contrib import admin
from .models import Client, Adress
class ClientInline(admin.StackedInline):
model = Adress
can_delete = False
extra = 1
class ClientAdmin(admin.ModelAdmin):
inlines = [ClientInline]
admin.site.register(Client, ClientAdmin)
| from django.contrib import admin
from .models import Client, Adress
# Register your models here.
class ClientInline(admin.StackedInline):
model = Adress
can_delete = False
extra = 1
class ClientAdmin(admin.ModelAdmin):
inlines = [ClientInline]
admin.site.register(Client, ClientAdmin) | [
3,
4,
5,
6,
7
] |
2,011 | 033d1b39dd3ebaa81c8c6c52386909acf076ef47 | <mask token>
| <mask token>
if media >= 6:
print('Parabéns!! Você foi aprovado.')
else:
print('Que pena!! Você foi reprovado.')
| <mask token>
nota1 = float(input('Digite sua primeira nota: '))
nota2 = float(input('Digite sua segunda nota: '))
nota3 = float(input('Digite sua terceira nota: '))
media = (nota1 + nota2 + nota3) / 3
if media >= 6:
print('Parabéns!! Você foi aprovado.')
else:
print('Que pena!! Você foi reprovado.')
| """
Faça um algoritmo que solicita ao usuário as notas de três provas. Calcule a média aritmética e
informe se o aluno foi Aprovado ou Reprovado (o aluno é considerado aprovado com a média igual ou superior a 6).
"""
nota1 = float(input("Digite sua primeira nota: "))
nota2 = float(input("Digite sua segunda nota: "))
nota3 = float(input("Digite sua terceira nota: "))
media = (nota1 + nota2 + nota3)/3
if media >= 6:
print("Parabéns!! Você foi aprovado.")
else:
print("Que pena!! Você foi reprovado.")
| null | [
0,
1,
2,
3
] |
2,012 | c6554ff18c23a61d3694e73b808f44c96f9a19c4 | <mask token>
| class Solution:
<mask token>
| class Solution:
def countBits(self, num: int) ->List[int]:
total = []
for i in range(num + 1):
counter = bin(i).count('1')
total.append(counter)
return total
| class Solution:
def countBits(self, num: int) -> List[int]:
total = []
for i in range(num + 1):
counter = bin(i).count('1')
# for j in bin(i):
# if j == '1':
# counter += 1
total.append(counter)
return total
# bin(i).count('1') is the easy way to do it with built in functions
# for loop to search each char in the returned string is slower
| null | [
0,
1,
2,
3
] |
2,013 | ae88418ccfdaa4b357a2491f6450dbcda55b1c21 | <mask token>
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
<mask token>
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
pass
<mask token>
| <mask token>
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
"""PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
pass
<mask token>
| <mask token>
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
"""PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
pass
if __name__ == '__main__':
unittest.main()
| <mask token>
from __future__ import absolute_import
import unittest
import io_stockx
from io_stockx.models.portfolio_id_del_response_portfolio_item_product_media import PortfolioIdDelResponsePortfolioItemProductMedia
from io_stockx.rest import ApiException
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
"""PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
pass
if __name__ == '__main__':
unittest.main()
| # coding: utf-8
"""
StockX API
PRERELEASE API - Subject to change before release. Provides access to StockX's public services, allowing end users to query for product and order information. # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import io_stockx
from io_stockx.models.portfolio_id_del_response_portfolio_item_product_media import PortfolioIdDelResponsePortfolioItemProductMedia # noqa: E501
from io_stockx.rest import ApiException
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
"""PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
# FIXME: construct object with mandatory attributes with example values
# model = io_stockx.models.portfolio_id_del_response_portfolio_item_product_media.PortfolioIdDelResponsePortfolioItemProductMedia() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
4,
5,
6,
7,
8
] |
2,014 | 7b459aad399a31f61b8686e1919b38d5538924b8 | <mask token>
| <mask token>
def test_readme_escaping() ->None:
"""Ensure the demo matches expected."""
assert main() == '<div><span>Escaping</span></div>'
| <mask token>
from . import main
def test_readme_escaping() ->None:
"""Ensure the demo matches expected."""
assert main() == '<div><span>Escaping</span></div>'
| """Test an example."""
from . import main
def test_readme_escaping() -> None:
"""Ensure the demo matches expected."""
assert main() == "<div><span>Escaping</span></div>"
| null | [
0,
1,
2,
3
] |
2,015 | 0778b25363d50e699edf48b92f1104ab57c03172 | <mask token>
| def multiply(value):
return value * 5
<mask token>
| def multiply(value):
return value * 5
if __name__ == '__main__':
a = [0, 1, 2, 3, 4, 5]
new_empty_list = []
print(a)
for item in a:
b = multiply(item)
new_empty_list.append(b)
print(b)
print(new_empty_list)
| ######################################################################
#
# Write something here to recognize your own file
#
# Copyright: MIT License
#
######################################################################
def multiply(value):
return value * 5
if __name__ == "__main__":
a = [0, 1, 2, 3, 4, 5]
new_empty_list = []
print(a)
for item in a:
b = multiply(item)
new_empty_list.append(b)
print(b)
print(new_empty_list) | null | [
0,
1,
2,
3
] |
2,016 | 95015c467dd6371f575fb5535fe652a914650ef1 | <mask token>
def compute_accuracy(model, good, bad):
train_arrays = numpy.zeros((25000, 400))
train_labels = numpy.zeros(25000)
classifier = LogisticRegression()
for i in range(25000 / 2):
prefix_train_pos = 'good_' + str(i)
prefix_train_neg = 'bad_' + str(i)
pos_review = model.docvecs[prefix_train_pos]
neg_review = model.docvecs[prefix_train_neg]
train_arrays[i] = pos_review
train_labels[i] = 1
train_arrays[25000 / 2 + i] = neg_review
train_labels[25000 / 2 + i] = 0
classifier.fit(train_arrays, train_labels)
test_arrays_good = numpy.zeros((12500, 400))
test_ratings_good = numpy.zeros(12500)
test_labels_good = numpy.zeros(12500)
test_arrays_bad = numpy.zeros((12500, 400))
test_ratings_bad = numpy.zeros(12500)
test_labels_bad = numpy.zeros(12500)
test_arrays = numpy.zeros((25000, 400))
test_rating = numpy.zeros(25000)
test_labels = numpy.zeros(25000)
good_correct = 0
good_total = 0
bad_correct = 0
bad_total = 0
for i, review in enumerate(good):
test_arrays[i] = model.infer_vector(review[0])
test_labels[i] = 1
if classifier.predict([test_arrays[i]]) == 1:
good_correct += 1
for i, review in enumerate(bad):
test_arrays[i + 12500] = model.infer_vector(review[0])
test_labels[i + 12500] = 0
if classifier.predict([test_arrays[i + 12500]]) == 0:
bad_correct += 1
accuracy = classifier.score(test_arrays, test_labels) * 100
print('Classifier reports a {}% accuracy'.format(accuracy))
print('{} Good correctly identified'.format(good_correct))
print('{} Bad correctly identified'.format(bad_correct))
<mask token>
| <mask token>
def compute_accuracy(model, good, bad):
train_arrays = numpy.zeros((25000, 400))
train_labels = numpy.zeros(25000)
classifier = LogisticRegression()
for i in range(25000 / 2):
prefix_train_pos = 'good_' + str(i)
prefix_train_neg = 'bad_' + str(i)
pos_review = model.docvecs[prefix_train_pos]
neg_review = model.docvecs[prefix_train_neg]
train_arrays[i] = pos_review
train_labels[i] = 1
train_arrays[25000 / 2 + i] = neg_review
train_labels[25000 / 2 + i] = 0
classifier.fit(train_arrays, train_labels)
test_arrays_good = numpy.zeros((12500, 400))
test_ratings_good = numpy.zeros(12500)
test_labels_good = numpy.zeros(12500)
test_arrays_bad = numpy.zeros((12500, 400))
test_ratings_bad = numpy.zeros(12500)
test_labels_bad = numpy.zeros(12500)
test_arrays = numpy.zeros((25000, 400))
test_rating = numpy.zeros(25000)
test_labels = numpy.zeros(25000)
good_correct = 0
good_total = 0
bad_correct = 0
bad_total = 0
for i, review in enumerate(good):
test_arrays[i] = model.infer_vector(review[0])
test_labels[i] = 1
if classifier.predict([test_arrays[i]]) == 1:
good_correct += 1
for i, review in enumerate(bad):
test_arrays[i + 12500] = model.infer_vector(review[0])
test_labels[i + 12500] = 0
if classifier.predict([test_arrays[i + 12500]]) == 0:
bad_correct += 1
accuracy = classifier.score(test_arrays, test_labels) * 100
print('Classifier reports a {}% accuracy'.format(accuracy))
print('{} Good correctly identified'.format(good_correct))
print('{} Bad correctly identified'.format(bad_correct))
<mask token>
compute_accuracy(yelp_model, yelp_sources_good, yelp_sources_bad)
| <mask token>
dirname = os.path.dirname(__file__)
def compute_accuracy(model, good, bad):
train_arrays = numpy.zeros((25000, 400))
train_labels = numpy.zeros(25000)
classifier = LogisticRegression()
for i in range(25000 / 2):
prefix_train_pos = 'good_' + str(i)
prefix_train_neg = 'bad_' + str(i)
pos_review = model.docvecs[prefix_train_pos]
neg_review = model.docvecs[prefix_train_neg]
train_arrays[i] = pos_review
train_labels[i] = 1
train_arrays[25000 / 2 + i] = neg_review
train_labels[25000 / 2 + i] = 0
classifier.fit(train_arrays, train_labels)
test_arrays_good = numpy.zeros((12500, 400))
test_ratings_good = numpy.zeros(12500)
test_labels_good = numpy.zeros(12500)
test_arrays_bad = numpy.zeros((12500, 400))
test_ratings_bad = numpy.zeros(12500)
test_labels_bad = numpy.zeros(12500)
test_arrays = numpy.zeros((25000, 400))
test_rating = numpy.zeros(25000)
test_labels = numpy.zeros(25000)
good_correct = 0
good_total = 0
bad_correct = 0
bad_total = 0
for i, review in enumerate(good):
test_arrays[i] = model.infer_vector(review[0])
test_labels[i] = 1
if classifier.predict([test_arrays[i]]) == 1:
good_correct += 1
for i, review in enumerate(bad):
test_arrays[i + 12500] = model.infer_vector(review[0])
test_labels[i + 12500] = 0
if classifier.predict([test_arrays[i + 12500]]) == 0:
bad_correct += 1
accuracy = classifier.score(test_arrays, test_labels) * 100
print('Classifier reports a {}% accuracy'.format(accuracy))
print('{} Good correctly identified'.format(good_correct))
print('{} Bad correctly identified'.format(bad_correct))
yelp_model = Doc2Vec.load(os.path.join(dirname, 'models/yelp_model.d2v'))
yelp_sources_good = YelpLabeledLineSentence(os.path.join(dirname,
'../data/review.json'), 'good', 12500)
yelp_sources_bad = YelpLabeledLineSentence(os.path.join(dirname,
'../data/review.json'), 'bad', 12500)
compute_accuracy(yelp_model, yelp_sources_good, yelp_sources_bad)
| from gensim import utils
from gensim.models.doc2vec import LabeledSentence
from gensim.models import Doc2Vec
from matplotlib import pyplot as plt
from sklearn.manifold import TSNE
from sklearn.feature_extraction.text import CountVectorizer
from random import shuffle
from sklearn.linear_model import LogisticRegression
from yelp_labeled_line_sentence import YelpLabeledLineSentence
from imdb_labeled_line_sentence import IMDBLabeledLineSentence
from sklearn.linear_model import SGDClassifier
import numpy
import json
import time
import os
import sys
import csv
dirname = os.path.dirname(__file__)
def compute_accuracy(model, good, bad):
train_arrays = numpy.zeros((25000, 400))
train_labels = numpy.zeros(25000)
classifier = LogisticRegression()
for i in range(25000 / 2):
prefix_train_pos = 'good_' + str(i)
prefix_train_neg = 'bad_' + str(i)
pos_review = model.docvecs[prefix_train_pos]
neg_review = model.docvecs[prefix_train_neg]
train_arrays[i] = pos_review
train_labels[i] = 1
train_arrays[25000 / 2 + i] = neg_review
train_labels[25000 / 2 + i] = 0
classifier.fit(train_arrays, train_labels)
test_arrays_good = numpy.zeros((12500, 400))
test_ratings_good = numpy.zeros(12500)
test_labels_good = numpy.zeros(12500)
test_arrays_bad = numpy.zeros((12500, 400))
test_ratings_bad = numpy.zeros(12500)
test_labels_bad = numpy.zeros(12500)
test_arrays = numpy.zeros((25000, 400))
test_rating = numpy.zeros(25000)
test_labels = numpy.zeros(25000)
good_correct = 0
good_total = 0
bad_correct = 0
bad_total = 0
for i, review in enumerate(good):
test_arrays[i] = model.infer_vector(review[0])
test_labels[i] = 1
if classifier.predict([test_arrays[i]]) == 1:
good_correct += 1
for i, review in enumerate(bad):
test_arrays[i + 12500] = model.infer_vector(review[0])
test_labels[i + 12500] = 0
if classifier.predict([test_arrays[i + 12500]]) == 0:
bad_correct += 1
accuracy = classifier.score(test_arrays, test_labels) * 100
print('Classifier reports a {}% accuracy'.format(accuracy))
print('{} Good correctly identified'.format(good_correct))
print('{} Bad correctly identified'.format(bad_correct))
yelp_model = Doc2Vec.load(os.path.join(dirname, 'models/yelp_model.d2v'))
yelp_sources_good = YelpLabeledLineSentence(os.path.join(dirname,
'../data/review.json'), 'good', 12500)
yelp_sources_bad = YelpLabeledLineSentence(os.path.join(dirname,
'../data/review.json'), 'bad', 12500)
compute_accuracy(yelp_model, yelp_sources_good, yelp_sources_bad)
| # ARGS:
# 1: total train reviews
# 2: number of iterations (for csv output)
# 3: size of vector
# 4: good/bad sizes
# import dependencies
from gensim import utils
from gensim.models.doc2vec import LabeledSentence
from gensim.models import Doc2Vec
from matplotlib import pyplot as plt
from sklearn.manifold import TSNE
from sklearn.feature_extraction.text import CountVectorizer
from random import shuffle
from sklearn.linear_model import LogisticRegression
from yelp_labeled_line_sentence import YelpLabeledLineSentence
from imdb_labeled_line_sentence import IMDBLabeledLineSentence
from sklearn.linear_model import SGDClassifier
import numpy
import json
import time
import os
import sys
import csv
dirname = os.path.dirname(__file__)
def compute_accuracy(model, good, bad):
# load our doc2vec model that we trained
# take our train reviews from the model, and put them in array, good reviews first, bad reviews second half of array
train_arrays = numpy.zeros((25000, 400))
train_labels = numpy.zeros(25000)
# create a logistic regression classifier
classifier = LogisticRegression()
# take our train reviews from the model, and put them in array, good reviews first, bad reviews second half of array
for i in range((25000/2)):
prefix_train_pos = 'good_' + str(i)
prefix_train_neg = 'bad_' + str(i)
pos_review = model.docvecs[prefix_train_pos]
neg_review = model.docvecs[prefix_train_neg]
train_arrays[i] = pos_review
train_labels[i] = 1
train_arrays[(25000/2) + i] = neg_review
train_labels[(25000/2) + i] = 0
classifier.fit(train_arrays, train_labels)
# take our test reviews from the model, and put them in array, good reviews first, bad reviews second half of array
# for each review, we'll infer the review's vector against our model
test_arrays_good = numpy.zeros((12500, 400))
test_ratings_good = numpy.zeros(12500)
test_labels_good = numpy.zeros(12500)
test_arrays_bad = numpy.zeros((12500, 400))
test_ratings_bad = numpy.zeros(12500)
test_labels_bad = numpy.zeros(12500)
test_arrays = numpy.zeros((25000, 400))
test_rating = numpy.zeros(25000)
test_labels = numpy.zeros(25000)
good_correct = 0
good_total = 0
bad_correct = 0
bad_total = 0
for i, review in enumerate(good):
test_arrays[i] = model.infer_vector(review[0])
test_labels[i] = 1
if(classifier.predict([test_arrays[i]]) == 1):
good_correct += 1
# test_ratings_good[i] = review[1][2]
for i, review in enumerate(bad):
test_arrays[i + 12500] = model.infer_vector(review[0])
test_labels[i + 12500] = 0
if(classifier.predict([test_arrays[i + 12500]]) == 0):
bad_correct += 1
# test_ratings_bad[i] = review[1][2]
# print the accuracy of our classifier
# accuracy=classifier.score(test_arrays_good, test_labels_good) * 100
# print("Classifier reports a {}% accuracy for good reviews".format(accuracy))
#
# accuracy=classifier.score(test_arrays_bad, test_labels_bad) * 100
# print("Classifier reports a {}% accuracy for bad reviews".format(accuracy))
#
accuracy=classifier.score(test_arrays, test_labels) * 100
print("Classifier reports a {}% accuracy".format(accuracy))
print("{} Good correctly identified".format(good_correct))
print("{} Bad correctly identified".format(bad_correct))
# for dim in range(1, int(sys.argv[3])):
# # plot probability of review being good vs feature vector value
# plt.scatter(test_arrays_good[:,dim], classifier.predict_proba(test_arrays_good)[:,1], color='green')
# plt.scatter(test_arrays_bad[:,dim], classifier.predict_proba(test_arrays_bad)[:,1], color='red')
#
# plt.ylabel('Probability of Review Being Good')
# plt.xlabel('dim={}'.format(dim))
# plt.show()
# # reduce the n-dimensional feature vector to n=1 using t-SNE
# tsne = TSNE(n_components=1)
# test_arrays_tsne_good = tsne.fit_transform(test_arrays_good)
# test_arrays_tsne_bad = tsne.fit_transform(test_arrays_bad)
#
# # plot probability of review being good vs feature vector value
# plt.scatter(test_arrays_tsne_good, classifier.predict_proba(test_arrays_good)[:,1], color='green')
# plt.scatter(test_arrays_tsne_bad, classifier.predict_proba(test_arrays_bad)[:,1], color='red')
#
# plt.ylabel('Probability of Review Being Good')
# plt.xlabel('t-SNE reduced feature vector (dim=1)')
# plt.show()
# # reduce the n-dimensional feature vector to n=1 using t-SNE
# tsne = TSNE(n_components=2)
# test_arrays_tsne_good = tsne.fit_transform(test_arrays_good)
# test_arrays_tsne_bad = tsne.fit_transform(test_arrays_bad)
#
# # plot feature vectors against each other
# plt.scatter(test_arrays_tsne_good[:,0], test_arrays_tsne_good[:,1], color='green')
# plt.scatter(test_arrays_tsne_bad[:,0], test_arrays_tsne_bad[:,1], color='red')
#
# plt.ylabel('x1')
# plt.xlabel('x2')
# plt.show()
yelp_model = Doc2Vec.load(os.path.join(dirname,'models/yelp_model.d2v'))
# imdb_model = Doc2Vec.load(os.path.join(dirname,'models/imdb_model.d2v'))
# create an array of LabeledLineSentences for previously unseen
# good and bad reviews
# this does some basic formatting of the text as well to make it more
# digestible by gensim and sklearn
yelp_sources_good = YelpLabeledLineSentence(os.path.join(dirname, '../data/review.json'), 'good', 12500)
yelp_sources_bad = YelpLabeledLineSentence(os.path.join(dirname, '../data/review.json'), 'bad', 12500)
# imdb_sources_good = IMDBLabeledLineSentence({os.path.join(dirname, '../data/aclImdb/test/pos'):'good'})
# imdb_sources_bad = IMDBLabeledLineSentence({os.path.join(dirname, '../data/aclImdb/test/neg'):'bad'})
compute_accuracy(yelp_model, yelp_sources_good, yelp_sources_bad)
# compute_accuracy(imdb_model, imdb_sources_good, imdb_sources_bad)
| [
1,
2,
3,
4,
5
] |
2,017 | 6cfda09f360aaa560011b91db8316e5e3889eea1 | <mask token>
| def cnt():
s1 = input('enter a string :').strip()
count = 0
countu = 0
for i in s1:
if i.islower():
count += 1
elif i.isupper():
countu += 1
else:
pass
print('THE NUMBER OF UPPER CASES ARE :', countu)
print('THE NUMBER OF LOWER CASSES ARE: ', count)
cnt()
| #CALCULATE NUMBER OF UPPER AND LOWER CASES
def cnt():
s1=input("enter a string :").strip()
count=0
countu=0
for i in s1:
if(i.islower()):
count+=1
elif(i.isupper()):
countu+=1
else:
pass
print("THE NUMBER OF UPPER CASES ARE :",countu)
print("THE NUMBER OF LOWER CASSES ARE: ",count)
cnt()
| null | null | [
0,
1,
2
] |
2,018 | cb50a5352b0ad7b04dee9393c50da54fdf507376 | <mask token>
def str2int(strtime: str):
hh, mm, ss = strtime.split(':')
return 3600 * int(hh) + 60 * int(mm) + int(ss)
def int2str(inttime: int):
hh = inttime // 3600
mm = inttime % 3600 // 60
ss = inttime % 60
return str(hh).zfill(2) + ':' + str(mm).zfill(2) + ':' + str(ss).zfill(2)
<mask token>
| <mask token>
def solution(play_time, adv_time, logs):
"""
Strategy :
adv_start_time을 log start time 부터 < 995959 - adv time
sliding window
Step 1.
String time -> integer time
Step 2. pseudo code : Two pointer algorithm
max time = 0
return max time
"""
MAX = str2int(play_time)
max_view = 0
ans_time = 0
adv_time = str2int(adv_time)
logs = [[str2int(log.split('-')[0]), str2int(log.split('-')[1])] for
log in logs]
view_list = [0] * (MAX + 1)
for start_time, end_time in logs:
view_list[start_time] += 1
view_list[end_time] -= 1
for i in range(1, MAX + 1):
view_list[i] = view_list[i] + view_list[i - 1]
for i in range(1, MAX + 1):
view_list[i] = view_list[i] + view_list[i - 1]
for start_time in range(MAX - adv_time + 1):
end_time = start_time + adv_time
temp_view = view_list[end_time] - view_list[start_time]
if temp_view > max_view:
max_view = temp_view
ans_time = start_time
if ans_time != 0:
ans_time += 1
return int2str(ans_time)
def str2int(strtime: str):
hh, mm, ss = strtime.split(':')
return 3600 * int(hh) + 60 * int(mm) + int(ss)
def int2str(inttime: int):
hh = inttime // 3600
mm = inttime % 3600 // 60
ss = inttime % 60
return str(hh).zfill(2) + ':' + str(mm).zfill(2) + ':' + str(ss).zfill(2)
<mask token>
| <mask token>
def solution(play_time, adv_time, logs):
"""
Strategy :
adv_start_time을 log start time 부터 < 995959 - adv time
sliding window
Step 1.
String time -> integer time
Step 2. pseudo code : Two pointer algorithm
max time = 0
return max time
"""
MAX = str2int(play_time)
max_view = 0
ans_time = 0
adv_time = str2int(adv_time)
logs = [[str2int(log.split('-')[0]), str2int(log.split('-')[1])] for
log in logs]
view_list = [0] * (MAX + 1)
for start_time, end_time in logs:
view_list[start_time] += 1
view_list[end_time] -= 1
for i in range(1, MAX + 1):
view_list[i] = view_list[i] + view_list[i - 1]
for i in range(1, MAX + 1):
view_list[i] = view_list[i] + view_list[i - 1]
for start_time in range(MAX - adv_time + 1):
end_time = start_time + adv_time
temp_view = view_list[end_time] - view_list[start_time]
if temp_view > max_view:
max_view = temp_view
ans_time = start_time
if ans_time != 0:
ans_time += 1
return int2str(ans_time)
def str2int(strtime: str):
hh, mm, ss = strtime.split(':')
return 3600 * int(hh) + 60 * int(mm) + int(ss)
def int2str(inttime: int):
hh = inttime // 3600
mm = inttime % 3600 // 60
ss = inttime % 60
return str(hh).zfill(2) + ':' + str(mm).zfill(2) + ':' + str(ss).zfill(2)
if __name__ == '__main__':
play_time = '02:03:55'
adv_time = '00:14:15'
logs = ['01:20:15-01:45:14', '00:25:50-00:48:29', '00:40:31-01:00:00',
'01:37:44-02:02:30', '01:30:59-01:53:29']
result = '01:30:59'
print(solution(play_time, adv_time, logs))
print(result)
play_time = '99:59:59'
adv_time = '25:00:00'
logs = ['69:59:59-89:59:59', '01:00:00-21:00:00', '79:59:59-99:59:59',
'11:00:00-31:00:00']
result = '01:00:00'
print(solution(play_time, adv_time, logs))
print(result)
play_time = '50:00:00'
adv_time = '50:00:00'
logs = ['15:36:51-38:21:49', '10:14:18-15:36:51', '38:21:49-42:51:45']
result = '00:00:00'
print(solution(play_time, adv_time, logs))
print(result)
| from collections import deque
def solution(play_time, adv_time, logs):
"""
Strategy :
adv_start_time을 log start time 부터 < 995959 - adv time
sliding window
Step 1.
String time -> integer time
Step 2. pseudo code : Two pointer algorithm
max time = 0
return max time
"""
MAX = str2int(play_time)
max_view = 0
ans_time = 0
adv_time = str2int(adv_time)
logs = [[str2int(log.split('-')[0]), str2int(log.split('-')[1])] for
log in logs]
view_list = [0] * (MAX + 1)
for start_time, end_time in logs:
view_list[start_time] += 1
view_list[end_time] -= 1
for i in range(1, MAX + 1):
view_list[i] = view_list[i] + view_list[i - 1]
for i in range(1, MAX + 1):
view_list[i] = view_list[i] + view_list[i - 1]
for start_time in range(MAX - adv_time + 1):
end_time = start_time + adv_time
temp_view = view_list[end_time] - view_list[start_time]
if temp_view > max_view:
max_view = temp_view
ans_time = start_time
if ans_time != 0:
ans_time += 1
return int2str(ans_time)
def str2int(strtime: str):
hh, mm, ss = strtime.split(':')
return 3600 * int(hh) + 60 * int(mm) + int(ss)
def int2str(inttime: int):
hh = inttime // 3600
mm = inttime % 3600 // 60
ss = inttime % 60
return str(hh).zfill(2) + ':' + str(mm).zfill(2) + ':' + str(ss).zfill(2)
if __name__ == '__main__':
play_time = '02:03:55'
adv_time = '00:14:15'
logs = ['01:20:15-01:45:14', '00:25:50-00:48:29', '00:40:31-01:00:00',
'01:37:44-02:02:30', '01:30:59-01:53:29']
result = '01:30:59'
print(solution(play_time, adv_time, logs))
print(result)
play_time = '99:59:59'
adv_time = '25:00:00'
logs = ['69:59:59-89:59:59', '01:00:00-21:00:00', '79:59:59-99:59:59',
'11:00:00-31:00:00']
result = '01:00:00'
print(solution(play_time, adv_time, logs))
print(result)
play_time = '50:00:00'
adv_time = '50:00:00'
logs = ['15:36:51-38:21:49', '10:14:18-15:36:51', '38:21:49-42:51:45']
result = '00:00:00'
print(solution(play_time, adv_time, logs))
print(result)
| from collections import deque
def solution(play_time, adv_time, logs):
'''
Strategy :
adv_start_time을 log start time 부터 < 995959 - adv time
sliding window
Step 1.
String time -> integer time
Step 2. pseudo code : Two pointer algorithm
max time = 0
return max time
'''
## Step 1.
MAX = str2int(play_time)
max_view = 0
ans_time = 0
adv_time = str2int(adv_time)
logs = [[str2int(log.split("-")[0]),str2int(log.split("-")[1])] for log in logs]
view_list = [0] * (MAX+1)
## Step 2.
## 도함수
for start_time,end_time in logs:
view_list[start_time] += 1
view_list[end_time] -= 1
## 함수
for i in range(1,MAX+1):
view_list[i] = view_list[i]+view_list[i-1]
## 누적 합
for i in range(1,MAX+1):
view_list[i] = view_list[i]+view_list[i-1]
for start_time in range(MAX-adv_time+1):
## start time 0,1,2,... MAX-adv_time
## end time adv_time, ... MAX
end_time = start_time + adv_time
temp_view = view_list[end_time] - view_list[start_time]
if temp_view > max_view:
max_view = temp_view
ans_time = start_time
if ans_time != 0:
ans_time += 1
return int2str(ans_time)
def str2int(strtime:str):
hh,mm,ss = strtime.split(":")
return 3600*int(hh)+60*int(mm)+int(ss)
def int2str(inttime:int):
hh = inttime//3600
mm = (inttime%3600)//60
ss = inttime%60
return str(hh).zfill(2)+":"+str(mm).zfill(2)+":"+str(ss).zfill(2)
if __name__ == "__main__":
play_time = "02:03:55"
adv_time = "00:14:15"
logs = ["01:20:15-01:45:14", "00:25:50-00:48:29", "00:40:31-01:00:00", "01:37:44-02:02:30", "01:30:59-01:53:29"]
result = "01:30:59"
print(solution(play_time, adv_time, logs))
print(result)
play_time = "99:59:59"
adv_time = "25:00:00"
logs = ["69:59:59-89:59:59", "01:00:00-21:00:00", "79:59:59-99:59:59", "11:00:00-31:00:00"]
result = "01:00:00"
print(solution(play_time, adv_time, logs))
print(result)
play_time = "50:00:00"
adv_time = "50:00:00"
logs = ["15:36:51-38:21:49", "10:14:18-15:36:51", "38:21:49-42:51:45"]
result = "00:00:00"
print(solution(play_time, adv_time, logs))
print(result) | [
2,
3,
4,
5,
6
] |
2,019 | 1c5655563d05498f016fb2d41a07331b9e8de5e8 | <mask token>
class SchematicsPlugin(BasePlugin):
<mask token>
def __init__(self, schema_name_resolver=None):
super().__init__()
self.schema_name_resolver = schema_name_resolver or resolver
self.spec = None
self.openapi_version = None
self.openapi = None
def init_spec(self, spec):
super().init_spec(spec)
self.spec = spec
self.openapi_version = spec.openapi_version
self.openapi = OpenAPIConverter(openapi_version=spec.
openapi_version, schema_name_resolver=self.schema_name_resolver,
spec=spec)
def resolve_parameters(self, parameters):
resolved = []
for parameter in parameters:
if isinstance(parameter, dict) and not isinstance(parameter.get
('schema', {}), dict):
schema_instance = resolve_schema_instance(parameter['schema'])
if 'in' in parameter:
del parameter['schema']
resolved += self.openapi.schema2parameters(schema_instance,
default_in=parameter.pop('in'), **parameter)
continue
self.resolve_schema(parameter)
resolved.append(parameter)
return resolved
def resolve_schema_in_request_body(self, request_body):
"""Function to resolve a schema in a requestBody object - modifies then
response dict to convert Marshmallow Schema object or class into dict
"""
content = request_body['content']
for content_type in content:
schema = content[content_type]['schema']
content[content_type]['schema'] = self.openapi.resolve_schema_dict(
schema)
def resolve_schema(self, data):
"""Function to resolve a schema in a parameter or response - modifies the
corresponding dict to convert Marshmallow Schema object or class into dict
:param APISpec spec: `APISpec` containing refs.
:param dict|str data: either a parameter or response dictionary that may
contain a schema, or a reference provided as string
"""
if not isinstance(data, dict):
return
if 'schema' in data:
data['schema'] = self.openapi.resolve_schema_dict(data['schema'])
if self.openapi_version.major >= 3:
if 'content' in data:
for content_type in data['content']:
schema = data['content'][content_type]['schema']
data['content'][content_type]['schema'
] = self.openapi.resolve_schema_dict(schema)
<mask token>
<mask token>
<mask token>
def response_helper(self, response, **kwargs):
"""Response component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in response definition.
:param dict parameter: response fields. May contain a marshmallow
Schema class or instance.
"""
self.resolve_schema(response)
if 'headers' in response:
for header in response['headers'].values():
self.resolve_schema(header)
return response
def operation_helper(self, operations, **kwargs):
for operation in operations.values():
if not isinstance(operation, dict):
continue
if 'parameters' in operation:
operation['parameters'] = self.resolve_parameters(operation
['parameters'])
if self.openapi_version.major >= 3:
if 'requestBody' in operation:
self.resolve_schema_in_request_body(operation[
'requestBody'])
for response in operation.get('responses', {}).values():
self.resolve_schema(response)
if 'headers' in response:
for header in response['headers'].values():
self.resolve_schema(header)
def warn_if_schema_already_in_spec(self, schema_key):
"""Method to warn the user if the schema has already been added to the
spec.
"""
if schema_key in self.openapi.refs:
warnings.warn(
'{} has already been added to the spec. Adding it twice may cause references to not resolve properly.'
.format(schema_key[0]), UserWarning)
| <mask token>
class SchematicsPlugin(BasePlugin):
<mask token>
def __init__(self, schema_name_resolver=None):
super().__init__()
self.schema_name_resolver = schema_name_resolver or resolver
self.spec = None
self.openapi_version = None
self.openapi = None
def init_spec(self, spec):
super().init_spec(spec)
self.spec = spec
self.openapi_version = spec.openapi_version
self.openapi = OpenAPIConverter(openapi_version=spec.
openapi_version, schema_name_resolver=self.schema_name_resolver,
spec=spec)
def resolve_parameters(self, parameters):
resolved = []
for parameter in parameters:
if isinstance(parameter, dict) and not isinstance(parameter.get
('schema', {}), dict):
schema_instance = resolve_schema_instance(parameter['schema'])
if 'in' in parameter:
del parameter['schema']
resolved += self.openapi.schema2parameters(schema_instance,
default_in=parameter.pop('in'), **parameter)
continue
self.resolve_schema(parameter)
resolved.append(parameter)
return resolved
def resolve_schema_in_request_body(self, request_body):
"""Function to resolve a schema in a requestBody object - modifies then
response dict to convert Marshmallow Schema object or class into dict
"""
content = request_body['content']
for content_type in content:
schema = content[content_type]['schema']
content[content_type]['schema'] = self.openapi.resolve_schema_dict(
schema)
def resolve_schema(self, data):
"""Function to resolve a schema in a parameter or response - modifies the
corresponding dict to convert Marshmallow Schema object or class into dict
:param APISpec spec: `APISpec` containing refs.
:param dict|str data: either a parameter or response dictionary that may
contain a schema, or a reference provided as string
"""
if not isinstance(data, dict):
return
if 'schema' in data:
data['schema'] = self.openapi.resolve_schema_dict(data['schema'])
if self.openapi_version.major >= 3:
if 'content' in data:
for content_type in data['content']:
schema = data['content'][content_type]['schema']
data['content'][content_type]['schema'
] = self.openapi.resolve_schema_dict(schema)
def map_to_openapi_type(self, *args):
"""Decorator to set mapping for custom fields.
``*args`` can be:
- a pair of the form ``(type, format)``
- a core marshmallow field type (in which case we reuse that type's mapping)
Examples: ::
@ma_plugin.map_to_openapi_type('string', 'uuid')
class MyCustomField(Integer):
# ...
@ma_plugin.map_to_openapi_type(Integer) # will map to ('integer', 'int32')
class MyCustomFieldThatsKindaLikeAnInteger(Integer):
# ...
"""
return self.openapi.map_to_openapi_type(*args)
def schema_helper(self, name, _, schema=None, **kwargs):
"""Definition helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` to provide OpenAPI
metadata.
:param type|Schema schema: A marshmallow Schema class or instance.
"""
if schema is None:
return None
schema_instance = resolve_schema_instance(schema)
schema_key = make_schema_key(schema_instance)
self.warn_if_schema_already_in_spec(schema_key)
self.openapi.refs[schema_key] = name
json_schema = self.openapi.schema2jsonschema(schema_instance)
return json_schema
def parameter_helper(self, parameter, **kwargs):
"""Parameter component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in parameter definition.
:param dict parameter: parameter fields. May contain a marshmallow
Schema class or instance.
"""
self.resolve_schema(parameter)
return parameter
def response_helper(self, response, **kwargs):
"""Response component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in response definition.
:param dict parameter: response fields. May contain a marshmallow
Schema class or instance.
"""
self.resolve_schema(response)
if 'headers' in response:
for header in response['headers'].values():
self.resolve_schema(header)
return response
def operation_helper(self, operations, **kwargs):
for operation in operations.values():
if not isinstance(operation, dict):
continue
if 'parameters' in operation:
operation['parameters'] = self.resolve_parameters(operation
['parameters'])
if self.openapi_version.major >= 3:
if 'requestBody' in operation:
self.resolve_schema_in_request_body(operation[
'requestBody'])
for response in operation.get('responses', {}).values():
self.resolve_schema(response)
if 'headers' in response:
for header in response['headers'].values():
self.resolve_schema(header)
def warn_if_schema_already_in_spec(self, schema_key):
"""Method to warn the user if the schema has already been added to the
spec.
"""
if schema_key in self.openapi.refs:
warnings.warn(
'{} has already been added to the spec. Adding it twice may cause references to not resolve properly.'
.format(schema_key[0]), UserWarning)
| <mask token>
class SchematicsPlugin(BasePlugin):
"""APISpec plugin handling schematics models
:param callable schema_name_resolver: Callable to generate the schema definition name.
Receives the `Schema` class and returns the name to be used in refs within
the generated spec. When working with circular referencing this function
must must not return `None` for schemas in a circular reference chain.
Example: ::
def schema_name_resolver(schema):
return schema.__name__
"""
def __init__(self, schema_name_resolver=None):
super().__init__()
self.schema_name_resolver = schema_name_resolver or resolver
self.spec = None
self.openapi_version = None
self.openapi = None
def init_spec(self, spec):
super().init_spec(spec)
self.spec = spec
self.openapi_version = spec.openapi_version
self.openapi = OpenAPIConverter(openapi_version=spec.
openapi_version, schema_name_resolver=self.schema_name_resolver,
spec=spec)
def resolve_parameters(self, parameters):
resolved = []
for parameter in parameters:
if isinstance(parameter, dict) and not isinstance(parameter.get
('schema', {}), dict):
schema_instance = resolve_schema_instance(parameter['schema'])
if 'in' in parameter:
del parameter['schema']
resolved += self.openapi.schema2parameters(schema_instance,
default_in=parameter.pop('in'), **parameter)
continue
self.resolve_schema(parameter)
resolved.append(parameter)
return resolved
def resolve_schema_in_request_body(self, request_body):
"""Function to resolve a schema in a requestBody object - modifies then
response dict to convert Marshmallow Schema object or class into dict
"""
content = request_body['content']
for content_type in content:
schema = content[content_type]['schema']
content[content_type]['schema'] = self.openapi.resolve_schema_dict(
schema)
def resolve_schema(self, data):
"""Function to resolve a schema in a parameter or response - modifies the
corresponding dict to convert Marshmallow Schema object or class into dict
:param APISpec spec: `APISpec` containing refs.
:param dict|str data: either a parameter or response dictionary that may
contain a schema, or a reference provided as string
"""
if not isinstance(data, dict):
return
if 'schema' in data:
data['schema'] = self.openapi.resolve_schema_dict(data['schema'])
if self.openapi_version.major >= 3:
if 'content' in data:
for content_type in data['content']:
schema = data['content'][content_type]['schema']
data['content'][content_type]['schema'
] = self.openapi.resolve_schema_dict(schema)
def map_to_openapi_type(self, *args):
"""Decorator to set mapping for custom fields.
``*args`` can be:
- a pair of the form ``(type, format)``
- a core marshmallow field type (in which case we reuse that type's mapping)
Examples: ::
@ma_plugin.map_to_openapi_type('string', 'uuid')
class MyCustomField(Integer):
# ...
@ma_plugin.map_to_openapi_type(Integer) # will map to ('integer', 'int32')
class MyCustomFieldThatsKindaLikeAnInteger(Integer):
# ...
"""
return self.openapi.map_to_openapi_type(*args)
def schema_helper(self, name, _, schema=None, **kwargs):
"""Definition helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` to provide OpenAPI
metadata.
:param type|Schema schema: A marshmallow Schema class or instance.
"""
if schema is None:
return None
schema_instance = resolve_schema_instance(schema)
schema_key = make_schema_key(schema_instance)
self.warn_if_schema_already_in_spec(schema_key)
self.openapi.refs[schema_key] = name
json_schema = self.openapi.schema2jsonschema(schema_instance)
return json_schema
def parameter_helper(self, parameter, **kwargs):
"""Parameter component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in parameter definition.
:param dict parameter: parameter fields. May contain a marshmallow
Schema class or instance.
"""
self.resolve_schema(parameter)
return parameter
def response_helper(self, response, **kwargs):
"""Response component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in response definition.
:param dict parameter: response fields. May contain a marshmallow
Schema class or instance.
"""
self.resolve_schema(response)
if 'headers' in response:
for header in response['headers'].values():
self.resolve_schema(header)
return response
def operation_helper(self, operations, **kwargs):
for operation in operations.values():
if not isinstance(operation, dict):
continue
if 'parameters' in operation:
operation['parameters'] = self.resolve_parameters(operation
['parameters'])
if self.openapi_version.major >= 3:
if 'requestBody' in operation:
self.resolve_schema_in_request_body(operation[
'requestBody'])
for response in operation.get('responses', {}).values():
self.resolve_schema(response)
if 'headers' in response:
for header in response['headers'].values():
self.resolve_schema(header)
def warn_if_schema_already_in_spec(self, schema_key):
"""Method to warn the user if the schema has already been added to the
spec.
"""
if schema_key in self.openapi.refs:
warnings.warn(
'{} has already been added to the spec. Adding it twice may cause references to not resolve properly.'
.format(schema_key[0]), UserWarning)
| <mask token>
def resolver(schema):
"""Default implementation of a schema name resolver function
"""
name = schema.__name__
if name.endswith('Schema'):
return name[:-6] or name
return name
class SchematicsPlugin(BasePlugin):
"""APISpec plugin handling schematics models
:param callable schema_name_resolver: Callable to generate the schema definition name.
Receives the `Schema` class and returns the name to be used in refs within
the generated spec. When working with circular referencing this function
must must not return `None` for schemas in a circular reference chain.
Example: ::
def schema_name_resolver(schema):
return schema.__name__
"""
def __init__(self, schema_name_resolver=None):
super().__init__()
self.schema_name_resolver = schema_name_resolver or resolver
self.spec = None
self.openapi_version = None
self.openapi = None
def init_spec(self, spec):
super().init_spec(spec)
self.spec = spec
self.openapi_version = spec.openapi_version
self.openapi = OpenAPIConverter(openapi_version=spec.
openapi_version, schema_name_resolver=self.schema_name_resolver,
spec=spec)
def resolve_parameters(self, parameters):
resolved = []
for parameter in parameters:
if isinstance(parameter, dict) and not isinstance(parameter.get
('schema', {}), dict):
schema_instance = resolve_schema_instance(parameter['schema'])
if 'in' in parameter:
del parameter['schema']
resolved += self.openapi.schema2parameters(schema_instance,
default_in=parameter.pop('in'), **parameter)
continue
self.resolve_schema(parameter)
resolved.append(parameter)
return resolved
def resolve_schema_in_request_body(self, request_body):
"""Function to resolve a schema in a requestBody object - modifies then
response dict to convert Marshmallow Schema object or class into dict
"""
content = request_body['content']
for content_type in content:
schema = content[content_type]['schema']
content[content_type]['schema'] = self.openapi.resolve_schema_dict(
schema)
def resolve_schema(self, data):
"""Function to resolve a schema in a parameter or response - modifies the
corresponding dict to convert Marshmallow Schema object or class into dict
:param APISpec spec: `APISpec` containing refs.
:param dict|str data: either a parameter or response dictionary that may
contain a schema, or a reference provided as string
"""
if not isinstance(data, dict):
return
if 'schema' in data:
data['schema'] = self.openapi.resolve_schema_dict(data['schema'])
if self.openapi_version.major >= 3:
if 'content' in data:
for content_type in data['content']:
schema = data['content'][content_type]['schema']
data['content'][content_type]['schema'
] = self.openapi.resolve_schema_dict(schema)
def map_to_openapi_type(self, *args):
"""Decorator to set mapping for custom fields.
``*args`` can be:
- a pair of the form ``(type, format)``
- a core marshmallow field type (in which case we reuse that type's mapping)
Examples: ::
@ma_plugin.map_to_openapi_type('string', 'uuid')
class MyCustomField(Integer):
# ...
@ma_plugin.map_to_openapi_type(Integer) # will map to ('integer', 'int32')
class MyCustomFieldThatsKindaLikeAnInteger(Integer):
# ...
"""
return self.openapi.map_to_openapi_type(*args)
def schema_helper(self, name, _, schema=None, **kwargs):
"""Definition helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` to provide OpenAPI
metadata.
:param type|Schema schema: A marshmallow Schema class or instance.
"""
if schema is None:
return None
schema_instance = resolve_schema_instance(schema)
schema_key = make_schema_key(schema_instance)
self.warn_if_schema_already_in_spec(schema_key)
self.openapi.refs[schema_key] = name
json_schema = self.openapi.schema2jsonschema(schema_instance)
return json_schema
def parameter_helper(self, parameter, **kwargs):
"""Parameter component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in parameter definition.
:param dict parameter: parameter fields. May contain a marshmallow
Schema class or instance.
"""
self.resolve_schema(parameter)
return parameter
def response_helper(self, response, **kwargs):
"""Response component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in response definition.
:param dict parameter: response fields. May contain a marshmallow
Schema class or instance.
"""
self.resolve_schema(response)
if 'headers' in response:
for header in response['headers'].values():
self.resolve_schema(header)
return response
def operation_helper(self, operations, **kwargs):
for operation in operations.values():
if not isinstance(operation, dict):
continue
if 'parameters' in operation:
operation['parameters'] = self.resolve_parameters(operation
['parameters'])
if self.openapi_version.major >= 3:
if 'requestBody' in operation:
self.resolve_schema_in_request_body(operation[
'requestBody'])
for response in operation.get('responses', {}).values():
self.resolve_schema(response)
if 'headers' in response:
for header in response['headers'].values():
self.resolve_schema(header)
def warn_if_schema_already_in_spec(self, schema_key):
"""Method to warn the user if the schema has already been added to the
spec.
"""
if schema_key in self.openapi.refs:
warnings.warn(
'{} has already been added to the spec. Adding it twice may cause references to not resolve properly.'
.format(schema_key[0]), UserWarning)
| # -*- coding: utf-8 -*-
"""
openapi.schematics
~~~~~~~~~~~~~~~~~~
Schematics plugin for apispec based on ext.MarshmallowPlugin
"""
import warnings
from apispec import BasePlugin
from .common import resolve_schema_instance, make_schema_key
from .openapi import OpenAPIConverter
def resolver(schema):
"""Default implementation of a schema name resolver function
"""
name = schema.__name__
if name.endswith("Schema"):
return name[:-6] or name
return name
class SchematicsPlugin(BasePlugin):
"""APISpec plugin handling schematics models
:param callable schema_name_resolver: Callable to generate the schema definition name.
Receives the `Schema` class and returns the name to be used in refs within
the generated spec. When working with circular referencing this function
must must not return `None` for schemas in a circular reference chain.
Example: ::
def schema_name_resolver(schema):
return schema.__name__
"""
def __init__(self, schema_name_resolver=None):
super().__init__()
self.schema_name_resolver = schema_name_resolver or resolver
self.spec = None
self.openapi_version = None
self.openapi = None
def init_spec(self, spec):
super().init_spec(spec)
self.spec = spec
self.openapi_version = spec.openapi_version
self.openapi = OpenAPIConverter(
openapi_version=spec.openapi_version,
schema_name_resolver=self.schema_name_resolver,
spec=spec,
)
def resolve_parameters(self, parameters):
resolved = []
for parameter in parameters:
if isinstance(parameter, dict) and not isinstance(
parameter.get("schema", {}), dict
):
schema_instance = resolve_schema_instance(parameter["schema"])
if "in" in parameter:
del parameter["schema"]
resolved += self.openapi.schema2parameters(
schema_instance, default_in=parameter.pop("in"), **parameter
)
continue
self.resolve_schema(parameter)
resolved.append(parameter)
return resolved
def resolve_schema_in_request_body(self, request_body):
"""Function to resolve a schema in a requestBody object - modifies then
response dict to convert Marshmallow Schema object or class into dict
"""
content = request_body["content"]
for content_type in content:
schema = content[content_type]["schema"]
content[content_type]["schema"] = self.openapi.resolve_schema_dict(schema)
def resolve_schema(self, data):
"""Function to resolve a schema in a parameter or response - modifies the
corresponding dict to convert Marshmallow Schema object or class into dict
:param APISpec spec: `APISpec` containing refs.
:param dict|str data: either a parameter or response dictionary that may
contain a schema, or a reference provided as string
"""
if not isinstance(data, dict):
return
# OAS 2 component or OAS 3 header
if "schema" in data:
data["schema"] = self.openapi.resolve_schema_dict(data["schema"])
# OAS 3 component except header
if self.openapi_version.major >= 3:
if "content" in data:
for content_type in data["content"]:
schema = data["content"][content_type]["schema"]
data["content"][content_type][
"schema"
] = self.openapi.resolve_schema_dict(schema)
def map_to_openapi_type(self, *args):
"""Decorator to set mapping for custom fields.
``*args`` can be:
- a pair of the form ``(type, format)``
- a core marshmallow field type (in which case we reuse that type's mapping)
Examples: ::
@ma_plugin.map_to_openapi_type('string', 'uuid')
class MyCustomField(Integer):
# ...
@ma_plugin.map_to_openapi_type(Integer) # will map to ('integer', 'int32')
class MyCustomFieldThatsKindaLikeAnInteger(Integer):
# ...
"""
return self.openapi.map_to_openapi_type(*args)
def schema_helper(self, name, _, schema=None, **kwargs):
"""Definition helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` to provide OpenAPI
metadata.
:param type|Schema schema: A marshmallow Schema class or instance.
"""
if schema is None:
return None
schema_instance = resolve_schema_instance(schema)
schema_key = make_schema_key(schema_instance)
self.warn_if_schema_already_in_spec(schema_key)
self.openapi.refs[schema_key] = name
json_schema = self.openapi.schema2jsonschema(schema_instance)
return json_schema
def parameter_helper(self, parameter, **kwargs):
"""Parameter component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in parameter definition.
:param dict parameter: parameter fields. May contain a marshmallow
Schema class or instance.
"""
# In OpenAPIv3, this only works when using the complex form using "content"
self.resolve_schema(parameter)
return parameter
def response_helper(self, response, **kwargs):
"""Response component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in response definition.
:param dict parameter: response fields. May contain a marshmallow
Schema class or instance.
"""
self.resolve_schema(response)
if "headers" in response:
for header in response["headers"].values():
self.resolve_schema(header)
return response
def operation_helper(self, operations, **kwargs):
for operation in operations.values():
if not isinstance(operation, dict):
continue
if "parameters" in operation:
operation["parameters"] = self.resolve_parameters(
operation["parameters"]
)
if self.openapi_version.major >= 3:
if "requestBody" in operation:
self.resolve_schema_in_request_body(operation["requestBody"])
for response in operation.get("responses", {}).values():
self.resolve_schema(response)
if "headers" in response:
for header in response["headers"].values():
self.resolve_schema(header)
def warn_if_schema_already_in_spec(self, schema_key):
"""Method to warn the user if the schema has already been added to the
spec.
"""
if schema_key in self.openapi.refs:
warnings.warn(
"{} has already been added to the spec. Adding it twice may "
"cause references to not resolve properly.".format(schema_key[0]),
UserWarning,
)
| [
9,
12,
13,
14,
16
] |
2,020 | 9ec1cca08fac2fd976c1f596f7d340befc4eb339 | class Solution:
<mask token>
<mask token>
<mask token>
<mask token>
| class Solution:
def searchInsert(self, nums, target: int):
n = len(nums)
left = 0
right = n - 1
while left <= right:
mid = (left + right) // 2
if nums[mid] == target:
return mid
if nums[mid] < target:
left = mid + 1
else:
right = mid - 1
return left
<mask token>
def searchInsert02(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
if nums[-1] < target:
return size
left = 0
right = size - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
<mask token>
| class Solution:
def searchInsert(self, nums, target: int):
n = len(nums)
left = 0
right = n - 1
while left <= right:
mid = (left + right) // 2
if nums[mid] == target:
return mid
if nums[mid] < target:
left = mid + 1
else:
right = mid - 1
return left
def searchInsert01(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
left = 0
right = size
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
def searchInsert02(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
if nums[-1] < target:
return size
left = 0
right = size - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
<mask token>
| class Solution:
def searchInsert(self, nums, target: int):
n = len(nums)
left = 0
right = n - 1
while left <= right:
mid = (left + right) // 2
if nums[mid] == target:
return mid
if nums[mid] < target:
left = mid + 1
else:
right = mid - 1
return left
def searchInsert01(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
left = 0
right = size
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
def searchInsert02(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
if nums[-1] < target:
return size
left = 0
right = size - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
return left
if __name__ == '__main__':
nums = [1, 3, 5, 6]
target = 7
s = Solution()
print(s.searchInsert01(nums, target))
| # coding:utf-8
class Solution:
def searchInsert(self, nums, target: int):
n = len(nums)
left = 0
right = n - 1
# 返回大于等于target的第一个索引则用left,否则用right
while left <= right:
mid = (left + right) // 2
if nums[mid] == target:
return mid
if nums[mid] < target:
left = mid + 1
else:
right = mid - 1
return left
# 如果写两个分支
def searchInsert01(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
left = 0
right = size
while left < right:
mid = left + (right - left) // 2
# 此处中位数小于目标值则排除掉,否则得包含中位数
if nums[mid] < target:
left = mid + 1
else: # >=
right = mid
return left
# 如果写两个分支
# 范围为[0,size-1]
def searchInsert02(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
if nums[-1] < target:
return size
left = 0
right = size - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else: # >=
right = mid
return left
if __name__ == '__main__':
nums = [1, 3, 5, 6]
target = 7
s = Solution()
# print(s.searchInsert(nums, target))
print(s.searchInsert01(nums, target))
| [
1,
3,
4,
5,
6
] |
2,021 | 15c6841052882406d7c7b6cd05c0186c6a4a5924 | <mask token>
| <mask token>
def solution(tickets):
routes = defaultdict(list)
for t in tickets:
routes[t[0]].append(t[1])
for r in routes:
routes[r].sort(reverse=True)
stack = ['ICN']
path = []
while stack:
top = stack[-1]
if top in routes and routes[top]:
stack.append(routes[top].pop())
else:
path.append(stack.pop())
return path[::-1]
<mask token>
| <mask token>
def solution(tickets):
routes = defaultdict(list)
for t in tickets:
routes[t[0]].append(t[1])
for r in routes:
routes[r].sort(reverse=True)
stack = ['ICN']
path = []
while stack:
top = stack[-1]
if top in routes and routes[top]:
stack.append(routes[top].pop())
else:
path.append(stack.pop())
return path[::-1]
print(soluiont([['ICN', 'BOO'], ['ICN', 'COO'], ['COO', 'ICN']]))
print(solution([['ICN', 'SFO'], ['ICN', 'ATL'], ['SFO', 'ATL'], ['ATL',
'ICN'], ['ATL', 'SFO']]))
| from collections import defaultdict
def solution(tickets):
routes = defaultdict(list)
for t in tickets:
routes[t[0]].append(t[1])
for r in routes:
routes[r].sort(reverse=True)
stack = ['ICN']
path = []
while stack:
top = stack[-1]
if top in routes and routes[top]:
stack.append(routes[top].pop())
else:
path.append(stack.pop())
return path[::-1]
print(soluiont([['ICN', 'BOO'], ['ICN', 'COO'], ['COO', 'ICN']]))
print(solution([['ICN', 'SFO'], ['ICN', 'ATL'], ['SFO', 'ATL'], ['ATL',
'ICN'], ['ATL', 'SFO']]))
| from collections import defaultdict
def solution(tickets):
# 출발지가 키, 목적지가 value 인 딕셔너리 생성
routes = defaultdict(list)
for t in tickets:
routes[t[0]].append(t[1])
# 알파벳 빠른순으로 정렬해야함으로 reverse=True
for r in routes:
routes[r].sort(reverse=True)
# 시작 위치 ICN
stack = ['ICN']
# 리턴 변수
path = []
while stack:
# 현제 갈수 있는곳 찾기
top = stack[-1]
if top in routes and routes[top]:
stack.append(routes[top].pop())
# route 가 비지 않았는데 route[top]가 비어있다는것은 마지막 공항이라는 뜻
else:
path.append(stack.pop())
# 마지막 공항을 찾기위해 path를 마지막에 역순 정렬렬
return path[::-1]
print(soluiont([["ICN","BOO"],["ICN","COO"],["COO","ICN"]]))
print(solution([["ICN", "SFO"], ["ICN", "ATL"], ["SFO", "ATL"], ["ATL", "ICN"], ["ATL","SFO"]])) | [
0,
1,
2,
3,
4
] |
2,022 | 1fff681363c4c91c47c2818681a3f2f125dd8c83 | <mask token>
def leialetra():
"""
=>Função para validar letras.
parm=msg: Recebe dados to tipo string sendo Ss ou Nn.
return: String de valor S.
"""
while True:
try:
msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]
except KeyboardInterrupt:
print('O usuário desistiu de inserir um dado!')
break
except IndexError:
print('ERRO, nada digitado!')
else:
if msg not in 'SsNn' or msg in ' ':
print('ERRO, DADO INVALIDO!')
elif msg in 'Nn':
print('Volte sempre, Obrigado!')
break
else:
return 'S'
break
def facapergunta():
msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ', '')
if msg.isnumeric():
return 'N'
else:
return 'L'
<mask token>
| <mask token>
def leialetra():
"""
=>Função para validar letras.
parm=msg: Recebe dados to tipo string sendo Ss ou Nn.
return: String de valor S.
"""
while True:
try:
msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]
except KeyboardInterrupt:
print('O usuário desistiu de inserir um dado!')
break
except IndexError:
print('ERRO, nada digitado!')
else:
if msg not in 'SsNn' or msg in ' ':
print('ERRO, DADO INVALIDO!')
elif msg in 'Nn':
print('Volte sempre, Obrigado!')
break
else:
return 'S'
break
def facapergunta():
msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ', '')
if msg.isnumeric():
return 'N'
else:
return 'L'
<mask token>
while True:
aleatorio = randint(0, 9)
escolha = leialetra()
if escolha == 'S':
pergunta = facapergunta()
if pergunta == 'L':
print(resposta[aleatorio])
else:
break
| <mask token>
def leialetra():
"""
=>Função para validar letras.
parm=msg: Recebe dados to tipo string sendo Ss ou Nn.
return: String de valor S.
"""
while True:
try:
msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]
except KeyboardInterrupt:
print('O usuário desistiu de inserir um dado!')
break
except IndexError:
print('ERRO, nada digitado!')
else:
if msg not in 'SsNn' or msg in ' ':
print('ERRO, DADO INVALIDO!')
elif msg in 'Nn':
print('Volte sempre, Obrigado!')
break
else:
return 'S'
break
def facapergunta():
msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ', '')
if msg.isnumeric():
return 'N'
else:
return 'L'
resposta = ['Sim', 'Não', 'Talvez', 'Por que não?', 'Vá', 'Não sei',
'Pode ser', 'Talvez sim', 'Talvez não', 'Tenha Fé']
while True:
aleatorio = randint(0, 9)
escolha = leialetra()
if escolha == 'S':
pergunta = facapergunta()
if pergunta == 'L':
print(resposta[aleatorio])
else:
break
| from random import randint
def leialetra():
"""
=>Função para validar letras.
parm=msg: Recebe dados to tipo string sendo Ss ou Nn.
return: String de valor S.
"""
while True:
try:
msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]
except KeyboardInterrupt:
print('O usuário desistiu de inserir um dado!')
break
except IndexError:
print('ERRO, nada digitado!')
else:
if msg not in 'SsNn' or msg in ' ':
print('ERRO, DADO INVALIDO!')
elif msg in 'Nn':
print('Volte sempre, Obrigado!')
break
else:
return 'S'
break
def facapergunta():
msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ', '')
if msg.isnumeric():
return 'N'
else:
return 'L'
resposta = ['Sim', 'Não', 'Talvez', 'Por que não?', 'Vá', 'Não sei',
'Pode ser', 'Talvez sim', 'Talvez não', 'Tenha Fé']
while True:
aleatorio = randint(0, 9)
escolha = leialetra()
if escolha == 'S':
pergunta = facapergunta()
if pergunta == 'L':
print(resposta[aleatorio])
else:
break
| from random import randint
#funções
def leialetra():
'''
=>Função para validar letras.
parm=msg: Recebe dados to tipo string sendo Ss ou Nn.
return: String de valor S.
'''
while True:
try:
msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]
except KeyboardInterrupt:
print('O usuário desistiu de inserir um dado!')
break
except IndexError:
print('ERRO, nada digitado!')
else:
if msg not in 'SsNn' or msg in ' ':
print('ERRO, DADO INVALIDO!')
else:
if msg in 'Nn':
print('Volte sempre, Obrigado!')
break
else:
return 'S'
break
def facapergunta():
msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ','')
if msg.isnumeric():
return 'N'
else:
return 'L'
#programa principal
resposta = ["Sim", "Não", "Talvez", "Por que não?", "Vá", "Não sei", "Pode ser", "Talvez sim", "Talvez não", "Tenha Fé"]
while True:
aleatorio = randint(0, 9)
escolha = leialetra()
if escolha == 'S':
pergunta = facapergunta()
if pergunta == 'L':
print(resposta[aleatorio])
else:
break
| [
2,
3,
4,
5,
6
] |
2,023 | 805b64a7bd727a88081a6ead574fff9b1542070f | <mask token>
| <mask token>
for eachfile in os.listdir(desktop_directory):
if os.path.isfile(desktop_directory + eachfile):
fileName, fileExtension = os.path.splitext(eachfile)
if all(fileExtension != e for e in exclude_these):
ext = fileExtension[1:]
if not os.path.exists(destination_folder + ext):
os.mkdir(destination_folder + ext)
os.rename(desktop_directory + eachfile, destination_folder +
ext + '/' + eachfile)
| desktop_directory = '/home/vineeth/Desktop/'
destination_folder = '/home/vineeth/Documents/'
exclude_these = ['.desktop', '.exe', '.lnk']
<mask token>
for eachfile in os.listdir(desktop_directory):
if os.path.isfile(desktop_directory + eachfile):
fileName, fileExtension = os.path.splitext(eachfile)
if all(fileExtension != e for e in exclude_these):
ext = fileExtension[1:]
if not os.path.exists(destination_folder + ext):
os.mkdir(destination_folder + ext)
os.rename(desktop_directory + eachfile, destination_folder +
ext + '/' + eachfile)
| desktop_directory = '/home/vineeth/Desktop/'
destination_folder = '/home/vineeth/Documents/'
exclude_these = ['.desktop', '.exe', '.lnk']
import os
for eachfile in os.listdir(desktop_directory):
if os.path.isfile(desktop_directory + eachfile):
fileName, fileExtension = os.path.splitext(eachfile)
if all(fileExtension != e for e in exclude_these):
ext = fileExtension[1:]
if not os.path.exists(destination_folder + ext):
os.mkdir(destination_folder + ext)
os.rename(desktop_directory + eachfile, destination_folder +
ext + '/' + eachfile)
| #This program sorts the files on Desktop on the basis of file extension and move them in separate folders in Documents folder.
desktop_directory="/home/vineeth/Desktop/" #LINUX
destination_folder="/home/vineeth/Documents/" #LINUX
#desktop_directory="C:/Users/VINEETH/Desktop/" #Windows
#destination_folder="C:/Users/VINEETH/Documents/" #Windows
exclude_these = ['.desktop','.exe','.lnk']
import os
for eachfile in os.listdir(desktop_directory):
if os.path.isfile(desktop_directory+eachfile):
fileName, fileExtension = os.path.splitext(eachfile)
if(all(fileExtension!=e for e in exclude_these)):
ext=fileExtension[1:]
if not os.path.exists(destination_folder+ext):
os.mkdir(destination_folder+ext)
os.rename(desktop_directory+eachfile,destination_folder+ext+"/"+eachfile)
| [
0,
1,
2,
3,
4
] |
2,024 | e4a60008ca7d61d825b59e6202b40c6be02841cd | <mask token>
| print('Hello World!')
print('2nd Test')
<mask token>
print(d)
print(d['a'])
<mask token>
random.seed(30)
<mask token>
print(r)
<mask token>
np.random.seed
for i in range(20):
newArray = list(set(np.random.random_integers(0, 10, size=6)))[:3]
print(newArray)
| print('Hello World!')
print('2nd Test')
d = dict()
d['a'] = dict()
d['a']['b'] = 5
d['a']['c'] = 6
d['x'] = dict()
d['x']['y'] = 10
print(d)
print(d['a'])
<mask token>
random.seed(30)
r = random.randrange(0, 5)
print(r)
<mask token>
np.random.seed
for i in range(20):
newArray = list(set(np.random.random_integers(0, 10, size=6)))[:3]
print(newArray)
| print('Hello World!')
print('2nd Test')
d = dict()
d['a'] = dict()
d['a']['b'] = 5
d['a']['c'] = 6
d['x'] = dict()
d['x']['y'] = 10
print(d)
print(d['a'])
import random
random.seed(30)
r = random.randrange(0, 5)
print(r)
import numpy as np
np.random.seed
for i in range(20):
newArray = list(set(np.random.random_integers(0, 10, size=6)))[:3]
print(newArray)
| print('Hello World!')
print('2nd Test')
d = dict()
d['a'] = dict()
d['a']['b'] = 5
d['a']['c'] = 6
d['x'] = dict()
d['x']['y'] = 10
print(d)
print(d['a'])
import random
random.seed(30)
r = random.randrange(0,5)
print(r)
import numpy as np
np.random.seed
for i in range(20):
newArray = list(set(np.random.random_integers(0, 10, size=(6))))[:3]
print(newArray) | [
0,
1,
2,
3,
4
] |
2,025 | 2faf39f8d12197e20948b2bf4288b7ee406f5b86 | <mask token>
class FunctionPygameCircle(FunctionExample):
def __init__(self, data_len, width=500, height=500, dot_size=5):
self.angle = 2 * math.pi / data_len
self.width = width
self.height = height
self.dot_size = dot_size
<mask token>
<mask token>
<mask token>
def reset(self):
pygame.display.flip()
self.screen.fill([0, 0, 0])
def close(self):
pygame.quit()
| <mask token>
class FunctionPygameCircle(FunctionExample):
def __init__(self, data_len, width=500, height=500, dot_size=5):
self.angle = 2 * math.pi / data_len
self.width = width
self.height = height
self.dot_size = dot_size
<mask token>
def run(self, data):
return pygame.draw.circle(self.screen, [150, 0, 150], [int(self.
width / 2 - math.cos(self.angle * data) * (self.width / 2 -
self.dot_size)), int(self.height / 2 - math.sin(self.angle *
data) * (self.height / 2 - self.dot_size))], self.dot_size)
def run_no_return(self, data):
pygame.draw.circle(self.screen, [150, 0, 150], [int(self.width / 2 -
math.cos(self.angle * data) * (self.width / 2 - self.dot_size)),
int(self.height / 2 - math.sin(self.angle * data) * (self.
height / 2 - self.dot_size))], self.dot_size)
def reset(self):
pygame.display.flip()
self.screen.fill([0, 0, 0])
def close(self):
pygame.quit()
| <mask token>
class FunctionPygameCircle(FunctionExample):
def __init__(self, data_len, width=500, height=500, dot_size=5):
self.angle = 2 * math.pi / data_len
self.width = width
self.height = height
self.dot_size = dot_size
def setup(self):
pygame.init()
self.screen = pygame.display.set_mode([self.width, self.height])
pygame.key.set_repeat(100, 50)
self.screen.fill([0, 0, 0])
def run(self, data):
return pygame.draw.circle(self.screen, [150, 0, 150], [int(self.
width / 2 - math.cos(self.angle * data) * (self.width / 2 -
self.dot_size)), int(self.height / 2 - math.sin(self.angle *
data) * (self.height / 2 - self.dot_size))], self.dot_size)
def run_no_return(self, data):
pygame.draw.circle(self.screen, [150, 0, 150], [int(self.width / 2 -
math.cos(self.angle * data) * (self.width / 2 - self.dot_size)),
int(self.height / 2 - math.sin(self.angle * data) * (self.
height / 2 - self.dot_size))], self.dot_size)
def reset(self):
pygame.display.flip()
self.screen.fill([0, 0, 0])
def close(self):
pygame.quit()
| import math
import pygame
from TestingFunctions.FunctionExample import FunctionExample
class FunctionPygameCircle(FunctionExample):
def __init__(self, data_len, width=500, height=500, dot_size=5):
self.angle = 2 * math.pi / data_len
self.width = width
self.height = height
self.dot_size = dot_size
def setup(self):
pygame.init()
self.screen = pygame.display.set_mode([self.width, self.height])
pygame.key.set_repeat(100, 50)
self.screen.fill([0, 0, 0])
def run(self, data):
return pygame.draw.circle(self.screen, [150, 0, 150], [int(self.
width / 2 - math.cos(self.angle * data) * (self.width / 2 -
self.dot_size)), int(self.height / 2 - math.sin(self.angle *
data) * (self.height / 2 - self.dot_size))], self.dot_size)
def run_no_return(self, data):
pygame.draw.circle(self.screen, [150, 0, 150], [int(self.width / 2 -
math.cos(self.angle * data) * (self.width / 2 - self.dot_size)),
int(self.height / 2 - math.sin(self.angle * data) * (self.
height / 2 - self.dot_size))], self.dot_size)
def reset(self):
pygame.display.flip()
self.screen.fill([0, 0, 0])
def close(self):
pygame.quit()
| import math
import pygame
from TestingFunctions.FunctionExample import FunctionExample
class FunctionPygameCircle(FunctionExample):
def __init__(self, data_len, width=500, height=500, dot_size=5):
self.angle = (2 * math.pi) / (data_len)
self.width = width
self.height = height
self.dot_size = dot_size
def setup(self):
pygame.init()
self.screen = pygame.display.set_mode([self.width, self.height])
pygame.key.set_repeat(100, 50)
self.screen.fill([0, 0, 0])
def run(self, data):
return pygame.draw.circle(self.screen, [150, 0, 150],
[int(self.width / 2 - math.cos(self.angle * data) * (self.width / 2 - self.dot_size)),
int(self.height / 2 - math.sin(self.angle * data) * (
self.height / 2 - self.dot_size))],
self.dot_size)
def run_no_return(self, data):
pygame.draw.circle(self.screen, [150, 0, 150],
[int(self.width / 2 - math.cos(self.angle * data) * (self.width / 2 - self.dot_size)),
int(self.height / 2 - math.sin(self.angle * data) * (self.height / 2 - self.dot_size))],
self.dot_size)
def reset(self):
pygame.display.flip()
self.screen.fill([0, 0, 0])
def close(self):
pygame.quit()
| [
4,
6,
7,
8,
9
] |
2,026 | 9833af7f5f740e18cbd4d16f59474b4bacaf070c | <mask token>
| <mask token>
print(response.status_code)
print(response.apparent_encoding)
<mask token>
for music in list_music:
print(music['name'])
print('所属专辑:' + music['album']['name'])
print('歌曲时长:' + str(music['interval']) + '秒')
print('歌曲播放链接:https://y.qq.com/n/yqq/song/' + music['mid'] + '.html\n\n')
| <mask token>
headers = {'User-Agent':
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.65 Safari/535.11'
}
client_url = (
'https://c.y.qq.com/soso/fcgi-bin/client_search_cp?ct=24&qqmusic_ver=1298&new_json=1&remoteplace=txt.yqq.song&searchid=69467462525912938&t=0&aggr=1&cr=1&catZhida=1&lossless=0&flag_qc=0&p=1&n=10&w=%E5%91%A8%E6%9D%B0%E4%BC%A6&g_tk=490628805&loginUin=757585105&hostUin=0&format=json&inCharset=utf8&outCharset=utf-8¬ice=0&platform=yqq.json&needNewCode=0'
)
response = requests.get(url=client_url, headers=headers)
print(response.status_code)
print(response.apparent_encoding)
response.encoding = response.apparent_encoding
json_response = response.json()
list_music = json_response['data']['song']['list']
for music in list_music:
print(music['name'])
print('所属专辑:' + music['album']['name'])
print('歌曲时长:' + str(music['interval']) + '秒')
print('歌曲播放链接:https://y.qq.com/n/yqq/song/' + music['mid'] + '.html\n\n')
| import requests
from bs4 import BeautifulSoup
headers = {'User-Agent':
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.65 Safari/535.11'
}
client_url = (
'https://c.y.qq.com/soso/fcgi-bin/client_search_cp?ct=24&qqmusic_ver=1298&new_json=1&remoteplace=txt.yqq.song&searchid=69467462525912938&t=0&aggr=1&cr=1&catZhida=1&lossless=0&flag_qc=0&p=1&n=10&w=%E5%91%A8%E6%9D%B0%E4%BC%A6&g_tk=490628805&loginUin=757585105&hostUin=0&format=json&inCharset=utf8&outCharset=utf-8¬ice=0&platform=yqq.json&needNewCode=0'
)
response = requests.get(url=client_url, headers=headers)
print(response.status_code)
print(response.apparent_encoding)
response.encoding = response.apparent_encoding
json_response = response.json()
list_music = json_response['data']['song']['list']
for music in list_music:
print(music['name'])
print('所属专辑:' + music['album']['name'])
print('歌曲时长:' + str(music['interval']) + '秒')
print('歌曲播放链接:https://y.qq.com/n/yqq/song/' + music['mid'] + '.html\n\n')
| # !/Library/Frameworks/Python.framework/Versions/3.7/bin/python3
# -*- coding:utf-8 -*-
# @Author : Jiazhixiang
import requests
from bs4 import BeautifulSoup
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.65 Safari/535.11"
}
# start_url = "https://y.qq.com/portal/search.html#page=1&searchid=1&remoteplace=txt.yqq.top&t=song&w=%E5%91%A8%E6%9D%B0%E4%BC%A6"
client_url = "https://c.y.qq.com/soso/fcgi-bin/client_search_cp?ct=24&qqmusic_ver=1298&new_json=1&remoteplace=txt.yqq.song&searchid=69467462525912938&t=0&aggr=1&cr=1&catZhida=1&lossless=0&flag_qc=0&p=1&n=10&w=%E5%91%A8%E6%9D%B0%E4%BC%A6&g_tk=490628805&loginUin=757585105&hostUin=0&format=json&inCharset=utf8&outCharset=utf-8¬ice=0&platform=yqq.json&needNewCode=0"
response = requests.get(url=client_url, headers=headers)
print(response.status_code)
print(response.apparent_encoding)
response.encoding = response.apparent_encoding
# response = response.text
# print(type(response))
json_response = response.json()
# print(json_response)
# print(type(json_response))
list_music = json_response['data']['song']['list']
for music in list_music:
print(music['name'])
print("所属专辑:" + music['album']['name'])
print("歌曲时长:" + str(music['interval']) + "秒")
# https: // y.qq.com / n / yqq / song / 001qvvgF38HVc4.html
print("歌曲播放链接:https://y.qq.com/n/yqq/song/" + music['mid'] + ".html\n\n")
| [
0,
1,
2,
3,
4
] |
2,027 | ed02cbf3ebef307d6209004e1e388312bfda0b50 | <mask token>
class cRandomString:
@staticmethod
def RandomTitle(name):
platform = ['PS4', 'XBOX', 'PC', 'NS', 'IOS']
random.shuffle(platform)
platform = '/'.join(platform)
firstWord = ['Cool', 'Hot', 'New', '2018', 'Gift', '*Cool*',
'*Hot*', '*New*', '$Cool$', '$Hot$', '$New$']
firstWord = random.choice(firstWord)
title = firstWord + ' 🤑 FREE Fortnite XXXX SKIN ' + platform
title = title.replace('XXXX', name)
return title
<mask token>
@staticmethod
def RandomTag(name):
tag_temp = (
'XXXX, XXXX fortnite, XXXX free, XXXX skin,fortnite XXXX skin free, how to get the XXXX skin, iPhone XXXX free skins, iPad XXXX free skins'
)
tag_final = tag_temp.replace('XXXX', name)
return tag_final
<mask token>
| <mask token>
class cRandomString:
@staticmethod
def RandomTitle(name):
platform = ['PS4', 'XBOX', 'PC', 'NS', 'IOS']
random.shuffle(platform)
platform = '/'.join(platform)
firstWord = ['Cool', 'Hot', 'New', '2018', 'Gift', '*Cool*',
'*Hot*', '*New*', '$Cool$', '$Hot$', '$New$']
firstWord = random.choice(firstWord)
title = firstWord + ' 🤑 FREE Fortnite XXXX SKIN ' + platform
title = title.replace('XXXX', name)
return title
@staticmethod
def RandomDescription(name):
platform = ['PS4', 'Xbox One', 'PC', 'Nintendo Switch', 'IOS']
random.shuffle(platform)
platform = ', '.join(platform)
description_temp = (
"""Hey Guys!
In today's video I will show you how to get the XXXX skin for free in fortnite!
This is working on xbox, ps4, ios, pc and nintendo switch!
This method is 100% free and working as of 2018.
This is the best way to get a fortnite XXXX skin for free key code!
This is a working and legal method!
How To Get FREE SKINS In Fortnite: Battle Royale! [{0}]"""
.format(platform))
description_final = description_temp.replace('XXXX', name)
return description_final
@staticmethod
def RandomTag(name):
tag_temp = (
'XXXX, XXXX fortnite, XXXX free, XXXX skin,fortnite XXXX skin free, how to get the XXXX skin, iPhone XXXX free skins, iPad XXXX free skins'
)
tag_final = tag_temp.replace('XXXX', name)
return tag_final
<mask token>
| <mask token>
reload(sys)
sys.setdefaultencoding('utf-8')
class cRandomString:
@staticmethod
def RandomTitle(name):
platform = ['PS4', 'XBOX', 'PC', 'NS', 'IOS']
random.shuffle(platform)
platform = '/'.join(platform)
firstWord = ['Cool', 'Hot', 'New', '2018', 'Gift', '*Cool*',
'*Hot*', '*New*', '$Cool$', '$Hot$', '$New$']
firstWord = random.choice(firstWord)
title = firstWord + ' 🤑 FREE Fortnite XXXX SKIN ' + platform
title = title.replace('XXXX', name)
return title
@staticmethod
def RandomDescription(name):
platform = ['PS4', 'Xbox One', 'PC', 'Nintendo Switch', 'IOS']
random.shuffle(platform)
platform = ', '.join(platform)
description_temp = (
"""Hey Guys!
In today's video I will show you how to get the XXXX skin for free in fortnite!
This is working on xbox, ps4, ios, pc and nintendo switch!
This method is 100% free and working as of 2018.
This is the best way to get a fortnite XXXX skin for free key code!
This is a working and legal method!
How To Get FREE SKINS In Fortnite: Battle Royale! [{0}]"""
.format(platform))
description_final = description_temp.replace('XXXX', name)
return description_final
@staticmethod
def RandomTag(name):
tag_temp = (
'XXXX, XXXX fortnite, XXXX free, XXXX skin,fortnite XXXX skin free, how to get the XXXX skin, iPhone XXXX free skins, iPad XXXX free skins'
)
tag_final = tag_temp.replace('XXXX', name)
return tag_final
if __name__ == '__main__':
cRandomString.RandomDescription('123')
| import random
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
class cRandomString:
@staticmethod
def RandomTitle(name):
platform = ['PS4', 'XBOX', 'PC', 'NS', 'IOS']
random.shuffle(platform)
platform = '/'.join(platform)
firstWord = ['Cool', 'Hot', 'New', '2018', 'Gift', '*Cool*',
'*Hot*', '*New*', '$Cool$', '$Hot$', '$New$']
firstWord = random.choice(firstWord)
title = firstWord + ' 🤑 FREE Fortnite XXXX SKIN ' + platform
title = title.replace('XXXX', name)
return title
@staticmethod
def RandomDescription(name):
platform = ['PS4', 'Xbox One', 'PC', 'Nintendo Switch', 'IOS']
random.shuffle(platform)
platform = ', '.join(platform)
description_temp = (
"""Hey Guys!
In today's video I will show you how to get the XXXX skin for free in fortnite!
This is working on xbox, ps4, ios, pc and nintendo switch!
This method is 100% free and working as of 2018.
This is the best way to get a fortnite XXXX skin for free key code!
This is a working and legal method!
How To Get FREE SKINS In Fortnite: Battle Royale! [{0}]"""
.format(platform))
description_final = description_temp.replace('XXXX', name)
return description_final
@staticmethod
def RandomTag(name):
tag_temp = (
'XXXX, XXXX fortnite, XXXX free, XXXX skin,fortnite XXXX skin free, how to get the XXXX skin, iPhone XXXX free skins, iPad XXXX free skins'
)
tag_final = tag_temp.replace('XXXX', name)
return tag_final
if __name__ == '__main__':
cRandomString.RandomDescription('123')
| # -*- coding:utf-8 -*-
# author:Kyseng
# file: cRandomString.py
# time: 2018/11/8 11:41 PM
# functhion:
import random
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
class cRandomString():
@staticmethod
def RandomTitle(name):
# name = name.decode('utf8')
# print name
platform = ["PS4", "XBOX", "PC", "NS", "IOS"]
random.shuffle(platform)
platform = "/".join(platform)
firstWord = ['Cool', 'Hot', 'New', '2018', 'Gift', '*Cool*', '*Hot*', '*New*', '$Cool$', '$Hot$', '$New$']
firstWord = random.choice(firstWord)
title = firstWord + ' 🤑 FREE Fortnite XXXX SKIN ' + platform
title = title.replace('XXXX', name)
return title
@staticmethod
def RandomDescription(name):
platform = ["PS4", "Xbox One", "PC", "Nintendo Switch", "IOS"]
random.shuffle(platform)
platform = ", ".join(platform)
description_temp = "Hey Guys!\n\nIn today's video I will show you how to get the XXXX skin for free in fortnite!\n\nThis is working on xbox, ps4, ios, pc and nintendo switch!\n\nThis method is 100% free and working as of 2018.\n\nThis is the best way to get a fortnite XXXX skin for free key code! \n\nThis is a working and legal method!\n\nHow To Get FREE SKINS In Fortnite: Battle Royale! [{0}]".format(platform)
description_final = description_temp.replace('XXXX', name)
return description_final
@staticmethod
def RandomTag(name):
tag_temp = "XXXX, XXXX fortnite, XXXX free, XXXX skin,fortnite XXXX skin free, how to get the XXXX skin, iPhone XXXX free skins, iPad XXXX free skins"
tag_final = tag_temp.replace('XXXX', name)
return tag_final
if __name__ == "__main__":
cRandomString.RandomDescription("123") | [
3,
4,
5,
6,
7
] |
2,028 | 434ec7791345ad869d8ce86aa1cdc08344203171 | <mask token>
class Variable:
def __init__(self, type_str: str, name: str):
self.original_type = type_str
self.__map_variable_type(type_str)
self.name = name
def __str__(self):
return 'VariableDto name=' + self.name + ' type=' + str(self.type.name)
def __map_variable_type(self, variable_type):
switcher = {'uint8_t': (VariableType.uint8, 1), 'int8_t': (
VariableType.int8, 1), 'uint16_t': (VariableType.uint16, 2),
'int16_t': (VariableType.int16, 2), 'uint32_t': (VariableType.
uint32, 4), 'int32_t': (VariableType.int32, 4), 'float': (
VariableType.float, 4), 'double': (VariableType.double, 4),
'bool': (VariableType.bool, 1)}
self.type, self.size = switcher.get(variable_type, (VariableType.
custom, None))
| <mask token>
class VariableType(Enum):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
class Variable:
def __init__(self, type_str: str, name: str):
self.original_type = type_str
self.__map_variable_type(type_str)
self.name = name
def __str__(self):
return 'VariableDto name=' + self.name + ' type=' + str(self.type.name)
def __map_variable_type(self, variable_type):
switcher = {'uint8_t': (VariableType.uint8, 1), 'int8_t': (
VariableType.int8, 1), 'uint16_t': (VariableType.uint16, 2),
'int16_t': (VariableType.int16, 2), 'uint32_t': (VariableType.
uint32, 4), 'int32_t': (VariableType.int32, 4), 'float': (
VariableType.float, 4), 'double': (VariableType.double, 4),
'bool': (VariableType.bool, 1)}
self.type, self.size = switcher.get(variable_type, (VariableType.
custom, None))
| <mask token>
class VariableType(Enum):
uint8 = 'uint8',
int8 = 'int8'
uint16 = 'uint16'
int16 = 'int16'
uint32 = 'uint32'
int32 = 'int32'
float = 'float'
double = 'double'
bool = 'bool'
custom = 'custom'
class Variable:
def __init__(self, type_str: str, name: str):
self.original_type = type_str
self.__map_variable_type(type_str)
self.name = name
def __str__(self):
return 'VariableDto name=' + self.name + ' type=' + str(self.type.name)
def __map_variable_type(self, variable_type):
switcher = {'uint8_t': (VariableType.uint8, 1), 'int8_t': (
VariableType.int8, 1), 'uint16_t': (VariableType.uint16, 2),
'int16_t': (VariableType.int16, 2), 'uint32_t': (VariableType.
uint32, 4), 'int32_t': (VariableType.int32, 4), 'float': (
VariableType.float, 4), 'double': (VariableType.double, 4),
'bool': (VariableType.bool, 1)}
self.type, self.size = switcher.get(variable_type, (VariableType.
custom, None))
| from enum import Enum
class VariableType(Enum):
uint8 = 'uint8',
int8 = 'int8'
uint16 = 'uint16'
int16 = 'int16'
uint32 = 'uint32'
int32 = 'int32'
float = 'float'
double = 'double'
bool = 'bool'
custom = 'custom'
class Variable:
def __init__(self, type_str: str, name: str):
self.original_type = type_str
self.__map_variable_type(type_str)
self.name = name
def __str__(self):
return 'VariableDto name=' + self.name + ' type=' + str(self.type.name)
def __map_variable_type(self, variable_type):
switcher = {'uint8_t': (VariableType.uint8, 1), 'int8_t': (
VariableType.int8, 1), 'uint16_t': (VariableType.uint16, 2),
'int16_t': (VariableType.int16, 2), 'uint32_t': (VariableType.
uint32, 4), 'int32_t': (VariableType.int32, 4), 'float': (
VariableType.float, 4), 'double': (VariableType.double, 4),
'bool': (VariableType.bool, 1)}
self.type, self.size = switcher.get(variable_type, (VariableType.
custom, None))
| from enum import Enum
class VariableType(Enum):
uint8 = "uint8",
int8 = "int8"
uint16 = "uint16"
int16 = "int16"
uint32 = "uint32"
int32 = "int32"
float = "float"
double = "double"
bool = "bool"
custom = "custom"
class Variable:
def __init__(self, type_str: str, name: str):
self.original_type = type_str
self.__map_variable_type(type_str)
self.name = name
def __str__(self):
return "VariableDto name=" + self.name + " type=" + str(self.type.name)
def __map_variable_type(self, variable_type):
# TODO add support for short, int, etc.
switcher = {
"uint8_t": (VariableType.uint8, 1),
"int8_t": (VariableType.int8, 1),
"uint16_t": (VariableType.uint16, 2),
"int16_t": (VariableType.int16, 2),
"uint32_t": (VariableType.uint32, 4),
"int32_t": (VariableType.int32, 4),
"float": (VariableType.float, 4),
"double": (VariableType.double, 4),
"bool": (VariableType.bool, 1)
}
self.type, self.size = switcher.get(variable_type, (VariableType.custom, None))
| [
4,
5,
6,
7,
8
] |
2,029 | f5ca2fb2ce8bcb7a67abe3123d4c50949e9c2f2f | # encoding: utf-8
# module Revit.GeometryConversion calls itself GeometryConversion
# from RevitNodes,Version=1.2.1.3083,Culture=neutral,PublicKeyToken=null
# by generator 1.145
# no doc
# no imports
# no functions
# classes
class CurveUtils(object):
# no doc
@staticmethod
def CurvesAreSimilar(a,b):
"""
CurvesAreSimilar(a: Curve,b: Curve) -> bool
This method uses basic checks to compare curves for similarity.
It
starts by comparing the curves' end points. Curves which have similar
end points but different directions will not be regarded as similar,
because directionality is important in Revit for other purposes.
Depending on the curve type,other comparisons are then performed.
a: The first curve.
b: The second curve.
Returns: Returns true if the curves are similar within Tolerance,and
false if they are not.
"""
pass
@staticmethod
def GetPlaneFromCurve(c,planarOnly):
""" GetPlaneFromCurve(c: Curve,planarOnly: bool) -> Plane """
pass
@staticmethod
def IsLineLike(crv):
""" IsLineLike(crv: Curve) -> bool """
pass
@staticmethod
def PointArraysAreSame(pnts1,pnts2):
""" PointArraysAreSame(pnts1: ReferencePointArray,pnts2: ReferencePointArray) -> bool """
pass
@staticmethod
def ReferencePointsAreSame(pnt1,pnt2):
""" ReferencePointsAreSame(pnt1: ReferencePoint,pnt2: ReferencePoint) -> bool """
pass
Tolerance=9.9999999999999995e-07
__all__=[
'CurvesAreSimilar',
'GetPlaneFromCurve',
'IsLineLike',
'PointArraysAreSame',
'ReferencePointsAreSame',
'Tolerance',
]
class DynamoToRevitBRep(object):
# no doc
@staticmethod
def ToRevitType(*__args):
"""
ToRevitType(surf: Surface,performHostUnitConversion: bool,materialId: ElementId) -> GeometryObject
this method attempts to construct a BRep from a surface.
ToRevitType(sol: Solid,performHostUnitConversion: bool,materialId: ElementId) -> GeometryObject
this method attempts to construct a BRep from a closed solid.
"""
pass
__all__=[
'ToRevitType',
]
class GeometryObjectConverter(object):
# no doc
@staticmethod
def Convert(geom,reference,transform):
"""
Convert(geom: GeometryObject,reference: Reference,transform: CoordinateSystem) -> object
Convert a GeometryObject to an applicable ProtoGeometry type.
Returns: A Geometry type. Null if there's no suitable conversion.
"""
pass
@staticmethod
def ConvertToMany(solid,reference,transform):
"""
ConvertToMany(solid: Solid,reference: Reference,transform: CoordinateSystem) -> IEnumerable[object]
Get the edges and faces from the solid and convert them
"""
pass
@staticmethod
def InternalConvert(geom):
""" InternalConvert(geom: PolyLine) -> PolyCurve """
pass
__all__=[
'Convert',
'ConvertToMany',
'InternalConvert',
]
class GeometryPrimitiveConverter(object):
# no doc
@staticmethod
def GetPerpendicular(*__args):
"""
GetPerpendicular(vector: Vector) -> Vector
GetPerpendicular(xyz: XYZ) -> XYZ
"""
pass
@staticmethod
def ToCoordinateSystem(t,convertUnits):
""" ToCoordinateSystem(t: Transform,convertUnits: bool) -> CoordinateSystem """
pass
@staticmethod
def ToDegrees(degrees):
""" ToDegrees(degrees: float) -> float """
pass
@staticmethod
def ToDoubleArray(list):
""" ToDoubleArray(list: Array[float]) -> DoubleArray """
pass
@staticmethod
def ToPlane(plane,convertUnits):
"""
ToPlane(plane: Plane,convertUnits: bool) -> Plane
ToPlane(plane: Plane,convertUnits: bool) -> Plane
"""
pass
@staticmethod
def ToPoint(xyz,convertUnits):
""" ToPoint(xyz: XYZ,convertUnits: bool) -> Point """
pass
@staticmethod
def ToPoints(list,convertUnits):
""" ToPoints(list: List[XYZ],convertUnits: bool) -> List[Point] """
pass
@staticmethod
def ToProtoType(*__args):
"""
ToProtoType(uv: UV) -> UV
ToProtoType(point: Point,convertUnits: bool) -> Point
ToProtoType(xyz: BoundingBoxXYZ,convertUnits: bool) -> BoundingBox
"""
pass
@staticmethod
def ToRadians(degrees):
""" ToRadians(degrees: float) -> float """
pass
@staticmethod
def ToRevitBoundingBox(cs,minPoint,maxPoint,convertUnits):
""" ToRevitBoundingBox(cs: CoordinateSystem,minPoint: Point,maxPoint: Point,convertUnits: bool) -> BoundingBoxXYZ """
pass
@staticmethod
def ToRevitType(*__args):
"""
ToRevitType(vec: Vector,convertUnits: bool) -> XYZ
ToRevitType(pt: Point,convertUnits: bool) -> XYZ
ToRevitType(bb: BoundingBox,convertUnits: bool) -> BoundingBoxXYZ
"""
pass
@staticmethod
def ToTransform(cs,convertUnits):
""" ToTransform(cs: CoordinateSystem,convertUnits: bool) -> Transform """
pass
@staticmethod
def ToVector(xyz,convertUnits):
""" ToVector(xyz: XYZ,convertUnits: bool) -> Vector """
pass
@staticmethod
def ToXyz(*__args):
"""
ToXyz(vec: Vector,convertUnits: bool) -> XYZ
ToXyz(pt: Point,convertUnits: bool) -> XYZ
"""
pass
@staticmethod
def ToXyzs(list,convertUnits):
"""
ToXyzs(list: Array[Vector],convertUnits: bool) -> Array[XYZ]
ToXyzs(list: Array[Point],convertUnits: bool) -> Array[XYZ]
ToXyzs(list: List[Point],convertUnits: bool) -> List[XYZ]
"""
pass
__all__=[
'GetPerpendicular',
'ToCoordinateSystem',
'ToDegrees',
'ToDoubleArray',
'ToPlane',
'ToPoint',
'ToPoints',
'ToProtoType',
'ToRadians',
'ToRevitBoundingBox',
'ToRevitType',
'ToTransform',
'ToVector',
'ToXyz',
'ToXyzs',
]
class NurbsUtils(object):
# no doc
@staticmethod
def ElevateBezierDegree(crv,finalDegree):
"""
ElevateBezierDegree(crv: NurbsCurve,finalDegree: int) -> NurbsCurve
Elevate the degree of a Bezier curve (represented in NURBS form) to a given
degree
without changing the shape
crv: The curve
finalDegree: The requested degree
"""
pass
__all__=[
'ElevateBezierDegree',
]
class PolygonContainment(object):
# no doc
@staticmethod
def AdjustDelta(delta,vertex,next_vertex,p):
""" AdjustDelta(delta: int,vertex: UV,next_vertex: UV,p: UV) -> int """
pass
@staticmethod
def GetQuadrant(vertex,p):
"""
GetQuadrant(vertex: UV,p: UV) -> int
Determine the quadrant of a polygon vertex
relative to the test
point.
"""
pass
@staticmethod
def GetXIntercept(p,q,y):
"""
GetXIntercept(p: UV,q: UV,y: float) -> float
Determine the X intercept of a polygon edge
with a horizontal
line at the Y value of the
test point.
"""
pass
@staticmethod
def PolygonContains(polygon,point):
""" PolygonContains(polygon: List[UV],point: UV) -> bool """
pass
__all__=[
'AdjustDelta',
'GetQuadrant',
'GetXIntercept',
'PolygonContains',
]
class ProtoToRevitCurve(object):
# no doc
@staticmethod
def ToRevitType(*__args):
"""
ToRevitType(pcrv: PolyCurve,performHostUnitConversion: bool) -> CurveLoop
ToRevitType(crv: Curve,performHostUnitConversion: bool) -> Curve
"""
pass
__all__=[
'ToRevitType',
]
class ProtoToRevitMesh(object):
# no doc
@staticmethod
def CreateBoundingBoxMeshForErrors(minPoint,maxPoint,performHostUnitConversion):
"""
CreateBoundingBoxMeshForErrors(minPoint: Point,maxPoint: Point,performHostUnitConversion: bool) -> IList[GeometryObject]
This is to create a bounding box mesh for geometries which have errors during
the tessellating process
"""
pass
@staticmethod
def ToRevitType(*__args):
"""
ToRevitType(mesh: Mesh,target: TessellatedShapeBuilderTarget,fallback: TessellatedShapeBuilderFallback,MaterialId: ElementId,performHostUnitConversion: bool) -> IList[GeometryObject]
ToRevitType(solid: Solid,target: TessellatedShapeBuilderTarget,fallback: TessellatedShapeBuilderFallback,MaterialId: ElementId,performHostUnitConversion: bool) -> IList[GeometryObject]
ToRevitType(srf: Surface,target: TessellatedShapeBuilderTarget,fallback: TessellatedShapeBuilderFallback,MaterialId: ElementId,performHostUnitConversion: bool) -> IList[GeometryObject]
"""
pass
__all__=[
'CreateBoundingBoxMeshForErrors',
'ToRevitType',
]
class RevitToProtoCurve(object):
# no doc
@staticmethod
def ToProtoType(*__args):
"""
ToProtoType(geom: PolyLine,performHostUnitConversion: bool) -> PolyCurve
ToProtoType(revitCurves: CurveArray,performHostUnitConversion: bool) -> PolyCurve
ToProtoType(revitCurve: Curve,performHostUnitConversion: bool,referenceOverride: Reference) -> Curve
"""
pass
__all__=[
'ToProtoType',
]
class RevitToProtoFace(object):
# no doc
@staticmethod
def ToProtoType(revitFace,performHostUnitConversion,referenceOverride):
""" ToProtoType(revitFace: Face,performHostUnitConversion: bool,referenceOverride: Reference) -> IEnumerable[Surface] """
pass
__all__=[
'ToProtoType',
]
class RevitToProtoMesh(object):
# no doc
@staticmethod
def ToProtoType(*__args):
"""
ToProtoType(meshArray: IEnumerable[Mesh],performHostUnitConversion: bool) -> Array[Mesh]
ToProtoType(mesh: Mesh,performHostUnitConversion: bool) -> Mesh
"""
pass
__all__=[
'ToProtoType',
]
class RevitToProtoSolid(object):
# no doc
@staticmethod
def ToProtoType(solid,performHostUnitConversion):
""" ToProtoType(solid: Solid,performHostUnitConversion: bool) -> Solid """
pass
__all__=[
'ToProtoType',
]
class SurfaceExtractor(object):
"""
This class is required to extract the underlying surface representation from a Revit Face.
All Face types are supported.
"""
@staticmethod
def ExtractSurface(face,edgeLoops):
"""
ExtractSurface(face: HermiteFace,edgeLoops: IEnumerable[PolyCurve]) -> Surface
ExtractSurface(face: RevolvedFace,edgeLoops: IEnumerable[PolyCurve]) -> Surface
ExtractSurface(face: RuledFace,edgeLoops: IEnumerable[PolyCurve]) -> Surface
ExtractSurface(face: PlanarFace,edgeLoops: IEnumerable[PolyCurve]) -> Surface
ExtractSurface(face: CylindricalFace,edgeLoops: IEnumerable[PolyCurve]) -> Surface
ExtractSurface(face: ConicalFace,edgeLoops: IEnumerable[PolyCurve]) -> Surface
"""
pass
__all__=[
'ExtractSurface',
]
class UnitConverter(object):
# no doc
@staticmethod
def ConvertToDynamoUnits(geometry):
# Error generating skeleton for function ConvertToDynamoUnits: Method must be called on a Type for which Type.IsGenericParameter is false.
@staticmethod
def ConvertToHostUnits(geometry):
# Error generating skeleton for function ConvertToHostUnits: Method must be called on a Type for which Type.IsGenericParameter is false.
@staticmethod
def DynamoToHostFactor(unitType):
""" DynamoToHostFactor(unitType: UnitType) -> float """
pass
@staticmethod
def HostToDynamoFactor(unitType):
""" HostToDynamoFactor(unitType: UnitType) -> float """
pass
@staticmethod
def InDynamoUnits(geometry):
# Error generating skeleton for function InDynamoUnits: Method must be called on a Type for which Type.IsGenericParameter is false.
@staticmethod
def InHostUnits(geometry):
# Error generating skeleton for function InHostUnits: Method must be called on a Type for which Type.IsGenericParameter is false.
__all__=[
'ConvertToDynamoUnits',
'ConvertToHostUnits',
'DynamoToHostFactor',
'HostToDynamoFactor',
'InDynamoUnits',
'InHostUnits',
]
| null | null | null | null | [
0
] |
2,030 | ae4d12ff88cf08b2e19b212c80549adc0a0d47dc | <mask token>
| class Solution:
<mask token>
| class Solution:
def sortArrayByParity(self, A: List[int]) ->List[int]:
l = []
r = []
for x in A:
if x % 2 == 0:
l.append(x)
else:
r.append(x)
ans = l + r
return ans
| #给定一个非负整数数组 A,返回一个数组,在该数组中, A 的所有偶数元素之后跟着所有奇数元素。你可以返回满足此条件的任何数组作为答案
class Solution:
def sortArrayByParity(self, A: List[int]) -> List[int]:
l=[]
r=[]
for x in A:
if(x%2==0):
l.append(x)
else:
r.append(x)
ans=l+r
return ans
| null | [
0,
1,
2,
3
] |
2,031 | c295d769b85943a6ca89f9d213e79b78129a6ce9 | <mask token>
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
<mask token>
@app.route('/api/v1.0/tobs/<startDate>/<endDate>')
def getTempObs(startDate, endDate):
"""Return the date and temperateure for 2017"""
results = session.query(Measurement.tobs).filter(Measurement.date >=
startDate).filter(Measurement.date <= endDate).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
@app.route('/api/v1.0/<startDate>/<endDate>')
@app.route('/api/v1.0/<startDate>')
def getTempStats(startDate, endDate='2018-31-12'):
"""Return temperature stats"""
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).all()
else:
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).filter(Measurement.date <= endDate
).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
<mask token>
| <mask token>
@app.route('/')
def welcome():
return jsonify({'Title': 'Welcome to hawaii weather info app',
'description':
'This api gives you the information about Hawaii stations, precipitation and temperature in a daterange'
, 'endpoints': ['/api/v1.0/precipitation', '/api/v1.0/stations',
'/api/v1.0/tobs', '/api/v1.0/<start>', '/api/v1.0/<start>/<end>']})
@app.route('/api/v1.0/precipitation')
def prcp():
prev_year = dt.date.today() - dt.timedelta(days=365)
prcp_each_day = session.query(Measurement.date, func.sum(Measurement.prcp)
).filter(Measurement.date >= prev_year).group_by(Measurement.date
).order_by(Measurement.date).all()
prcp_dict = dict(prcp_each_day)
return jsonify(prcp_dict)
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
"""Return a json list of Temperature Observations (tobs) for the previous year"""
prev_year = dt.date.today() - dt.timedelta(days=365)
tobsquery = session.query(Measurement.tobs).filter(Measurement.date >=
prev_year).all()
tobslist = list(np.ravel(tobsquery))
return jsonify(tobslist)
@app.errorhandler(404)
def page_not_found(e):
return (
'<h2> 404: Page Not Found </h2>Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>'
, 404)
@app.route('/api/v1.0/<start>', methods=['GET'])
def tobsinfo_start(start):
try:
if start:
sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs),
func.max(Measurement.tobs)]
calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',
Measurement.date) >= start).one()
return (
f'<h2> Temperature(F) informtion from {start} </h2>Minimum temp: {calcs[0]}<br>Average temp: {round(calcs[1], 2)}<br>Maximum temp: {round(calcs[2], 2)}<br>'
)
except:
abort(404)
@app.route('/api/v1.0/tobs/<startDate>/<endDate>')
def getTempObs(startDate, endDate):
"""Return the date and temperateure for 2017"""
results = session.query(Measurement.tobs).filter(Measurement.date >=
startDate).filter(Measurement.date <= endDate).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
@app.route('/api/v1.0/<startDate>/<endDate>')
@app.route('/api/v1.0/<startDate>')
def getTempStats(startDate, endDate='2018-31-12'):
"""Return temperature stats"""
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).all()
else:
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).filter(Measurement.date <= endDate
).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
<mask token>
| <mask token>
Base.prepare(engine, reflect=True)
<mask token>
@app.route('/')
def welcome():
return jsonify({'Title': 'Welcome to hawaii weather info app',
'description':
'This api gives you the information about Hawaii stations, precipitation and temperature in a daterange'
, 'endpoints': ['/api/v1.0/precipitation', '/api/v1.0/stations',
'/api/v1.0/tobs', '/api/v1.0/<start>', '/api/v1.0/<start>/<end>']})
@app.route('/api/v1.0/precipitation')
def prcp():
prev_year = dt.date.today() - dt.timedelta(days=365)
prcp_each_day = session.query(Measurement.date, func.sum(Measurement.prcp)
).filter(Measurement.date >= prev_year).group_by(Measurement.date
).order_by(Measurement.date).all()
prcp_dict = dict(prcp_each_day)
return jsonify(prcp_dict)
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
"""Return a json list of Temperature Observations (tobs) for the previous year"""
prev_year = dt.date.today() - dt.timedelta(days=365)
tobsquery = session.query(Measurement.tobs).filter(Measurement.date >=
prev_year).all()
tobslist = list(np.ravel(tobsquery))
return jsonify(tobslist)
@app.errorhandler(404)
def page_not_found(e):
return (
'<h2> 404: Page Not Found </h2>Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>'
, 404)
@app.route('/api/v1.0/<start>', methods=['GET'])
def tobsinfo_start(start):
try:
if start:
sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs),
func.max(Measurement.tobs)]
calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',
Measurement.date) >= start).one()
return (
f'<h2> Temperature(F) informtion from {start} </h2>Minimum temp: {calcs[0]}<br>Average temp: {round(calcs[1], 2)}<br>Maximum temp: {round(calcs[2], 2)}<br>'
)
except:
abort(404)
@app.route('/api/v1.0/tobs/<startDate>/<endDate>')
def getTempObs(startDate, endDate):
"""Return the date and temperateure for 2017"""
results = session.query(Measurement.tobs).filter(Measurement.date >=
startDate).filter(Measurement.date <= endDate).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
@app.route('/api/v1.0/<startDate>/<endDate>')
@app.route('/api/v1.0/<startDate>')
def getTempStats(startDate, endDate='2018-31-12'):
"""Return temperature stats"""
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).all()
else:
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).filter(Measurement.date <= endDate
).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
if __name__ == '__main__':
app.run(debug=True)
| <mask token>
engine = create_engine('sqlite:///hawaii.sqlite')
Base = automap_base()
Base.prepare(engine, reflect=True)
Measurement = Base.classes.measurement
Station = Base.classes.station
session = Session(engine)
app = Flask(__name__)
@app.route('/')
def welcome():
return jsonify({'Title': 'Welcome to hawaii weather info app',
'description':
'This api gives you the information about Hawaii stations, precipitation and temperature in a daterange'
, 'endpoints': ['/api/v1.0/precipitation', '/api/v1.0/stations',
'/api/v1.0/tobs', '/api/v1.0/<start>', '/api/v1.0/<start>/<end>']})
@app.route('/api/v1.0/precipitation')
def prcp():
prev_year = dt.date.today() - dt.timedelta(days=365)
prcp_each_day = session.query(Measurement.date, func.sum(Measurement.prcp)
).filter(Measurement.date >= prev_year).group_by(Measurement.date
).order_by(Measurement.date).all()
prcp_dict = dict(prcp_each_day)
return jsonify(prcp_dict)
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
"""Return a json list of Temperature Observations (tobs) for the previous year"""
prev_year = dt.date.today() - dt.timedelta(days=365)
tobsquery = session.query(Measurement.tobs).filter(Measurement.date >=
prev_year).all()
tobslist = list(np.ravel(tobsquery))
return jsonify(tobslist)
@app.errorhandler(404)
def page_not_found(e):
return (
'<h2> 404: Page Not Found </h2>Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>'
, 404)
@app.route('/api/v1.0/<start>', methods=['GET'])
def tobsinfo_start(start):
try:
if start:
sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs),
func.max(Measurement.tobs)]
calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',
Measurement.date) >= start).one()
return (
f'<h2> Temperature(F) informtion from {start} </h2>Minimum temp: {calcs[0]}<br>Average temp: {round(calcs[1], 2)}<br>Maximum temp: {round(calcs[2], 2)}<br>'
)
except:
abort(404)
@app.route('/api/v1.0/tobs/<startDate>/<endDate>')
def getTempObs(startDate, endDate):
"""Return the date and temperateure for 2017"""
results = session.query(Measurement.tobs).filter(Measurement.date >=
startDate).filter(Measurement.date <= endDate).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
@app.route('/api/v1.0/<startDate>/<endDate>')
@app.route('/api/v1.0/<startDate>')
def getTempStats(startDate, endDate='2018-31-12'):
"""Return temperature stats"""
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).all()
else:
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).filter(Measurement.date <= endDate
).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
if __name__ == '__main__':
app.run(debug=True)
| import datetime as dt
import numpy as np
import pandas as pd
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
from flask import Flask, jsonify, render_template, abort
#creating engine to connect with hawaii sqlite database
engine = create_engine("sqlite:///hawaii.sqlite")
#using automap to load orm automatically
Base = automap_base()
# reflecting the tables from orm classes
Base.prepare(engine, reflect = True)
Measurement = Base.classes.measurement
Station = Base.classes.station
# print(Base.classes.keys())
#initiating session
session = Session(engine)
# initiating flask api
app = Flask(__name__)
@app.route('/')
def welcome():
return jsonify({"Title": "Welcome to hawaii weather info app",
"description": "This api gives you the information about Hawaii stations, precipitation and temperature in a daterange",
"endpoints":["/api/v1.0/precipitation",
"/api/v1.0/stations",
"/api/v1.0/tobs",
"/api/v1.0/<start>",
"/api/v1.0/<start>/<end>"]})
@app.route("/api/v1.0/precipitation")
def prcp():
prev_year = dt.date.today() - dt.timedelta(days=365)
# date_string = prev_year.strftime("%Y-%m-%d")
prcp_each_day = session.query(Measurement.date,func.sum(Measurement.prcp)).filter(Measurement.date >= prev_year).group_by(Measurement.date).order_by(Measurement.date).all()
prcp_dict = dict(prcp_each_day)
return jsonify(prcp_dict)
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
"""Return a json list of Temperature Observations (tobs) for the previous year"""
prev_year = dt.date.today() - dt.timedelta(days=365)
# date_string = prev_year.strftime("%Y-%m-%d")
tobsquery = session.query(Measurement.tobs).filter(Measurement.date >= prev_year).all()
tobslist = list(np.ravel(tobsquery))
return jsonify(tobslist)
#executing the error handler page using 404 abort
@app.errorhandler(404)
def page_not_found(e):
return ("<h2> 404: Page Not Found </h2>"
"Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>"),404
@app.route('/api/v1.0/<start>', methods=["GET"])
# Return a json list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range.
# When given the start only, calculate TMIN, TAVG, and TMAX for all dates greater than and equal to the start date.
# When given the start and the end date, calculate the TMIN, TAVG, and TMAX for dates between the start and end date inclusive.
def tobsinfo_start(start):
# daterange = [date for dates in session.query(Measurement.date).all()]
try:
if start:# in daterange:
# start = func.strftime('%Y-%m-%d', 'start')
sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)]
calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',Measurement.date) >= start).one()
return (
f"<h2> Temperature(F) informtion from {start} </h2>"
f"Minimum temp: {calcs[0]}<br>"
f"Average temp: {round(calcs[1],2)}<br>"
f"Maximum temp: {round(calcs[2],2)}<br>"
)
except:
abort(404)
@app.route("/api/v1.0/tobs/<startDate>/<endDate>")
def getTempObs(startDate,endDate):
"""Return the date and temperateure for 2017"""
# Query all the date and the temperature details
results = session.query(Measurement.tobs). filter(Measurement.date >= startDate).filter(Measurement.date <= endDate).all()
# Convert list of tuples into normal list
all_names = list(np.ravel(results))
return jsonify(all_names)
# 12. Get the temperature stats for given date
@app.route("/api/v1.0/<startDate>/<endDate>")
@app.route("/api/v1.0/<startDate>")
def getTempStats(startDate,endDate='2018-31-12'):
"""Return temperature stats"""
#If end date is not given
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= startDate).all()
else:
# Query all the date and the temperature details og
results = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= startDate).filter(Measurement.date <= endDate).all()
# Convert list of tuples into normal list
all_names = list(np.ravel(results))
return jsonify(all_names)
if __name__ == '__main__':
app.run(debug=True)
| [
3,
8,
9,
10,
12
] |
2,032 | 55fc197eebc4e06466e0fc0458957d0460602eef | <mask token>
def test_forgotten_initialized_on_protected():
blueprint = Blueprint('Test')
@blueprint.get('/protected')
@protected()
def protected_hello_world(request):
return json({'message': 'hello world'})
@blueprint.route('/scoped')
@scoped('something')
async def scoped_endpoint(request):
return json({'scoped': True})
app = Sanic('sanic-jwt-test')
sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)
app.blueprint(blueprint, url_prefix='/test')
_, response = app.test_client.post('/test/auth', json={'username':
'user1', 'password': 'abcxyz'})
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
_, response = app.test_client.get('/test/protected', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 500
assert response.json.get('exception') == 'SanicJWTException'
_, response = app.test_client.get('/test/scoped', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 500
assert response.json.get('exception') == 'SanicJWTException'
def test_option_method_on_protected(app):
sanic_app, sanic_jwt = app
@sanic_app.route('/protected/options', methods=['OPTIONS'])
@sanic_jwt.protected()
async def my_protected_options(request):
return text('', status=204)
_, response = sanic_app.test_client.options('/protected/options')
assert response.status == 204
<mask token>
def test_inject_user_on_instance(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/user')
@sanic_jwt.inject_user()
@sanic_jwt.protected()
async def my_protected_user(request, user):
return json({'user_id': user.user_id})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/auth/me', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.json.get('me').get('user_id') == 1
_, response = sanic_app.test_client.get('/protected/user', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 1
<mask token>
def test_inject_user_with_auth_mode_off(app_with_retrieve_user):
async def retrieve_user(request, payload, *args, **kwargs):
return {'user_id': 123}
microservice_app = Sanic('sanic-jwt-test')
microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False,
retrieve_user=retrieve_user)
@microservice_app.route('/protected/user')
@microservice_sanic_jwt.inject_user()
@microservice_sanic_jwt.protected()
async def my_protected_user(request, user):
return json({'user_id': user.get('user_id')})
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = microservice_app.test_client.get('/protected/user',
headers={'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 123
_, response = microservice_app.test_client.get('/protected/user')
assert response.status == 401
def test_redirect_without_url(app):
sanic_app, sanic_jwt = app
@sanic_app.route('/index.html')
def index(request):
return html('<html><body>Home</body></html>')
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('', status=200)
request, response = sanic_app.test_client.get('/protected/static')
assert response.status == 200
assert response.body == b'<html><body>Home</body></html>'
assert response.history
assert response.history[0].status_code == 302
<mask token>
def test_redirect_with_configured_url():
sanic_app = Sanic('sanic-jwt-test')
sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url=
'/unprotected')
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('', status=200)
@sanic_app.route('/unprotected')
async def my_unprotected_goto(request):
return text('unprotected content', status=200)
_, response = sanic_app.test_client.get('/protected/static')
assert response.status == 200 and response.text == 'unprotected content'
<mask token>
| <mask token>
def test_forgotten_initialized_on_protected():
blueprint = Blueprint('Test')
@blueprint.get('/protected')
@protected()
def protected_hello_world(request):
return json({'message': 'hello world'})
@blueprint.route('/scoped')
@scoped('something')
async def scoped_endpoint(request):
return json({'scoped': True})
app = Sanic('sanic-jwt-test')
sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)
app.blueprint(blueprint, url_prefix='/test')
_, response = app.test_client.post('/test/auth', json={'username':
'user1', 'password': 'abcxyz'})
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
_, response = app.test_client.get('/test/protected', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 500
assert response.json.get('exception') == 'SanicJWTException'
_, response = app.test_client.get('/test/scoped', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 500
assert response.json.get('exception') == 'SanicJWTException'
def test_option_method_on_protected(app):
sanic_app, sanic_jwt = app
@sanic_app.route('/protected/options', methods=['OPTIONS'])
@sanic_jwt.protected()
async def my_protected_options(request):
return text('', status=204)
_, response = sanic_app.test_client.options('/protected/options')
assert response.status == 204
<mask token>
def test_inject_user_on_instance(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/user')
@sanic_jwt.inject_user()
@sanic_jwt.protected()
async def my_protected_user(request, user):
return json({'user_id': user.user_id})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/auth/me', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.json.get('me').get('user_id') == 1
_, response = sanic_app.test_client.get('/protected/user', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 1
<mask token>
def test_inject_user_with_auth_mode_off(app_with_retrieve_user):
async def retrieve_user(request, payload, *args, **kwargs):
return {'user_id': 123}
microservice_app = Sanic('sanic-jwt-test')
microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False,
retrieve_user=retrieve_user)
@microservice_app.route('/protected/user')
@microservice_sanic_jwt.inject_user()
@microservice_sanic_jwt.protected()
async def my_protected_user(request, user):
return json({'user_id': user.get('user_id')})
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = microservice_app.test_client.get('/protected/user',
headers={'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 123
_, response = microservice_app.test_client.get('/protected/user')
assert response.status == 401
def test_redirect_without_url(app):
sanic_app, sanic_jwt = app
@sanic_app.route('/index.html')
def index(request):
return html('<html><body>Home</body></html>')
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('', status=200)
request, response = sanic_app.test_client.get('/protected/static')
assert response.status == 200
assert response.body == b'<html><body>Home</body></html>'
assert response.history
assert response.history[0].status_code == 302
<mask token>
def test_redirect_with_configured_url():
sanic_app = Sanic('sanic-jwt-test')
sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url=
'/unprotected')
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('', status=200)
@sanic_app.route('/unprotected')
async def my_unprotected_goto(request):
return text('unprotected content', status=200)
_, response = sanic_app.test_client.get('/protected/static')
assert response.status == 200 and response.text == 'unprotected content'
def test_authenticated_redirect(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('protected content', status=200)
@sanic_app.route('/unprotected')
async def my_unprotected_goto(request):
return text('unprotected content', status=200)
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/protected/static', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200 and response.text == 'protected content'
| <mask token>
def test_forgotten_initialized_on_protected():
blueprint = Blueprint('Test')
@blueprint.get('/protected')
@protected()
def protected_hello_world(request):
return json({'message': 'hello world'})
@blueprint.route('/scoped')
@scoped('something')
async def scoped_endpoint(request):
return json({'scoped': True})
app = Sanic('sanic-jwt-test')
sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)
app.blueprint(blueprint, url_prefix='/test')
_, response = app.test_client.post('/test/auth', json={'username':
'user1', 'password': 'abcxyz'})
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
_, response = app.test_client.get('/test/protected', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 500
assert response.json.get('exception') == 'SanicJWTException'
_, response = app.test_client.get('/test/scoped', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 500
assert response.json.get('exception') == 'SanicJWTException'
def test_option_method_on_protected(app):
sanic_app, sanic_jwt = app
@sanic_app.route('/protected/options', methods=['OPTIONS'])
@sanic_jwt.protected()
async def my_protected_options(request):
return text('', status=204)
_, response = sanic_app.test_client.options('/protected/options')
assert response.status == 204
def test_inject_user_regular(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/user')
@inject_user()
@protected()
async def my_protected_user(request, user):
return json({'user_id': user.user_id})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/auth/me', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.json.get('me').get('user_id') == 1
_, response = sanic_app.test_client.get('/protected/user', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 1
def test_inject_user_on_instance(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/user')
@sanic_jwt.inject_user()
@sanic_jwt.protected()
async def my_protected_user(request, user):
return json({'user_id': user.user_id})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/auth/me', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.json.get('me').get('user_id') == 1
_, response = sanic_app.test_client.get('/protected/user', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 1
<mask token>
def test_inject_user_with_auth_mode_off(app_with_retrieve_user):
async def retrieve_user(request, payload, *args, **kwargs):
return {'user_id': 123}
microservice_app = Sanic('sanic-jwt-test')
microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False,
retrieve_user=retrieve_user)
@microservice_app.route('/protected/user')
@microservice_sanic_jwt.inject_user()
@microservice_sanic_jwt.protected()
async def my_protected_user(request, user):
return json({'user_id': user.get('user_id')})
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = microservice_app.test_client.get('/protected/user',
headers={'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 123
_, response = microservice_app.test_client.get('/protected/user')
assert response.status == 401
def test_redirect_without_url(app):
sanic_app, sanic_jwt = app
@sanic_app.route('/index.html')
def index(request):
return html('<html><body>Home</body></html>')
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('', status=200)
request, response = sanic_app.test_client.get('/protected/static')
assert response.status == 200
assert response.body == b'<html><body>Home</body></html>'
assert response.history
assert response.history[0].status_code == 302
<mask token>
def test_redirect_with_configured_url():
sanic_app = Sanic('sanic-jwt-test')
sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url=
'/unprotected')
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('', status=200)
@sanic_app.route('/unprotected')
async def my_unprotected_goto(request):
return text('unprotected content', status=200)
_, response = sanic_app.test_client.get('/protected/static')
assert response.status == 200 and response.text == 'unprotected content'
def test_authenticated_redirect(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('protected content', status=200)
@sanic_app.route('/unprotected')
async def my_unprotected_goto(request):
return text('unprotected content', status=200)
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/protected/static', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200 and response.text == 'protected content'
| <mask token>
def test_forgotten_initialized_on_protected():
blueprint = Blueprint('Test')
@blueprint.get('/protected')
@protected()
def protected_hello_world(request):
return json({'message': 'hello world'})
@blueprint.route('/scoped')
@scoped('something')
async def scoped_endpoint(request):
return json({'scoped': True})
app = Sanic('sanic-jwt-test')
sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)
app.blueprint(blueprint, url_prefix='/test')
_, response = app.test_client.post('/test/auth', json={'username':
'user1', 'password': 'abcxyz'})
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
_, response = app.test_client.get('/test/protected', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 500
assert response.json.get('exception') == 'SanicJWTException'
_, response = app.test_client.get('/test/scoped', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 500
assert response.json.get('exception') == 'SanicJWTException'
def test_option_method_on_protected(app):
sanic_app, sanic_jwt = app
@sanic_app.route('/protected/options', methods=['OPTIONS'])
@sanic_jwt.protected()
async def my_protected_options(request):
return text('', status=204)
_, response = sanic_app.test_client.options('/protected/options')
assert response.status == 204
def test_inject_user_regular(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/user')
@inject_user()
@protected()
async def my_protected_user(request, user):
return json({'user_id': user.user_id})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/auth/me', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.json.get('me').get('user_id') == 1
_, response = sanic_app.test_client.get('/protected/user', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 1
def test_inject_user_on_instance(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/user')
@sanic_jwt.inject_user()
@sanic_jwt.protected()
async def my_protected_user(request, user):
return json({'user_id': user.user_id})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/auth/me', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.json.get('me').get('user_id') == 1
_, response = sanic_app.test_client.get('/protected/user', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 1
def test_inject_user_on_instance_bp(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/user')
@sanic_jwt.inject_user()
@sanic_jwt.protected()
async def my_protected_user(request, user):
return json({'user_id': user.user_id})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/auth/me', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.json.get('me').get('user_id') == 1
_, response = sanic_app.test_client.get('/protected/user', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 1
def test_inject_user_on_instance_non_async(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/user')
@sanic_jwt.inject_user()
@sanic_jwt.protected()
def my_protected_user(request, user):
return json({'user_id': user.user_id})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/auth/me', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.json.get('me').get('user_id') == 1
_, response = sanic_app.test_client.get('/protected/user', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 1
def test_inject_user_with_auth_mode_off(app_with_retrieve_user):
async def retrieve_user(request, payload, *args, **kwargs):
return {'user_id': 123}
microservice_app = Sanic('sanic-jwt-test')
microservice_sanic_jwt = Initialize(microservice_app, auth_mode=False,
retrieve_user=retrieve_user)
@microservice_app.route('/protected/user')
@microservice_sanic_jwt.inject_user()
@microservice_sanic_jwt.protected()
async def my_protected_user(request, user):
return json({'user_id': user.get('user_id')})
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = microservice_app.test_client.get('/protected/user',
headers={'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200
assert response.json.get('user_id') == 123
_, response = microservice_app.test_client.get('/protected/user')
assert response.status == 401
def test_redirect_without_url(app):
sanic_app, sanic_jwt = app
@sanic_app.route('/index.html')
def index(request):
return html('<html><body>Home</body></html>')
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('', status=200)
request, response = sanic_app.test_client.get('/protected/static')
assert response.status == 200
assert response.body == b'<html><body>Home</body></html>'
assert response.history
assert response.history[0].status_code == 302
def test_redirect_with_decorator_url(app):
sanic_app, sanic_jwt = app
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True, redirect_url='/unprotected')
async def my_protected_static(request):
return text('', status=200)
@sanic_app.route('/unprotected')
async def my_unprotected_goto(request):
return text('unprotected content', status=200)
_, response = sanic_app.test_client.get('/protected/static')
assert response.status == 200 and response.text == 'unprotected content'
def test_redirect_with_configured_url():
sanic_app = Sanic('sanic-jwt-test')
sanic_jwt = Initialize(sanic_app, auth_mode=False, login_redirect_url=
'/unprotected')
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('', status=200)
@sanic_app.route('/unprotected')
async def my_unprotected_goto(request):
return text('unprotected content', status=200)
_, response = sanic_app.test_client.get('/protected/static')
assert response.status == 200 and response.text == 'unprotected content'
def test_authenticated_redirect(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post('/auth', json={'username':
'user1', 'password': 'abcxyz'})
sanic_app.router.reset()
@sanic_app.route('/protected/static')
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text('protected content', status=200)
@sanic_app.route('/unprotected')
async def my_unprotected_goto(request):
return text('unprotected content', status=200)
access_token = response.json.get(sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get('/protected/static', headers={
'Authorization': 'Bearer {}'.format(access_token)})
assert response.status == 200 and response.text == 'protected content'
| from sanic import Sanic
from sanic.blueprints import Blueprint
from sanic.response import html, json, text
from sanic_jwt import Initialize
from sanic_jwt.decorators import inject_user, protected, scoped
def test_forgotten_initialized_on_protected():
blueprint = Blueprint("Test")
@blueprint.get("/protected")
@protected()
def protected_hello_world(request):
return json({"message": "hello world"})
@blueprint.route("/scoped")
@scoped("something")
async def scoped_endpoint(request):
return json({"scoped": True})
app = Sanic("sanic-jwt-test")
sanicjwt = Initialize(blueprint, app=app, authenticate=lambda x: True)
app.blueprint(blueprint, url_prefix="/test")
_, response = app.test_client.post(
"/test/auth", json={"username": "user1", "password": "abcxyz"}
)
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
_, response = app.test_client.get(
"/test/protected",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 500
assert response.json.get("exception") == "SanicJWTException"
_, response = app.test_client.get(
"/test/scoped",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 500
assert response.json.get("exception") == "SanicJWTException"
def test_option_method_on_protected(app):
sanic_app, sanic_jwt = app
@sanic_app.route("/protected/options", methods=["OPTIONS"])
@sanic_jwt.protected()
async def my_protected_options(request):
return text("", status=204)
_, response = sanic_app.test_client.options("/protected/options")
assert response.status == 204
def test_inject_user_regular(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post(
"/auth", json={"username": "user1", "password": "abcxyz"}
)
sanic_app.router.reset()
@sanic_app.route("/protected/user")
@inject_user()
@protected()
async def my_protected_user(request, user):
return json({"user_id": user.user_id})
access_token = response.json.get(
sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get(
"/auth/me", headers={"Authorization": "Bearer {}".format(access_token)}
)
assert response.json.get("me").get("user_id") == 1
_, response = sanic_app.test_client.get(
"/protected/user",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("user_id") == 1
def test_inject_user_on_instance(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post(
"/auth", json={"username": "user1", "password": "abcxyz"}
)
sanic_app.router.reset()
@sanic_app.route("/protected/user")
@sanic_jwt.inject_user()
@sanic_jwt.protected()
async def my_protected_user(request, user):
return json({"user_id": user.user_id})
access_token = response.json.get(
sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get(
"/auth/me", headers={"Authorization": "Bearer {}".format(access_token)}
)
assert response.json.get("me").get("user_id") == 1
_, response = sanic_app.test_client.get(
"/protected/user",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("user_id") == 1
def test_inject_user_on_instance_bp(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post(
"/auth", json={"username": "user1", "password": "abcxyz"}
)
sanic_app.router.reset()
@sanic_app.route("/protected/user")
@sanic_jwt.inject_user()
@sanic_jwt.protected()
async def my_protected_user(request, user):
return json({"user_id": user.user_id})
access_token = response.json.get(
sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get(
"/auth/me", headers={"Authorization": "Bearer {}".format(access_token)}
)
assert response.json.get("me").get("user_id") == 1
_, response = sanic_app.test_client.get(
"/protected/user",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("user_id") == 1
def test_inject_user_on_instance_non_async(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post(
"/auth", json={"username": "user1", "password": "abcxyz"}
)
sanic_app.router.reset()
@sanic_app.route("/protected/user")
@sanic_jwt.inject_user()
@sanic_jwt.protected()
def my_protected_user(request, user):
return json({"user_id": user.user_id})
access_token = response.json.get(
sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get(
"/auth/me", headers={"Authorization": "Bearer {}".format(access_token)}
)
assert response.json.get("me").get("user_id") == 1
_, response = sanic_app.test_client.get(
"/protected/user",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("user_id") == 1
def test_inject_user_with_auth_mode_off(app_with_retrieve_user):
async def retrieve_user(request, payload, *args, **kwargs):
return {"user_id": 123}
microservice_app = Sanic("sanic-jwt-test")
microservice_sanic_jwt = Initialize(
microservice_app, auth_mode=False, retrieve_user=retrieve_user
)
@microservice_app.route("/protected/user")
@microservice_sanic_jwt.inject_user()
@microservice_sanic_jwt.protected()
async def my_protected_user(request, user):
return json({"user_id": user.get("user_id")})
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post(
"/auth", json={"username": "user1", "password": "abcxyz"}
)
access_token = response.json.get(
sanic_jwt.config.access_token_name(), None
)
_, response = microservice_app.test_client.get(
"/protected/user",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("user_id") == 123
_, response = microservice_app.test_client.get("/protected/user")
assert response.status == 401
def test_redirect_without_url(app):
sanic_app, sanic_jwt = app
@sanic_app.route("/index.html")
def index(request):
return html("<html><body>Home</body></html>")
@sanic_app.route("/protected/static")
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text("", status=200)
request, response = sanic_app.test_client.get("/protected/static")
assert response.status == 200
assert response.body == b"<html><body>Home</body></html>"
assert response.history
assert response.history[0].status_code == 302
def test_redirect_with_decorator_url(app):
sanic_app, sanic_jwt = app
@sanic_app.route("/protected/static")
@sanic_jwt.protected(redirect_on_fail=True, redirect_url="/unprotected")
async def my_protected_static(request):
return text("", status=200)
@sanic_app.route("/unprotected")
async def my_unprotected_goto(request):
return text("unprotected content", status=200)
_, response = sanic_app.test_client.get("/protected/static")
assert response.status == 200 and response.text == "unprotected content"
def test_redirect_with_configured_url():
sanic_app = Sanic("sanic-jwt-test")
sanic_jwt = Initialize(
sanic_app, auth_mode=False, login_redirect_url="/unprotected"
)
@sanic_app.route("/protected/static")
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text("", status=200)
@sanic_app.route("/unprotected")
async def my_unprotected_goto(request):
return text("unprotected content", status=200)
_, response = sanic_app.test_client.get("/protected/static")
assert response.status == 200 and response.text == "unprotected content"
def test_authenticated_redirect(app_with_retrieve_user):
sanic_app, sanic_jwt = app_with_retrieve_user
_, response = sanic_app.test_client.post(
"/auth", json={"username": "user1", "password": "abcxyz"}
)
sanic_app.router.reset()
@sanic_app.route("/protected/static")
@sanic_jwt.protected(redirect_on_fail=True)
async def my_protected_static(request):
return text("protected content", status=200)
@sanic_app.route("/unprotected")
async def my_unprotected_goto(request):
return text("unprotected content", status=200)
access_token = response.json.get(
sanic_jwt.config.access_token_name(), None
)
_, response = sanic_app.test_client.get(
"/protected/static",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200 and response.text == "protected content"
| [
6,
7,
8,
11,
13
] |
2,033 | e5b5874f060bdf93ac4fadaf556aa4182619d077 | <mask token>
| <mask token>
try:
conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd
='root', db='test')
cur = conn.cursor()
cur.execute('SELECT user_id, user_name FROM cap_user')
row_count = cur.rowcount
for r in cur.fetchall():
print('userId is %s, userName is %s' % r)
except Exception as e:
print(e)
finally:
if cur is not None:
cur.close()
if conn is not None:
conn.close()
| <mask token>
conn = None
cur = None
try:
conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd
='root', db='test')
cur = conn.cursor()
cur.execute('SELECT user_id, user_name FROM cap_user')
row_count = cur.rowcount
for r in cur.fetchall():
print('userId is %s, userName is %s' % r)
except Exception as e:
print(e)
finally:
if cur is not None:
cur.close()
if conn is not None:
conn.close()
| import pymysql
conn = None
cur = None
try:
conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd
='root', db='test')
cur = conn.cursor()
cur.execute('SELECT user_id, user_name FROM cap_user')
row_count = cur.rowcount
for r in cur.fetchall():
print('userId is %s, userName is %s' % r)
except Exception as e:
print(e)
finally:
if cur is not None:
cur.close()
if conn is not None:
conn.close()
| import pymysql
conn = None
cur = None
try:
conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='root', db='test')
cur = conn.cursor()
cur.execute("SELECT user_id, user_name FROM cap_user")
row_count = cur.rowcount
# row_number = cur.rownumber
for r in cur.fetchall():
print("userId is %s, userName is %s" % r)
except Exception as e:
print(e)
finally:
if cur is not None:
cur.close()
if conn is not None:
conn.close()
| [
0,
1,
2,
3,
4
] |
2,034 | aed09a3c04f284fa0b8844a47c5bc9d1621a9b5f | <mask token>
| <mask token>
file_handle.write(contents)
file_handle.close()
print(contents)
| fileName = str(input(
'Please write the name of the file you would like to open: '))
file_handle = open(fileName, 'w')
contents = str(input('Please write the content you would like to save.'))
file_handle.write(contents)
file_handle.close()
print(contents)
| fileName = str(input("Please write the name of the file you would like to open: "))
file_handle = open(fileName, "w")
contents = str(input("Please write the content you would like to save."))
file_handle.write(contents)
file_handle.close()
print(contents) | null | [
0,
1,
2,
3
] |
2,035 | 83e231480c618d290089340c642313bbba4f1070 | <mask token>
| <mask token>
with open('movies.csv', 'w') as file:
csv_writer = writer(file)
csv_writer.writerow(['Name', 'Year'])
csv_writer.writerow(['Ratchasan', 2018])
csv_writer.writerow(['Vadachennai', 2018])
csv_writer.writerow(['Naran', 2007])
| from csv import writer
with open('movies.csv', 'w') as file:
csv_writer = writer(file)
csv_writer.writerow(['Name', 'Year'])
csv_writer.writerow(['Ratchasan', 2018])
csv_writer.writerow(['Vadachennai', 2018])
csv_writer.writerow(['Naran', 2007])
| from csv import writer
with open("movies.csv","w") as file:
csv_writer=writer(file)
csv_writer.writerow(['Name','Year'])
csv_writer.writerow(['Ratchasan',2018])
csv_writer.writerow(['Vadachennai',2018])
csv_writer.writerow(['Naran',2007])
| null | [
0,
1,
2,
3
] |
2,036 | 6e98dfd758700c57ddbb17624472ce2c23cbee6a | # Name: CreateDatabase.py
# Description: Connects to a point in time in the geodatabase in
# PostgreSQL using database authentication.
# Import system modules
import arcpy
import os
arcpy.env.workspace="Database Connections"
if arcpy.Exists ("Prueba6.sde")==False:
arcpy.CreateDatabaseConnection_management("Database Connections",
"Prueba6.sde",
"SQL_SERVER",
"192.168.200.250",
"DATABASE_AUTH",
"sde",
"$deDEs4Rr0lLo",
"#",
"sprueba",
"#",
"#",
"#",
"#")
#arcpy.ListUsers(conection_sde)
#print arcpy.ListFeatureClasses()
prueba = "Prueba6.sde"
desc= arcpy.Describe("Prueba6.sde")
print desc.name
arcpy.env.workspace = r"Database Connections/Prueba6.sde"
desc= arcpy.Describe("sprueba.DBO.base_limites_nacionales")
print desc.name
#datasets=arcpy.ListDatasets()
arcpy.env.workspace = r"Database Connections/Prueba6.sde/sprueba.DBO.base_limites_nacionales"
desc= arcpy.Describe("sprueba.DBO.departamentos")
print desc.name
#arcpy.AddField_management("sprueba.DBO.departamentos","limites_buffer","TEXT","10")
#arcpy.Buffer_analysis("sprueba.DBO.departamentos","sprueba.DBO.departamentos_buffer",'10 miles')
inFeatures = "sprueba.DBO.departamentos"
fieldName1 = "xCentroid"
fieldName2 = "yCentroid"
fieldPrecision = 18
fieldScale = 11
# Add fields
arcpy.AddField_management(inFeatures, fieldName1, "DOUBLE",
fieldPrecision, fieldScale)
arcpy.AddField_management(inFeatures, fieldName2, "DOUBLE",
fieldPrecision, fieldScale)
# Calculate centroid
arcpy.CalculateField_management(inFeatures, fieldName1,
"!SHAPE.CENTROID.X!",
"PYTHON_9.3")
arcpy.CalculateField_management(inFeatures, fieldName2,
"!SHAPE.CENTROID.Y!",
"PYTHON_9.3")
| null | null | null | null | [
0
] |
2,037 | 6415b08795975698e8e2019cafb82561b35f8e71 | <mask token>
| from __future__ import absolute_import
from . import utils
from . import bert_model
from . import transformer
from .utils import *
from .bert_model import *
from .transformer import *
| null | null | null | [
0,
1
] |
2,038 | b616b907eb67fff97d57ee2b0d3ab8e01d154956 | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../tools'))
import files
import genetics
def main(argv):
S = files.read_lines(argv[0])
S_rc = [genetics.dna_complement(s) for s in S]
S_u = set(S + S_rc)
B_k = []
for s in S_u:
B_k.append((s[:-1], s[1:]))
print '\n'.join('(%s, %s)' % b for b in sorted(B_k))
if __name__ == "__main__":
main(sys.argv[1:])
| null | null | null | null | [
0
] |
2,039 | 6de9fffd91d2f7602f7c681253211077704ba8c4 | <mask token>
class Product(models.Model):
title = models.CharField(max_length=32)
description = models.TextField(max_length=360)
price = models.IntegerField()
image = models.CharField(max_length=255, null=True)
brand = models.ForeignKey(Brand, on_delete=models.CASCADE)
user = models.ForeignKey(User, null=True, on_delete=models.SET_NULL)
def no_of_ratings(self):
ratings = Rating.objects.filter(product=self)
return len(ratings)
def avg_rating(self):
sum = 0
ratings = Rating.objects.filter(product=self)
for rating in ratings:
sum += rating.stars
if len(ratings) > 0:
return sum / len(ratings)
else:
return 0
class Rating(models.Model):
product = models.ForeignKey(Product, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
stars = models.IntegerField(validators=[MinValueValidator(1),
MaxValueValidator(5)])
class Meta:
unique_together = 'user', 'product'
index_together = 'user', 'product'
| <mask token>
class Brand(models.Model):
<mask token>
<mask token>
<mask token>
class Product(models.Model):
title = models.CharField(max_length=32)
description = models.TextField(max_length=360)
price = models.IntegerField()
image = models.CharField(max_length=255, null=True)
brand = models.ForeignKey(Brand, on_delete=models.CASCADE)
user = models.ForeignKey(User, null=True, on_delete=models.SET_NULL)
def no_of_ratings(self):
ratings = Rating.objects.filter(product=self)
return len(ratings)
def avg_rating(self):
sum = 0
ratings = Rating.objects.filter(product=self)
for rating in ratings:
sum += rating.stars
if len(ratings) > 0:
return sum / len(ratings)
else:
return 0
class Rating(models.Model):
product = models.ForeignKey(Product, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
stars = models.IntegerField(validators=[MinValueValidator(1),
MaxValueValidator(5)])
class Meta:
unique_together = 'user', 'product'
index_together = 'user', 'product'
| <mask token>
class Person(models.Model):
<mask token>
<mask token>
<mask token>
class Brand(models.Model):
name = models.CharField(max_length=255)
coverImage = models.CharField(max_length=360)
logo = models.CharField(max_length=360)
class Product(models.Model):
title = models.CharField(max_length=32)
description = models.TextField(max_length=360)
price = models.IntegerField()
image = models.CharField(max_length=255, null=True)
brand = models.ForeignKey(Brand, on_delete=models.CASCADE)
user = models.ForeignKey(User, null=True, on_delete=models.SET_NULL)
def no_of_ratings(self):
ratings = Rating.objects.filter(product=self)
return len(ratings)
def avg_rating(self):
sum = 0
ratings = Rating.objects.filter(product=self)
for rating in ratings:
sum += rating.stars
if len(ratings) > 0:
return sum / len(ratings)
else:
return 0
class Rating(models.Model):
product = models.ForeignKey(Product, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
stars = models.IntegerField(validators=[MinValueValidator(1),
MaxValueValidator(5)])
class Meta:
unique_together = 'user', 'product'
index_together = 'user', 'product'
| <mask token>
class Person(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE,
related_name='person')
age = models.PositiveSmallIntegerField()
bio = models.CharField(max_length=255)
class Brand(models.Model):
name = models.CharField(max_length=255)
coverImage = models.CharField(max_length=360)
logo = models.CharField(max_length=360)
class Product(models.Model):
title = models.CharField(max_length=32)
description = models.TextField(max_length=360)
price = models.IntegerField()
image = models.CharField(max_length=255, null=True)
brand = models.ForeignKey(Brand, on_delete=models.CASCADE)
user = models.ForeignKey(User, null=True, on_delete=models.SET_NULL)
def no_of_ratings(self):
ratings = Rating.objects.filter(product=self)
return len(ratings)
def avg_rating(self):
sum = 0
ratings = Rating.objects.filter(product=self)
for rating in ratings:
sum += rating.stars
if len(ratings) > 0:
return sum / len(ratings)
else:
return 0
class Rating(models.Model):
product = models.ForeignKey(Product, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
stars = models.IntegerField(validators=[MinValueValidator(1),
MaxValueValidator(5)])
class Meta:
unique_together = 'user', 'product'
index_together = 'user', 'product'
| from django.db import models
from django.contrib.auth.models import User
from django.core.validators import MaxValueValidator, MinValueValidator
class Person(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='person')
age = models.PositiveSmallIntegerField()
bio = models.CharField(max_length=255)
class Brand(models.Model):
name = models.CharField(max_length=255)
coverImage = models.CharField(max_length=360)
logo = models.CharField(max_length=360)
class Product(models.Model):
title = models.CharField(max_length=32)
description = models.TextField(max_length=360)
price = models.IntegerField()
image = models.CharField(max_length=255, null=True)
brand = models.ForeignKey(Brand, on_delete=models.CASCADE)
user = models.ForeignKey(User, null=True, on_delete=models.SET_NULL)
def no_of_ratings(self):
ratings = Rating.objects.filter(product=self)
return len(ratings)
def avg_rating(self):
sum = 0
ratings = Rating.objects.filter(product=self)
for rating in ratings:
sum += rating.stars
if len(ratings) > 0:
return sum / len(ratings)
else:
return 0
class Rating(models.Model):
product = models.ForeignKey(Product, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
stars = models.IntegerField(validators=[MinValueValidator(1), MaxValueValidator(5)])
class Meta:
unique_together = (('user', 'product'))
index_together = (('user', 'product'))
| [
6,
7,
9,
10,
12
] |
2,040 | b90fb1e657d4c7e186a7b889eee586527bec4413 | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
dependencies = [('crm', '0003_auto_20190802_2211')]
operations = [migrations.AlterModelOptions(name='customerinfo', options
={'verbose_name': '客户信息', 'verbose_name_plural': '客户信息'})]
| from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('crm', '0003_auto_20190802_2211')]
operations = [migrations.AlterModelOptions(name='customerinfo', options
={'verbose_name': '客户信息', 'verbose_name_plural': '客户信息'})]
| # Generated by Django 2.1.5 on 2019-08-03 23:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('crm', '0003_auto_20190802_2211'),
]
operations = [
migrations.AlterModelOptions(
name='customerinfo',
options={'verbose_name': '客户信息', 'verbose_name_plural': '客户信息'},
),
]
| [
0,
1,
2,
3,
4
] |
2,041 | b0062dde448c450131f578a2afe130ca663f0902 | <mask token>
| <mask token>
def eval_loop():
line = input('Please enter a sting')
while True:
if line == 'done':
break
else:
output = eval(line)
print(output)
line = input('Please enter a sting')
<mask token>
| <mask token>
def eval_loop():
line = input('Please enter a sting')
while True:
if line == 'done':
break
else:
output = eval(line)
print(output)
line = input('Please enter a sting')
eval_loop()
| from math import *
def eval_loop():
line = input('Please enter a sting')
while True:
if line == 'done':
break
else:
output = eval(line)
print(output)
line = input('Please enter a sting')
eval_loop()
| null | [
0,
1,
2,
3
] |
2,042 | c2201a281ccd0833b0d7d2219d97ce3175fb012b | <mask token>
| <mask token>
for num in numbers_ascii:
numbers_total += num
<mask token>
for i in numbers_total:
i = int(i)
cool_threshold *= i
print(f'Cool threshold: {cool_threshold}')
<mask token>
for j in valid_emojis:
sum_ch = 0
for ch in j:
if ch == '*' or ch == ':':
continue
sum_ch += ord(ch)
if sum_ch > cool_threshold:
cool_emoji.append(j)
print(f'{len(valid_emojis)} emojis found in the text. The cool ones are:')
print(*cool_emoji, sep='\n')
| <mask token>
pattern1 = '[:]{2}[A-Z][a-z]{2,}[:]{2}|[\\*]{2}[a-zA-Z]{3,}[\\*]{2}'
pattern2 = '([0-9]+)'
data = input()
valid_emojis = re.findall(pattern1, data)
numbers_ascii = re.findall(pattern2, data)
numbers_total = ''
for num in numbers_ascii:
numbers_total += num
cool_threshold = 1
for i in numbers_total:
i = int(i)
cool_threshold *= i
print(f'Cool threshold: {cool_threshold}')
cool_emoji = []
for j in valid_emojis:
sum_ch = 0
for ch in j:
if ch == '*' or ch == ':':
continue
sum_ch += ord(ch)
if sum_ch > cool_threshold:
cool_emoji.append(j)
print(f'{len(valid_emojis)} emojis found in the text. The cool ones are:')
print(*cool_emoji, sep='\n')
| import re
pattern1 = '[:]{2}[A-Z][a-z]{2,}[:]{2}|[\\*]{2}[a-zA-Z]{3,}[\\*]{2}'
pattern2 = '([0-9]+)'
data = input()
valid_emojis = re.findall(pattern1, data)
numbers_ascii = re.findall(pattern2, data)
numbers_total = ''
for num in numbers_ascii:
numbers_total += num
cool_threshold = 1
for i in numbers_total:
i = int(i)
cool_threshold *= i
print(f'Cool threshold: {cool_threshold}')
cool_emoji = []
for j in valid_emojis:
sum_ch = 0
for ch in j:
if ch == '*' or ch == ':':
continue
sum_ch += ord(ch)
if sum_ch > cool_threshold:
cool_emoji.append(j)
print(f'{len(valid_emojis)} emojis found in the text. The cool ones are:')
print(*cool_emoji, sep='\n')
| import re
pattern1 = r"[:]{2}[A-Z][a-z]{2,}[:]{2}|[\*]{2}[a-zA-Z]{3,}[\*]{2}"
pattern2 = r"([0-9]+)"
data = input()
valid_emojis = re.findall(pattern1, data)
numbers_ascii = re.findall(pattern2, data)
numbers_total = ""
for num in numbers_ascii:
numbers_total += num
cool_threshold = 1
for i in numbers_total:
i = int(i)
cool_threshold *= i
print(f"Cool threshold: {cool_threshold}")
cool_emoji = []
for j in valid_emojis:
sum_ch = 0
for ch in j:
if ch == "*" or ch == ":":
continue
sum_ch += ord(ch)
if sum_ch > cool_threshold:
cool_emoji.append(j)
print(f"{len(valid_emojis)} emojis found in the text. The cool ones are:")
print(*cool_emoji,sep='\n')
| [
0,
1,
2,
3,
4
] |
2,043 | 020a41e7d3cc3f5adf3a38a6852dac6037595372 | <mask token>
| <mask token>
if no == rev:
print(f'{no}--->{rev} Input is a palindrome')
else:
print(f'{no}--->{rev} Input is not a palindrome')
| no = int(input('Enter a number: '))
no = str(no)
rev = no[::-1]
if no == rev:
print(f'{no}--->{rev} Input is a palindrome')
else:
print(f'{no}--->{rev} Input is not a palindrome')
| no = int(input("Enter a number: "))
no = str(no)
rev = no[::-1]
if no==rev:
print(f"{no}--->{rev} Input is a palindrome")
else:
print(f"{no}--->{rev} Input is not a palindrome")
| null | [
0,
1,
2,
3
] |
2,044 | 1338d6578a94338c6e75acc025ddddd14097ee10 | #!/usr/bin python
import socket
import json
import threading
import sys
from db_util import DBUtil
from cryptoLib import AesCtr,Hmac
class Client(threading.Thread):
def __init__(self, (client_conn, client_addr), sema):
threading.Thread.__init__(self)
self.client_conn = client_conn
self.client_addr = client_addr
self.size = 4096
self.len_of_mac = 12
self.sema = sema
def run(self):
while True:
dataRaw = None
try:
dataRaw = self.client_conn.recv(self.size)
iv,dataEnc,dataHmac=dataRaw.split("nonce")
dataAuth=verHmac(dataEnc,dataHmac)
if not dataAuth:
continue
else:
dataChecked=decrypt(dataEnc,iv)
except socket.error, e:
print(e.message)
if dataRaw is not None:
try:
data = json.loads(dataChecked)
print("Received : " + str(data))
dbutil = DBUtil()
self.sema.acquire()
dbutil.update_database(data)
self.sema.release()
except ValueError:
continue
self.client_conn.close()
break
def verHmac(dataHmac,dataEnc):
hmObj1=Hmac(dataEnc)
l=hmObj1.verifyHmac(dataHmac)
return l
def decrypt(dataEnc,iv):
e2=AesCtr()
unEnc=e2.decryptData(enc,iv)
class Receiver:
def __init__(self,port):
self.host ="127.0.0.1"
#why not "127.0.0.1"
self.port = port
self.threads = list()
self.udp_sock = None
self.semaphore = threading.Semaphore(1)
def get_ip_address(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
def create_socket(self):
try:
self.udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.udp_sock.bind((self.host, self.port))
#self.udp_sock.listen(2)
except socket.error:
if self.udp_sock:
self.udp_sock.close()
print("Failure to open socket")
sys.exit(1)
def run(self):
self.create_socket()
while True:
client = Client(self.udp_sock.accept(), self.semaphore)
client.start()
self.threads.append(client)
def main():
receiver = Receiver(49999)
receiver.run()
if __name__ == '__main__':
main()
| null | null | null | null | [
0
] |
2,045 | bdf3cb1830021b10d6c8966b3341fd9297d9a371 | <mask token>
| <mask token>
[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13,
5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15,
8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429,
145011267381.10236]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985,
148817892429.6303]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536,
138194745977.8172]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13,
5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15,
8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045,
148499957167.59894]
[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13,
4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568,
138376625633.08905]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468,
147143586736.12967]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,
8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985,
148817892429.6303]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13,
5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15,
8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06,
-1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952,
149819556305.94864]
[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13,
5.0376537605765665e-11, -1.7763084077799175e-10, -
1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -
2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10,
108694336300.90585, 154375559012.27695]
[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13,
4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15,
8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06,
-1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514,
195080915978.15582]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13,
4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15,
8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337,
143318140783.98648]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789,
160453198244.84198]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13,
4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06,
-1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114,
109895891048.79645]
[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13,
4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327,
122880053749.32047]
[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13,
4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06,
-1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218,
130994741061.18477]
[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205,
148716985588.15564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098,
101545825010.15762]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15,
8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779,
101879284463.33914]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885,
102270797763.39908]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874,
106305215455.77405]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13,
4.7704075824842225e-11, -1.8975666267494283e-10, -
1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05,
-1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10,
166731944707.48343, 109962566902.69849]
[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15,
8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -
1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018,
111850971687.16727]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15,
8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794,
128488226222.4665]
[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15,
8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -
1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544,
172987399752.44284]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824,
100937635343.36494]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142,
101220474756.5564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603,
101440046940.62292]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283,
101479475091.5385]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353,
101522685052.87083]
[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158,
102059630396.96977]
[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06,
-1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558,
102134941196.42899]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273,
102270797763.3992]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13,
4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435,
102270797763.39929]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15,
8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374,
102518032445.5969]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15,
8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432,
102577021916.3392]
[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874,
106305215455.77405]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382,
112061347287.60056]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06,
-1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097,
136457449593.06062]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096,
160562679389.67618]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007,
100125948657.42978]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744,
101215117638.35565]
[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142,
101220474756.5564]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664,
101220474756.55742]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617,
101440046940.6675]
[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283,
101479475091.5385]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997,
101479475091.5439]
[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134,
101707557509.25955]
[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668,
101910116331.42278]
[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13,
4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892,
101942928295.47075]
[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095,
104790698646.6004]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8168585276282465e-11, -1.4675478300173032e-10, -
1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05,
-1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10,
160649925757.17908, 106424978687.80653]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564,
106648081137.30634]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15,
8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595,
106784848298.00577]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962,
106918161793.97298]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13,
4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648,
117274357359.96004]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109,
118996909122.33968]
[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083,
125656067768.88814]
[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951,
191438895729.71088]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697,
99223644222.007]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007,
100125948657.42978]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534,
100180028793.61896]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686,
100223589650.82378]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13,
4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221,
100558408593.70113]
[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8877585360256924e-11, -1.4675478300173032e-10, -
1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05,
-1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10,
193351738763.71564, 100949387586.23102]
[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13,
4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133,
101220474756.86967]
[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13,
4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612,
101440046940.05927]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15,
8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655,
101467426817.57397]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.8327983670281894e-11, -1.4675478300173032e-10, -
1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -
1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10,
193392923341.53983, 101900620617.14302]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13,
4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602,
103131734300.077]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997,
103180541968.40872]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538,
103805616436.34537]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15,
8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -
1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226,
106843736334.12831]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826,
110030788135.34956]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15,
8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885,
111006224451.55664]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314,
113087422800.04585]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723,
115101067854.69138]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216,
126984206927.84627]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858,
98138013390.26245]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816,
98829512345.71414]
[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13,
4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15,
8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157,
98891303611.42876]
[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723,
99638222233.03885]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506,
99962477826.90034]
[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526,
100180028793.6191]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237,
100290100926.3771]
[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853,
100447140164.3877]
[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13,
4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15,
8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385,
100872818268.9527]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177,
101076246798.6337]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375,
101683114493.3993]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255,
105699410466.83022]
[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207,
105861289429.36061]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723,
106068644665.40553]
[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06,
-1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741,
109638154986.2024]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15,
8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976,
114344342719.97507]
[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13,
4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15,
8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632,
115101067854.31332]
[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339,
120797794814.05704]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -
1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072,
133721716481.47603]
[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -
1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489,
147005409641.27127]
[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15,
9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978,
156722470654.13324]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13,
4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868,
167972224844.19583]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13,
4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635,
167972224843.92523]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189,
192873830899.82352]
[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13,
4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952,
160840990423.46024]
[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13,
4.6589669053151376e-11, -1.4986345441105813e-10, -
2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -
1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10,
96467208837.94556, 179586543004.98117]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06,
-1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293,
187118262382.8758]
[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06,
-1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736,
187415567631.77402]
[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189,
192873830899.82352]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.6154548476823616e-11, -1.8724359625458014e-10, -
2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05,
-1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10,
117723326371.02731, 192873830899.82806]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13,
4.9793760275117476e-11, -2.0780774158604122e-10, -
2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05,
-1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10,
170388218306.66492, 168925348515.4128]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13,
4.9793760275117476e-11, -2.0780774158604122e-10, -
2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10,
191821821495.1242, 158798904598.69617]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,
8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985,
163375067226.8736]
[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084,
152444791757.7255]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13,
4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15,
7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06,
-1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325,
153164597685.87036]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826,
155849166742.8801]
[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13,
5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673,
161472427331.15216]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16,
2.0053347897812537e-05, -1.7639524821935923e-06, -
1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852,
175966043507.07343]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15,
7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06,
-1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572,
184829802626.36642]
[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588,
189416231139.84406]
[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13,
4.9793760275117476e-11, -2.0772853669541976e-10, -
1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10,
160631139543.06137, 122019730569.7476]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116,
128597452665.91768]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,
-1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,
8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16,
1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08,
8.728626586100963e-10, 100156348461.68698, 161778485371.36353]
[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272,
171303112707.4717]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
4.9793760275117476e-11, -1.7352085678160897e-10, -
1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05,
-1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10,
97245352689.07887, 174341101475.58182]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16,
2.0053347897812537e-05, -1.7639524821935923e-06, -
1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355,
185221791801.95062]
[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13,
5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936,
189416231139.85312]
[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125,
190153350507.14474]
[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13,
4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279,
197738317572.1617]
[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11,
-1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16,
2.0200374650352852e-05, -1.7758673160173464e-06, -
1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334,
119035825863.27417]
[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13,
4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604,
120144468135.82727]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -
1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346,
120359956158.03543]
[0.0, -1.1984578022968498e-12, -2.094909506024221e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06,
-1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575,
120995758664.39177]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13,
4.9967768219433575e-11, -1.7352085678160897e-10, -
1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10,
151029089477.88403, 121221447183.73479]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212,
129257349906.46594]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574,
129372470770.49553]
[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11,
-1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16,
1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,
8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987,
132029509845.4832]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15,
8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735,
137741348069.72827]
[0.0, -1.2344709098355012e-12, -2.090479539659853e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,
7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -
1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327,
143862344272.2216]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159,
143868003797.30536]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13,
4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457,
151496866956.06183]
[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13,
5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617,
154679332976.7693]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,
-1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.760544278271184e-10, 107979663117.77498, 158587944243.3901]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13,
4.9793760275117476e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05,
-1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10,
101036412554.48618, 178952195751.12357]
[0.0, -1.2071709641632366e-12, -2.088572649745598e-13,
4.9793760275117476e-11, -1.7352085678160897e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05,
-1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10,
101115281125.52821, 181312381109.07834]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15,
7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06,
-1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515,
193403737351.61066]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06,
-1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167,
199093039398.6542]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,
7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -
1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637,
120593643708.66519]
[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13,
5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895,
121269083493.68436]
[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11,
-1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16,
1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.711551918674385e-10, 168378423128.42877, 121439949900.90005]
[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15,
7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06,
-1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705,
122027384226.92]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13,
4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15,
7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -
1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388,
122750625888.09634]
[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13,
5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15,
7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886,
122935226427.98189]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545,
131702579310.68652]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863,
133211383937.09729]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263,
143105235055.608]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511,
143860615432.91934]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999,
145092770865.8836]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487,
155477031697.76462]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15,
8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412,
158587944243.89005]
[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11,
-1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.760544278271184e-10, 99132279868.34593, 171185572417.85907]
[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15,
8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461,
180430143233.58368]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15,
7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06,
-1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496,
183449646874.34637]
[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13,
4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935,
191076754457.2524]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,
7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06,
-1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465,
194275355409.06598]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11,
-1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16,
2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08,
8.760544278271184e-10, 86984982238.58047, 194967876303.00238]
[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15,
7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182,
198112832281.90573]
[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067,
115813093887.0164]
[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06,
-1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041,
118508631814.89664]
[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395,
119478476003.54858]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902,
119746195767.88297]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686,
120002114057.9749]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245,
124495463707.0261]
[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13,
5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236,
127226107362.62663]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766,
128048566261.66084]
[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375,
129146670219.88675]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287,
132556338910.10567]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873,
132653030892.18918]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999,
140436120253.29218]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377,
143105235055.60883]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417,
143860615432.91846]
[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159,
143868003797.30536]
[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13,
5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966,
144269444777.14786]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957,
145085114899.6645]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15,
7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602,
145085114900.12366]
[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217,
145590447784.79443]
[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044,
153694065180.84283]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13,
5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157,
154263245256.49524]
[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876,
164710456294.5225]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035,
170174200265.44513]
| one = [7.236287049225701e-06, -1.445911565527231e-12, -
1.7498772740084537e-13, 5.109944355076077e-11, -2.5430545472048434e-10,
-1.1709514644876058e-15, 3.210132219509301e-16, 2.502027767038304e-05,
-1.975229899156637e-06, -1.4769695480936238e-08, 8.945619840357268e-10,
135323228000.64511, 130464457208.5385]
two = [6.101651991514008e-06, -1.2764740103418866e-12, -
1.9703439809858206e-13, 4.396430723625485e-11, -7.256876412950873e-11,
-1.0739249647595844e-15, 3.658727722774004e-16, 2.9622074287767617e-05,
-1.9615179204309246e-06, -1.518516920005905e-08, 8.601004856702239e-10,
194360719320.3122, 75684271432.82758]
three = [6.4442734160126695e-06, -1.2463732938819767e-12, -
1.7912928652160854e-13, 3.990379556815055e-11, -7.256876412950873e-11,
-1.128505986956859e-15, 3.855466000081844e-16, 2.7105518268805634e-05,
-1.918022677712299e-06, -1.648586510957147e-08, 8.952907812465134e-10,
40874176708.45886, 129961018217.7445]
four = [5.591985036569838e-06, -1.5732644861037622e-12, -
1.2586540738798186e-13, 5.508993685740796e-11, -2.345347836605763e-10,
-2.1583737575101563e-15, 3.315525502908504e-16, 2.240369111953624e-05,
-1.8808495402864136e-06, -1.5154818034574072e-08, 9.134128217572173e-10,
95538034865.65512, 192689393537.75766]
five = [5.9877501684316964e-06, -1.4725222964411265e-12, -
2.0184675219747084e-13, 4.503520441436847e-11, -2.195719309752964e-10,
-1.1996862422718706e-15, 3.172649531291829e-16, 2.235294071412983e-05,
-1.7673862518012629e-06, -1.593810591566234e-08, 8.495479067416047e-10,
172629547544.72174, 121012464101.10771]
six = [6.525636151737385e-10, -1.5516831882387681e-12, -
1.7065883936338436e-13, 4.6265959327559024e-11, -2.669670220497726e-10,
-1.0739249647595844e-15, 9.085513864943156e-16, 2.5963751617497687e-05,
-1.9757021060346727e-06, -1.5031696163247857e-08, 8.945619840357268e-10,
99871865434.22476, 123933224114.80229]
first1_gen = [[6.417695307686038e-06, -1.2416886913890308e-12, -
1.791907685050265e-13, 3.983180616117193e-11, -7.243488055496258e-11, -
1.1211433897576025e-15, 3.855466000081844e-16, 2.7255618460061466e-05,
-1.917823676019374e-06, -1.6515339421288782e-08, 9.011563904603084e-10,
37866240406.859344, 251532289608.81], [5.974092884160685e-06, -
1.4591405170404072e-12, -2.0184675219747084e-13, 4.3821744446480515e-11,
-7.22093644433135e-11, -1.0712173220027044e-15, 3.65758224365464e-16,
2.235294071412983e-05, -1.763797302814154e-06, -1.6059311052756668e-08,
8.601004856702239e-10, 50907349656.8246, 117645129547.73723], [
7.171513003462397e-06, -1.4334443716578728e-12, -1.749514610735409e-13,
5.509823004788858e-11, -2.5310572250093563e-10, -1.1729621402736547e-15,
3.321162280251396e-16, 2.4812886502853343e-05, -1.964119169077712e-06,
-1.4799846596325615e-08, 8.965548334484032e-10, 85071583311.774,
128667385131.30013], [7.3000149385339486e-06, -1.4508582334938624e-12,
-1.7446896418754742e-13, 5.109944355076077e-11, -2.5448794058714256e-10,
-1.1658376910672744e-15, 3.1827015830354867e-16, 2.502027767038304e-05,
-1.9664311146400523e-06, -1.4730561693079958e-08, 8.945619840357268e-10,
88113858040.47986, 127558862768.52084], [5.581899283069486e-06, -
1.5683042319109065e-12, -1.2586540738798186e-13, 5.535493146365402e-11,
-2.359264703422783e-10, -2.1583737575101563e-15, 3.2921934547988314e-16,
2.2287538734129395e-05, -1.8740196054647742e-06, -
1.5117323048065992e-08, 9.114608510796109e-10, 90926368846.81926,
202187413440.1054], [7.283321725975412e-06, -1.4356567410151954e-12, -
1.7340660013452496e-13, 5.090884822547887e-11, -2.5483963758954753e-10,
-1.139281753854116e-15, 3.1970242364315826e-16, 2.7105518268805634e-05,
-1.963160298901409e-06, -1.4681586301228543e-08, 8.916460477308206e-10,
142505061534.36484, 476063714570.38367], [5.591985036569838e-06, -
1.582675728169255e-12, -1.7359285477580936e-13, 5.508993685740796e-11,
-2.5320893657294154e-10, -2.1583737575101563e-15, 3.210132219509301e-16,
2.511654073479438e-05, -1.965555797894771e-06, -1.5140087108671845e-08,
9.214909160927855e-10, 154168790181.56195, 151975095946.00134], [
6.4442734160126695e-06, -1.5732644861037622e-12, -
1.8036634758606428e-13, 5.508993685740796e-11, -7.27534017567909e-11, -
2.1583737575101563e-15, 3.306758579127667e-16, 2.2271668826613973e-05,
-1.8701423073554431e-06, -1.501078224172373e-08, 8.952907812465134e-10,
267883353895.00665, 158759045786.36343], [6.460391520361948e-06, -
1.2647094709156108e-12, -1.7971415732486973e-13, 4.396430723625485e-11,
-7.247266456377939e-11, -1.1373744765683215e-15, 3.658727722774004e-16,
2.7105518268805634e-05, -1.9663482803776534e-06, -
1.6397993463300374e-08, 8.923803313149724e-10, 349965962553.9084,
297837273933.3269], [5.6272383047081095e-06, -1.5732644861037622e-12, -
1.2571170147507106e-13, 5.534697362808701e-11, -2.3610413258218975e-10,
-1.1709514644876058e-15, 3.2295817320330796e-16, 2.2314117324425535e-05,
-1.8663649176622442e-06, -1.4769695480936238e-08, 9.134128217572173e-10,
393807734620.02893, 1450122303072.2456], [6.437914022666636e-06, -
1.2546731037733632e-12, -1.7844406460041829e-13, 5.488975389250315e-11,
-7.259445338393382e-11, -2.1597092009682793e-15, 3.3041861616205316e-16,
2.240369111953624e-05, -1.876360375320595e-06, -1.648586510957147e-08,
9.134128217572173e-10, 630890128752.3734, 431834854178.85406], [
6.046575120541287e-06, -1.2764740103418866e-12, -1.746683186012092e-13,
5.109944355076077e-11, -2.520608616913497e-10, -1.0704525109919603e-15,
3.6772692838424905e-16, 2.971296945414015e-05, -1.951293357817624e-06,
-1.4769695480936238e-08, 8.939102135383639e-10, 871857905030.9667,
2328286443290.7437], [6.051000675950963e-06, -1.2846825520511646e-12, -
1.268060597488819e-13, 5.490952472465525e-11, -2.3244121922778247e-10,
-2.1424540029363198e-15, 3.673980081076506e-16, 2.961326937497751e-05,
-1.895367635724618e-06, -1.5034205062876655e-08, 9.16195585945909e-10,
1374938673042.5493, 4524615824537.332], [5.6149092148265474e-06, -
1.4639678768975506e-12, -1.253161090730697e-13, 4.481233479664715e-11,
-2.335516269047763e-10, -2.1416544930348844e-15, 3.3108330528832777e-16,
2.22837679272578e-05, -1.8681878215606722e-06, -1.528899727808779e-08,
8.573199342562181e-10, 1914602582873.603, 2013877892656.268], [
6.101651991514008e-06, -1.5833077943313046e-12, -1.9703439809858206e-13,
5.500949944067544e-11, -7.256876412950873e-11, -1.0739249647595844e-15,
3.658727722774004e-16, 2.970517711660123e-05, -1.8738366196528042e-06,
-1.522166132952199e-08, 9.123763139194573e-10, 3105022967535.493,
7589715261899.736], [7.169307360099383e-06, -1.475336624504327e-12, -
2.0167346748799746e-13, 4.53859215469466e-11, -2.1795530264429259e-10,
-1.209364174087727e-15, 3.179525403817121e-16, 2.248948490803903e-05, -
1.9732992714201345e-06, -1.4769695480936238e-08, 8.472670825115021e-10,
3105580314530.341, 4622017117439.275]]
second1_gen = [[6.473615077297489e-06, -1.2416886913890308e-12, -
1.7473505716030156e-13, 3.966285637236728e-11, -7.243488055496258e-11,
-1.1645955168783485e-15, 3.1918479761370934e-16, 2.7255618460061466e-05,
-1.912188850787629e-06, -1.6430064111592607e-08, 8.970550453733459e-10,
35685411688.23251, 231044368946.34586], [6.393923513974502e-06, -
1.2418411778899226e-12, -1.7798884315456173e-13, 3.983180616117193e-11,
-7.243742739542879e-11, -1.128236668058653e-15, 3.855466000081844e-16,
2.7200371659468664e-05, -1.9285560276423494e-06, -1.636514926725132e-08,
9.071692193685023e-10, 57865021002.9106, 360571654391.1672], [
7.230454358781939e-06, -1.423600316370741e-12, -1.7526876652912844e-13,
5.484412599476033e-11, -7.222102668803471e-11, -1.1795054510279537e-15,
3.642469974043324e-16, 2.4721354631465055e-05, -1.7738362153245365e-06,
-1.6042437181983083e-08, 8.601004856702239e-10, 60788722272.11295,
440230270157.01904], [6.435449388867622e-06, -1.2416886913890308e-12, -
1.807074860305897e-13, 5.4624696474782334e-11, -7.299561923303083e-11,
-1.1155657493946243e-15, 3.855466000081844e-16, 2.4639345261867096e-05,
-1.92912357850029e-06, -1.4800406168095671e-08, 9.011563904603084e-10,
90541420172.20418, 503189560104.03455], [6.417695307686038e-06, -
1.2339817339229541e-12, -1.7924803979756243e-13, 5.5902899343682586e-11,
-7.217875877484109e-11, -1.120826019773443e-15, 3.8364837768074985e-16,
2.2074405673546407e-05, -1.904212437644655e-06, -1.509791791618086e-08,
8.960324081400173e-10, 91138056935.866, 156256693553.4698], [
7.235432436183002e-06, -1.444519147741974e-12, -1.7273464723057338e-13,
5.517809418856912e-11, -2.5310572250093563e-10, -1.1658376910672744e-15,
3.3048095015500005e-16, 2.4812886502853343e-05, -1.964119169077712e-06,
-1.4777953862585708e-08, 8.945619840357268e-10, 98015149423.40909,
125389712442.99564], [6.382295596647026e-06, -1.5683042319109065e-12, -
1.271182130914441e-13, 3.9709881372590666e-11, -2.3411267641257417e-10,
-1.1298867172210502e-15, 3.273827033054119e-16, 2.71828464025051e-05, -
1.86879521538149e-06, -1.6615697675064263e-08, 8.938783145101195e-10,
108132988244.55444, 600937075323.7117], [7.3000149385339486e-06, -
1.4649443926376347e-12, -1.740251215699652e-13, 5.5040821609381877e-11,
-2.5448794058714256e-10, -1.1729621402736547e-15, 3.321162280251396e-16,
2.492985953688089e-05, -1.95260325957056e-06, -1.4879723555310096e-08,
8.886352647229086e-10, 118040637271.1665, 119637343045.177], [
5.595995170722691e-06, -1.5775800984465949e-12, -1.2531378473105398e-13,
5.5737478708430025e-11, -2.359264703422783e-10, -2.141274549861917e-15,
3.2670998922499434e-16, 2.2375793269713536e-05, -1.8912926681237391e-06,
-1.5244852134327217e-08, 9.114608510796109e-10, 193706809398.06177,
145429438824.56485], [6.417695307686038e-06, -1.2390179448049186e-12, -
2.0184675219747084e-13, 3.996761820973954e-11, -7.30077645678233e-11, -
1.0733818300903034e-15, 3.6521589033170274e-16, 2.7380751148035565e-05,
-1.901967051200766e-06, -1.6531476837456585e-08, 8.659462633971021e-10,
291714681643.4888, 219358626907.00577], [7.269087955666727e-06, -
1.4398732474157131e-12, -1.745771866624504e-13, 5.5370858680922966e-11,
-2.5212090845365535e-10, -1.1547640084684547e-15,
3.1826570991307717e-16, 2.4799848604697875e-05, -1.9802449310363633e-06,
-1.4932011828861567e-08, 8.916225586049855e-10, 291814703950.912,
265497905413.09335], [5.9575073045674184e-06, -1.4591405170404072e-12,
-1.7515686156504634e-13, 5.071091939607585e-11, -7.251972289899038e-11,
-1.172163868062928e-15, 3.2003450301868095e-16, 2.236559796692659e-05,
-1.964000257622103e-06, -1.461000086726312e-08, 8.924031273079037e-10,
441351014961.37744, 513124822279.29816], [7.118156558728498e-06, -
1.4213484509322684e-12, -1.7594919642528414e-13, 5.502275447498347e-11,
-2.359264703422783e-10, -2.146866081339977e-15, 3.3020925008057705e-16,
2.48800717576552e-05, -1.8740196054647742e-06, -1.4681760148497176e-08,
9.194043116452982e-10, 480601682287.2741, 2166349399584.3464], [
6.435379358296727e-06, -1.449279705541305e-12, -1.791907685050265e-13,
4.013727926643595e-11, -2.561628978573389e-10, -1.1658376910672744e-15,
3.1916771926698506e-16, 2.706170262409588e-05, -1.9747493962051268e-06,
-1.6529378614728517e-08, 8.945619840357268e-10, 480690251628.6576,
455217335045.56067], [7.273965294010602e-06, -1.4508582334938624e-12, -
1.2640181562203036e-13, 5.1256890020829106e-11, -2.347526011960417e-10,
-1.1573810914157072e-15, 3.313802025100971e-16, 2.5248996663846427e-05,
-1.8890715225154116e-06, -1.4830513494585048e-08, 9.024560997678787e-10,
513022508534.7746, 1741282758378.8208], [7.171513003462397e-06, -
1.4334443716578728e-12, -1.258745292341622e-13, 5.562080442549079e-11,
-2.5310572250093563e-10, -2.177369178159867e-15, 3.269368594462498e-16,
2.5052523082312023e-05, -1.9593459141604013e-06, -
1.4665768665138152e-08, 8.920318373308913e-10, 559251400205.1976,
313686240874.89294]]
third1_gen = [[6.428534934734018e-06, -1.2348251959432863e-12, -
1.767418187059626e-13, 3.954772029523348e-11, -7.292041892016764e-11, -
1.1216042005993232e-15, 3.8462974452187554e-16, 2.732021800880368e-05,
-1.912188850787629e-06, -1.6465861899672315e-08, 8.953663972360121e-10,
35914970214.05617, 208658422545.5101], [6.449609175276781e-06, -
1.2355212093166627e-12, -1.7892996139776768e-13, 3.978108705811362e-11,
-7.260470610345522e-11, -1.128236668058653e-15, 3.8262320992212617e-16,
2.699492740612888e-05, -1.9285560276423494e-06, -1.6459368248390354e-08,
9.071692193685023e-10, 37667755025.66565, 260591174431.75333], [
6.393923513974502e-06, -1.2329510175057565e-12, -1.7878217157136278e-13,
4.009121098742944e-11, -7.243742739542879e-11, -1.119215448440791e-15,
3.855466000081844e-16, 2.7170577516281446e-05, -1.946180426984478e-06,
-1.6356719885598995e-08, 9.071692193685023e-10, 41822657912.61174,
187148082730.9518], [6.393923513974502e-06, -1.2418411778899226e-12, -
1.7764720872488035e-13, 5.5839617178535e-11, -7.217875877484109e-11, -
1.1285205693786809e-15, 3.8241419562917457e-16, 2.727322263242888e-05,
-1.9285560276423494e-06, -1.6299569164241514e-08, 8.954758973117168e-10,
45658359101.85514, 143455126000.2526], [6.412748625088242e-06, -
1.2418411778899226e-12, -1.7788474362949836e-13, 3.98996561577576e-11,
-7.290920324596793e-11, -1.1258830930124426e-15, 3.8322709394594156e-16,
2.6978084672522227e-05, -1.9285560276423494e-06, -
1.6212095851483947e-08, 9.06465374180439e-10, 61888825971.955795,
378668457219.4866], [7.2950079161541e-06, -1.423600316370741e-12, -
1.8067111524974517e-13, 5.467528933636526e-11, -7.269174548770519e-11,
-1.1131382577055909e-15, 3.642469974043324e-16, 2.442302310111588e-05,
-1.9365154780516644e-06, -1.4736235919210341e-08, 9.02573445716291e-10,
72168008768.07632, 429565720321.34186], [7.277641363649251e-06, -
1.4186237292635021e-12, -1.7672076654522444e-13, 5.4875348972838477e-11,
-7.250728822785179e-11, -1.1805107762756462e-15, 3.880180132520679e-16,
2.7230117388865188e-05, -1.79140018540739e-06, -1.6042437181983083e-08,
8.524740779894739e-10, 144497176198.74966, 733034177617.006], [
6.435449388867622e-06, -1.2375432988348708e-12, -1.8114977137612309e-13,
3.9353291584632385e-11, -7.306938943468394e-11, -1.1645955168783485e-15,
3.887993677152085e-16, 2.4432920122355823e-05, -1.927081007099796e-06,
-1.644170413651962e-08, 9.09149545755435e-10, 151124978488.96066,
169172823395.74277], [7.278147471012389e-06, -1.4279386093057266e-12, -
1.7683419692117291e-13, 5.493758019518918e-11, -7.289146026177328e-11,
-1.1733747472097884e-15, 3.675691109659462e-16, 2.4721354631465055e-05,
-1.7638896999117907e-06, -1.588988736168235e-08, 8.632841256471107e-10,
202474467398.45615, 922092113586.5779], [7.177079530800026e-06, -
1.234976832476029e-12, -1.7526876652912844e-13, 5.534254133122458e-11,
-7.205830797649949e-11, -1.120826019773443e-15, 3.8364837768074985e-16,
2.2258192147086412e-05, -1.7878127478583311e-06, -1.620023857736605e-08,
8.601004856702239e-10, 213869103072.6637, 175609972725.89545], [
6.350923506939188e-06, -1.2525603780194753e-12, -1.7993410193080307e-13,
5.465765498048408e-11, -7.243742739542879e-11, -1.1188147125437704e-15,
3.855466000081844e-16, 2.47790541156232e-05, -1.9163436765125797e-06, -
1.4800406168095671e-08, 9.043461740243768e-10, 224990894591.97565,
940216435276.2135], [6.375685299492019e-06, -1.2470011129066444e-12, -
1.7556981763399573e-13, 5.482994274294271e-11, -7.247391358991481e-11,
-1.1737410455893592e-15, 3.8256427214483946e-16, 2.4747394888572957e-05,
-1.921085601798487e-06, -1.655011267092608e-08, 9.011563904603084e-10,
242139334921.33466, 239644754200.97003], [6.474178960026375e-06, -
1.436844524248817e-12, -1.766513283684079e-13, 3.940038642964773e-11, -
7.181977887130175e-11, -1.1548751736666541e-15, 3.1745148598988346e-16,
2.707077658308786e-05, -1.92536072773705e-06, -1.6138736645669917e-08,
8.669699125562364e-10, 435950975348.6226, 363915964843.3034], [
6.393923513974502e-06, -1.4269415936091027e-12, -1.7684911527276688e-13,
5.480211712359269e-11, -7.243742739542879e-11, -1.1795054510279537e-15,
3.8683254669914693e-16, 2.7200371659468664e-05, -1.925930700762681e-06,
-1.643396668485197e-08, 8.601004856702239e-10, 840789439847.5613,
886246867017.2574], [6.5292806963971566e-06, -1.2521788644307235e-12, -
1.752024719240228e-13, 5.432423395298522e-11, -7.243160061946103e-11, -
1.1728842336075722e-15, 3.642469974043324e-16, 2.4721354631465055e-05,
-1.9201275577069358e-06, -1.6042437181983083e-08, 8.613978338195112e-10,
1220087240914.9465, 1538404370735.8923], [7.222746286095911e-06, -
1.4287928653696903e-12, -1.7798884315456173e-13, 5.47608522234827e-11,
-7.177949793819456e-11, -1.1234835849356116e-15, 3.638627899273496e-16,
2.4725904181789833e-05, -1.7849753358990938e-06, -
1.6004659818379623e-08, 9.095587982641099e-10, 1457214324700.6113,
3971854766728.4727]]
[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13,
5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15,
8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429,
145011267381.10236]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985,
148817892429.6303]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536,
138194745977.8172]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13,
5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15,
8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045,
148499957167.59894]
[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13,
4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568,
138376625633.08905]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468,
147143586736.12967]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,
8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985,
148817892429.6303]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13,
5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15,
8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06,
-1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952,
149819556305.94864]
[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13,
5.0376537605765665e-11, -1.7763084077799175e-10, -
1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -
2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10,
108694336300.90585, 154375559012.27695]
[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13,
4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15,
8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06,
-1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514,
195080915978.15582]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13,
4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15,
8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337,
143318140783.98648]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789,
160453198244.84198]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13,
4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06,
-1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114,
109895891048.79645]
[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13,
4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327,
122880053749.32047]
[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13,
4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06,
-1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218,
130994741061.18477]
[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205,
148716985588.15564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098,
101545825010.15762]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15,
8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779,
101879284463.33914]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885,
102270797763.39908]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874,
106305215455.77405]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13,
4.7704075824842225e-11, -1.8975666267494283e-10, -
1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05,
-1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10,
166731944707.48343, 109962566902.69849]
[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15,
8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -
1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018,
111850971687.16727]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15,
8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794,
128488226222.4665]
[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15,
8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -
1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544,
172987399752.44284]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824,
100937635343.36494]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142,
101220474756.5564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603,
101440046940.62292]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283,
101479475091.5385]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353,
101522685052.87083]
[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158,
102059630396.96977]
[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06,
-1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558,
102134941196.42899]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273,
102270797763.3992]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13,
4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435,
102270797763.39929]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15,
8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374,
102518032445.5969]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15,
8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432,
102577021916.3392]
[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874,
106305215455.77405]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382,
112061347287.60056]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06,
-1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097,
136457449593.06062]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096,
160562679389.67618]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007,
100125948657.42978]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744,
101215117638.35565]
[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142,
101220474756.5564]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664,
101220474756.55742]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617,
101440046940.6675]
[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283,
101479475091.5385]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997,
101479475091.5439]
[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134,
101707557509.25955]
[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668,
101910116331.42278]
[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13,
4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892,
101942928295.47075]
[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095,
104790698646.6004]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8168585276282465e-11, -1.4675478300173032e-10, -
1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05,
-1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10,
160649925757.17908, 106424978687.80653]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564,
106648081137.30634]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15,
8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595,
106784848298.00577]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962,
106918161793.97298]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13,
4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648,
117274357359.96004]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109,
118996909122.33968]
[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083,
125656067768.88814]
[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951,
191438895729.71088]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697,
99223644222.007]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007,
100125948657.42978]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534,
100180028793.61896]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686,
100223589650.82378]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13,
4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221,
100558408593.70113]
[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8877585360256924e-11, -1.4675478300173032e-10, -
1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05,
-1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10,
193351738763.71564, 100949387586.23102]
[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13,
4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133,
101220474756.86967]
[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13,
4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612,
101440046940.05927]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15,
8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655,
101467426817.57397]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.8327983670281894e-11, -1.4675478300173032e-10, -
1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -
1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10,
193392923341.53983, 101900620617.14302]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13,
4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602,
103131734300.077]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997,
103180541968.40872]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538,
103805616436.34537]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15,
8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -
1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226,
106843736334.12831]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826,
110030788135.34956]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15,
8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885,
111006224451.55664]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314,
113087422800.04585]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723,
115101067854.69138]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216,
126984206927.84627]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858,
98138013390.26245]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816,
98829512345.71414]
[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13,
4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15,
8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157,
98891303611.42876]
[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723,
99638222233.03885]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506,
99962477826.90034]
[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526,
100180028793.6191]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237,
100290100926.3771]
[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853,
100447140164.3877]
[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13,
4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15,
8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385,
100872818268.9527]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177,
101076246798.6337]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375,
101683114493.3993]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255,
105699410466.83022]
[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207,
105861289429.36061]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723,
106068644665.40553]
[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06,
-1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741,
109638154986.2024]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15,
8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976,
114344342719.97507]
[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13,
4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15,
8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632,
115101067854.31332]
[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339,
120797794814.05704]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -
1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072,
133721716481.47603]
[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -
1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489,
147005409641.27127]
[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15,
9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978,
156722470654.13324]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13,
4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868,
167972224844.19583]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13,
4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635,
167972224843.92523]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189,
192873830899.82352]
[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13,
4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952,
160840990423.46024]
[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13,
4.6589669053151376e-11, -1.4986345441105813e-10, -
2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -
1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10,
96467208837.94556, 179586543004.98117]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06,
-1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293,
187118262382.8758]
[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06,
-1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736,
187415567631.77402]
[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189,
192873830899.82352]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.6154548476823616e-11, -1.8724359625458014e-10, -
2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05,
-1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10,
117723326371.02731, 192873830899.82806]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13,
4.9793760275117476e-11, -2.0780774158604122e-10, -
2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05,
-1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10,
170388218306.66492, 168925348515.4128]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13,
4.9793760275117476e-11, -2.0780774158604122e-10, -
2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10,
191821821495.1242, 158798904598.69617]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,
8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985,
163375067226.8736]
[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084,
152444791757.7255]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13,
4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15,
7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06,
-1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325,
153164597685.87036]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826,
155849166742.8801]
[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13,
5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673,
161472427331.15216]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16,
2.0053347897812537e-05, -1.7639524821935923e-06, -
1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852,
175966043507.07343]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15,
7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06,
-1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572,
184829802626.36642]
[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588,
189416231139.84406]
[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13,
4.9793760275117476e-11, -2.0772853669541976e-10, -
1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10,
160631139543.06137, 122019730569.7476]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116,
128597452665.91768]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,
-1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,
8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16,
1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08,
8.728626586100963e-10, 100156348461.68698, 161778485371.36353]
[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272,
171303112707.4717]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
4.9793760275117476e-11, -1.7352085678160897e-10, -
1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05,
-1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10,
97245352689.07887, 174341101475.58182]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16,
2.0053347897812537e-05, -1.7639524821935923e-06, -
1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355,
185221791801.95062]
[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13,
5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936,
189416231139.85312]
[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125,
190153350507.14474]
[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13,
4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279,
197738317572.1617]
[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11,
-1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16,
2.0200374650352852e-05, -1.7758673160173464e-06, -
1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334,
119035825863.27417]
[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13,
4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604,
120144468135.82727]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -
1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346,
120359956158.03543]
[0.0, -1.1984578022968498e-12, -2.094909506024221e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06,
-1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575,
120995758664.39177]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13,
4.9967768219433575e-11, -1.7352085678160897e-10, -
1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10,
151029089477.88403, 121221447183.73479]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212,
129257349906.46594]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574,
129372470770.49553]
[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11,
-1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16,
1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,
8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987,
132029509845.4832]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15,
8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735,
137741348069.72827]
[0.0, -1.2344709098355012e-12, -2.090479539659853e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,
7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -
1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327,
143862344272.2216]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159,
143868003797.30536]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13,
4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457,
151496866956.06183]
[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13,
5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617,
154679332976.7693]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,
-1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.760544278271184e-10, 107979663117.77498, 158587944243.3901]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13,
4.9793760275117476e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05,
-1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10,
101036412554.48618, 178952195751.12357]
[0.0, -1.2071709641632366e-12, -2.088572649745598e-13,
4.9793760275117476e-11, -1.7352085678160897e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05,
-1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10,
101115281125.52821, 181312381109.07834]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15,
7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06,
-1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515,
193403737351.61066]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06,
-1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167,
199093039398.6542]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,
7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -
1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637,
120593643708.66519]
[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13,
5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895,
121269083493.68436]
[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11,
-1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16,
1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.711551918674385e-10, 168378423128.42877, 121439949900.90005]
[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15,
7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06,
-1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705,
122027384226.92]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13,
4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15,
7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -
1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388,
122750625888.09634]
[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13,
5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15,
7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886,
122935226427.98189]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545,
131702579310.68652]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863,
133211383937.09729]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263,
143105235055.608]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511,
143860615432.91934]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999,
145092770865.8836]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487,
155477031697.76462]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15,
8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412,
158587944243.89005]
[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11,
-1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.760544278271184e-10, 99132279868.34593, 171185572417.85907]
[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15,
8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461,
180430143233.58368]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15,
7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06,
-1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496,
183449646874.34637]
[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13,
4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935,
191076754457.2524]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,
7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06,
-1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465,
194275355409.06598]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11,
-1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16,
2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08,
8.760544278271184e-10, 86984982238.58047, 194967876303.00238]
[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15,
7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182,
198112832281.90573]
[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067,
115813093887.0164]
[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06,
-1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041,
118508631814.89664]
[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395,
119478476003.54858]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902,
119746195767.88297]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686,
120002114057.9749]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245,
124495463707.0261]
[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13,
5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236,
127226107362.62663]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766,
128048566261.66084]
[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375,
129146670219.88675]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287,
132556338910.10567]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873,
132653030892.18918]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999,
140436120253.29218]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377,
143105235055.60883]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417,
143860615432.91846]
[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159,
143868003797.30536]
[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13,
5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966,
144269444777.14786]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957,
145085114899.6645]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15,
7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602,
145085114900.12366]
[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217,
145590447784.79443]
[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044,
153694065180.84283]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13,
5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157,
154263245256.49524]
[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876,
164710456294.5225]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035,
170174200265.44513]
| one=[7.236287049225701e-06, -1.445911565527231e-12, -1.7498772740084537e-13, 5.109944355076077e-11, -2.5430545472048434e-10, -1.1709514644876058e-15, 3.210132219509301e-16, 2.502027767038304e-05, -1.975229899156637e-06, -1.4769695480936238e-08, 8.945619840357268e-10, 135323228000.64511, 130464457208.5385]
two=[6.101651991514008e-06, -1.2764740103418866e-12, -1.9703439809858206e-13, 4.396430723625485e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.9622074287767617e-05, -1.9615179204309246e-06, -1.518516920005905e-08, 8.601004856702239e-10, 194360719320.3122, 75684271432.82758]
three=[6.4442734160126695e-06, -1.2463732938819767e-12, -1.7912928652160854e-13, 3.990379556815055e-11, -7.256876412950873e-11, -1.128505986956859e-15, 3.855466000081844e-16, 2.7105518268805634e-05, -1.918022677712299e-06, -1.648586510957147e-08, 8.952907812465134e-10, 40874176708.45886, 129961018217.7445]
four=[5.591985036569838e-06, -1.5732644861037622e-12, -1.2586540738798186e-13, 5.508993685740796e-11, -2.345347836605763e-10, -2.1583737575101563e-15, 3.315525502908504e-16, 2.240369111953624e-05, -1.8808495402864136e-06, -1.5154818034574072e-08, 9.134128217572173e-10, 95538034865.65512, 192689393537.75766]
five=[5.9877501684316964e-06, -1.4725222964411265e-12, -2.0184675219747084e-13, 4.503520441436847e-11, -2.195719309752964e-10, -1.1996862422718706e-15, 3.172649531291829e-16, 2.235294071412983e-05, -1.7673862518012629e-06, -1.593810591566234e-08, 8.495479067416047e-10, 172629547544.72174, 121012464101.10771]
six = [6.525636151737385e-10, -1.5516831882387681e-12, -1.7065883936338436e-13, 4.6265959327559024e-11, -2.669670220497726e-10, -1.0739249647595844e-15, 9.085513864943156e-16, 2.5963751617497686e-05, -1.9757021060346726e-06, -1.5031696163247858e-08, 8.945619840357268e-10, 99871865434.22476, 123933224114.80229]
first1_gen= [[6.417695307686038e-06, -1.2416886913890308e-12, -1.791907685050265e-13, 3.983180616117193e-11, -7.243488055496258e-11, -1.1211433897576025e-15, 3.855466000081844e-16, 2.7255618460061466e-05, -1.917823676019374e-06, -1.6515339421288782e-08, 9.011563904603084e-10, 37866240406.859344, 251532289608.81], [5.974092884160685e-06, -1.4591405170404072e-12, -2.0184675219747084e-13, 4.3821744446480515e-11, -7.22093644433135e-11, -1.0712173220027044e-15, 3.65758224365464e-16, 2.235294071412983e-05, -1.763797302814154e-06, -1.6059311052756668e-08, 8.601004856702239e-10, 50907349656.8246, 117645129547.73723], [7.171513003462397e-06, -1.4334443716578728e-12, -1.749514610735409e-13, 5.509823004788858e-11, -2.5310572250093563e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4799846596325615e-08, 8.965548334484032e-10, 85071583311.774, 128667385131.30013], [7.3000149385339486e-06, -1.4508582334938624e-12, -1.7446896418754742e-13, 5.109944355076077e-11, -2.5448794058714256e-10, -1.1658376910672744e-15, 3.1827015830354867e-16, 2.502027767038304e-05, -1.9664311146400523e-06, -1.4730561693079958e-08, 8.945619840357268e-10, 88113858040.47986, 127558862768.52084], [5.581899283069486e-06, -1.5683042319109065e-12, -1.2586540738798186e-13, 5.535493146365402e-11, -2.359264703422783e-10, -2.1583737575101563e-15, 3.2921934547988314e-16, 2.2287538734129395e-05, -1.8740196054647742e-06, -1.5117323048065992e-08, 9.114608510796109e-10, 90926368846.81926, 202187413440.1054], [7.283321725975412e-06, -1.4356567410151954e-12, -1.7340660013452496e-13, 5.090884822547887e-11, -2.5483963758954753e-10, -1.139281753854116e-15, 3.1970242364315826e-16, 2.7105518268805634e-05, -1.963160298901409e-06, -1.4681586301228543e-08, 8.916460477308206e-10, 142505061534.36484, 476063714570.38367], [5.591985036569838e-06, -1.582675728169255e-12, -1.7359285477580936e-13, 5.508993685740796e-11, -2.5320893657294154e-10, -2.1583737575101563e-15, 3.210132219509301e-16, 2.511654073479438e-05, -1.965555797894771e-06, -1.5140087108671845e-08, 9.214909160927855e-10, 154168790181.56195, 151975095946.00134], [6.4442734160126695e-06, -1.5732644861037622e-12, -1.8036634758606428e-13, 5.508993685740796e-11, -7.27534017567909e-11, -2.1583737575101563e-15, 3.306758579127667e-16, 2.2271668826613973e-05, -1.8701423073554431e-06, -1.501078224172373e-08, 8.952907812465134e-10, 267883353895.00665, 158759045786.36343], [6.460391520361948e-06, -1.2647094709156108e-12, -1.7971415732486973e-13, 4.396430723625485e-11, -7.247266456377939e-11, -1.1373744765683215e-15, 3.658727722774004e-16, 2.7105518268805634e-05, -1.9663482803776534e-06, -1.6397993463300374e-08, 8.923803313149724e-10, 349965962553.9084, 297837273933.3269], [5.6272383047081095e-06, -1.5732644861037622e-12, -1.2571170147507106e-13, 5.534697362808701e-11, -2.3610413258218975e-10, -1.1709514644876058e-15, 3.2295817320330796e-16, 2.2314117324425535e-05, -1.8663649176622442e-06, -1.4769695480936238e-08, 9.134128217572173e-10, 393807734620.02893, 1450122303072.2456], [6.437914022666636e-06, -1.2546731037733632e-12, -1.7844406460041829e-13, 5.488975389250315e-11, -7.259445338393382e-11, -2.1597092009682793e-15, 3.3041861616205316e-16, 2.240369111953624e-05, -1.876360375320595e-06, -1.648586510957147e-08, 9.134128217572173e-10, 630890128752.3734, 431834854178.85406], [6.046575120541287e-06, -1.2764740103418866e-12, -1.746683186012092e-13, 5.109944355076077e-11, -2.520608616913497e-10, -1.0704525109919603e-15, 3.6772692838424905e-16, 2.971296945414015e-05, -1.951293357817624e-06, -1.4769695480936238e-08, 8.939102135383639e-10, 871857905030.9667, 2328286443290.7437], [6.051000675950963e-06, -1.2846825520511646e-12, -1.268060597488819e-13, 5.490952472465525e-11, -2.3244121922778247e-10, -2.1424540029363198e-15, 3.673980081076506e-16, 2.961326937497751e-05, -1.895367635724618e-06, -1.5034205062876655e-08, 9.16195585945909e-10, 1374938673042.5493, 4524615824537.332], [5.6149092148265474e-06, -1.4639678768975506e-12, -1.253161090730697e-13, 4.481233479664715e-11, -2.335516269047763e-10, -2.1416544930348844e-15, 3.3108330528832777e-16, 2.22837679272578e-05, -1.8681878215606722e-06, -1.528899727808779e-08, 8.573199342562181e-10, 1914602582873.603, 2013877892656.268], [6.101651991514008e-06, -1.5833077943313046e-12, -1.9703439809858206e-13, 5.500949944067544e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.970517711660123e-05, -1.8738366196528042e-06, -1.522166132952199e-08, 9.123763139194573e-10, 3105022967535.493, 7589715261899.736], [7.169307360099383e-06, -1.475336624504327e-12, -2.0167346748799746e-13, 4.53859215469466e-11, -2.1795530264429259e-10, -1.209364174087727e-15, 3.179525403817121e-16, 2.248948490803903e-05, -1.9732992714201345e-06, -1.4769695480936238e-08, 8.472670825115021e-10, 3105580314530.341, 4622017117439.275]]
second1_gen= [[6.473615077297489e-06, -1.2416886913890308e-12, -1.7473505716030156e-13, 3.966285637236728e-11, -7.243488055496258e-11, -1.1645955168783485e-15, 3.1918479761370934e-16, 2.7255618460061466e-05, -1.912188850787629e-06, -1.6430064111592607e-08, 8.970550453733459e-10, 35685411688.23251, 231044368946.34586], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7798884315456173e-13, 3.983180616117193e-11, -7.243742739542879e-11, -1.128236668058653e-15, 3.855466000081844e-16, 2.7200371659468664e-05, -1.9285560276423494e-06, -1.636514926725132e-08, 9.071692193685023e-10, 57865021002.9106, 360571654391.1672], [7.230454358781939e-06, -1.423600316370741e-12, -1.7526876652912844e-13, 5.484412599476033e-11, -7.222102668803471e-11, -1.1795054510279537e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.7738362153245365e-06, -1.6042437181983083e-08, 8.601004856702239e-10, 60788722272.11295, 440230270157.01904], [6.435449388867622e-06, -1.2416886913890308e-12, -1.807074860305897e-13, 5.4624696474782334e-11, -7.299561923303083e-11, -1.1155657493946243e-15, 3.855466000081844e-16, 2.4639345261867096e-05, -1.92912357850029e-06, -1.4800406168095671e-08, 9.011563904603084e-10, 90541420172.20418, 503189560104.03455], [6.417695307686038e-06, -1.2339817339229541e-12, -1.7924803979756243e-13, 5.5902899343682586e-11, -7.217875877484109e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2074405673546407e-05, -1.904212437644655e-06, -1.509791791618086e-08, 8.960324081400173e-10, 91138056935.866, 156256693553.4698], [7.235432436183002e-06, -1.444519147741974e-12, -1.7273464723057338e-13, 5.517809418856912e-11, -2.5310572250093563e-10, -1.1658376910672744e-15, 3.3048095015500005e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4777953862585708e-08, 8.945619840357268e-10, 98015149423.40909, 125389712442.99564], [6.382295596647026e-06, -1.5683042319109065e-12, -1.271182130914441e-13, 3.9709881372590666e-11, -2.3411267641257417e-10, -1.1298867172210502e-15, 3.273827033054119e-16, 2.71828464025051e-05, -1.86879521538149e-06, -1.6615697675064263e-08, 8.938783145101195e-10, 108132988244.55444, 600937075323.7117], [7.3000149385339486e-06, -1.4649443926376347e-12, -1.740251215699652e-13, 5.5040821609381877e-11, -2.5448794058714256e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.492985953688089e-05, -1.95260325957056e-06, -1.4879723555310096e-08, 8.886352647229086e-10, 118040637271.1665, 119637343045.177], [5.595995170722691e-06, -1.5775800984465949e-12, -1.2531378473105398e-13, 5.5737478708430025e-11, -2.359264703422783e-10, -2.141274549861917e-15, 3.2670998922499434e-16, 2.2375793269713536e-05, -1.8912926681237391e-06, -1.5244852134327217e-08, 9.114608510796109e-10, 193706809398.06177, 145429438824.56485], [6.417695307686038e-06, -1.2390179448049186e-12, -2.0184675219747084e-13, 3.996761820973954e-11, -7.30077645678233e-11, -1.0733818300903034e-15, 3.6521589033170274e-16, 2.7380751148035565e-05, -1.901967051200766e-06, -1.6531476837456585e-08, 8.659462633971021e-10, 291714681643.4888, 219358626907.00577], [7.269087955666727e-06, -1.4398732474157131e-12, -1.745771866624504e-13, 5.5370858680922966e-11, -2.5212090845365535e-10, -1.1547640084684547e-15, 3.1826570991307717e-16, 2.4799848604697875e-05, -1.9802449310363633e-06, -1.4932011828861567e-08, 8.916225586049855e-10, 291814703950.912, 265497905413.09335], [5.9575073045674184e-06, -1.4591405170404072e-12, -1.7515686156504634e-13, 5.071091939607585e-11, -7.251972289899038e-11, -1.172163868062928e-15, 3.2003450301868095e-16, 2.236559796692659e-05, -1.964000257622103e-06, -1.461000086726312e-08, 8.924031273079037e-10, 441351014961.37744, 513124822279.29816], [7.118156558728498e-06, -1.4213484509322684e-12, -1.7594919642528414e-13, 5.502275447498347e-11, -2.359264703422783e-10, -2.146866081339977e-15, 3.3020925008057705e-16, 2.48800717576552e-05, -1.8740196054647742e-06, -1.4681760148497176e-08, 9.194043116452982e-10, 480601682287.2741, 2166349399584.3464], [6.435379358296727e-06, -1.449279705541305e-12, -1.791907685050265e-13, 4.013727926643595e-11, -2.561628978573389e-10, -1.1658376910672744e-15, 3.1916771926698506e-16, 2.706170262409588e-05, -1.9747493962051268e-06, -1.6529378614728517e-08, 8.945619840357268e-10, 480690251628.6576, 455217335045.56067], [7.273965294010602e-06, -1.4508582334938624e-12, -1.2640181562203036e-13, 5.1256890020829106e-11, -2.347526011960417e-10, -1.1573810914157072e-15, 3.313802025100971e-16, 2.5248996663846427e-05, -1.8890715225154116e-06, -1.4830513494585048e-08, 9.024560997678787e-10, 513022508534.7746, 1741282758378.8208], [7.171513003462397e-06, -1.4334443716578728e-12, -1.258745292341622e-13, 5.562080442549079e-11, -2.5310572250093563e-10, -2.177369178159867e-15, 3.269368594462498e-16, 2.5052523082312023e-05, -1.9593459141604013e-06, -1.4665768665138152e-08, 8.920318373308913e-10, 559251400205.1976, 313686240874.89294]]
third1_gen= [[6.428534934734018e-06, -1.2348251959432863e-12, -1.767418187059626e-13, 3.954772029523348e-11, -7.292041892016764e-11, -1.1216042005993232e-15, 3.8462974452187554e-16, 2.732021800880368e-05, -1.912188850787629e-06, -1.6465861899672315e-08, 8.953663972360121e-10, 35914970214.05617, 208658422545.5101], [6.449609175276781e-06, -1.2355212093166627e-12, -1.7892996139776768e-13, 3.978108705811362e-11, -7.260470610345522e-11, -1.128236668058653e-15, 3.8262320992212617e-16, 2.699492740612888e-05, -1.9285560276423494e-06, -1.6459368248390354e-08, 9.071692193685023e-10, 37667755025.66565, 260591174431.75333], [6.393923513974502e-06, -1.2329510175057565e-12, -1.7878217157136278e-13, 4.009121098742944e-11, -7.243742739542879e-11, -1.119215448440791e-15, 3.855466000081844e-16, 2.7170577516281446e-05, -1.946180426984478e-06, -1.6356719885598995e-08, 9.071692193685023e-10, 41822657912.61174, 187148082730.9518], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7764720872488035e-13, 5.5839617178535e-11, -7.217875877484109e-11, -1.1285205693786809e-15, 3.8241419562917457e-16, 2.727322263242888e-05, -1.9285560276423494e-06, -1.6299569164241514e-08, 8.954758973117168e-10, 45658359101.85514, 143455126000.2526], [6.412748625088242e-06, -1.2418411778899226e-12, -1.7788474362949836e-13, 3.98996561577576e-11, -7.290920324596793e-11, -1.1258830930124426e-15, 3.8322709394594156e-16, 2.6978084672522227e-05, -1.9285560276423494e-06, -1.6212095851483947e-08, 9.06465374180439e-10, 61888825971.955795, 378668457219.4866], [7.2950079161541e-06, -1.423600316370741e-12, -1.8067111524974517e-13, 5.467528933636526e-11, -7.269174548770519e-11, -1.1131382577055909e-15, 3.642469974043324e-16, 2.442302310111588e-05, -1.9365154780516644e-06, -1.4736235919210341e-08, 9.02573445716291e-10, 72168008768.07632, 429565720321.34186], [7.277641363649251e-06, -1.4186237292635021e-12, -1.7672076654522444e-13, 5.4875348972838477e-11, -7.250728822785179e-11, -1.1805107762756462e-15, 3.880180132520679e-16, 2.7230117388865188e-05, -1.79140018540739e-06, -1.6042437181983083e-08, 8.524740779894739e-10, 144497176198.74966, 733034177617.006], [6.435449388867622e-06, -1.2375432988348708e-12, -1.8114977137612309e-13, 3.9353291584632385e-11, -7.306938943468394e-11, -1.1645955168783485e-15, 3.887993677152085e-16, 2.4432920122355823e-05, -1.927081007099796e-06, -1.644170413651962e-08, 9.09149545755435e-10, 151124978488.96066, 169172823395.74277], [7.278147471012389e-06, -1.4279386093057266e-12, -1.7683419692117291e-13, 5.493758019518918e-11, -7.289146026177328e-11, -1.1733747472097884e-15, 3.675691109659462e-16, 2.4721354631465055e-05, -1.7638896999117907e-06, -1.588988736168235e-08, 8.632841256471107e-10, 202474467398.45615, 922092113586.5779], [7.177079530800026e-06, -1.234976832476029e-12, -1.7526876652912844e-13, 5.534254133122458e-11, -7.205830797649949e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2258192147086412e-05, -1.7878127478583311e-06, -1.620023857736605e-08, 8.601004856702239e-10, 213869103072.6637, 175609972725.89545], [6.350923506939188e-06, -1.2525603780194753e-12, -1.7993410193080307e-13, 5.465765498048408e-11, -7.243742739542879e-11, -1.1188147125437704e-15, 3.855466000081844e-16, 2.47790541156232e-05, -1.9163436765125797e-06, -1.4800406168095671e-08, 9.043461740243768e-10, 224990894591.97565, 940216435276.2135], [6.375685299492019e-06, -1.2470011129066444e-12, -1.7556981763399573e-13, 5.482994274294271e-11, -7.247391358991481e-11, -1.1737410455893592e-15, 3.8256427214483946e-16, 2.4747394888572957e-05, -1.921085601798487e-06, -1.655011267092608e-08, 9.011563904603084e-10, 242139334921.33466, 239644754200.97003], [6.474178960026375e-06, -1.436844524248817e-12, -1.766513283684079e-13, 3.940038642964773e-11, -7.181977887130175e-11, -1.1548751736666541e-15, 3.1745148598988346e-16, 2.707077658308786e-05, -1.92536072773705e-06, -1.6138736645669917e-08, 8.669699125562364e-10, 435950975348.6226, 363915964843.3034], [6.393923513974502e-06, -1.4269415936091027e-12, -1.7684911527276688e-13, 5.480211712359269e-11, -7.243742739542879e-11, -1.1795054510279537e-15, 3.8683254669914693e-16, 2.7200371659468664e-05, -1.925930700762681e-06, -1.643396668485197e-08, 8.601004856702239e-10, 840789439847.5613, 886246867017.2574], [6.5292806963971566e-06, -1.2521788644307235e-12, -1.752024719240228e-13, 5.432423395298522e-11, -7.243160061946103e-11, -1.1728842336075722e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.9201275577069358e-06, -1.6042437181983083e-08, 8.613978338195112e-10, 1220087240914.9465, 1538404370735.8923], [7.222746286095911e-06, -1.4287928653696903e-12, -1.7798884315456173e-13, 5.47608522234827e-11, -7.177949793819456e-11, -1.1234835849356116e-15, 3.638627899273496e-16, 2.4725904181789833e-05, -1.7849753358990938e-06, -1.6004659818379623e-08, 9.095587982641099e-10, 1457214324700.6113, 3971854766728.4727]]
[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13, 5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429, 145011267381.10236]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536, 138194745977.8172]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045, 148499957167.59894]
[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13, 4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568, 138376625633.08905]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468, 147143586736.12967]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06, -1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952, 149819556305.94864]
[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13, 5.0376537605765665e-11, -1.7763084077799175e-10, -1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10, 108694336300.90585, 154375559012.27695]
[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13, 4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06, -1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514, 195080915978.15582]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13, 4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15, 8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337, 143318140783.98648]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789, 160453198244.84198]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114, 109895891048.79645]
[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13, 4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327, 122880053749.32047]
[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13, 4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06, -1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218, 130994741061.18477]
[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205, 148716985588.15564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098, 101545825010.15762]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15, 8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779, 101879284463.33914]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885, 102270797763.39908]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.7704075824842225e-11, -1.8975666267494283e-10, -1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05, -1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 166731944707.48343, 109962566902.69849]
[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15, 8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018, 111850971687.16727]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15, 8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794, 128488226222.4665]
[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15, 8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544, 172987399752.44284]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]
[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824, 100937635343.36494]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603, 101440046940.62292]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353, 101522685052.87083]
[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158, 102059630396.96977]
[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06, -1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558, 102134941196.42899]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273, 102270797763.3992]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13, 4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435, 102270797763.39929]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15, 8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374, 102518032445.5969]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15, 8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432, 102577021916.3392]
[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382, 112061347287.60056]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097, 136457449593.06062]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06, -1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096, 160562679389.67618]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744, 101215117638.35565]
[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664, 101220474756.55742]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617, 101440046940.6675]
[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997, 101479475091.5439]
[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06, -1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134, 101707557509.25955]
[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668, 101910116331.42278]
[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13, 4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892, 101942928295.47075]
[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095, 104790698646.6004]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8168585276282465e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10, 160649925757.17908, 106424978687.80653]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564, 106648081137.30634]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15, 8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595, 106784848298.00577]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962, 106918161793.97298]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13, 4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648, 117274357359.96004]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109, 118996909122.33968]
[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083, 125656067768.88814]
[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951, 191438895729.71088]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697, 99223644222.007]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534, 100180028793.61896]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686, 100223589650.82378]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13, 4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221, 100558408593.70113]
[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8877585360256924e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193351738763.71564, 100949387586.23102]
[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13, 4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133, 101220474756.86967]
[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13, 4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612, 101440046940.05927]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15, 8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655, 101467426817.57397]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.8327983670281894e-11, -1.4675478300173032e-10, -1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10, 193392923341.53983, 101900620617.14302]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13, 4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06, -1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602, 103131734300.077]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997, 103180541968.40872]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538, 103805616436.34537]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226, 106843736334.12831]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06, -1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826, 110030788135.34956]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885, 111006224451.55664]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314, 113087422800.04585]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723, 115101067854.69138]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216, 126984206927.84627]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858, 98138013390.26245]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816, 98829512345.71414]
[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13, 4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15, 8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157, 98891303611.42876]
[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723, 99638222233.03885]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506, 99962477826.90034]
[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526, 100180028793.6191]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237, 100290100926.3771]
[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853, 100447140164.3877]
[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13, 4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15, 8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385, 100872818268.9527]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177, 101076246798.6337]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375, 101683114493.3993]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255, 105699410466.83022]
[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06, -1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207, 105861289429.36061]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723, 106068644665.40553]
[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06, -1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741, 109638154986.2024]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15, 8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976, 114344342719.97507]
[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13, 4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632, 115101067854.31332]
[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339, 120797794814.05704]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072, 133721716481.47603]
[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489, 147005409641.27127]
[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15, 9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978, 156722470654.13324]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868, 167972224844.19583]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635, 167972224843.92523]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]
[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13, 4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952, 160840990423.46024]
[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13, 4.6589669053151376e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 96467208837.94556, 179586543004.98117]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06, -1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293, 187118262382.8758]
[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06, -1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736, 187415567631.77402]
[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.6154548476823616e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.02731, 192873830899.82806]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10, 170388218306.66492, 168925348515.4128]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 191821821495.1242, 158798904598.69617]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985, 163375067226.8736]
[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084, 152444791757.7255]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15, 7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06, -1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325, 153164597685.87036]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826, 155849166742.8801]
[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13, 5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673, 161472427331.15216]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852, 175966043507.07343]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15, 7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06, -1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572, 184829802626.36642]
[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588, 189416231139.84406]
[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13, 4.9793760275117476e-11, -2.0772853669541976e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10, 160631139543.06137, 122019730569.7476]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116, 128597452665.91768]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 100156348461.68698, 161778485371.36353]
[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272, 171303112707.4717]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 97245352689.07887, 174341101475.58182]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355, 185221791801.95062]
[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936, 189416231139.85312]
[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125, 190153350507.14474]
[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13, 4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279, 197738317572.1617]
[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11, -1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16, 2.0200374650352852e-05, -1.7758673160173464e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334, 119035825863.27417]
[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604, 120144468135.82727]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346, 120359956158.03543]
[0.0, -1.1984578022968498e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575, 120995758664.39177]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 4.9967768219433575e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10, 151029089477.88403, 121221447183.73479]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]
[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212, 129257349906.46594]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06, -1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574, 129372470770.49553]
[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987, 132029509845.4832]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735, 137741348069.72827]
[0.0, -1.2344709098355012e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327, 143862344272.2216]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13, 4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457, 151496866956.06183]
[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13, 5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617, 154679332976.7693]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.77498, 158587944243.3901]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]
[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13, 4.9793760275117476e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05, -1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 101036412554.48618, 178952195751.12357]
[0.0, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10, 101115281125.52821, 181312381109.07834]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]
[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15, 7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515, 193403737351.61066]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06, -1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167, 199093039398.6542]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637, 120593643708.66519]
[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13, 5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895, 121269083493.68436]
[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16, 1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.711551918674385e-10, 168378423128.42877, 121439949900.90005]
[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15, 7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705, 122027384226.92]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15, 7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388, 122750625888.09634]
[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13, 5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886, 122935226427.98189]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545, 131702579310.68652]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863, 133211383937.09729]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263, 143105235055.608]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511, 143860615432.91934]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999, 145092770865.8836]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487, 155477031697.76462]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412, 158587944243.89005]
[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 99132279868.34593, 171185572417.85907]
[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461, 180430143233.58368]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15, 7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496, 183449646874.34637]
[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935, 191076754457.2524]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06, -1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465, 194275355409.06598]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08, 8.760544278271184e-10, 86984982238.58047, 194967876303.00238]
[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182, 198112832281.90573]
[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067, 115813093887.0164]
[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06, -1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041, 118508631814.89664]
[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395, 119478476003.54858]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902, 119746195767.88297]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686, 120002114057.9749]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]
[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245, 124495463707.0261]
[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13, 5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236, 127226107362.62663]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766, 128048566261.66084]
[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06, -1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375, 129146670219.88675]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287, 132556338910.10567]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873, 132653030892.18918]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06, -1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999, 140436120253.29218]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377, 143105235055.60883]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417, 143860615432.91846]
[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]
[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13, 5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966, 144269444777.14786]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957, 145085114899.6645]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15, 7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602, 145085114900.12366]
[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217, 145590447784.79443]
[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044, 153694065180.84283]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13, 5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157, 154263245256.49524]
[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876, 164710456294.5225]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035, 170174200265.44513]
| null | [
0,
1,
2,
3
] |
2,046 | 5b366b0f6813f686600df9da4a17f190f034a10c | <mask token>
class EventSerializer(ModelSerializer):
class Meta:
model = Event
fields = '__all__'
class HolidaySerializerRead(ModelSerializer):
country = CountrySerializer()
class Meta:
model = Holiday
fields = '__all__'
class HolidaySerializerWrite(ModelSerializer):
class Meta:
model = Holiday
fields = '__all__'
| <mask token>
class UserSerializer(ModelSerializer):
class Meta:
model = User
fields = '__all__'
class EventSerializer(ModelSerializer):
class Meta:
model = Event
fields = '__all__'
class HolidaySerializerRead(ModelSerializer):
country = CountrySerializer()
class Meta:
model = Holiday
fields = '__all__'
class HolidaySerializerWrite(ModelSerializer):
class Meta:
model = Holiday
fields = '__all__'
| <mask token>
class CountrySerializer(ModelSerializer):
class Meta:
model = Country
fields = '__all__'
class UserSerializer(ModelSerializer):
class Meta:
model = User
fields = '__all__'
class EventSerializer(ModelSerializer):
class Meta:
model = Event
fields = '__all__'
class HolidaySerializerRead(ModelSerializer):
country = CountrySerializer()
class Meta:
model = Holiday
fields = '__all__'
class HolidaySerializerWrite(ModelSerializer):
class Meta:
model = Holiday
fields = '__all__'
| from django.contrib.auth.models import User
from rest_framework.serializers import ModelSerializer
from app_calendar.models import Holiday, Country, Event, User
class CountrySerializer(ModelSerializer):
class Meta:
model = Country
fields = '__all__'
class UserSerializer(ModelSerializer):
class Meta:
model = User
fields = '__all__'
class EventSerializer(ModelSerializer):
class Meta:
model = Event
fields = '__all__'
class HolidaySerializerRead(ModelSerializer):
country = CountrySerializer()
class Meta:
model = Holiday
fields = '__all__'
class HolidaySerializerWrite(ModelSerializer):
class Meta:
model = Holiday
fields = '__all__'
| null | [
4,
5,
6,
7
] |
2,047 | c2f6fa4d9a6e2ee5f0593bef775ce8f811225613 | <mask token>
@gapit_test('vkCmdCopyQueryPoolResults_test')
class FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest
):
<mask token>
@gapit_test('vkCmdCopyQueryPoolResults_test')
class AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(
GapitTest):
def expect(self):
"""3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4, stride: 12 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
'vkCmdCopyQueryPoolResults', 3))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(12, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_PARTIAL_BIT |
VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.
int_flags)
| <mask token>
@gapit_test('vkCmdCopyQueryPoolResults_test')
class AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):
<mask token>
@gapit_test('vkCmdCopyQueryPoolResults_test')
class FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest
):
def expect(self):
"""2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,
queryCount: 4, stride: 8 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
'vkCmdCopyQueryPoolResults', 2))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(4, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(8, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,
copy_query_pool_results.int_flags)
@gapit_test('vkCmdCopyQueryPoolResults_test')
class AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(
GapitTest):
def expect(self):
"""3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4, stride: 12 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
'vkCmdCopyQueryPoolResults', 3))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(12, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_PARTIAL_BIT |
VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.
int_flags)
| <mask token>
@gapit_test('vkCmdCopyQueryPoolResults_test')
class AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):
def expect(self):
"""1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4 stride: 4 and dstOffset: 16."""
copy_query_pool_results = require(self.nth_call_of(
'vkCmdCopyQueryPoolResults', 1))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(16, copy_query_pool_results.int_dstOffset)
require_equal(4, copy_query_pool_results.int_stride)
require_equal(0, copy_query_pool_results.int_flags)
@gapit_test('vkCmdCopyQueryPoolResults_test')
class FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest
):
def expect(self):
"""2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,
queryCount: 4, stride: 8 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
'vkCmdCopyQueryPoolResults', 2))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(4, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(8, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,
copy_query_pool_results.int_flags)
@gapit_test('vkCmdCopyQueryPoolResults_test')
class AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(
GapitTest):
def expect(self):
"""3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4, stride: 12 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
'vkCmdCopyQueryPoolResults', 3))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(12, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_PARTIAL_BIT |
VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.
int_flags)
| from gapit_test_framework import gapit_test, require, require_equal, require_true
from gapit_test_framework import require_not_equal, little_endian_bytes_to_int
from gapit_test_framework import GapitTest, get_read_offset_function
import gapit_test_framework
from vulkan_constants import *
@gapit_test('vkCmdCopyQueryPoolResults_test')
class AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):
def expect(self):
"""1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4 stride: 4 and dstOffset: 16."""
copy_query_pool_results = require(self.nth_call_of(
'vkCmdCopyQueryPoolResults', 1))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(16, copy_query_pool_results.int_dstOffset)
require_equal(4, copy_query_pool_results.int_stride)
require_equal(0, copy_query_pool_results.int_flags)
@gapit_test('vkCmdCopyQueryPoolResults_test')
class FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest
):
def expect(self):
"""2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,
queryCount: 4, stride: 8 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
'vkCmdCopyQueryPoolResults', 2))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(4, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(8, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,
copy_query_pool_results.int_flags)
@gapit_test('vkCmdCopyQueryPoolResults_test')
class AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(
GapitTest):
def expect(self):
"""3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4, stride: 12 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
'vkCmdCopyQueryPoolResults', 3))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(12, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_PARTIAL_BIT |
VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.
int_flags)
| # Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gapit_test_framework import gapit_test, require, require_equal, require_true
from gapit_test_framework import require_not_equal, little_endian_bytes_to_int
from gapit_test_framework import GapitTest, get_read_offset_function
import gapit_test_framework
from vulkan_constants import *
@gapit_test("vkCmdCopyQueryPoolResults_test")
class AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):
def expect(self):
"""1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4 stride: 4 and dstOffset: 16."""
copy_query_pool_results = require(self.nth_call_of(
"vkCmdCopyQueryPoolResults", 1))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(16, copy_query_pool_results.int_dstOffset)
require_equal(4, copy_query_pool_results.int_stride)
require_equal(0, copy_query_pool_results.int_flags)
@gapit_test("vkCmdCopyQueryPoolResults_test")
class FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest):
def expect(self):
"""2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,
queryCount: 4, stride: 8 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
"vkCmdCopyQueryPoolResults", 2))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(4, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(8, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,
copy_query_pool_results.int_flags)
@gapit_test("vkCmdCopyQueryPoolResults_test")
class AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(GapitTest):
def expect(self):
"""3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4, stride: 12 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
"vkCmdCopyQueryPoolResults", 3))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(12, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_PARTIAL_BIT
| VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
copy_query_pool_results.int_flags)
| [
3,
5,
6,
7,
8
] |
2,048 | 5436e9270e61f5f9ab41fc1f35a80f4b8def65ee | <mask token>
class TestFeatureReader(unittest.TestCase):
<mask token>
def testRFEFull(self):
feat = ['column1', 'column2', 'column3']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual(feat, readlist)
<mask token>
def testRFEFull3(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(
context.exception))
def testRFEEmptyFile(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFEEmptyFile2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTFull(self):
feat = ['column1', 'column2', 'column3']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.dt_feat_reader.getFeats()
self.assertEqual(feat, readlist)
def testDTFull2(self):
feat = ['column1', 'column2']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual(feat, readlist)
def testDTFull3(self):
feat = ['column1', 'column2']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(
context.exception))
def testDTEmpty(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTEmpty2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFENotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testRFENotExist2(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
<mask token>
| <mask token>
class TestFeatureReader(unittest.TestCase):
def setUp(self):
self.rfe_feat_reader = FeatureReader(RFEFeatureReader(), '/foo/bar.txt'
)
self.dt_feat_reader = FeatureReader(DTFeatureReader(), '/foo/bar.txt')
def testRFEFull(self):
feat = ['column1', 'column2', 'column3']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual(feat, readlist)
def testRFEFull2(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.rfe_feat_reader.getNFeats(2)
self.assertEqual(feat, readlist)
def testRFEFull3(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(
context.exception))
def testRFEEmptyFile(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFEEmptyFile2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTFull(self):
feat = ['column1', 'column2', 'column3']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.dt_feat_reader.getFeats()
self.assertEqual(feat, readlist)
def testDTFull2(self):
feat = ['column1', 'column2']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual(feat, readlist)
def testDTFull3(self):
feat = ['column1', 'column2']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(
context.exception))
def testDTEmpty(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTEmpty2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFENotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testRFENotExist2(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
<mask token>
| <mask token>
class TestFeatureReader(unittest.TestCase):
def setUp(self):
self.rfe_feat_reader = FeatureReader(RFEFeatureReader(), '/foo/bar.txt'
)
self.dt_feat_reader = FeatureReader(DTFeatureReader(), '/foo/bar.txt')
def testRFEFull(self):
feat = ['column1', 'column2', 'column3']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual(feat, readlist)
def testRFEFull2(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.rfe_feat_reader.getNFeats(2)
self.assertEqual(feat, readlist)
def testRFEFull3(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(
context.exception))
def testRFEEmptyFile(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFEEmptyFile2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTFull(self):
feat = ['column1', 'column2', 'column3']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.dt_feat_reader.getFeats()
self.assertEqual(feat, readlist)
def testDTFull2(self):
feat = ['column1', 'column2']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual(feat, readlist)
def testDTFull3(self):
feat = ['column1', 'column2']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(
context.exception))
def testDTEmpty(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTEmpty2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFENotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testRFENotExist2(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
if __name__ == '__main__':
unittest.main
| import unittest
from FileFeatureReader.featurereaders import RFEFeatureReader, DTFeatureReader
from FileFeatureReader.featurereader import FeatureReader
from unittest import mock
from unittest.mock import patch
import builtins
class TestFeatureReader(unittest.TestCase):
def setUp(self):
self.rfe_feat_reader = FeatureReader(RFEFeatureReader(), '/foo/bar.txt'
)
self.dt_feat_reader = FeatureReader(DTFeatureReader(), '/foo/bar.txt')
def testRFEFull(self):
feat = ['column1', 'column2', 'column3']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual(feat, readlist)
def testRFEFull2(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.rfe_feat_reader.getNFeats(2)
self.assertEqual(feat, readlist)
def testRFEFull3(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(
context.exception))
def testRFEEmptyFile(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFEEmptyFile2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTFull(self):
feat = ['column1', 'column2', 'column3']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.dt_feat_reader.getFeats()
self.assertEqual(feat, readlist)
def testDTFull2(self):
feat = ['column1', 'column2']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual(feat, readlist)
def testDTFull3(self):
feat = ['column1', 'column2']
read_data = """Header
---- column1: 0.1738919473844908
---- column2: 0.1738919473844908
---- column3: 0.1738919473844908
"""
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(
context.exception))
def testDTEmpty(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTEmpty2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch('builtins.open', mock_open), mock.patch('os.stat',
return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFENotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testRFENotExist2(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
if __name__ == '__main__':
unittest.main
| import unittest
from FileFeatureReader.featurereaders import RFEFeatureReader, DTFeatureReader
from FileFeatureReader.featurereader import FeatureReader
from unittest import mock
from unittest.mock import patch
import builtins
class TestFeatureReader(unittest.TestCase):
def setUp(self):
self.rfe_feat_reader = FeatureReader(RFEFeatureReader(), "/foo/bar.txt")
self.dt_feat_reader = FeatureReader(DTFeatureReader(), "/foo/bar.txt")
def testRFEFull(self):
feat = ['column1', 'column2', 'column3']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()):
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual(feat, readlist)
def testRFEFull2(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()):
readlist = self.rfe_feat_reader.getNFeats(2)
self.assertEqual(feat, readlist)
def testRFEFull3(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1\n---- column2\n---- column3\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(context.exception))
def testRFEEmptyFile(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFEEmptyFile2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTFull(self):
feat = ['column1', 'column2', 'column3']
read_data = 'Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()):
readlist = self.dt_feat_reader.getFeats()
self.assertEqual(feat, readlist)
def testDTFull2(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()):
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual(feat, readlist)
def testDTFull3(self):
feat = ['column1', 'column2']
read_data = 'Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n'
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()):
with self.assertRaises(ValueError) as context:
readlist = self.rfe_feat_reader.getNFeats(0)
self.assertEqual('n parameter is lower than 1 (it is 0)', str(context.exception))
def testDTEmpty(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testDTEmpty2(self):
mck = mock.Mock()
attrs = {'st_size': 0}
mck.configure_mock(**attrs)
read_data = ''
mock_open = mock.mock_open(read_data=read_data)
with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mck):
with self.assertRaises(ValueError) as context:
readlist = self.dt_feat_reader.getNFeats(2)
self.assertEqual('/foo/bar.txt is empty', str(context.exception))
def testRFENotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testRFENotExist2(self):
with self.assertRaises(IOError) as context:
readlist = self.rfe_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getFeats()
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
def testDTNotExist(self):
with self.assertRaises(IOError) as context:
readlist = self.dt_feat_reader.getNFeats(3)
self.assertEqual('/foo/bar.txt does not exist', str(context.exception))
if __name__ == '__main__':
unittest.main | [
14,
16,
17,
18,
19
] |
2,049 | f570d7e723fd0bec8c51022912a7dab4795fad43 | #!/usr/bin/python
import socket
import sys
from ctypes import *
import re
if len(sys.argv) == 3:
TCP_IP = sys.argv[1]
TCP_PORT = int(sys.argv[2])
else:
TCP_IP = "127.0.0.1"
TCP_PORT = 5005
BUFFER_SIZE = 1024
MESSAGE = "Hello, World!\n"
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
print "Connecting to " + TCP_IP + " on port " + str(TCP_PORT)
s.connect((TCP_IP, TCP_PORT))
s.send(TCP_IP + "\n")
#s.send(gpsMessage() + antennaMessage())
#data = s.recv(BUFFER_SIZE)
#print "received data:", data
#parseCommand(data)
s.close()
| null | null | null | null | [
0
] |
2,050 | 4eac468db955ca5ef5d2ec6ba67bd6c7f4d865f4 | <mask token>
| class TestRawJob:
<mask token>
| class TestRawJob:
def __init__(self, parsedRow):
values = [string.strip().lower() for string in parsedRow]
keys = ['Id', 'Title', 'Description', 'Raw Location',
'Normalized Location', 'Contract Type', 'Contract Time',
'Company', 'Category', 'Source']
self.data = dict(zip(keys, values))
| class TestRawJob:
def __init__(self, parsedRow):
values = [string.strip().lower() for string in parsedRow]
keys = ["Id", "Title", "Description", "Raw Location", "Normalized Location",
"Contract Type", "Contract Time", "Company", "Category", "Source"]
self.data = dict(zip(keys, values))
| null | [
0,
1,
2,
3
] |
2,051 | aae09dafeb10a1f9ed260439e63e4aaadadc3768 | <mask token>
def loadFile():
global data
x = 0
data = []
subs = ['image', 'text file']
exts = ['.jpg', '.txt']
while x < 2:
check = pathlib.Path(input(f'Enter {subs[x]} name: ')).with_suffix(exts
[x])
if check.is_file():
data.insert(x, check)
if x < len(subs):
x += 1
else:
print("Couldn't find the file. Try again.")
x = 0
def stringToBinary():
global binaryTextFile
f = open(data[1], 'r')
if f.mode == 'r':
textFile = f.read()
binaryTextFile = wrap(' '.join(format(x, 'b') for x in bytearray(
textFile, encoding='ASCII')), 2)
def imageToBinary():
global bin_list
imgFile = open(data[0], 'rb')
imgData = imgFile.read()
imgFile.close()
hex_str = str(binascii.hexlify(imgData))
hex_list = []
bin_list = []
for i in range(2, len(hex_str) - 1, 2):
hex = hex_str[i] + hex_str[i + 1]
hex_list.append(hex)
bin_list.append(bin(int(hex, 16))[2:])
bin_list = wrap(''.join(bin_list), 8)
def enc():
global result
x = 0
result = []
result = bin_list
for i in range(len(binaryTextFile)):
imgByte = str(bin_list[x])
textBits = str(binaryTextFile[x])
newImgByte = imgByte[:-2] + textBits
result.insert(x, newImgByte)
if x < len(binaryTextFile):
x += 1
else:
break
<mask token>
| <mask token>
def loadFile():
global data
x = 0
data = []
subs = ['image', 'text file']
exts = ['.jpg', '.txt']
while x < 2:
check = pathlib.Path(input(f'Enter {subs[x]} name: ')).with_suffix(exts
[x])
if check.is_file():
data.insert(x, check)
if x < len(subs):
x += 1
else:
print("Couldn't find the file. Try again.")
x = 0
def stringToBinary():
global binaryTextFile
f = open(data[1], 'r')
if f.mode == 'r':
textFile = f.read()
binaryTextFile = wrap(' '.join(format(x, 'b') for x in bytearray(
textFile, encoding='ASCII')), 2)
def imageToBinary():
global bin_list
imgFile = open(data[0], 'rb')
imgData = imgFile.read()
imgFile.close()
hex_str = str(binascii.hexlify(imgData))
hex_list = []
bin_list = []
for i in range(2, len(hex_str) - 1, 2):
hex = hex_str[i] + hex_str[i + 1]
hex_list.append(hex)
bin_list.append(bin(int(hex, 16))[2:])
bin_list = wrap(''.join(bin_list), 8)
def enc():
global result
x = 0
result = []
result = bin_list
for i in range(len(binaryTextFile)):
imgByte = str(bin_list[x])
textBits = str(binaryTextFile[x])
newImgByte = imgByte[:-2] + textBits
result.insert(x, newImgByte)
if x < len(binaryTextFile):
x += 1
else:
break
def newImg():
x = 0
arr = []
tmp = ''
for i in range(len(result)):
tmp = tmp + str(result[x])
if x < len(result):
x += 1
else:
break
arr = wrap(''.join(tmp), 1)
print(arr)
size = 5
dt = struct.pack('B' * len(arr), *[(pixel * 255) for pixel in arr])
img = Image.frombuffer('L', size, dt)
img.save('final.jpg')
<mask token>
| <mask token>
def loadFile():
global data
x = 0
data = []
subs = ['image', 'text file']
exts = ['.jpg', '.txt']
while x < 2:
check = pathlib.Path(input(f'Enter {subs[x]} name: ')).with_suffix(exts
[x])
if check.is_file():
data.insert(x, check)
if x < len(subs):
x += 1
else:
print("Couldn't find the file. Try again.")
x = 0
def stringToBinary():
global binaryTextFile
f = open(data[1], 'r')
if f.mode == 'r':
textFile = f.read()
binaryTextFile = wrap(' '.join(format(x, 'b') for x in bytearray(
textFile, encoding='ASCII')), 2)
def imageToBinary():
global bin_list
imgFile = open(data[0], 'rb')
imgData = imgFile.read()
imgFile.close()
hex_str = str(binascii.hexlify(imgData))
hex_list = []
bin_list = []
for i in range(2, len(hex_str) - 1, 2):
hex = hex_str[i] + hex_str[i + 1]
hex_list.append(hex)
bin_list.append(bin(int(hex, 16))[2:])
bin_list = wrap(''.join(bin_list), 8)
def enc():
global result
x = 0
result = []
result = bin_list
for i in range(len(binaryTextFile)):
imgByte = str(bin_list[x])
textBits = str(binaryTextFile[x])
newImgByte = imgByte[:-2] + textBits
result.insert(x, newImgByte)
if x < len(binaryTextFile):
x += 1
else:
break
def newImg():
x = 0
arr = []
tmp = ''
for i in range(len(result)):
tmp = tmp + str(result[x])
if x < len(result):
x += 1
else:
break
arr = wrap(''.join(tmp), 1)
print(arr)
size = 5
dt = struct.pack('B' * len(arr), *[(pixel * 255) for pixel in arr])
img = Image.frombuffer('L', size, dt)
img.save('final.jpg')
loadFile()
stringToBinary()
imageToBinary()
enc()
newImg()
| import pathlib, binascii
from textwrap import wrap
from PIL import Image
import struct
def loadFile():
global data
x = 0
data = []
subs = ['image', 'text file']
exts = ['.jpg', '.txt']
while x < 2:
check = pathlib.Path(input(f'Enter {subs[x]} name: ')).with_suffix(exts
[x])
if check.is_file():
data.insert(x, check)
if x < len(subs):
x += 1
else:
print("Couldn't find the file. Try again.")
x = 0
def stringToBinary():
global binaryTextFile
f = open(data[1], 'r')
if f.mode == 'r':
textFile = f.read()
binaryTextFile = wrap(' '.join(format(x, 'b') for x in bytearray(
textFile, encoding='ASCII')), 2)
def imageToBinary():
global bin_list
imgFile = open(data[0], 'rb')
imgData = imgFile.read()
imgFile.close()
hex_str = str(binascii.hexlify(imgData))
hex_list = []
bin_list = []
for i in range(2, len(hex_str) - 1, 2):
hex = hex_str[i] + hex_str[i + 1]
hex_list.append(hex)
bin_list.append(bin(int(hex, 16))[2:])
bin_list = wrap(''.join(bin_list), 8)
def enc():
global result
x = 0
result = []
result = bin_list
for i in range(len(binaryTextFile)):
imgByte = str(bin_list[x])
textBits = str(binaryTextFile[x])
newImgByte = imgByte[:-2] + textBits
result.insert(x, newImgByte)
if x < len(binaryTextFile):
x += 1
else:
break
def newImg():
x = 0
arr = []
tmp = ''
for i in range(len(result)):
tmp = tmp + str(result[x])
if x < len(result):
x += 1
else:
break
arr = wrap(''.join(tmp), 1)
print(arr)
size = 5
dt = struct.pack('B' * len(arr), *[(pixel * 255) for pixel in arr])
img = Image.frombuffer('L', size, dt)
img.save('final.jpg')
loadFile()
stringToBinary()
imageToBinary()
enc()
newImg()
| import pathlib, binascii
from textwrap import wrap
from PIL import Image
import struct
def loadFile():
global data
x = 0
data = []
subs = ["image", "text file"]
exts = [".jpg", ".txt"]
while x < 2:
check = pathlib.Path(input(f"Enter {subs[x]} name: ")).with_suffix(exts[x]) # To be fixed
if check.is_file():
data.insert(x, check)
if(x < len(subs)):
x+=1
else:
print("Couldn't find the file. Try again.")
x = 0
def stringToBinary():
global binaryTextFile
f = open(data[1], "r")
if f.mode == "r":
textFile = f.read()
binaryTextFile = wrap(' '.join(format(x, 'b') for x in bytearray(textFile, encoding='ASCII')), 2)
def imageToBinary():
global bin_list
imgFile = open(data[0], "rb")
imgData = imgFile.read()
imgFile.close()
hex_str = str(binascii.hexlify(imgData))
hex_list = []
bin_list = []
for i in range(2, len(hex_str)-1, 2):
hex = hex_str[i] + hex_str[i+1]
hex_list.append(hex)
bin_list.append(bin(int(hex, 16))[2:])
bin_list = wrap(''.join(bin_list), 8)
def enc():
global result
x = 0
result = []
result = bin_list
for i in range(len(binaryTextFile)):
imgByte = str(bin_list[x])
textBits = str(binaryTextFile[x])
newImgByte = imgByte[:-2] + textBits
result.insert(x, newImgByte)
if(x < len(binaryTextFile)):
x+=1
else:
break
def newImg():
x = 0
arr = []
tmp = ""
for i in range(len(result)):
tmp = tmp + str(result[x])
if(x < len(result)):
x+=1
else:
break
arr = wrap(''.join(tmp), 1)
print(arr)
size = 5
dt = struct.pack('B'*len(arr), *[pixel*255 for pixel in arr]) # to fix
img = Image.frombuffer('L', size, dt)
img.save('final.jpg')
loadFile()
stringToBinary()
imageToBinary()
enc()
newImg() | [
4,
5,
6,
7,
8
] |
2,052 | 3683b1f799fa315d736e4b62c9c093360afa893f | # -*- coding: utf-8 -*-
#!/bin/python3
import websocket
import json
import time
from loraCrypto import LoRaCrypto
from binascii import hexlify
'''
没有加密的数据
{
cmd: 'tx';
EUI: string;
port: number;
data: string
}
加密的数据
{
cmd: 'tx';
EUI: string;
port: number;
encdata: string;
seqno: number;
}
'''
GATEWAY_ID = "be7a0029"
TOKEN = "7AXCO2-Kkle42YGVVKvmmQ"
# 目标设备信息
EUI = "BE7A0000000005D2"
ADDR = "00aa1174"
LASTEST_SEQ = 4739
APP_SKEY = "2b7e151628aed2a6abf7158809cf4f3c"
# 需要下载的文件
FILE_NAME = "lora.bin"
PACKET_SIZE = 50
sendData = {}
def main():
ws = websocket.WebSocket()
ws.connect("wss://www.loriot.io/app?id="+GATEWAY_ID+"&token="+TOKEN)
lc = LoRaCrypto()
with open(FILE_NAME, "rb") as downloadFile:
binData =downloadFile.read()
count = len(binData) // PACKET_SIZE
sendData["cmd"] = "tx"
sendData["EUI"] = EUI
sendData["port"] = 1
seq = LASTEST_SEQ
print("Upload start!")
for i in range(count+1):
packetBin = binData[i*PACKET_SIZE:i*PACKET_SIZE+PACKET_SIZE]
packetStr = hexlify(packetBin).decode()
packetEncStr = lc.PayloadEncrypt(packetStr, APP_SKEY, ADDR, 1, seq)
sendData["encdata"] = packetEncStr
sendData["seqno"] = seq
print("Packet %d:" % i)
print("Before encrypt:")
print(packetStr)
print("After encrypt:")
print(packetEncStr)
print("Sequence is %d" % seq)
ws.send(json.dumps(sendData))
seq += 1
time.sleep(10)
print("Upload finish!")
ws.close()
if __name__ == "__main__":
from server.app.libs.loraencrypto import wrap_data
print wrap_data('he', 'BX32903', 20)
| null | null | null | null | [
0
] |
2,053 | fadf16792822926cb7b7386291e52ce44693baf8 | <mask token>
class UserViewSet(viewsets.ModelViewSet):
<mask token>
<mask token>
<mask token>
| <mask token>
class UserViewSet(viewsets.ModelViewSet):
<mask token>
queryset = UserCustom.objects.all()
serializer_class = UserSerializer
| <mask token>
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = UserCustom.objects.all()
serializer_class = UserSerializer
| from rest_framework import viewsets
from .serializers import UserSerializer
from .models import UserCustom
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = UserCustom.objects.all()
serializer_class = UserSerializer
| null | [
1,
2,
3,
4
] |
2,054 | 567076af26b8c93c68647103aeddf43aeb24db13 | <mask token>
class Resources(object):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
@property
def makespan(self):
eft = 0
for i in range(0, self.len):
tasks_in_resource = self.tasksOfResource[i]
if len(tasks_in_resource) == 0:
continue
eft = max(eft, tasks_in_resource[-1].EFT)
return eft
<mask token>
@property
def sum_internal_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_gaps_resource(r)
return sum_gaps
<mask token>
<mask token>
class CostAwareResources(Resources):
def __init__(self, powers, prices, timeslot_len, bandwidth):
super(CostAwareResources, self).__init__(powers, bandwidth)
self.timeslot = timeslot_len
self.price = prices
self.head_nodes = {}
self.sum_weight_scheduled = {}
def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True
):
"""
computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.
:param resource_id:
:param start_time:
:param eft:
:param cost_only:
:return:
"""
tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if
not t.task.dummy_task]
if not tasks_in_resource:
if eft == -1:
return 0 if cost_only else (0, 0, 0)
else:
return math.ceil((eft - start_time) / self.timeslot[
resource_id]) * self.price[resource_id]
if start_time != -1:
task_start_time = min(tasks_in_resource[0].EST, start_time)
else:
task_start_time = tasks_in_resource[0].EST
task_finish_time = max(tasks_in_resource[-1].EFT, eft)
reservation = task_finish_time - task_start_time
cost = math.ceil(reservation / self.timeslot[resource_id]
) * self.price[resource_id]
timeslot = self.timeslot[resource_id]
startof = [x.EST for x in tasks_in_resource]
endof = [x.EFT for x in tasks_in_resource]
if start_time != -1:
startof.append(start_time)
endof.append(eft)
startof.sort()
endof.sort()
timeslot_start = min(startof)
last_finish_time = max(endof)
current_task_id = 0
rent_periods = []
while timeslot_start < last_finish_time:
task_len = endof[current_task_id] - timeslot_start
time_slot_finish = endof[current_task_id] + (timeslot -
task_len % timeslot) % timeslot
current_task_id += 1
if current_task_id >= len(startof):
rent_periods.append((timeslot_start, time_slot_finish))
break
if startof[current_task_id] <= time_slot_finish:
pass
else:
rent_periods.append((timeslot_start, time_slot_finish))
timeslot_start = startof[current_task_id]
sum = 0
for rp in rent_periods:
sum += rp[1] - rp[0]
cost = sum / timeslot * self.price[resource_id]
if cost_only:
return cost
else:
return cost, min(startof), max(endof)
def resource_start_time(self, resource_id):
tasks_in_resource = self.tasksOfResource[resource_id]
length = len(tasks_in_resource)
start_index = 0
while length > 0 and tasks_in_resource[start_index].task.dummy_task:
start_index += 1
length -= 1
if length == 0:
return -1
return tasks_in_resource[start_index].EST
@property
def plan_cost(self):
cost = 0
for i in range(0, self.len):
cost += self.resource_cost(i)
return cost
def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft,
resource_id, task_id=None):
timeslot_end = timeslot_start + self.timeslot[resource_id]
if ft <= timeslot_start or est >= timeslot_end:
return 0
tasks = self.tasksOfResource[resource_id]
task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)
sum_w = 0
for id in task_ids:
if task_id == id:
continue
start_time = tasks[id].EST
finish_time = tasks[id].EFT
if start_time < timeslot_start:
start_time = timeslot_start
if finish_time > timeslot_end:
finish_time = timeslot_end
sum_w += finish_time - start_time
if est < timeslot_start:
est = timeslot_start
if ft > timeslot_end:
ft = timeslot_end
if ft == est:
return 0
share = float(ft - est) / (sum_w + ft - est)
return share * self.price[resource_id]
def task_id_in_timeslot(self, resource_id, timeslot_start):
timeslot_end = timeslot_start + self.timeslot[resource_id]
task_ids = []
for id in range(len(self.tasksOfResource[resource_id])):
s = self.tasksOfResource[resource_id][id]
if (timeslot_start <= s.EST <= timeslot_end or timeslot_start <=
s.EFT <= timeslot_end or s.EST < timeslot_start and
timeslot_end < s.EFT):
task_ids.append(id)
return task_ids
def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1,
task_id=None):
if task_id is not None:
est = self.tasksOfResource[resource_id][task_id].EST
ft = self.tasksOfResource[resource_id][task_id].EFT
timeslot_len = self.timeslot[resource_id]
resource_start_time = self.resource_start_time(resource_id)
if resource_start_time == -1:
resource_start_time = est
timeslot_start = float(timeslot_len) * math.floor((est -
resource_start_time) / timeslot_len) + resource_start_time
timeslot_end = float(timeslot_len) * math.ceil((ft -
resource_start_time) / timeslot_len) + resource_start_time
shared_cost = 0
for interval in f_range(timeslot_start, timeslot_end + timeslot_len /
2, timeslot_len):
share_in_interval = self.calculate_shared_cost_within_timeslot(
interval, est, ft, resource_id, task_id)
shared_cost += share_in_interval
return shared_cost
def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):
"""
calculates eft and cost of a certain task on a certain resource.
:param task:Definitions.Task()
:param resource_id:
:return:
"""
start_time, eft, runtime_on_resource, place_id = self.calculate_eft(
task, resource_id, arrival_time=arrival_time)
if task.dummy_task:
return start_time, eft, runtime_on_resource, place_id, 0
else:
cost = self.calculate_share_cost_change(resource_id, start_time,
eft, task.graph.name, True)
return start_time, eft, runtime_on_resource, place_id, cost
def sum_external_gaps_resource(self, r):
c, s, e = self.resource_cost(r, cost_only=False)
reservation = e - s
timeslot = self.timeslot[r]
gap = timeslot - reservation % timeslot
if gap == timeslot:
return 0
else:
return gap
@property
def sum_external_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_external_gaps_resource(r)
return sum_gaps
@property
def sum_gaps(self):
return self.sum_internal_gaps + self.sum_external_gaps
@property
def occupied_resources(self):
counter = 0
for i in range(self.len):
if self.resource_cost(i) != 0:
counter += self.price[i]
return counter
@property
def gap_rate(self):
return self.sum_gaps / self.makespan / self.occupied_resources
def select_resource(self, task=Task(), test=None, arrival_time=0):
eft_best = -1
def something_found():
return eft_best != -1
if task.asap is not None:
if not task.asap:
if not test:
print('', end='')
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best
) = -1, -1, -1, -1, -1, -1
for r in range(0, self.len):
(start_time, eft, runtime_on_resource, place_id, cost) = (
self.calculate_eft_and_cost(task, r, arrival_time=
arrival_time))
if (not something_found() or eft < eft_best and task.
sub_deadline < eft_best or task.sub_budget <
cost_best and eft <= task.sub_deadline and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and (eft_best > task.sub_deadline or
cost_best > task.sub_budget) or eft <= task.
sub_deadline and cost <= task.sub_budget and eft <
eft_best or eft <= task.sub_deadline and cost <=
task.sub_budget and eft == eft_best and cost <
cost_best):
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best) = (
start_time, eft, runtime_on_resource, place_id,
r, cost)
continue
if not test:
print('', end='')
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best, cost_best)
elif task.asap:
if not test:
print('', end='')
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best
) = -1, -1, -1, -1, -1, -1
for r in range(0, self.len):
(start_time, eft, runtime_on_resource, place_id, cost) = (
self.calculate_eft_and_cost(task, r, arrival_time=
arrival_time))
if (not something_found() or eft < eft_best and task.
sub_deadline < eft_best or task.sub_budget <
cost_best and eft <= task.sub_deadline and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and (eft_best > task.sub_deadline or
cost_best > task.sub_budget) or eft <= task.
sub_deadline and cost <= task.sub_budget and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and cost == cost_best and eft <
eft_best):
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best) = (
start_time, eft, runtime_on_resource, place_id,
r, cost)
continue
if not test:
print('', end='')
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best, cost_best)
else:
return super(CostAwareResources, self).select_resource(task)
def price_of_each_graph(self):
graph_names = self.job_task_schedule.keys()
costs = {}
for name in graph_names:
costs[name] = 0
for r in range(self.len):
for id in range(len(self.tasksOfResource[r])):
name = self.tasksOfResource[r][id].task.graph.name
cost = self.calculate_task_shared_cost(resource_id=r,
task_id=id)
costs[name] += cost
return costs
def get_cheapest_empty_resource(self):
for r in range(self.len):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):
super(CostAwareResources, self).schedule(task_schedule, place_id)
if not do_head_nodes:
return
if task_schedule.task.graph.name in self.head_nodes:
prev_heads = self.head_nodes[task_schedule.task.graph.name]
parents_of_current_task = task_schedule.task.predecessor.keys()
self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[
task_schedule.task.graph.name].difference(
parents_of_current_task)
self.head_nodes[task_schedule.task.graph.name].add(task_schedule
.task.id)
else:
self.head_nodes[task_schedule.task.graph.name] = set()
self.head_nodes[task_schedule.task.graph.name].add(task_schedule
.task.id)
self.sum_weight_scheduled[task_schedule.task.graph.name] = 0
self.sum_weight_scheduled[task_schedule.task.graph.name
] += task_schedule.task.weight
def calculate_share_cost_change(self, resource_id, est=-1, eft=-1,
job_id=-1, only_this_job=False):
sum_w = {}
for i in range(len(self.tasksOfResource[resource_id])):
sch = self.tasksOfResource[resource_id][i]
job = sch.task.graph.name
if job not in sum_w:
sum_w[job] = 0
sum_w[job] += sch.EFT - sch.EST
sum_w_all_old = sum(sum_w.values())
prev_cost_resource = self.resource_cost(resource_id)
prev_cost_job = {}
for j in sum_w.keys():
if sum_w_all_old == 0:
prev_cost_job[j] = 0
else:
prev_cost_job[j] = float(prev_cost_resource) * sum_w[j
] / sum_w_all_old
if est == -1:
return prev_cost_job
new_cost_resource = self.resource_cost(resource_id, start_time=est,
eft=eft)
if job_id not in sum_w:
sum_w[job_id] = 0
sum_w[job_id] += eft - est
sum_w_all_new = sum_w_all_old + eft - est
new_cost_job = {}
changes = {}
for j in sum_w.keys():
if sum_w_all_new == 0:
new_cost_job[j] = 0
else:
new_cost_job[j] = float(new_cost_resource) * sum_w[j
] / sum_w_all_new
if j not in prev_cost_job:
changes[j] = new_cost_job[j]
else:
changes[j] = new_cost_job[j] - prev_cost_job[j]
if only_this_job:
return changes[job_id]
return changes
| <mask token>
class Resources(object):
<mask token>
<mask token>
def __init__(self, powers, bandwidth):
number_of_resources = len(powers)
self.power = powers
self.tasksOfResource = []
for i in range(number_of_resources):
self.tasksOfResource.append([])
self.len = number_of_resources
self.bandwidth = bandwidth
self.job_task_schedule = {}
<mask token>
def calculate_eft(self, task, resource_id, arrival_time=0):
g = task.graph
if resource_id == -1:
graphs_task_on_resource = []
task_runtime_on_resource = task.weight / max(self.power)
else:
task_runtime_on_resource = task.weight / self.power[resource_id]
graphs_task_on_resource = list(map(lambda t: t.task.id if t.
task.graph.name == g.name else -1, self.tasksOfResource[
resource_id]))
max_est_of_task = arrival_time
for p in task.predecessor:
if p in graphs_task_on_resource:
communication_delay = 0
else:
communication_delay = task.predecessor[p] / self.bandwidth
if (g.name not in self.job_task_schedule or p not in self.
job_task_schedule[g.name]):
continue
p_eft = self.job_task_schedule[g.name][p].EFT
if p_eft + communication_delay > max_est_of_task:
max_est_of_task = p_eft + communication_delay
start_time, place_id = self.find_gap(resource_id, max_est_of_task,
task_runtime_on_resource)
eft_task = start_time + task_runtime_on_resource
return start_time, eft_task, task_runtime_on_resource, place_id
<mask token>
def show_schedule(self, job_id=-1, finishing=None, print_enabled=False):
result = []
for r in range(0, self.len):
names = []
est = []
eft = []
def add_entries(x):
if job_id != -1 and x.task.graph.name != job_id:
return
names.append(x.task.id if job_id != -1 else
f'{x.task.graph.name}-{x.task.id}')
est.append(x.EST)
eft.append(x.EFT)
list(map(add_entries, self.tasksOfResource[r]))
result.append((names, est, eft))
def print_list(x):
if not print_enabled:
return
first = True
for e in x:
if first:
first = False
else:
print(',', end=' ')
print(e, end=' ')
print()
print_list(names)
print_list(est)
print_list(eft)
if finishing is not None and print_enabled:
print(finishing)
return result
<mask token>
@property
def average_power(self):
return math.fsum(self.power) / self.len
@property
def makespan(self):
eft = 0
for i in range(0, self.len):
tasks_in_resource = self.tasksOfResource[i]
if len(tasks_in_resource) == 0:
continue
eft = max(eft, tasks_in_resource[-1].EFT)
return eft
<mask token>
@property
def sum_internal_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_gaps_resource(r)
return sum_gaps
def select_resource(self, task, arrival_time=0):
(est_best, eft_best, runtime_on_resource_best, place_id_best,
resource_id_best) = -1, -1, -1, -1, -1
for r in range(0, self.len):
(max_est_of_task, eft_task, task_runtime_on_resource, place_id
) = self.calculate_eft(task, r, arrival_time=arrival_time)
if eft_best == -1 or eft_task < eft_best:
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best) = (max_est_of_task,
eft_task, task_runtime_on_resource, place_id, r)
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best)
def get_fastest_empty_resource(self):
for r in range(self.len - 1, -1, -1):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
class CostAwareResources(Resources):
def __init__(self, powers, prices, timeslot_len, bandwidth):
super(CostAwareResources, self).__init__(powers, bandwidth)
self.timeslot = timeslot_len
self.price = prices
self.head_nodes = {}
self.sum_weight_scheduled = {}
def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True
):
"""
computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.
:param resource_id:
:param start_time:
:param eft:
:param cost_only:
:return:
"""
tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if
not t.task.dummy_task]
if not tasks_in_resource:
if eft == -1:
return 0 if cost_only else (0, 0, 0)
else:
return math.ceil((eft - start_time) / self.timeslot[
resource_id]) * self.price[resource_id]
if start_time != -1:
task_start_time = min(tasks_in_resource[0].EST, start_time)
else:
task_start_time = tasks_in_resource[0].EST
task_finish_time = max(tasks_in_resource[-1].EFT, eft)
reservation = task_finish_time - task_start_time
cost = math.ceil(reservation / self.timeslot[resource_id]
) * self.price[resource_id]
timeslot = self.timeslot[resource_id]
startof = [x.EST for x in tasks_in_resource]
endof = [x.EFT for x in tasks_in_resource]
if start_time != -1:
startof.append(start_time)
endof.append(eft)
startof.sort()
endof.sort()
timeslot_start = min(startof)
last_finish_time = max(endof)
current_task_id = 0
rent_periods = []
while timeslot_start < last_finish_time:
task_len = endof[current_task_id] - timeslot_start
time_slot_finish = endof[current_task_id] + (timeslot -
task_len % timeslot) % timeslot
current_task_id += 1
if current_task_id >= len(startof):
rent_periods.append((timeslot_start, time_slot_finish))
break
if startof[current_task_id] <= time_slot_finish:
pass
else:
rent_periods.append((timeslot_start, time_slot_finish))
timeslot_start = startof[current_task_id]
sum = 0
for rp in rent_periods:
sum += rp[1] - rp[0]
cost = sum / timeslot * self.price[resource_id]
if cost_only:
return cost
else:
return cost, min(startof), max(endof)
def resource_start_time(self, resource_id):
tasks_in_resource = self.tasksOfResource[resource_id]
length = len(tasks_in_resource)
start_index = 0
while length > 0 and tasks_in_resource[start_index].task.dummy_task:
start_index += 1
length -= 1
if length == 0:
return -1
return tasks_in_resource[start_index].EST
@property
def plan_cost(self):
cost = 0
for i in range(0, self.len):
cost += self.resource_cost(i)
return cost
def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft,
resource_id, task_id=None):
timeslot_end = timeslot_start + self.timeslot[resource_id]
if ft <= timeslot_start or est >= timeslot_end:
return 0
tasks = self.tasksOfResource[resource_id]
task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)
sum_w = 0
for id in task_ids:
if task_id == id:
continue
start_time = tasks[id].EST
finish_time = tasks[id].EFT
if start_time < timeslot_start:
start_time = timeslot_start
if finish_time > timeslot_end:
finish_time = timeslot_end
sum_w += finish_time - start_time
if est < timeslot_start:
est = timeslot_start
if ft > timeslot_end:
ft = timeslot_end
if ft == est:
return 0
share = float(ft - est) / (sum_w + ft - est)
return share * self.price[resource_id]
def task_id_in_timeslot(self, resource_id, timeslot_start):
timeslot_end = timeslot_start + self.timeslot[resource_id]
task_ids = []
for id in range(len(self.tasksOfResource[resource_id])):
s = self.tasksOfResource[resource_id][id]
if (timeslot_start <= s.EST <= timeslot_end or timeslot_start <=
s.EFT <= timeslot_end or s.EST < timeslot_start and
timeslot_end < s.EFT):
task_ids.append(id)
return task_ids
def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1,
task_id=None):
if task_id is not None:
est = self.tasksOfResource[resource_id][task_id].EST
ft = self.tasksOfResource[resource_id][task_id].EFT
timeslot_len = self.timeslot[resource_id]
resource_start_time = self.resource_start_time(resource_id)
if resource_start_time == -1:
resource_start_time = est
timeslot_start = float(timeslot_len) * math.floor((est -
resource_start_time) / timeslot_len) + resource_start_time
timeslot_end = float(timeslot_len) * math.ceil((ft -
resource_start_time) / timeslot_len) + resource_start_time
shared_cost = 0
for interval in f_range(timeslot_start, timeslot_end + timeslot_len /
2, timeslot_len):
share_in_interval = self.calculate_shared_cost_within_timeslot(
interval, est, ft, resource_id, task_id)
shared_cost += share_in_interval
return shared_cost
def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):
"""
calculates eft and cost of a certain task on a certain resource.
:param task:Definitions.Task()
:param resource_id:
:return:
"""
start_time, eft, runtime_on_resource, place_id = self.calculate_eft(
task, resource_id, arrival_time=arrival_time)
if task.dummy_task:
return start_time, eft, runtime_on_resource, place_id, 0
else:
cost = self.calculate_share_cost_change(resource_id, start_time,
eft, task.graph.name, True)
return start_time, eft, runtime_on_resource, place_id, cost
def sum_external_gaps_resource(self, r):
c, s, e = self.resource_cost(r, cost_only=False)
reservation = e - s
timeslot = self.timeslot[r]
gap = timeslot - reservation % timeslot
if gap == timeslot:
return 0
else:
return gap
@property
def sum_external_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_external_gaps_resource(r)
return sum_gaps
@property
def sum_gaps(self):
return self.sum_internal_gaps + self.sum_external_gaps
@property
def occupied_resources(self):
counter = 0
for i in range(self.len):
if self.resource_cost(i) != 0:
counter += self.price[i]
return counter
@property
def gap_rate(self):
return self.sum_gaps / self.makespan / self.occupied_resources
def select_resource(self, task=Task(), test=None, arrival_time=0):
eft_best = -1
def something_found():
return eft_best != -1
if task.asap is not None:
if not task.asap:
if not test:
print('', end='')
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best
) = -1, -1, -1, -1, -1, -1
for r in range(0, self.len):
(start_time, eft, runtime_on_resource, place_id, cost) = (
self.calculate_eft_and_cost(task, r, arrival_time=
arrival_time))
if (not something_found() or eft < eft_best and task.
sub_deadline < eft_best or task.sub_budget <
cost_best and eft <= task.sub_deadline and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and (eft_best > task.sub_deadline or
cost_best > task.sub_budget) or eft <= task.
sub_deadline and cost <= task.sub_budget and eft <
eft_best or eft <= task.sub_deadline and cost <=
task.sub_budget and eft == eft_best and cost <
cost_best):
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best) = (
start_time, eft, runtime_on_resource, place_id,
r, cost)
continue
if not test:
print('', end='')
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best, cost_best)
elif task.asap:
if not test:
print('', end='')
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best
) = -1, -1, -1, -1, -1, -1
for r in range(0, self.len):
(start_time, eft, runtime_on_resource, place_id, cost) = (
self.calculate_eft_and_cost(task, r, arrival_time=
arrival_time))
if (not something_found() or eft < eft_best and task.
sub_deadline < eft_best or task.sub_budget <
cost_best and eft <= task.sub_deadline and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and (eft_best > task.sub_deadline or
cost_best > task.sub_budget) or eft <= task.
sub_deadline and cost <= task.sub_budget and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and cost == cost_best and eft <
eft_best):
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best) = (
start_time, eft, runtime_on_resource, place_id,
r, cost)
continue
if not test:
print('', end='')
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best, cost_best)
else:
return super(CostAwareResources, self).select_resource(task)
def price_of_each_graph(self):
graph_names = self.job_task_schedule.keys()
costs = {}
for name in graph_names:
costs[name] = 0
for r in range(self.len):
for id in range(len(self.tasksOfResource[r])):
name = self.tasksOfResource[r][id].task.graph.name
cost = self.calculate_task_shared_cost(resource_id=r,
task_id=id)
costs[name] += cost
return costs
def get_cheapest_empty_resource(self):
for r in range(self.len):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):
super(CostAwareResources, self).schedule(task_schedule, place_id)
if not do_head_nodes:
return
if task_schedule.task.graph.name in self.head_nodes:
prev_heads = self.head_nodes[task_schedule.task.graph.name]
parents_of_current_task = task_schedule.task.predecessor.keys()
self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[
task_schedule.task.graph.name].difference(
parents_of_current_task)
self.head_nodes[task_schedule.task.graph.name].add(task_schedule
.task.id)
else:
self.head_nodes[task_schedule.task.graph.name] = set()
self.head_nodes[task_schedule.task.graph.name].add(task_schedule
.task.id)
self.sum_weight_scheduled[task_schedule.task.graph.name] = 0
self.sum_weight_scheduled[task_schedule.task.graph.name
] += task_schedule.task.weight
def calculate_share_cost_change(self, resource_id, est=-1, eft=-1,
job_id=-1, only_this_job=False):
sum_w = {}
for i in range(len(self.tasksOfResource[resource_id])):
sch = self.tasksOfResource[resource_id][i]
job = sch.task.graph.name
if job not in sum_w:
sum_w[job] = 0
sum_w[job] += sch.EFT - sch.EST
sum_w_all_old = sum(sum_w.values())
prev_cost_resource = self.resource_cost(resource_id)
prev_cost_job = {}
for j in sum_w.keys():
if sum_w_all_old == 0:
prev_cost_job[j] = 0
else:
prev_cost_job[j] = float(prev_cost_resource) * sum_w[j
] / sum_w_all_old
if est == -1:
return prev_cost_job
new_cost_resource = self.resource_cost(resource_id, start_time=est,
eft=eft)
if job_id not in sum_w:
sum_w[job_id] = 0
sum_w[job_id] += eft - est
sum_w_all_new = sum_w_all_old + eft - est
new_cost_job = {}
changes = {}
for j in sum_w.keys():
if sum_w_all_new == 0:
new_cost_job[j] = 0
else:
new_cost_job[j] = float(new_cost_resource) * sum_w[j
] / sum_w_all_new
if j not in prev_cost_job:
changes[j] = new_cost_job[j]
else:
changes[j] = new_cost_job[j] - prev_cost_job[j]
if only_this_job:
return changes[job_id]
return changes
| <mask token>
class Resources(object):
<mask token>
<mask token>
def __init__(self, powers, bandwidth):
number_of_resources = len(powers)
self.power = powers
self.tasksOfResource = []
for i in range(number_of_resources):
self.tasksOfResource.append([])
self.len = number_of_resources
self.bandwidth = bandwidth
self.job_task_schedule = {}
def find_gap(self, resource, start_time, runtime):
"""
finds a gap in resource and returns the start time and the place index of the task among the current tasks of the resource
if resource is -1, it does nothing (returns the given start time, and -1 for place)
"""
if resource == -1:
return start_time, -1
number_of_tasks = len(self.tasksOfResource[resource])
if number_of_tasks == 0:
return start_time, 0
elif self.tasksOfResource[resource][0].EST >= start_time + runtime:
return start_time, 0
elif number_of_tasks == 1:
if self.tasksOfResource[resource][0].EFT < start_time:
return start_time, 1
else:
return self.tasksOfResource[resource][0].EFT, 1
else:
for i in range(1, number_of_tasks):
if self.tasksOfResource[resource][i].EST <= start_time:
continue
elif start_time < self.tasksOfResource[resource][i - 1].EFT:
gap = self.tasksOfResource[resource][i
].EST - self.tasksOfResource[resource][i - 1].EFT
if gap < runtime:
continue
else:
return self.tasksOfResource[resource][i - 1].EFT, i
elif self.tasksOfResource[resource][i - 1
].EFT <= start_time < self.tasksOfResource[resource][i
].EST:
if self.tasksOfResource[resource][i
].EST - start_time < runtime:
continue
else:
return start_time, i
else:
return max(self.tasksOfResource[resource][-1].EFT, start_time
), -1
def calculate_eft(self, task, resource_id, arrival_time=0):
g = task.graph
if resource_id == -1:
graphs_task_on_resource = []
task_runtime_on_resource = task.weight / max(self.power)
else:
task_runtime_on_resource = task.weight / self.power[resource_id]
graphs_task_on_resource = list(map(lambda t: t.task.id if t.
task.graph.name == g.name else -1, self.tasksOfResource[
resource_id]))
max_est_of_task = arrival_time
for p in task.predecessor:
if p in graphs_task_on_resource:
communication_delay = 0
else:
communication_delay = task.predecessor[p] / self.bandwidth
if (g.name not in self.job_task_schedule or p not in self.
job_task_schedule[g.name]):
continue
p_eft = self.job_task_schedule[g.name][p].EFT
if p_eft + communication_delay > max_est_of_task:
max_est_of_task = p_eft + communication_delay
start_time, place_id = self.find_gap(resource_id, max_est_of_task,
task_runtime_on_resource)
eft_task = start_time + task_runtime_on_resource
return start_time, eft_task, task_runtime_on_resource, place_id
def schedule(self, task_schedule, place_id=-1):
"""
Schedules a task in a place id. if place_id is -1 the schedule is appended to the last.
:type task_schedule: TaskSchedule
:type place_id: int
"""
resource = task_schedule.resource
if place_id == -1:
self.tasksOfResource[resource].append(task_schedule)
else:
self.tasksOfResource[resource].insert(place_id, task_schedule)
if task_schedule.task.graph.name in self.job_task_schedule:
pass
else:
self.job_task_schedule[task_schedule.task.graph.name] = {}
self.job_task_schedule[task_schedule.task.graph.name][task_schedule
.task.id] = task_schedule
def show_schedule(self, job_id=-1, finishing=None, print_enabled=False):
result = []
for r in range(0, self.len):
names = []
est = []
eft = []
def add_entries(x):
if job_id != -1 and x.task.graph.name != job_id:
return
names.append(x.task.id if job_id != -1 else
f'{x.task.graph.name}-{x.task.id}')
est.append(x.EST)
eft.append(x.EFT)
list(map(add_entries, self.tasksOfResource[r]))
result.append((names, est, eft))
def print_list(x):
if not print_enabled:
return
first = True
for e in x:
if first:
first = False
else:
print(',', end=' ')
print(e, end=' ')
print()
print_list(names)
print_list(est)
print_list(eft)
if finishing is not None and print_enabled:
print(finishing)
return result
def write_schedule(self, db_file, test_name='N/A', extra='single',
policy='', job_count=1):
w = writer.Writer(db_file)
w.create_plan()
w.create_plan_head()
unique_jobs_id = w.write_plan_head(test_name, policy, job_count)
def add_entries(x):
(job_name, job_type, task_id, jobs_id, start_time, finish_time,
resource_id, resource_speed, job_component_id, extra_params
) = (x.task.graph.name, x.task.graph.type, x.task.id,
unique_jobs_id, x.EST, x.EFT, r, self.power[r], policy, extra)
w.write_plan(job_name, job_type, task_id, jobs_id, start_time,
finish_time, resource_id, resource_speed, job_component_id,
extra_params)
for r in range(0, self.len):
list(map(add_entries, self.tasksOfResource[r]))
w.commit()
w.close()
@property
def average_power(self):
return math.fsum(self.power) / self.len
@property
def makespan(self):
eft = 0
for i in range(0, self.len):
tasks_in_resource = self.tasksOfResource[i]
if len(tasks_in_resource) == 0:
continue
eft = max(eft, tasks_in_resource[-1].EFT)
return eft
def sum_gaps_resource(self, resource_id):
tasks_in_current_resource = self.tasksOfResource[resource_id]
num_tasks = len(tasks_in_current_resource)
if num_tasks <= 1:
return 0
sum_gaps = 0
for i in range(1, num_tasks):
if tasks_in_current_resource[i - 1
].task.dummy_task or tasks_in_current_resource[i
].task.dummy_task:
continue
finish_prev = tasks_in_current_resource[i - 1].EFT
start_current = tasks_in_current_resource[i].EST
gap_length = start_current - finish_prev
if gap_length < 0:
raise Exception('Schedule is not correct, check gaps!')
sum_gaps += gap_length
return sum_gaps
@property
def sum_internal_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_gaps_resource(r)
return sum_gaps
def select_resource(self, task, arrival_time=0):
(est_best, eft_best, runtime_on_resource_best, place_id_best,
resource_id_best) = -1, -1, -1, -1, -1
for r in range(0, self.len):
(max_est_of_task, eft_task, task_runtime_on_resource, place_id
) = self.calculate_eft(task, r, arrival_time=arrival_time)
if eft_best == -1 or eft_task < eft_best:
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best) = (max_est_of_task,
eft_task, task_runtime_on_resource, place_id, r)
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best)
def get_fastest_empty_resource(self):
for r in range(self.len - 1, -1, -1):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
class CostAwareResources(Resources):
def __init__(self, powers, prices, timeslot_len, bandwidth):
super(CostAwareResources, self).__init__(powers, bandwidth)
self.timeslot = timeslot_len
self.price = prices
self.head_nodes = {}
self.sum_weight_scheduled = {}
def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True
):
"""
computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.
:param resource_id:
:param start_time:
:param eft:
:param cost_only:
:return:
"""
tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if
not t.task.dummy_task]
if not tasks_in_resource:
if eft == -1:
return 0 if cost_only else (0, 0, 0)
else:
return math.ceil((eft - start_time) / self.timeslot[
resource_id]) * self.price[resource_id]
if start_time != -1:
task_start_time = min(tasks_in_resource[0].EST, start_time)
else:
task_start_time = tasks_in_resource[0].EST
task_finish_time = max(tasks_in_resource[-1].EFT, eft)
reservation = task_finish_time - task_start_time
cost = math.ceil(reservation / self.timeslot[resource_id]
) * self.price[resource_id]
timeslot = self.timeslot[resource_id]
startof = [x.EST for x in tasks_in_resource]
endof = [x.EFT for x in tasks_in_resource]
if start_time != -1:
startof.append(start_time)
endof.append(eft)
startof.sort()
endof.sort()
timeslot_start = min(startof)
last_finish_time = max(endof)
current_task_id = 0
rent_periods = []
while timeslot_start < last_finish_time:
task_len = endof[current_task_id] - timeslot_start
time_slot_finish = endof[current_task_id] + (timeslot -
task_len % timeslot) % timeslot
current_task_id += 1
if current_task_id >= len(startof):
rent_periods.append((timeslot_start, time_slot_finish))
break
if startof[current_task_id] <= time_slot_finish:
pass
else:
rent_periods.append((timeslot_start, time_slot_finish))
timeslot_start = startof[current_task_id]
sum = 0
for rp in rent_periods:
sum += rp[1] - rp[0]
cost = sum / timeslot * self.price[resource_id]
if cost_only:
return cost
else:
return cost, min(startof), max(endof)
def resource_start_time(self, resource_id):
tasks_in_resource = self.tasksOfResource[resource_id]
length = len(tasks_in_resource)
start_index = 0
while length > 0 and tasks_in_resource[start_index].task.dummy_task:
start_index += 1
length -= 1
if length == 0:
return -1
return tasks_in_resource[start_index].EST
@property
def plan_cost(self):
cost = 0
for i in range(0, self.len):
cost += self.resource_cost(i)
return cost
def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft,
resource_id, task_id=None):
timeslot_end = timeslot_start + self.timeslot[resource_id]
if ft <= timeslot_start or est >= timeslot_end:
return 0
tasks = self.tasksOfResource[resource_id]
task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)
sum_w = 0
for id in task_ids:
if task_id == id:
continue
start_time = tasks[id].EST
finish_time = tasks[id].EFT
if start_time < timeslot_start:
start_time = timeslot_start
if finish_time > timeslot_end:
finish_time = timeslot_end
sum_w += finish_time - start_time
if est < timeslot_start:
est = timeslot_start
if ft > timeslot_end:
ft = timeslot_end
if ft == est:
return 0
share = float(ft - est) / (sum_w + ft - est)
return share * self.price[resource_id]
def task_id_in_timeslot(self, resource_id, timeslot_start):
timeslot_end = timeslot_start + self.timeslot[resource_id]
task_ids = []
for id in range(len(self.tasksOfResource[resource_id])):
s = self.tasksOfResource[resource_id][id]
if (timeslot_start <= s.EST <= timeslot_end or timeslot_start <=
s.EFT <= timeslot_end or s.EST < timeslot_start and
timeslot_end < s.EFT):
task_ids.append(id)
return task_ids
def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1,
task_id=None):
if task_id is not None:
est = self.tasksOfResource[resource_id][task_id].EST
ft = self.tasksOfResource[resource_id][task_id].EFT
timeslot_len = self.timeslot[resource_id]
resource_start_time = self.resource_start_time(resource_id)
if resource_start_time == -1:
resource_start_time = est
timeslot_start = float(timeslot_len) * math.floor((est -
resource_start_time) / timeslot_len) + resource_start_time
timeslot_end = float(timeslot_len) * math.ceil((ft -
resource_start_time) / timeslot_len) + resource_start_time
shared_cost = 0
for interval in f_range(timeslot_start, timeslot_end + timeslot_len /
2, timeslot_len):
share_in_interval = self.calculate_shared_cost_within_timeslot(
interval, est, ft, resource_id, task_id)
shared_cost += share_in_interval
return shared_cost
def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):
"""
calculates eft and cost of a certain task on a certain resource.
:param task:Definitions.Task()
:param resource_id:
:return:
"""
start_time, eft, runtime_on_resource, place_id = self.calculate_eft(
task, resource_id, arrival_time=arrival_time)
if task.dummy_task:
return start_time, eft, runtime_on_resource, place_id, 0
else:
cost = self.calculate_share_cost_change(resource_id, start_time,
eft, task.graph.name, True)
return start_time, eft, runtime_on_resource, place_id, cost
def sum_external_gaps_resource(self, r):
c, s, e = self.resource_cost(r, cost_only=False)
reservation = e - s
timeslot = self.timeslot[r]
gap = timeslot - reservation % timeslot
if gap == timeslot:
return 0
else:
return gap
@property
def sum_external_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_external_gaps_resource(r)
return sum_gaps
@property
def sum_gaps(self):
return self.sum_internal_gaps + self.sum_external_gaps
@property
def occupied_resources(self):
counter = 0
for i in range(self.len):
if self.resource_cost(i) != 0:
counter += self.price[i]
return counter
@property
def gap_rate(self):
return self.sum_gaps / self.makespan / self.occupied_resources
def select_resource(self, task=Task(), test=None, arrival_time=0):
eft_best = -1
def something_found():
return eft_best != -1
if task.asap is not None:
if not task.asap:
if not test:
print('', end='')
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best
) = -1, -1, -1, -1, -1, -1
for r in range(0, self.len):
(start_time, eft, runtime_on_resource, place_id, cost) = (
self.calculate_eft_and_cost(task, r, arrival_time=
arrival_time))
if (not something_found() or eft < eft_best and task.
sub_deadline < eft_best or task.sub_budget <
cost_best and eft <= task.sub_deadline and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and (eft_best > task.sub_deadline or
cost_best > task.sub_budget) or eft <= task.
sub_deadline and cost <= task.sub_budget and eft <
eft_best or eft <= task.sub_deadline and cost <=
task.sub_budget and eft == eft_best and cost <
cost_best):
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best) = (
start_time, eft, runtime_on_resource, place_id,
r, cost)
continue
if not test:
print('', end='')
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best, cost_best)
elif task.asap:
if not test:
print('', end='')
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best
) = -1, -1, -1, -1, -1, -1
for r in range(0, self.len):
(start_time, eft, runtime_on_resource, place_id, cost) = (
self.calculate_eft_and_cost(task, r, arrival_time=
arrival_time))
if (not something_found() or eft < eft_best and task.
sub_deadline < eft_best or task.sub_budget <
cost_best and eft <= task.sub_deadline and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and (eft_best > task.sub_deadline or
cost_best > task.sub_budget) or eft <= task.
sub_deadline and cost <= task.sub_budget and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and cost == cost_best and eft <
eft_best):
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best) = (
start_time, eft, runtime_on_resource, place_id,
r, cost)
continue
if not test:
print('', end='')
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best, cost_best)
else:
return super(CostAwareResources, self).select_resource(task)
def price_of_each_graph(self):
graph_names = self.job_task_schedule.keys()
costs = {}
for name in graph_names:
costs[name] = 0
for r in range(self.len):
for id in range(len(self.tasksOfResource[r])):
name = self.tasksOfResource[r][id].task.graph.name
cost = self.calculate_task_shared_cost(resource_id=r,
task_id=id)
costs[name] += cost
return costs
def get_cheapest_empty_resource(self):
for r in range(self.len):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):
super(CostAwareResources, self).schedule(task_schedule, place_id)
if not do_head_nodes:
return
if task_schedule.task.graph.name in self.head_nodes:
prev_heads = self.head_nodes[task_schedule.task.graph.name]
parents_of_current_task = task_schedule.task.predecessor.keys()
self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[
task_schedule.task.graph.name].difference(
parents_of_current_task)
self.head_nodes[task_schedule.task.graph.name].add(task_schedule
.task.id)
else:
self.head_nodes[task_schedule.task.graph.name] = set()
self.head_nodes[task_schedule.task.graph.name].add(task_schedule
.task.id)
self.sum_weight_scheduled[task_schedule.task.graph.name] = 0
self.sum_weight_scheduled[task_schedule.task.graph.name
] += task_schedule.task.weight
def calculate_share_cost_change(self, resource_id, est=-1, eft=-1,
job_id=-1, only_this_job=False):
sum_w = {}
for i in range(len(self.tasksOfResource[resource_id])):
sch = self.tasksOfResource[resource_id][i]
job = sch.task.graph.name
if job not in sum_w:
sum_w[job] = 0
sum_w[job] += sch.EFT - sch.EST
sum_w_all_old = sum(sum_w.values())
prev_cost_resource = self.resource_cost(resource_id)
prev_cost_job = {}
for j in sum_w.keys():
if sum_w_all_old == 0:
prev_cost_job[j] = 0
else:
prev_cost_job[j] = float(prev_cost_resource) * sum_w[j
] / sum_w_all_old
if est == -1:
return prev_cost_job
new_cost_resource = self.resource_cost(resource_id, start_time=est,
eft=eft)
if job_id not in sum_w:
sum_w[job_id] = 0
sum_w[job_id] += eft - est
sum_w_all_new = sum_w_all_old + eft - est
new_cost_job = {}
changes = {}
for j in sum_w.keys():
if sum_w_all_new == 0:
new_cost_job[j] = 0
else:
new_cost_job[j] = float(new_cost_resource) * sum_w[j
] / sum_w_all_new
if j not in prev_cost_job:
changes[j] = new_cost_job[j]
else:
changes[j] = new_cost_job[j] - prev_cost_job[j]
if only_this_job:
return changes[job_id]
return changes
| <mask token>
class Constraint(Enum):
<mask token>
<mask token>
<mask token>
<mask token>
class TaskSchedule:
def __init__(self, task, est=-1, runtime=-1, eft=-1, resource=-1):
self.task = task
self.EFT = eft
self.EST = est
self.runtime = runtime
self.resource = resource
class Resources(object):
len = -1
bandwidth = 0
def __init__(self, powers, bandwidth):
number_of_resources = len(powers)
self.power = powers
self.tasksOfResource = []
for i in range(number_of_resources):
self.tasksOfResource.append([])
self.len = number_of_resources
self.bandwidth = bandwidth
self.job_task_schedule = {}
def find_gap(self, resource, start_time, runtime):
"""
finds a gap in resource and returns the start time and the place index of the task among the current tasks of the resource
if resource is -1, it does nothing (returns the given start time, and -1 for place)
"""
if resource == -1:
return start_time, -1
number_of_tasks = len(self.tasksOfResource[resource])
if number_of_tasks == 0:
return start_time, 0
elif self.tasksOfResource[resource][0].EST >= start_time + runtime:
return start_time, 0
elif number_of_tasks == 1:
if self.tasksOfResource[resource][0].EFT < start_time:
return start_time, 1
else:
return self.tasksOfResource[resource][0].EFT, 1
else:
for i in range(1, number_of_tasks):
if self.tasksOfResource[resource][i].EST <= start_time:
continue
elif start_time < self.tasksOfResource[resource][i - 1].EFT:
gap = self.tasksOfResource[resource][i
].EST - self.tasksOfResource[resource][i - 1].EFT
if gap < runtime:
continue
else:
return self.tasksOfResource[resource][i - 1].EFT, i
elif self.tasksOfResource[resource][i - 1
].EFT <= start_time < self.tasksOfResource[resource][i
].EST:
if self.tasksOfResource[resource][i
].EST - start_time < runtime:
continue
else:
return start_time, i
else:
return max(self.tasksOfResource[resource][-1].EFT, start_time
), -1
def calculate_eft(self, task, resource_id, arrival_time=0):
g = task.graph
if resource_id == -1:
graphs_task_on_resource = []
task_runtime_on_resource = task.weight / max(self.power)
else:
task_runtime_on_resource = task.weight / self.power[resource_id]
graphs_task_on_resource = list(map(lambda t: t.task.id if t.
task.graph.name == g.name else -1, self.tasksOfResource[
resource_id]))
max_est_of_task = arrival_time
for p in task.predecessor:
if p in graphs_task_on_resource:
communication_delay = 0
else:
communication_delay = task.predecessor[p] / self.bandwidth
if (g.name not in self.job_task_schedule or p not in self.
job_task_schedule[g.name]):
continue
p_eft = self.job_task_schedule[g.name][p].EFT
if p_eft + communication_delay > max_est_of_task:
max_est_of_task = p_eft + communication_delay
start_time, place_id = self.find_gap(resource_id, max_est_of_task,
task_runtime_on_resource)
eft_task = start_time + task_runtime_on_resource
return start_time, eft_task, task_runtime_on_resource, place_id
def schedule(self, task_schedule, place_id=-1):
"""
Schedules a task in a place id. if place_id is -1 the schedule is appended to the last.
:type task_schedule: TaskSchedule
:type place_id: int
"""
resource = task_schedule.resource
if place_id == -1:
self.tasksOfResource[resource].append(task_schedule)
else:
self.tasksOfResource[resource].insert(place_id, task_schedule)
if task_schedule.task.graph.name in self.job_task_schedule:
pass
else:
self.job_task_schedule[task_schedule.task.graph.name] = {}
self.job_task_schedule[task_schedule.task.graph.name][task_schedule
.task.id] = task_schedule
def show_schedule(self, job_id=-1, finishing=None, print_enabled=False):
result = []
for r in range(0, self.len):
names = []
est = []
eft = []
def add_entries(x):
if job_id != -1 and x.task.graph.name != job_id:
return
names.append(x.task.id if job_id != -1 else
f'{x.task.graph.name}-{x.task.id}')
est.append(x.EST)
eft.append(x.EFT)
list(map(add_entries, self.tasksOfResource[r]))
result.append((names, est, eft))
def print_list(x):
if not print_enabled:
return
first = True
for e in x:
if first:
first = False
else:
print(',', end=' ')
print(e, end=' ')
print()
print_list(names)
print_list(est)
print_list(eft)
if finishing is not None and print_enabled:
print(finishing)
return result
def write_schedule(self, db_file, test_name='N/A', extra='single',
policy='', job_count=1):
w = writer.Writer(db_file)
w.create_plan()
w.create_plan_head()
unique_jobs_id = w.write_plan_head(test_name, policy, job_count)
def add_entries(x):
(job_name, job_type, task_id, jobs_id, start_time, finish_time,
resource_id, resource_speed, job_component_id, extra_params
) = (x.task.graph.name, x.task.graph.type, x.task.id,
unique_jobs_id, x.EST, x.EFT, r, self.power[r], policy, extra)
w.write_plan(job_name, job_type, task_id, jobs_id, start_time,
finish_time, resource_id, resource_speed, job_component_id,
extra_params)
for r in range(0, self.len):
list(map(add_entries, self.tasksOfResource[r]))
w.commit()
w.close()
@property
def average_power(self):
return math.fsum(self.power) / self.len
@property
def makespan(self):
eft = 0
for i in range(0, self.len):
tasks_in_resource = self.tasksOfResource[i]
if len(tasks_in_resource) == 0:
continue
eft = max(eft, tasks_in_resource[-1].EFT)
return eft
def sum_gaps_resource(self, resource_id):
tasks_in_current_resource = self.tasksOfResource[resource_id]
num_tasks = len(tasks_in_current_resource)
if num_tasks <= 1:
return 0
sum_gaps = 0
for i in range(1, num_tasks):
if tasks_in_current_resource[i - 1
].task.dummy_task or tasks_in_current_resource[i
].task.dummy_task:
continue
finish_prev = tasks_in_current_resource[i - 1].EFT
start_current = tasks_in_current_resource[i].EST
gap_length = start_current - finish_prev
if gap_length < 0:
raise Exception('Schedule is not correct, check gaps!')
sum_gaps += gap_length
return sum_gaps
@property
def sum_internal_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_gaps_resource(r)
return sum_gaps
def select_resource(self, task, arrival_time=0):
(est_best, eft_best, runtime_on_resource_best, place_id_best,
resource_id_best) = -1, -1, -1, -1, -1
for r in range(0, self.len):
(max_est_of_task, eft_task, task_runtime_on_resource, place_id
) = self.calculate_eft(task, r, arrival_time=arrival_time)
if eft_best == -1 or eft_task < eft_best:
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best) = (max_est_of_task,
eft_task, task_runtime_on_resource, place_id, r)
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best)
def get_fastest_empty_resource(self):
for r in range(self.len - 1, -1, -1):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
class CostAwareResources(Resources):
def __init__(self, powers, prices, timeslot_len, bandwidth):
super(CostAwareResources, self).__init__(powers, bandwidth)
self.timeslot = timeslot_len
self.price = prices
self.head_nodes = {}
self.sum_weight_scheduled = {}
def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True
):
"""
computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.
:param resource_id:
:param start_time:
:param eft:
:param cost_only:
:return:
"""
tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if
not t.task.dummy_task]
if not tasks_in_resource:
if eft == -1:
return 0 if cost_only else (0, 0, 0)
else:
return math.ceil((eft - start_time) / self.timeslot[
resource_id]) * self.price[resource_id]
if start_time != -1:
task_start_time = min(tasks_in_resource[0].EST, start_time)
else:
task_start_time = tasks_in_resource[0].EST
task_finish_time = max(tasks_in_resource[-1].EFT, eft)
reservation = task_finish_time - task_start_time
cost = math.ceil(reservation / self.timeslot[resource_id]
) * self.price[resource_id]
timeslot = self.timeslot[resource_id]
startof = [x.EST for x in tasks_in_resource]
endof = [x.EFT for x in tasks_in_resource]
if start_time != -1:
startof.append(start_time)
endof.append(eft)
startof.sort()
endof.sort()
timeslot_start = min(startof)
last_finish_time = max(endof)
current_task_id = 0
rent_periods = []
while timeslot_start < last_finish_time:
task_len = endof[current_task_id] - timeslot_start
time_slot_finish = endof[current_task_id] + (timeslot -
task_len % timeslot) % timeslot
current_task_id += 1
if current_task_id >= len(startof):
rent_periods.append((timeslot_start, time_slot_finish))
break
if startof[current_task_id] <= time_slot_finish:
pass
else:
rent_periods.append((timeslot_start, time_slot_finish))
timeslot_start = startof[current_task_id]
sum = 0
for rp in rent_periods:
sum += rp[1] - rp[0]
cost = sum / timeslot * self.price[resource_id]
if cost_only:
return cost
else:
return cost, min(startof), max(endof)
def resource_start_time(self, resource_id):
tasks_in_resource = self.tasksOfResource[resource_id]
length = len(tasks_in_resource)
start_index = 0
while length > 0 and tasks_in_resource[start_index].task.dummy_task:
start_index += 1
length -= 1
if length == 0:
return -1
return tasks_in_resource[start_index].EST
@property
def plan_cost(self):
cost = 0
for i in range(0, self.len):
cost += self.resource_cost(i)
return cost
def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft,
resource_id, task_id=None):
timeslot_end = timeslot_start + self.timeslot[resource_id]
if ft <= timeslot_start or est >= timeslot_end:
return 0
tasks = self.tasksOfResource[resource_id]
task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)
sum_w = 0
for id in task_ids:
if task_id == id:
continue
start_time = tasks[id].EST
finish_time = tasks[id].EFT
if start_time < timeslot_start:
start_time = timeslot_start
if finish_time > timeslot_end:
finish_time = timeslot_end
sum_w += finish_time - start_time
if est < timeslot_start:
est = timeslot_start
if ft > timeslot_end:
ft = timeslot_end
if ft == est:
return 0
share = float(ft - est) / (sum_w + ft - est)
return share * self.price[resource_id]
def task_id_in_timeslot(self, resource_id, timeslot_start):
timeslot_end = timeslot_start + self.timeslot[resource_id]
task_ids = []
for id in range(len(self.tasksOfResource[resource_id])):
s = self.tasksOfResource[resource_id][id]
if (timeslot_start <= s.EST <= timeslot_end or timeslot_start <=
s.EFT <= timeslot_end or s.EST < timeslot_start and
timeslot_end < s.EFT):
task_ids.append(id)
return task_ids
def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1,
task_id=None):
if task_id is not None:
est = self.tasksOfResource[resource_id][task_id].EST
ft = self.tasksOfResource[resource_id][task_id].EFT
timeslot_len = self.timeslot[resource_id]
resource_start_time = self.resource_start_time(resource_id)
if resource_start_time == -1:
resource_start_time = est
timeslot_start = float(timeslot_len) * math.floor((est -
resource_start_time) / timeslot_len) + resource_start_time
timeslot_end = float(timeslot_len) * math.ceil((ft -
resource_start_time) / timeslot_len) + resource_start_time
shared_cost = 0
for interval in f_range(timeslot_start, timeslot_end + timeslot_len /
2, timeslot_len):
share_in_interval = self.calculate_shared_cost_within_timeslot(
interval, est, ft, resource_id, task_id)
shared_cost += share_in_interval
return shared_cost
def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):
"""
calculates eft and cost of a certain task on a certain resource.
:param task:Definitions.Task()
:param resource_id:
:return:
"""
start_time, eft, runtime_on_resource, place_id = self.calculate_eft(
task, resource_id, arrival_time=arrival_time)
if task.dummy_task:
return start_time, eft, runtime_on_resource, place_id, 0
else:
cost = self.calculate_share_cost_change(resource_id, start_time,
eft, task.graph.name, True)
return start_time, eft, runtime_on_resource, place_id, cost
def sum_external_gaps_resource(self, r):
c, s, e = self.resource_cost(r, cost_only=False)
reservation = e - s
timeslot = self.timeslot[r]
gap = timeslot - reservation % timeslot
if gap == timeslot:
return 0
else:
return gap
@property
def sum_external_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_external_gaps_resource(r)
return sum_gaps
@property
def sum_gaps(self):
return self.sum_internal_gaps + self.sum_external_gaps
@property
def occupied_resources(self):
counter = 0
for i in range(self.len):
if self.resource_cost(i) != 0:
counter += self.price[i]
return counter
@property
def gap_rate(self):
return self.sum_gaps / self.makespan / self.occupied_resources
def select_resource(self, task=Task(), test=None, arrival_time=0):
eft_best = -1
def something_found():
return eft_best != -1
if task.asap is not None:
if not task.asap:
if not test:
print('', end='')
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best
) = -1, -1, -1, -1, -1, -1
for r in range(0, self.len):
(start_time, eft, runtime_on_resource, place_id, cost) = (
self.calculate_eft_and_cost(task, r, arrival_time=
arrival_time))
if (not something_found() or eft < eft_best and task.
sub_deadline < eft_best or task.sub_budget <
cost_best and eft <= task.sub_deadline and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and (eft_best > task.sub_deadline or
cost_best > task.sub_budget) or eft <= task.
sub_deadline and cost <= task.sub_budget and eft <
eft_best or eft <= task.sub_deadline and cost <=
task.sub_budget and eft == eft_best and cost <
cost_best):
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best) = (
start_time, eft, runtime_on_resource, place_id,
r, cost)
continue
if not test:
print('', end='')
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best, cost_best)
elif task.asap:
if not test:
print('', end='')
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best
) = -1, -1, -1, -1, -1, -1
for r in range(0, self.len):
(start_time, eft, runtime_on_resource, place_id, cost) = (
self.calculate_eft_and_cost(task, r, arrival_time=
arrival_time))
if (not something_found() or eft < eft_best and task.
sub_deadline < eft_best or task.sub_budget <
cost_best and eft <= task.sub_deadline and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and (eft_best > task.sub_deadline or
cost_best > task.sub_budget) or eft <= task.
sub_deadline and cost <= task.sub_budget and cost <
cost_best or eft <= task.sub_deadline and cost <=
task.sub_budget and cost == cost_best and eft <
eft_best):
(est_best, eft_best, runtime_on_resource_best,
place_id_best, resource_id_best, cost_best) = (
start_time, eft, runtime_on_resource, place_id,
r, cost)
continue
if not test:
print('', end='')
return (est_best, runtime_on_resource_best, eft_best,
resource_id_best, place_id_best, cost_best)
else:
return super(CostAwareResources, self).select_resource(task)
def price_of_each_graph(self):
graph_names = self.job_task_schedule.keys()
costs = {}
for name in graph_names:
costs[name] = 0
for r in range(self.len):
for id in range(len(self.tasksOfResource[r])):
name = self.tasksOfResource[r][id].task.graph.name
cost = self.calculate_task_shared_cost(resource_id=r,
task_id=id)
costs[name] += cost
return costs
def get_cheapest_empty_resource(self):
for r in range(self.len):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):
super(CostAwareResources, self).schedule(task_schedule, place_id)
if not do_head_nodes:
return
if task_schedule.task.graph.name in self.head_nodes:
prev_heads = self.head_nodes[task_schedule.task.graph.name]
parents_of_current_task = task_schedule.task.predecessor.keys()
self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[
task_schedule.task.graph.name].difference(
parents_of_current_task)
self.head_nodes[task_schedule.task.graph.name].add(task_schedule
.task.id)
else:
self.head_nodes[task_schedule.task.graph.name] = set()
self.head_nodes[task_schedule.task.graph.name].add(task_schedule
.task.id)
self.sum_weight_scheduled[task_schedule.task.graph.name] = 0
self.sum_weight_scheduled[task_schedule.task.graph.name
] += task_schedule.task.weight
def calculate_share_cost_change(self, resource_id, est=-1, eft=-1,
job_id=-1, only_this_job=False):
sum_w = {}
for i in range(len(self.tasksOfResource[resource_id])):
sch = self.tasksOfResource[resource_id][i]
job = sch.task.graph.name
if job not in sum_w:
sum_w[job] = 0
sum_w[job] += sch.EFT - sch.EST
sum_w_all_old = sum(sum_w.values())
prev_cost_resource = self.resource_cost(resource_id)
prev_cost_job = {}
for j in sum_w.keys():
if sum_w_all_old == 0:
prev_cost_job[j] = 0
else:
prev_cost_job[j] = float(prev_cost_resource) * sum_w[j
] / sum_w_all_old
if est == -1:
return prev_cost_job
new_cost_resource = self.resource_cost(resource_id, start_time=est,
eft=eft)
if job_id not in sum_w:
sum_w[job_id] = 0
sum_w[job_id] += eft - est
sum_w_all_new = sum_w_all_old + eft - est
new_cost_job = {}
changes = {}
for j in sum_w.keys():
if sum_w_all_new == 0:
new_cost_job[j] = 0
else:
new_cost_job[j] = float(new_cost_resource) * sum_w[j
] / sum_w_all_new
if j not in prev_cost_job:
changes[j] = new_cost_job[j]
else:
changes[j] = new_cost_job[j] - prev_cost_job[j]
if only_this_job:
return changes[job_id]
return changes
| from __future__ import print_function
import math
import db
from db import writer
from enum import Enum
from Definitions.Graph import Task
class Constraint(Enum):
deadline = 1
budget = 2
none = 3
def f_range(x, y, jump):
while x < y:
yield x
x += jump
class TaskSchedule:
def __init__(self, task, est=-1, runtime=-1, eft=-1, resource=-1):
self.task = task
self.EFT = eft
self.EST = est
self.runtime = runtime
self.resource = resource
class Resources(object):
len = -1
bandwidth = 0
def __init__(self, powers, bandwidth): # e.g. [1,1,2,2,4]
number_of_resources = len(powers)
self.power = powers
self.tasksOfResource = [] # ordered set of TaskSchedule objects in every resource
for i in range(number_of_resources):
self.tasksOfResource.append([])
self.len = number_of_resources
self.bandwidth = bandwidth
self.job_task_schedule = {} # job_task_schedule['Mine_10_1'][4].EFT == 12
def find_gap(self, resource, start_time, runtime):
'''
finds a gap in resource and returns the start time and the place index of the task among the current tasks of the resource
if resource is -1, it does nothing (returns the given start time, and -1 for place)
'''
if resource == -1:
return start_time, -1
number_of_tasks = len(self.tasksOfResource[resource])
if number_of_tasks == 0:
return start_time, 0
elif self.tasksOfResource[resource][0].EST >= start_time + runtime:
return start_time, 0
elif number_of_tasks == 1:
if self.tasksOfResource[resource][0].EFT < start_time:
return start_time, 1
else:
return self.tasksOfResource[resource][0].EFT, 1
else:
for i in range(1, number_of_tasks):
if self.tasksOfResource[resource][i].EST <= start_time:
continue
elif start_time < self.tasksOfResource[resource][i - 1].EFT:
gap = self.tasksOfResource[resource][i].EST - self.tasksOfResource[resource][i - 1].EFT
if gap < runtime:
continue
else:
return self.tasksOfResource[resource][i - 1].EFT, i
elif self.tasksOfResource[resource][i - 1].EFT <= start_time < self.tasksOfResource[resource][i].EST:
if self.tasksOfResource[resource][i].EST - start_time < runtime:
continue
else:
return start_time, i
else: # no gap is found, put it at the end (it can be done using append method)
return max(self.tasksOfResource[resource][-1].EFT, start_time), -1
def calculate_eft(self, task, resource_id, arrival_time=0):
g = task.graph
if resource_id == -1:
graphs_task_on_resource = []
task_runtime_on_resource = task.weight / max(self.power)
else:
task_runtime_on_resource = task.weight / self.power[resource_id]
graphs_task_on_resource = list(
map(lambda t: t.task.id if t.task.graph.name == g.name else -1, self.tasksOfResource[resource_id]))
max_est_of_task = arrival_time
for p in task.predecessor:
# check if p and task.id on the same resource_id
if p in graphs_task_on_resource:
communication_delay = 0
else:
communication_delay = task.predecessor[p] / self.bandwidth
if g.name not in self.job_task_schedule or p not in self.job_task_schedule[g.name]:
continue
p_eft = self.job_task_schedule[g.name][p].EFT
if p_eft + communication_delay > max_est_of_task:
max_est_of_task = p_eft + communication_delay
# EST Of Task is found and stored in max_est_of_task
# Find a gap to schedule it:
start_time, place_id = self.find_gap(resource_id, max_est_of_task, task_runtime_on_resource)
eft_task = start_time + task_runtime_on_resource
return start_time, eft_task, task_runtime_on_resource, place_id
def schedule(self, task_schedule, place_id=-1):
"""
Schedules a task in a place id. if place_id is -1 the schedule is appended to the last.
:type task_schedule: TaskSchedule
:type place_id: int
"""
resource = task_schedule.resource
if place_id == -1:
self.tasksOfResource[resource].append(task_schedule)
else:
self.tasksOfResource[resource].insert(place_id, task_schedule)
if task_schedule.task.graph.name in self.job_task_schedule:
pass
else:
self.job_task_schedule[task_schedule.task.graph.name] = {}
self.job_task_schedule[task_schedule.task.graph.name][task_schedule.task.id] = task_schedule
def show_schedule(self, job_id=-1, finishing=None, print_enabled=False):
result = []
for r in range(0, self.len):
names = []
est = []
eft = []
def add_entries(x):
if job_id != -1 and x.task.graph.name != job_id:
return
names.append(x.task.id if job_id != -1 else f'{x.task.graph.name}-{x.task.id}')
est.append(x.EST)
eft.append(x.EFT)
list(map(add_entries, self.tasksOfResource[r]))
result.append((names, est, eft))
def print_list(x):
if not print_enabled:
return
first = True
for e in x:
if first:
first = False
else:
print(',', end=' ')
print(e, end=' ')
print()
print_list(names)
print_list(est)
print_list(eft)
if finishing is not None and print_enabled:
print(finishing)
return result
def write_schedule(self, db_file, test_name='N/A', extra='single', policy='', job_count=1):
w = writer.Writer(db_file)
w.create_plan()
w.create_plan_head()
unique_jobs_id = w.write_plan_head(test_name, policy, job_count)
def add_entries(x):
job_name, job_type, task_id, jobs_id, start_time,\
finish_time, resource_id, resource_speed, \
job_component_id, extra_params = x.task.graph.name, x.task.graph.type, x.task.id, unique_jobs_id\
, x.EST,\
x.EFT, r, self.power[r], policy, extra
w.write_plan(job_name, job_type, task_id, jobs_id, start_time, finish_time, resource_id,
resource_speed, job_component_id, extra_params)
for r in range(0, self.len):
list(map(add_entries, self.tasksOfResource[r]))
w.commit()
w.close()
@property
def average_power(self):
return math.fsum(self.power) / self.len
@property
def makespan(self):
eft = 0
for i in range(0, self.len):
tasks_in_resource = self.tasksOfResource[i]
if len(tasks_in_resource) == 0:
continue
eft = max(eft, tasks_in_resource[-1].EFT)
return eft
def sum_gaps_resource(self, resource_id):
tasks_in_current_resource = self.tasksOfResource[resource_id]
num_tasks = len(tasks_in_current_resource)
if num_tasks <= 1:
return 0
sum_gaps = 0
for i in range(1, num_tasks):
if tasks_in_current_resource[i - 1].task.dummy_task or tasks_in_current_resource[i].task.dummy_task:
continue
finish_prev = tasks_in_current_resource[i - 1].EFT
start_current = tasks_in_current_resource[i].EST
gap_length = start_current - finish_prev
if gap_length < 0:
raise Exception('Schedule is not correct, check gaps!')
sum_gaps += gap_length
return sum_gaps
@property
def sum_internal_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_gaps_resource(r)
return sum_gaps
def select_resource(self, task, arrival_time=0):
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best = -1, -1, -1, -1, -1
for r in range(0, self.len):
max_est_of_task, eft_task, task_runtime_on_resource, place_id = self.calculate_eft(task, r, arrival_time=arrival_time)
if eft_best == -1 or eft_task < eft_best:
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best = \
max_est_of_task, eft_task, task_runtime_on_resource, place_id, r
return est_best, runtime_on_resource_best, eft_best, resource_id_best, place_id_best
def get_fastest_empty_resource(self):
for r in range(self.len - 1, -1, -1):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
class CostAwareResources(Resources):
def __init__(self, powers, prices, timeslot_len, bandwidth):
super(CostAwareResources, self).__init__(powers, bandwidth)
self.timeslot = timeslot_len
self.price = prices
self.head_nodes = {}
self.sum_weight_scheduled = {}
def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True):
"""
computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.
:param resource_id:
:param start_time:
:param eft:
:param cost_only:
:return:
"""
tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if not t.task.dummy_task]
if not tasks_in_resource:
if eft == -1:
return 0 if cost_only else (0, 0, 0)
else:
return math.ceil((eft - start_time) / self.timeslot[resource_id]) * self.price[resource_id]
if start_time != -1:
task_start_time = min(tasks_in_resource[0].EST, start_time)
else:
task_start_time = tasks_in_resource[0].EST
task_finish_time = max(tasks_in_resource[-1].EFT, eft)
reservation = task_finish_time - task_start_time
cost = math.ceil(reservation / self.timeslot[resource_id]) * self.price[resource_id]
timeslot = self.timeslot[resource_id]
startof = [x.EST for x in tasks_in_resource]
endof = [x.EFT for x in tasks_in_resource]
if start_time != -1:
startof.append(start_time)
endof.append(eft)
startof.sort()
endof.sort()
timeslot_start = min(startof)
last_finish_time = max(endof)
current_task_id = 0
rent_periods = []
while timeslot_start < last_finish_time:
task_len = endof[current_task_id] - timeslot_start
time_slot_finish = endof[current_task_id] + (timeslot - (task_len % timeslot)) % timeslot
current_task_id += 1
if current_task_id >= len(startof):
rent_periods.append((timeslot_start, time_slot_finish))
break
if startof[current_task_id] <= time_slot_finish:
pass
else:
rent_periods.append((timeslot_start, time_slot_finish))
timeslot_start = startof[current_task_id]
sum = 0
for rp in rent_periods:
sum += (rp[1] - rp[0])
cost = sum / timeslot * self.price[resource_id]
if cost_only:
return cost
else:
return cost, min(startof), (max(endof))
def resource_start_time(self, resource_id):
tasks_in_resource = self.tasksOfResource[resource_id]
length = len(tasks_in_resource)
start_index = 0
while length > 0 and tasks_in_resource[start_index].task.dummy_task:
start_index += 1
length -= 1
if length == 0:
return -1
return tasks_in_resource[start_index].EST
@property
def plan_cost(self):
cost = 0
for i in range(0, self.len):
cost += self.resource_cost(i)
return cost
def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft, resource_id, task_id=None):
timeslot_end = timeslot_start + self.timeslot[resource_id]
if ft <= timeslot_start or est >= timeslot_end:
return 0
tasks = self.tasksOfResource[resource_id]
task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)
sum_w = 0
for id in task_ids:
if task_id == id:
continue
start_time = tasks[id].EST
finish_time = tasks[id].EFT
if start_time < timeslot_start:
start_time = timeslot_start
if finish_time > timeslot_end:
finish_time = timeslot_end
sum_w += finish_time - start_time
if est < timeslot_start:
est = timeslot_start
if ft > timeslot_end:
ft = timeslot_end
if ft == est:
return 0
share = float(ft - est) / (sum_w + ft - est)
return share * self.price[resource_id]
def task_id_in_timeslot(self, resource_id, timeslot_start):
timeslot_end = timeslot_start + self.timeslot[resource_id]
task_ids = []
for id in range(len(self.tasksOfResource[resource_id])):
s = self.tasksOfResource[resource_id][id]
if timeslot_start <= s.EST <= timeslot_end or timeslot_start <= s.EFT <= timeslot_end \
or s.EST < timeslot_start and timeslot_end < s.EFT:
task_ids.append(id)
return task_ids
def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1, task_id=None):
if task_id is not None:
# this task has already been scheduled
est = self.tasksOfResource[resource_id][task_id].EST
ft = self.tasksOfResource[resource_id][task_id].EFT
timeslot_len = self.timeslot[resource_id]
resource_start_time = self.resource_start_time(resource_id)
if resource_start_time == -1:
resource_start_time = est
timeslot_start = float(timeslot_len) * math.floor((est - resource_start_time) /
timeslot_len) + resource_start_time
timeslot_end = float(timeslot_len) * math.ceil((ft - resource_start_time) /
timeslot_len) + resource_start_time
shared_cost = 0
for interval in f_range(timeslot_start, timeslot_end + timeslot_len / 2, timeslot_len):
share_in_interval = self.calculate_shared_cost_within_timeslot(interval, est, ft, resource_id, task_id)
shared_cost += share_in_interval
return shared_cost
def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):
"""
calculates eft and cost of a certain task on a certain resource.
:param task:Definitions.Task()
:param resource_id:
:return:
"""
start_time, eft, runtime_on_resource, place_id = self.calculate_eft(task, resource_id, arrival_time=arrival_time)
if task.dummy_task:
return start_time, eft, runtime_on_resource, place_id, 0
else:
cost = self.calculate_share_cost_change(resource_id, start_time, eft, task.graph.name, True)
return start_time, eft, runtime_on_resource, place_id, cost
def sum_external_gaps_resource(self, r):
c, s, e = self.resource_cost(r, cost_only=False)
reservation = e - s
timeslot = self.timeslot[r]
gap = timeslot - reservation % timeslot
if gap == timeslot:
return 0
else:
return gap
@property
def sum_external_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_external_gaps_resource(r)
return sum_gaps
@property
def sum_gaps(self):
return self.sum_internal_gaps + self.sum_external_gaps
@property
def occupied_resources(self):
counter = 0
for i in range(self.len):
if self.resource_cost(i) != 0:
counter += self.price[i]
return counter
@property
def gap_rate(self):
return self.sum_gaps / self.makespan / self.occupied_resources
def select_resource(self, task=Task(), test=None, arrival_time=0):
eft_best = -1
def something_found():
return eft_best != -1
if task.asap is not None:
if not task.asap: # budget workflow
if not test:
print('', end='')
# fastest affordable
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
-1, -1, -1, -1, -1, -1
for r in range(0, self.len):
start_time, eft, runtime_on_resource, place_id, cost = self.calculate_eft_and_cost(task, r, arrival_time=arrival_time)
if not something_found() or \
eft < eft_best and task.sub_deadline < eft_best or \
task.sub_budget < cost_best and eft <= task.sub_deadline and cost < cost_best or \
eft <= task.sub_deadline and cost <= task.sub_budget and \
(eft_best > task.sub_deadline or cost_best > task.sub_budget) or \
eft <= task.sub_deadline and cost <= task.sub_budget and eft < eft_best or \
eft <= task.sub_deadline and cost <= task.sub_budget and eft == eft_best and cost < cost_best:
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
start_time, eft, runtime_on_resource, place_id, r, cost
continue
if not test:
print('', end='')
return est_best, runtime_on_resource_best, eft_best, resource_id_best, place_id_best, cost_best
elif task.asap: # deadline workflow
# cheapest before sub-deadline
if not test:
print('', end='')
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
-1, -1, -1, -1, -1, -1
for r in range(0, self.len):
start_time, eft, runtime_on_resource, place_id, cost = self.calculate_eft_and_cost(task, r, arrival_time=arrival_time)
# if eft_best == -1 or eft_best > eft > task.sub_deadline or task.sub_deadline >= eft and (
# cost < cost_best or eft_best > task.sub_deadline):
if not something_found() or \
eft < eft_best and task.sub_deadline < eft_best or \
task.sub_budget < cost_best and eft <= task.sub_deadline and cost < cost_best or \
eft <= task.sub_deadline and cost <= task.sub_budget and \
(eft_best > task.sub_deadline or cost_best > task.sub_budget) or \
eft <= task.sub_deadline and cost <= task.sub_budget and cost < cost_best or \
eft <= task.sub_deadline and cost <= task.sub_budget and cost == cost_best and eft < eft_best:
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
start_time, eft, runtime_on_resource, place_id, r, cost
# if cost_best == -1 or cost_best > cost > task.sub_budget or task.sub_budget >= cost and (
# eft < eft_best or cost_best > task.sub_budget):
# est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
# start_time, eft, runtime_on_resource, place_id, r, cost
continue
if not test:
print('', end='')
return est_best, runtime_on_resource_best, eft_best, resource_id_best, place_id_best, cost_best
else:
# minimize time (as in HEFT) TODO: it doesn't return cost (as the sixth return value)
return super(CostAwareResources, self).select_resource(task)
def price_of_each_graph(self):
graph_names = self.job_task_schedule.keys()
costs = {}
for name in graph_names:
costs[name] = 0
for r in range(self.len):
for id in range(len(self.tasksOfResource[r])):
name = self.tasksOfResource[r][id].task.graph.name
cost = self.calculate_task_shared_cost(resource_id=r, task_id=id)
costs[name] += cost
return costs
def get_cheapest_empty_resource(self):
for r in range(self.len):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):
super(CostAwareResources, self).schedule(task_schedule, place_id)
# head_node computations:
if not do_head_nodes:
return
if task_schedule.task.graph.name in self.head_nodes:
prev_heads = self.head_nodes[task_schedule.task.graph.name]
parents_of_current_task = task_schedule.task.predecessor.keys()
self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[task_schedule.task.graph.name].difference(
parents_of_current_task)
self.head_nodes[task_schedule.task.graph.name].add(task_schedule.task.id)
else:
self.head_nodes[task_schedule.task.graph.name] = set()
self.head_nodes[task_schedule.task.graph.name].add(task_schedule.task.id)
self.sum_weight_scheduled[task_schedule.task.graph.name] = 0
self.sum_weight_scheduled[task_schedule.task.graph.name] += task_schedule.task.weight
def calculate_share_cost_change(self, resource_id, est=-1, eft=-1, job_id=-1, only_this_job=False):
sum_w = {}
for i in range(len(self.tasksOfResource[resource_id])):
sch = self.tasksOfResource[resource_id][i]
job = sch.task.graph.name
if job not in sum_w:
sum_w[job] = 0
sum_w[job] += sch.EFT - sch.EST
sum_w_all_old = sum(sum_w.values())
prev_cost_resource = self.resource_cost(resource_id)
prev_cost_job = {}
for j in sum_w.keys():
if sum_w_all_old == 0:
prev_cost_job[j] = 0
else:
prev_cost_job[j] = float(prev_cost_resource) * sum_w[j] / sum_w_all_old
if est == -1:
return prev_cost_job
new_cost_resource = self.resource_cost(resource_id, start_time=est, eft=eft)
if job_id not in sum_w:
sum_w[job_id] = 0
sum_w[job_id] += eft - est
sum_w_all_new = sum_w_all_old + eft - est
new_cost_job = {}
changes = {}
for j in sum_w.keys():
if sum_w_all_new == 0:
new_cost_job[j] = 0
else:
new_cost_job[j] = float(new_cost_resource) * sum_w[j] / sum_w_all_new
if j not in prev_cost_job:
changes[j] = new_cost_job[j]
else:
changes[j] = new_cost_job[j] - prev_cost_job[j]
if only_this_job:
return changes[job_id]
return changes
| [
22,
28,
32,
36,
40
] |
2,055 | fb6dd9ec7d8dc80eace90dadc2112c7c27125efd | <mask token>
| <mask token>
res.read_rcf()
res.read_his()
<mask token>
for kt, step in enumerate(res.steps):
if step.conv_status in [-1]:
if step.time in tx:
tsteps.append(kt)
<mask token>
res.read_dat()
res.read_s00()
for lab in res.ele_group_labels:
if lab == 'VOLUMICS':
res.read_s01()
vtktools.write_vtu(res, vol=True, verbose=False, outline=True)
| <mask token>
pathname = (
'\\\\192.168.1.51\\Mandats sur H RAID0\\M1010_Tourbillon\\stab_panneau')
prob = 'M1010_stabPann_m2_renfLat'
res = zr(pathname, prob)
res.read_rcf()
res.read_his()
tx = [67]
tsteps = []
for kt, step in enumerate(res.steps):
if step.conv_status in [-1]:
if step.time in tx:
tsteps.append(kt)
res.out_steps = tsteps
res.read_dat()
res.read_s00()
for lab in res.ele_group_labels:
if lab == 'VOLUMICS':
res.read_s01()
vtktools.write_vtu(res, vol=True, verbose=False, outline=True)
| import numpy as np
from zsoil_tools import zsoil_results as zr
from zsoil_tools import vtktools
pathname = (
'\\\\192.168.1.51\\Mandats sur H RAID0\\M1010_Tourbillon\\stab_panneau')
prob = 'M1010_stabPann_m2_renfLat'
res = zr(pathname, prob)
res.read_rcf()
res.read_his()
tx = [67]
tsteps = []
for kt, step in enumerate(res.steps):
if step.conv_status in [-1]:
if step.time in tx:
tsteps.append(kt)
res.out_steps = tsteps
res.read_dat()
res.read_s00()
for lab in res.ele_group_labels:
if lab == 'VOLUMICS':
res.read_s01()
vtktools.write_vtu(res, vol=True, verbose=False, outline=True)
| # @description Exporting outline (boundary faces) of zsoil results to vtu
# @input zsoil results
# @output vtu unstructured grid
# @author Matthias Preisig
# @date 2017/10/10
import numpy as np
from zsoil_tools import zsoil_results as zr
from zsoil_tools import vtktools
pathname = r'\\192.168.1.51\Mandats sur H RAID0\M1010_Tourbillon\stab_panneau'
prob = 'M1010_stabPann_m2_renfLat'
res = zr(pathname,prob)
res.read_rcf()
res.read_his()
tx = [67]
tsteps = []
for kt,step in enumerate(res.steps):
if step.conv_status in [-1]:
if step.time in tx:
tsteps.append(kt)
res.out_steps = tsteps
res.read_dat()
res.read_s00()
for lab in res.ele_group_labels:
if lab=='VOLUMICS':
res.read_s01() # volumics
## elif lab=='SHELLS':
## res.read_s02() # shells
## elif lab=='TRUSSES':
## res.read_s03() # trusses
## elif lab=='BEAMS':
## res.read_s04() # beams
## elif lab=='CONTACT':
## res.read_s07()
##vtktools.write_vtu(res,beams=True,verbose=False)
##vtktools.write_vtu(res,trusses=True,verbose=False)
vtktools.write_vtu(res,vol=True,verbose=False,outline=True)
##vtktools.write_vtu(res,shells=True,verbose=False)
| [
0,
1,
2,
3,
4
] |
2,056 | 1ce34bfec6a9acfeaf0d5c5835ebebed4d7ee369 | #!/usr/bin/env python
import remctl
import json
import datetime
import time,random
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description = "List all TCC Forge overlays"
)
parser.add_argument(
'-n',
'--now',
action = "store_false",
default = True,
dest = 'now',
help = 'skip the random wait'
)
cmdlineopts = parser.parse_args()
if cmdlineopts.now:
timeout = random.randint(0, 2 * 60 * 10) * 0.1
time.sleep(timeout)
command = ('forge','overlay','list','{"group":"tcc","distro":"F19"}')
c = remctl.remctl(host = 'update.nmt.edu', command=command)
r = json.loads(c.stdout)
for overlay in r:
print overlay["name"]
| null | null | null | null | [
0
] |
2,057 | 415d58e502e8a33f7a37c4fb2da34e838246ea9c | <mask token>
| modulus_size = 2048
n, e = 0, 0
k = modulus_size // 8
queries = 0
print_queries_every = 1
number_of_time_to_confirm_conforming = 10
encrypt_openssl = True
t_start = 0
cwd = ''
host = '10.0.0.1'
port = 4430
sock = 0
max_message_size = 2048
|
# RSA key
modulus_size = 2048
(n, e) = (0, 0) # Not being initialize here
# modulus size in bytes
k = modulus_size // 8
# keep track of the oracle calls
queries = 0
print_queries_every = 1
number_of_time_to_confirm_conforming = 10
# Choose to use OpenSSL encrypt function or our own implementations
encrypt_openssl = True
# start timer
t_start = 0 # Not being initialize here
# Current Working Directory of the project
cwd = ""
# Server info
host = '10.0.0.1'
port = 4430
sock = 0 # Not being initialize here
max_message_size = 2048
| null | null | [
0,
1,
2
] |
2,058 | caca4309034f08874e1e32828a601e7e3d4d3efd | <mask token>
def readOnePolicy(path2):
ethic_set = wn.synsets('ethic')
standard_set = wn.synsets('standard')
privacy_set = wn.synsets('privacy')
education_set = wn.synsets('education')
investment_set = wn.synsets('investment')
application_set = wn.synsets('application')
content = ''
with codecs.open(path2, 'r', encoding=u'utf-8', errors='ignore') as fr:
content = fr.read()
content = content.split()
stop_words = ''
with open('stopWords.txt', 'r') as f2:
stop_words = f2.read()
stop_words = stop_words.split()
ethic_max_prob = 0
standard_max_prob = 0
privacy_max_prob = 0
education_max_prob = 0
investment_max_prob = 0
application_max_prob = 0
for i in range(len(content)):
contentSyns = []
if content[i] not in stop_words:
if not content[i].isnumeric():
contentSyns = wn.synsets(content[i])
if len(contentSyns) > 0:
ethic_prob = max([(0 if e.path_similarity(c) == None else
e.path_similarity(c)) for e in ethic_set for c in
contentSyns])
standard_prob = max([(0 if s.path_similarity(c) == None
else s.path_similarity(c)) for s in standard_set for
c in contentSyns])
privacy_prob = max([(0 if p.path_similarity(c) == None else
p.path_similarity(c)) for p in privacy_set for c in
contentSyns])
education_prob = max([(0 if edu.path_similarity(c) ==
None else edu.path_similarity(c)) for edu in
education_set for c in contentSyns])
investment_prob = max([(0 if i.path_similarity(c) ==
None else i.path_similarity(c)) for i in
investment_set for c in contentSyns])
application_prob = max([(0 if a.path_similarity(c) ==
None else a.path_similarity(c)) for a in
application_set for c in contentSyns])
if ethic_prob > ethic_max_prob:
ethic_max_prob = ethic_prob
if standard_prob > standard_max_prob:
standard_max_prob = standard_prob
if privacy_prob > privacy_max_prob:
privacy_max_prob = privacy_prob
if education_prob > education_max_prob:
education_max_prob = education_prob
if investment_prob > investment_max_prob:
investment_max_prob = investment_prob
if application_prob > application_max_prob:
application_max_prob = application_prob
print(path2, ' ', ethic_max_prob, ' ', standard_max_prob, ' ',
privacy_max_prob, ' ', education_max_prob, ' ',
investment_max_prob, ' ', application_max_prob)
<mask token>
| <mask token>
def readOnePolicy(path2):
ethic_set = wn.synsets('ethic')
standard_set = wn.synsets('standard')
privacy_set = wn.synsets('privacy')
education_set = wn.synsets('education')
investment_set = wn.synsets('investment')
application_set = wn.synsets('application')
content = ''
with codecs.open(path2, 'r', encoding=u'utf-8', errors='ignore') as fr:
content = fr.read()
content = content.split()
stop_words = ''
with open('stopWords.txt', 'r') as f2:
stop_words = f2.read()
stop_words = stop_words.split()
ethic_max_prob = 0
standard_max_prob = 0
privacy_max_prob = 0
education_max_prob = 0
investment_max_prob = 0
application_max_prob = 0
for i in range(len(content)):
contentSyns = []
if content[i] not in stop_words:
if not content[i].isnumeric():
contentSyns = wn.synsets(content[i])
if len(contentSyns) > 0:
ethic_prob = max([(0 if e.path_similarity(c) == None else
e.path_similarity(c)) for e in ethic_set for c in
contentSyns])
standard_prob = max([(0 if s.path_similarity(c) == None
else s.path_similarity(c)) for s in standard_set for
c in contentSyns])
privacy_prob = max([(0 if p.path_similarity(c) == None else
p.path_similarity(c)) for p in privacy_set for c in
contentSyns])
education_prob = max([(0 if edu.path_similarity(c) ==
None else edu.path_similarity(c)) for edu in
education_set for c in contentSyns])
investment_prob = max([(0 if i.path_similarity(c) ==
None else i.path_similarity(c)) for i in
investment_set for c in contentSyns])
application_prob = max([(0 if a.path_similarity(c) ==
None else a.path_similarity(c)) for a in
application_set for c in contentSyns])
if ethic_prob > ethic_max_prob:
ethic_max_prob = ethic_prob
if standard_prob > standard_max_prob:
standard_max_prob = standard_prob
if privacy_prob > privacy_max_prob:
privacy_max_prob = privacy_prob
if education_prob > education_max_prob:
education_max_prob = education_prob
if investment_prob > investment_max_prob:
investment_max_prob = investment_prob
if application_prob > application_max_prob:
application_max_prob = application_prob
print(path2, ' ', ethic_max_prob, ' ', standard_max_prob, ' ',
privacy_max_prob, ' ', education_max_prob, ' ',
investment_max_prob, ' ', application_max_prob)
<mask token>
for root, dirs, files in os.walk(file_dir):
for f in range(len(files)):
path1 = os.path.join(file_dir, files[f])
readOnePolicy(path1)
| <mask token>
def readOnePolicy(path2):
ethic_set = wn.synsets('ethic')
standard_set = wn.synsets('standard')
privacy_set = wn.synsets('privacy')
education_set = wn.synsets('education')
investment_set = wn.synsets('investment')
application_set = wn.synsets('application')
content = ''
with codecs.open(path2, 'r', encoding=u'utf-8', errors='ignore') as fr:
content = fr.read()
content = content.split()
stop_words = ''
with open('stopWords.txt', 'r') as f2:
stop_words = f2.read()
stop_words = stop_words.split()
ethic_max_prob = 0
standard_max_prob = 0
privacy_max_prob = 0
education_max_prob = 0
investment_max_prob = 0
application_max_prob = 0
for i in range(len(content)):
contentSyns = []
if content[i] not in stop_words:
if not content[i].isnumeric():
contentSyns = wn.synsets(content[i])
if len(contentSyns) > 0:
ethic_prob = max([(0 if e.path_similarity(c) == None else
e.path_similarity(c)) for e in ethic_set for c in
contentSyns])
standard_prob = max([(0 if s.path_similarity(c) == None
else s.path_similarity(c)) for s in standard_set for
c in contentSyns])
privacy_prob = max([(0 if p.path_similarity(c) == None else
p.path_similarity(c)) for p in privacy_set for c in
contentSyns])
education_prob = max([(0 if edu.path_similarity(c) ==
None else edu.path_similarity(c)) for edu in
education_set for c in contentSyns])
investment_prob = max([(0 if i.path_similarity(c) ==
None else i.path_similarity(c)) for i in
investment_set for c in contentSyns])
application_prob = max([(0 if a.path_similarity(c) ==
None else a.path_similarity(c)) for a in
application_set for c in contentSyns])
if ethic_prob > ethic_max_prob:
ethic_max_prob = ethic_prob
if standard_prob > standard_max_prob:
standard_max_prob = standard_prob
if privacy_prob > privacy_max_prob:
privacy_max_prob = privacy_prob
if education_prob > education_max_prob:
education_max_prob = education_prob
if investment_prob > investment_max_prob:
investment_max_prob = investment_prob
if application_prob > application_max_prob:
application_max_prob = application_prob
print(path2, ' ', ethic_max_prob, ' ', standard_max_prob, ' ',
privacy_max_prob, ' ', education_max_prob, ' ',
investment_max_prob, ' ', application_max_prob)
file_dir = 'txt'
for root, dirs, files in os.walk(file_dir):
for f in range(len(files)):
path1 = os.path.join(file_dir, files[f])
readOnePolicy(path1)
| from nltk.corpus import wordnet as wn
import os
import codecs
def readOnePolicy(path2):
ethic_set = wn.synsets('ethic')
standard_set = wn.synsets('standard')
privacy_set = wn.synsets('privacy')
education_set = wn.synsets('education')
investment_set = wn.synsets('investment')
application_set = wn.synsets('application')
content = ''
with codecs.open(path2, 'r', encoding=u'utf-8', errors='ignore') as fr:
content = fr.read()
content = content.split()
stop_words = ''
with open('stopWords.txt', 'r') as f2:
stop_words = f2.read()
stop_words = stop_words.split()
ethic_max_prob = 0
standard_max_prob = 0
privacy_max_prob = 0
education_max_prob = 0
investment_max_prob = 0
application_max_prob = 0
for i in range(len(content)):
contentSyns = []
if content[i] not in stop_words:
if not content[i].isnumeric():
contentSyns = wn.synsets(content[i])
if len(contentSyns) > 0:
ethic_prob = max([(0 if e.path_similarity(c) == None else
e.path_similarity(c)) for e in ethic_set for c in
contentSyns])
standard_prob = max([(0 if s.path_similarity(c) == None
else s.path_similarity(c)) for s in standard_set for
c in contentSyns])
privacy_prob = max([(0 if p.path_similarity(c) == None else
p.path_similarity(c)) for p in privacy_set for c in
contentSyns])
education_prob = max([(0 if edu.path_similarity(c) ==
None else edu.path_similarity(c)) for edu in
education_set for c in contentSyns])
investment_prob = max([(0 if i.path_similarity(c) ==
None else i.path_similarity(c)) for i in
investment_set for c in contentSyns])
application_prob = max([(0 if a.path_similarity(c) ==
None else a.path_similarity(c)) for a in
application_set for c in contentSyns])
if ethic_prob > ethic_max_prob:
ethic_max_prob = ethic_prob
if standard_prob > standard_max_prob:
standard_max_prob = standard_prob
if privacy_prob > privacy_max_prob:
privacy_max_prob = privacy_prob
if education_prob > education_max_prob:
education_max_prob = education_prob
if investment_prob > investment_max_prob:
investment_max_prob = investment_prob
if application_prob > application_max_prob:
application_max_prob = application_prob
print(path2, ' ', ethic_max_prob, ' ', standard_max_prob, ' ',
privacy_max_prob, ' ', education_max_prob, ' ',
investment_max_prob, ' ', application_max_prob)
file_dir = 'txt'
for root, dirs, files in os.walk(file_dir):
for f in range(len(files)):
path1 = os.path.join(file_dir, files[f])
readOnePolicy(path1)
| #####################将政策文件中的内容抽取出来:标准、伦理、 3部分内容##########################
###########step 1:把3部分内容找到近义词,组成一个词表######
###########step 2:把文件与词表相匹配,判断文件到底在讲啥######
from nltk.corpus import wordnet as wn
import os
import codecs
# goods = wn.synsets('beautiful')
# beautifuls = wn.synsets('pretty')
# bads = wn.synsets('standard')
# print('good和bad的语义相似度为: ', max([0 if good.path_similarity(bad) == None else good.path_similarity(bad) for good in goods for bad in bads]))
def readOnePolicy(path2):
ethic_set = wn.synsets('ethic')
# print('ethic的同义词集为:', ethic_set)
# print('ethic的各同义词集包含的单词有:', [ethic.lemma_names() for ethic in ethic_set])
# print('ethic的各同义词集的具体定义是:',[dog.definition() for dog in ethic_set])
# print('ethic的各同义词集的例子是:',[dog.examples() for dog in ethic_set])
standard_set = wn.synsets('standard')
privacy_set = wn.synsets('privacy')
education_set = wn.synsets('education')
investment_set = wn.synsets('investment')
application_set = wn.synsets('application')
content=''
# with open(path2,'r',encoding='UTF-8') as f1:
# with open(path2, 'r', encoding='UTF-8') as f1:
with codecs.open(path2, 'r', encoding=u'utf-8', errors='ignore') as fr:###这里用codecs防止编码出错
content=fr.read()
content=content.split()
# print(type(content))
# content = wn.synsets('standard')
# print('good和beautiful的语义相似度为: ', max([0 if one_ethic.path_similarity(one_word) == None else one_ethic.path_similarity(one_word) for one_ethic in ethic_set for one_word in content]))
#
# for ethic in ethic_set:
# # print(type(ethic.lemma_names()))##list
# for one_word in range(len(ethic.lemma_names())):
# print(ethic.lemma_names()[one_word])
# print('content和ethic的语义相似度为: ', max([0 if good.path_similarity(beautiful) == None else good.path_similarity(beautiful) for good in goods for beautiful in beautifuls]))
stop_words=''
with open('stopWords.txt','r') as f2:
stop_words=f2.read()
stop_words=stop_words.split()
ethic_max_prob = 0
standard_max_prob = 0
privacy_max_prob = 0
education_max_prob = 0
investment_max_prob = 0
application_max_prob = 0
for i in range(len(content)):
contentSyns=[]
if content[i] not in stop_words:
if not content[i].isnumeric():
# print(content[i],' content[i]')
contentSyns=wn.synsets(content[i])
# print(contentSyns,' contentsyns')###contentSyns有些是空的[],下面max()会报错
if len(contentSyns)>0:
ethic_prob=max([0 if e.path_similarity(c) == None else e.path_similarity(c) for e in ethic_set for c in contentSyns])
standard_prob = max([0 if s.path_similarity(c) == None else s.path_similarity(c) for s in standard_set for c in contentSyns])
privacy_prob = max([0 if p.path_similarity(c) == None else p.path_similarity(c) for p in privacy_set for c in contentSyns])
education_prob = max([0 if edu.path_similarity(c) == None else edu.path_similarity(c) for edu in education_set for c in contentSyns])
investment_prob = max([0 if i.path_similarity(c) == None else i.path_similarity(c) for i in investment_set for c in contentSyns])
application_prob = max([0 if a.path_similarity(c) == None else a.path_similarity(c) for a in application_set for c in contentSyns])
if ethic_prob>ethic_max_prob:
ethic_max_prob=ethic_prob
if standard_prob>standard_max_prob:
standard_max_prob=standard_prob
if privacy_prob>privacy_max_prob:
privacy_max_prob=privacy_prob
if education_prob > education_max_prob:
education_max_prob = education_prob
if investment_prob > investment_max_prob:
investment_max_prob = investment_prob
if application_prob > application_max_prob:
application_max_prob = application_prob
# print(max_prob,' 概率')
# print(ethic_max_prob,' ethic_max_prob')
# print(standard_max_prob,' standard_max_prob')
# print(privacy_max_prob,' privacy_max_prob')
print(path2,' ',ethic_max_prob,' ',standard_max_prob,' ',privacy_max_prob,' ',education_max_prob,' ',investment_max_prob,' ',application_max_prob)
file_dir = r"txt"
for root, dirs, files in os.walk(file_dir):
for f in range(len(files)):
path1=os.path.join(file_dir,files[f])
# print(path1,' doc_name')
readOnePolicy(path1)
# with open(path1, 'r') as f1:
# content = f1.read()
| [
1,
2,
3,
4,
5
] |
2,059 | 746e0895f0fb971156e778cbff20317cc88441f1 | <mask token>
| <mask token>
st.set_option('deprecation.showfileUploaderEncoding', False)
np.set_printoptions(suppress=True)
<mask token>
st.title('Leaf Disease Detection Using Machine Learning')
<mask token>
if uploaded_file is not None:
image = Image.open(uploaded_file)
size = 224, 224
image = ImageOps.fit(image, size, Image.ANTIALIAS)
image_array = np.asarray(image)
st.image(image, caption='Uploaded Image.', width=300)
normalized_image_array = image_array.astype(np.float32) / 127.0 - 1
data[0] = normalized_image_array
prediction = model.predict(data)
data = np.rint(prediction)
print(data)
if data[0][0] == 1:
st.write('Grape___Black_rot')
if data[0][1] == 1:
st.write('Grape___Esca_(Black_Measles)')
if data[0][2] == 1:
st.write('Grape___healthy')
if data[0][3] == 1:
st.write('Grape___Leaf_blight')
| <mask token>
st.set_option('deprecation.showfileUploaderEncoding', False)
np.set_printoptions(suppress=True)
model = tensorflow.keras.models.load_model('keras_model.h5')
data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)
st.title('Leaf Disease Detection Using Machine Learning')
uploaded_file = st.file_uploader('Choose an image...', type='JPG')
if uploaded_file is not None:
image = Image.open(uploaded_file)
size = 224, 224
image = ImageOps.fit(image, size, Image.ANTIALIAS)
image_array = np.asarray(image)
st.image(image, caption='Uploaded Image.', width=300)
normalized_image_array = image_array.astype(np.float32) / 127.0 - 1
data[0] = normalized_image_array
prediction = model.predict(data)
data = np.rint(prediction)
print(data)
if data[0][0] == 1:
st.write('Grape___Black_rot')
if data[0][1] == 1:
st.write('Grape___Esca_(Black_Measles)')
if data[0][2] == 1:
st.write('Grape___healthy')
if data[0][3] == 1:
st.write('Grape___Leaf_blight')
| import streamlit as st
import tensorflow.keras
from PIL import Image, ImageOps
import numpy as np
st.set_option('deprecation.showfileUploaderEncoding', False)
np.set_printoptions(suppress=True)
model = tensorflow.keras.models.load_model('keras_model.h5')
data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)
st.title('Leaf Disease Detection Using Machine Learning')
uploaded_file = st.file_uploader('Choose an image...', type='JPG')
if uploaded_file is not None:
image = Image.open(uploaded_file)
size = 224, 224
image = ImageOps.fit(image, size, Image.ANTIALIAS)
image_array = np.asarray(image)
st.image(image, caption='Uploaded Image.', width=300)
normalized_image_array = image_array.astype(np.float32) / 127.0 - 1
data[0] = normalized_image_array
prediction = model.predict(data)
data = np.rint(prediction)
print(data)
if data[0][0] == 1:
st.write('Grape___Black_rot')
if data[0][1] == 1:
st.write('Grape___Esca_(Black_Measles)')
if data[0][2] == 1:
st.write('Grape___healthy')
if data[0][3] == 1:
st.write('Grape___Leaf_blight')
| import streamlit as st
import tensorflow.keras
from PIL import Image, ImageOps
import numpy as np
st.set_option('deprecation.showfileUploaderEncoding', False)
np.set_printoptions(suppress=True)
model = tensorflow.keras.models.load_model('keras_model.h5')
data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)
st.title("Leaf Disease Detection Using Machine Learning")
uploaded_file = st.file_uploader("Choose an image...", type="JPG")
if uploaded_file is not None:
image = Image.open(uploaded_file)
size = (224, 224)
image = ImageOps.fit(image, size, Image.ANTIALIAS)
image_array = np.asarray(image)
#image.show()
st.image(image, caption='Uploaded Image.', width=300)
normalized_image_array = (image_array.astype(np.float32) / 127.0) - 1
data[0] = normalized_image_array
prediction = model.predict(data)
data = np.rint(prediction)
print(data)
if(data[0][0]==1):
st.write("Grape___Black_rot")
if(data[0][1]==1):
st.write("Grape___Esca_(Black_Measles)")
if(data[0][2]==1):
st.write("Grape___healthy")
if(data[0][3]==1):
st.write("Grape___Leaf_blight")
| [
0,
1,
2,
3,
4
] |
2,060 | 506d33587ff6c8b2c3d9bc546307996d2f518d86 | <mask token>
| <mask token>
if not os.path.exists(filepath + pathRGB):
os.makedirs(filepath + pathRGB)
backSubInstance.setConfig('sample.cfg')
for filename in glob.glob(filepath + extension):
pathAndFile = os.path.splitext(filename)[0]
latestFilename = ntpath.basename(pathAndFile)
image = cv2.imread(filepath + latestFilename + '.jpg', cv2.
CV_LOAD_IMAGE_COLOR)
print(latestFilename)
diffImage = backSubInstance.getDiff(image)
resultFileName = filepath + pathRGB + latestFilename + 'motion' + str(
batchCount) + '.jpg'
cv2.imwrite(resultFileName, diffImage)
batchCount += 1
| <mask token>
filepath = './tl3Pictures/'
pathRGB = '.diff/'
extension = '*.jpg'
batchCount = 0
backSubInstance = backSub()
if not os.path.exists(filepath + pathRGB):
os.makedirs(filepath + pathRGB)
backSubInstance.setConfig('sample.cfg')
for filename in glob.glob(filepath + extension):
pathAndFile = os.path.splitext(filename)[0]
latestFilename = ntpath.basename(pathAndFile)
image = cv2.imread(filepath + latestFilename + '.jpg', cv2.
CV_LOAD_IMAGE_COLOR)
print(latestFilename)
diffImage = backSubInstance.getDiff(image)
resultFileName = filepath + pathRGB + latestFilename + 'motion' + str(
batchCount) + '.jpg'
cv2.imwrite(resultFileName, diffImage)
batchCount += 1
| import cv2
import numpy
import os
import glob
import ntpath
from backSub import *
from ConfigParser import SafeConfigParser
filepath = './tl3Pictures/'
pathRGB = '.diff/'
extension = '*.jpg'
batchCount = 0
backSubInstance = backSub()
if not os.path.exists(filepath + pathRGB):
os.makedirs(filepath + pathRGB)
backSubInstance.setConfig('sample.cfg')
for filename in glob.glob(filepath + extension):
pathAndFile = os.path.splitext(filename)[0]
latestFilename = ntpath.basename(pathAndFile)
image = cv2.imread(filepath + latestFilename + '.jpg', cv2.
CV_LOAD_IMAGE_COLOR)
print(latestFilename)
diffImage = backSubInstance.getDiff(image)
resultFileName = filepath + pathRGB + latestFilename + 'motion' + str(
batchCount) + '.jpg'
cv2.imwrite(resultFileName, diffImage)
batchCount += 1
| import cv2
import numpy
import os
import glob
import ntpath
from backSub import *
from ConfigParser import SafeConfigParser
filepath = "./tl3Pictures/" # where the input files are
pathRGB = ".diff/" # where the result is saved
extension = "*.jpg" # only jpg files considered
batchCount = 0
backSubInstance = backSub()
if not os.path.exists(filepath + pathRGB):
os.makedirs(filepath+pathRGB) #create the result folder if it
# is not there
backSubInstance.setConfig('sample.cfg') # load the backSub parameters
# from the configuration file
for filename in glob.glob(filepath + extension):
#print(filename) #full file name and path
pathAndFile = os.path.splitext(filename)[0]
#print(pathAndFile) #file name and path without extension
latestFilename = ntpath.basename(pathAndFile)
#print(latestFilename) #only file name
image = cv2.imread(filepath + latestFilename + ".jpg",\
cv2.CV_LOAD_IMAGE_COLOR) #read the image from the source
print(latestFilename)
diffImage = backSubInstance.getDiff(image) # get the difference image
resultFileName = filepath + pathRGB + latestFilename + "motion"+ \
str(batchCount) + ".jpg" #contruct the path where to save diffImage
cv2.imwrite(resultFileName, diffImage) # write the image to the
# destination
batchCount +=1
| [
0,
1,
2,
3,
4
] |
2,061 | 1aa01845ab98005b1fee33b4fc153bb029e450e0 | <mask token>
def createNewDataFrame():
columns = ['document_id', 'content', 'cat', 'subcat']
df_ = pd.DataFrame(columns=columns)
return df_
def getcategories(foldername):
cats = foldername.split('_')
print('The cats are ', cats, len(cats))
cat = ''
sub = ''
if len(cats) == 1:
cat = cats[0]
sub = ''
if len(cats) == 2:
cat = cats[0]
sub = cats[1]
if len(cats) == 3:
cat = cats[0] + '/' + cats[1]
sub = cats[2]
if len(cats) == 4:
cat = cats[0] + '/' + cats[1]
sub = cats[2] + '/' + cats[3]
return cat, sub
<mask token>
| <mask token>
def createNewDataFrame():
columns = ['document_id', 'content', 'cat', 'subcat']
df_ = pd.DataFrame(columns=columns)
return df_
def getcategories(foldername):
cats = foldername.split('_')
print('The cats are ', cats, len(cats))
cat = ''
sub = ''
if len(cats) == 1:
cat = cats[0]
sub = ''
if len(cats) == 2:
cat = cats[0]
sub = cats[1]
if len(cats) == 3:
cat = cats[0] + '/' + cats[1]
sub = cats[2]
if len(cats) == 4:
cat = cats[0] + '/' + cats[1]
sub = cats[2] + '/' + cats[3]
return cat, sub
global df
<mask token>
for item in paths:
pdffolders = glob.glob(item + '/*.pdf_work')
cat, subcat = getcategories(item.split('/')[-2])
for eachpdffolder in pdffolders:
doc_id = eachpdffolder.split('/')[-1].split('.')[0]
textfile = glob.glob(eachpdffolder + 'page_*[^_6].txt')
if len(textfile) < 2:
with open(eachpdffolder + '/page_0001.txt', 'r') as myfile0:
content = myfile0.read()
else:
with open(eachpdffolder + '/page_0001.txt', 'r') as myfile:
content = myfile.read()
with open(eachpdffolder + '/page_0002.txt', 'r') as myfile2:
content = content + myfile2.read()
df = df.append([{'document_id': doc_id, 'content': content, 'cat':
cat, 'subcat': subcat}], ignore_index=True)
df.to_csv('../corpus/Full_corpus_fromClientFolder.csv')
| <mask token>
def createNewDataFrame():
columns = ['document_id', 'content', 'cat', 'subcat']
df_ = pd.DataFrame(columns=columns)
return df_
def getcategories(foldername):
cats = foldername.split('_')
print('The cats are ', cats, len(cats))
cat = ''
sub = ''
if len(cats) == 1:
cat = cats[0]
sub = ''
if len(cats) == 2:
cat = cats[0]
sub = cats[1]
if len(cats) == 3:
cat = cats[0] + '/' + cats[1]
sub = cats[2]
if len(cats) == 4:
cat = cats[0] + '/' + cats[1]
sub = cats[2] + '/' + cats[3]
return cat, sub
global df
df = createNewDataFrame()
clientFolder = '/home/medilenz/OCR_Process/Firm_logic_july_03/'
paths = glob.glob(clientFolder + '*/')
for item in paths:
pdffolders = glob.glob(item + '/*.pdf_work')
cat, subcat = getcategories(item.split('/')[-2])
for eachpdffolder in pdffolders:
doc_id = eachpdffolder.split('/')[-1].split('.')[0]
textfile = glob.glob(eachpdffolder + 'page_*[^_6].txt')
if len(textfile) < 2:
with open(eachpdffolder + '/page_0001.txt', 'r') as myfile0:
content = myfile0.read()
else:
with open(eachpdffolder + '/page_0001.txt', 'r') as myfile:
content = myfile.read()
with open(eachpdffolder + '/page_0002.txt', 'r') as myfile2:
content = content + myfile2.read()
df = df.append([{'document_id': doc_id, 'content': content, 'cat':
cat, 'subcat': subcat}], ignore_index=True)
df.to_csv('../corpus/Full_corpus_fromClientFolder.csv')
| import pandas as pd
import os, re, sys
import numpy as np
import glob as glob
def createNewDataFrame():
columns = ['document_id', 'content', 'cat', 'subcat']
df_ = pd.DataFrame(columns=columns)
return df_
def getcategories(foldername):
cats = foldername.split('_')
print('The cats are ', cats, len(cats))
cat = ''
sub = ''
if len(cats) == 1:
cat = cats[0]
sub = ''
if len(cats) == 2:
cat = cats[0]
sub = cats[1]
if len(cats) == 3:
cat = cats[0] + '/' + cats[1]
sub = cats[2]
if len(cats) == 4:
cat = cats[0] + '/' + cats[1]
sub = cats[2] + '/' + cats[3]
return cat, sub
global df
df = createNewDataFrame()
clientFolder = '/home/medilenz/OCR_Process/Firm_logic_july_03/'
paths = glob.glob(clientFolder + '*/')
for item in paths:
pdffolders = glob.glob(item + '/*.pdf_work')
cat, subcat = getcategories(item.split('/')[-2])
for eachpdffolder in pdffolders:
doc_id = eachpdffolder.split('/')[-1].split('.')[0]
textfile = glob.glob(eachpdffolder + 'page_*[^_6].txt')
if len(textfile) < 2:
with open(eachpdffolder + '/page_0001.txt', 'r') as myfile0:
content = myfile0.read()
else:
with open(eachpdffolder + '/page_0001.txt', 'r') as myfile:
content = myfile.read()
with open(eachpdffolder + '/page_0002.txt', 'r') as myfile2:
content = content + myfile2.read()
df = df.append([{'document_id': doc_id, 'content': content, 'cat':
cat, 'subcat': subcat}], ignore_index=True)
df.to_csv('../corpus/Full_corpus_fromClientFolder.csv')
|
# coding: utf-8
# In[1]:
import pandas as pd
import os,re,sys
import numpy as np
import glob as glob
# In[2]:
def createNewDataFrame():
columns = ['document_id','content','cat','subcat']
df_ = pd.DataFrame(columns=columns)
return(df_)
# In[3]:
def getcategories(foldername):
cats = foldername.split('_')
print("The cats are ", cats,len(cats))
cat =''
sub = ''
if (len(cats) == 1):
cat = cats[0]
sub = ''
if (len(cats) == 2):
cat = cats[0]
sub = cats[1]
if(len(cats) == 3):
cat = cats[0]+'/'+cats[1]
sub = cats[2]
if(len(cats) == 4):
cat = cats[0]+'/'+cats[1]
sub = cats[2]+'/'+cats[3]
return(cat,sub)
# In[4]:
global df
df = createNewDataFrame()
clientFolder='/home/medilenz/OCR_Process/Firm_logic_july_03/'
paths = glob.glob(clientFolder+'*/')
for item in paths:
pdffolders = glob.glob(item+'/*.pdf_work')
#print("THe item is ", item)
cat,subcat = getcategories(item.split('/')[-2])
for eachpdffolder in pdffolders:
doc_id=eachpdffolder.split('/')[-1].split('.')[0]
textfile = glob.glob(eachpdffolder+'page_*[^_6].txt')
if(len(textfile) < 2):
with open(eachpdffolder+'/page_0001.txt', 'r') as myfile0:
content = myfile0.read()
else :
with open(eachpdffolder+'/page_0001.txt', 'r') as myfile:
content = myfile.read()
with open(eachpdffolder+'/page_0002.txt', 'r') as myfile2:
content = content + myfile2.read()
df = df.append([{'document_id':doc_id, 'content':content,'cat':cat, 'subcat': subcat}],ignore_index=True)
df.to_csv("../corpus/Full_corpus_fromClientFolder.csv")
| [
2,
3,
4,
5,
6
] |
2,062 | b290763362af96f5af03fa31f4936339cef66a1d | <mask token>
def console_check(csl, f):
if csl == 'playstation-4':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_4.')
if csl == 'playstation-3':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_3.')
if csl == 'playstation-2':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_2.')
if csl == 'playstation':
f.write('\tdbo:computingPlatform dbpedia:PlayStation.')
if csl == 'xbox-one':
f.write('\tdbo:computingPlatform dbpedia:Xbox_One.')
if csl == 'xbox-360':
f.write('\tdbo:computingPlatform dbpedia:Xbox_360.')
if csl == 'switch':
f.write('\tdbo:computingPlatform dbpedia:Nintendo_Switch.')
if csl == 'pc':
f.write('\tdbo:computingPlatform dbpedia:Computer.')
f.write('\n\n')
def initial_warnings():
cprint('Esse programa funciona usando uma API chamada Chicken Coop API.',
'red', attrs=['bold'])
cprint('Essa API pega informações sobre jogos de determinados consoles.',
'red', attrs=['bold'])
cprint('Para que ela rode corretamente, siga as seguintes instruções:',
'cyan', attrs=['bold'])
cprint('Consoles:', 'yellow', attrs=['bold'])
cprint(' Playstation 4 -> playstation-4', 'green', attrs=['bold'])
cprint(' Xbox One -> xbox-one', 'green', attrs=['bold'])
cprint(' Computador -> pc', 'green', attrs=['bold'])
cprint(' Nintendo Switch -> switch', 'green', attrs=['bold'])
cprint('Exemplos de jogos: ', 'yellow', attrs=['bold'])
cprint(' Uncharted: The Lost Legacy', 'green', attrs=['bold'])
cprint(' God of War', 'green', attrs=['bold'])
cprint(' Ori and The Blind Forest', 'green', attrs=['bold'])
cprint(
'Aviso: Os jogos devem ser escritos com o nome exato e os consoles da maneira demonstrada, caso contrário, não funcionará!'
, 'magenta', attrs=['bold'])
print('\n')
def get_and_write(mc, csl):
print(f"Title: {mc['result']['title']}")
print(f"Release Date: {mc['result']['releaseDate']}")
print(f"Score: {mc['result']['score']}")
print(f"Developer: {mc['result']['developer']}\n")
mc_title = mc['result']['title']
mc_score = mc['result']['score']
mc_developer = mc['result']['developer']
rsp = write_file(mc_title, mc_score, mc_developer, mc, csl)
if rsp:
write_file(mc_title, mc_score, mc_developer, mc, csl)
def write_file(title, score, developer, mc, csl):
source = '<https://www.metacritic.com/game/'
aux_title = ''
source = source + csl + '/'
path = Path('gamedeflib_rdf.ttl')
if path.is_file() and os.stat('gamedeflib_rdf.ttl').st_size > 0:
file = open('gamedeflib_rdf.ttl', 'r')
count = 1
for element in file:
jogo = f'_:game{count}\n'
if element == jogo:
count = count + 1
file.close()
file = open('gamedeflib_rdf.ttl', 'a+')
file.write(f'\n_:game{count}\n')
file.write(f'\trdfs:label "{title}";\n')
file.write(f'\tdbp:score {score};\n')
genre_number(mc, file)
publisher_number(mc, file)
file.write(f'\tdbo:developer "{developer}";\n')
aux_title = title.lower()
aux_title = aux_title.replace(':', '')
aux_title = aux_title.replace(' ', '-')
source = source + aux_title + '>'
file.write(f'\tdc:source {source};\n')
console_check(csl, file)
file.close()
else:
file = open('gamedeflib_rdf.ttl', 'w+')
file.write('@prefix dc: \t<http://purl.org/dc/elements/1.1/> .\n')
file.write(
'@prefix rdf:\t<http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n')
file.write('@prefix rdfs:\t<http://www.w3.org/2000/01/rdf-schema#> .\n'
)
file.write('@prefix foaf:\t<http://xmlns.com/foaf/0.1/> .\n')
file.write('@prefix dbo: <http://dbpedia.org/ontology/> .\n')
file.write('@prefix dbpedia: <http://dbpedia.org/page/> .\n')
file.write('@prefix dbp: <http://dbpedia.org/property/> .\n')
file.write(
"""dbpedia:PlayStation_4
foaf:name "PlayStation 4";
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 4".
"""
)
file.write(
"""dbpedia:PlayStation_3
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 3".
"""
)
file.write(
"""dbpedia:PlayStation_2
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 2".
"""
)
file.write(
"""dbpedia:PlayStation
dbp:type dbpedia:Video_game_console;
rdfs:label "PlayStation".
"""
)
file.write(
"""dbpedia:XBox_One
foaf:name "XBox One";
dbo:type dbpedia:Home_video_game_console;
rdfs:label "XBox One" .
"""
)
file.write(
"""dbpedia:XBox_360
dbo:type dbpedia:Home_video_game_console;
rdfs:label "XBox 360" .
"""
)
file.write(
"""dbpedia:Nintendo_Switch
foaf:name "New Nintendank New Wii U 2.0+";
dbo:type dbpedia:Video_game_hardware;
rdfs:label "Nintendo Switch" .
"""
)
file.write(
"""dbpedia:Computer
dbp:title "Computer";
rdf:type dbo:Device;
rdfs:label "Computer" .
"""
)
return 1
def genre_number(mc, f):
tam = len(mc['result']['genre'])
for x in range(0, tam):
print(f"Genre number {x + 1}: {mc['result']['genre'][x]}")
aux = mc['result']['genre'][x]
f.write(f'\tdbo:genre "{aux}";\n')
def publisher_number(mc, f):
tam = len(mc['result']['publisher'])
for x in range(0, tam):
print(f"Publisher number {x + 1}: {mc['result']['publisher'][x]}")
aux = mc['result']['publisher'][x]
f.write(f'\tdbo:publisher "{aux}";\n')
<mask token>
| <mask token>
def console_check(csl, f):
if csl == 'playstation-4':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_4.')
if csl == 'playstation-3':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_3.')
if csl == 'playstation-2':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_2.')
if csl == 'playstation':
f.write('\tdbo:computingPlatform dbpedia:PlayStation.')
if csl == 'xbox-one':
f.write('\tdbo:computingPlatform dbpedia:Xbox_One.')
if csl == 'xbox-360':
f.write('\tdbo:computingPlatform dbpedia:Xbox_360.')
if csl == 'switch':
f.write('\tdbo:computingPlatform dbpedia:Nintendo_Switch.')
if csl == 'pc':
f.write('\tdbo:computingPlatform dbpedia:Computer.')
f.write('\n\n')
def initial_warnings():
cprint('Esse programa funciona usando uma API chamada Chicken Coop API.',
'red', attrs=['bold'])
cprint('Essa API pega informações sobre jogos de determinados consoles.',
'red', attrs=['bold'])
cprint('Para que ela rode corretamente, siga as seguintes instruções:',
'cyan', attrs=['bold'])
cprint('Consoles:', 'yellow', attrs=['bold'])
cprint(' Playstation 4 -> playstation-4', 'green', attrs=['bold'])
cprint(' Xbox One -> xbox-one', 'green', attrs=['bold'])
cprint(' Computador -> pc', 'green', attrs=['bold'])
cprint(' Nintendo Switch -> switch', 'green', attrs=['bold'])
cprint('Exemplos de jogos: ', 'yellow', attrs=['bold'])
cprint(' Uncharted: The Lost Legacy', 'green', attrs=['bold'])
cprint(' God of War', 'green', attrs=['bold'])
cprint(' Ori and The Blind Forest', 'green', attrs=['bold'])
cprint(
'Aviso: Os jogos devem ser escritos com o nome exato e os consoles da maneira demonstrada, caso contrário, não funcionará!'
, 'magenta', attrs=['bold'])
print('\n')
def get_and_write(mc, csl):
print(f"Title: {mc['result']['title']}")
print(f"Release Date: {mc['result']['releaseDate']}")
print(f"Score: {mc['result']['score']}")
print(f"Developer: {mc['result']['developer']}\n")
mc_title = mc['result']['title']
mc_score = mc['result']['score']
mc_developer = mc['result']['developer']
rsp = write_file(mc_title, mc_score, mc_developer, mc, csl)
if rsp:
write_file(mc_title, mc_score, mc_developer, mc, csl)
def write_file(title, score, developer, mc, csl):
source = '<https://www.metacritic.com/game/'
aux_title = ''
source = source + csl + '/'
path = Path('gamedeflib_rdf.ttl')
if path.is_file() and os.stat('gamedeflib_rdf.ttl').st_size > 0:
file = open('gamedeflib_rdf.ttl', 'r')
count = 1
for element in file:
jogo = f'_:game{count}\n'
if element == jogo:
count = count + 1
file.close()
file = open('gamedeflib_rdf.ttl', 'a+')
file.write(f'\n_:game{count}\n')
file.write(f'\trdfs:label "{title}";\n')
file.write(f'\tdbp:score {score};\n')
genre_number(mc, file)
publisher_number(mc, file)
file.write(f'\tdbo:developer "{developer}";\n')
aux_title = title.lower()
aux_title = aux_title.replace(':', '')
aux_title = aux_title.replace(' ', '-')
source = source + aux_title + '>'
file.write(f'\tdc:source {source};\n')
console_check(csl, file)
file.close()
else:
file = open('gamedeflib_rdf.ttl', 'w+')
file.write('@prefix dc: \t<http://purl.org/dc/elements/1.1/> .\n')
file.write(
'@prefix rdf:\t<http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n')
file.write('@prefix rdfs:\t<http://www.w3.org/2000/01/rdf-schema#> .\n'
)
file.write('@prefix foaf:\t<http://xmlns.com/foaf/0.1/> .\n')
file.write('@prefix dbo: <http://dbpedia.org/ontology/> .\n')
file.write('@prefix dbpedia: <http://dbpedia.org/page/> .\n')
file.write('@prefix dbp: <http://dbpedia.org/property/> .\n')
file.write(
"""dbpedia:PlayStation_4
foaf:name "PlayStation 4";
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 4".
"""
)
file.write(
"""dbpedia:PlayStation_3
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 3".
"""
)
file.write(
"""dbpedia:PlayStation_2
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 2".
"""
)
file.write(
"""dbpedia:PlayStation
dbp:type dbpedia:Video_game_console;
rdfs:label "PlayStation".
"""
)
file.write(
"""dbpedia:XBox_One
foaf:name "XBox One";
dbo:type dbpedia:Home_video_game_console;
rdfs:label "XBox One" .
"""
)
file.write(
"""dbpedia:XBox_360
dbo:type dbpedia:Home_video_game_console;
rdfs:label "XBox 360" .
"""
)
file.write(
"""dbpedia:Nintendo_Switch
foaf:name "New Nintendank New Wii U 2.0+";
dbo:type dbpedia:Video_game_hardware;
rdfs:label "Nintendo Switch" .
"""
)
file.write(
"""dbpedia:Computer
dbp:title "Computer";
rdf:type dbo:Device;
rdfs:label "Computer" .
"""
)
return 1
def genre_number(mc, f):
tam = len(mc['result']['genre'])
for x in range(0, tam):
print(f"Genre number {x + 1}: {mc['result']['genre'][x]}")
aux = mc['result']['genre'][x]
f.write(f'\tdbo:genre "{aux}";\n')
def publisher_number(mc, f):
tam = len(mc['result']['publisher'])
for x in range(0, tam):
print(f"Publisher number {x + 1}: {mc['result']['publisher'][x]}")
aux = mc['result']['publisher'][x]
f.write(f'\tdbo:publisher "{aux}";\n')
def main():
print('Digite o console do jogo desejado: ', end='')
console = str(input())
print('Digite o título do jogo desejado: ', end='')
title = str(input())
try:
url = 'https://chicken-coop.p.rapidapi.com/games/' + title
querystring = {'platform': console}
headers = {'x-rapidapi-host': 'chicken-coop.p.rapidapi.com',
'x-rapidapi-key':
'c3df04dcc0msh2d6e3cc8ccd93dep1c9851jsn230c81227b26'}
response = requests.request('GET', url, headers=headers, params=
querystring)
metacritic = json.loads(response.text)
if metacritic['result'] == 'No result':
print(
'\nAlguma informação digitada está incorreta. Tente novamente.'
)
else:
get_and_write(metacritic, console)
except Exception as err:
print(
'Algum erro desconhecido ocorreu durante a execucação.\nTente novamente.'
)
cprint(err, 'red')
<mask token>
| <mask token>
def console_check(csl, f):
if csl == 'playstation-4':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_4.')
if csl == 'playstation-3':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_3.')
if csl == 'playstation-2':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_2.')
if csl == 'playstation':
f.write('\tdbo:computingPlatform dbpedia:PlayStation.')
if csl == 'xbox-one':
f.write('\tdbo:computingPlatform dbpedia:Xbox_One.')
if csl == 'xbox-360':
f.write('\tdbo:computingPlatform dbpedia:Xbox_360.')
if csl == 'switch':
f.write('\tdbo:computingPlatform dbpedia:Nintendo_Switch.')
if csl == 'pc':
f.write('\tdbo:computingPlatform dbpedia:Computer.')
f.write('\n\n')
def initial_warnings():
cprint('Esse programa funciona usando uma API chamada Chicken Coop API.',
'red', attrs=['bold'])
cprint('Essa API pega informações sobre jogos de determinados consoles.',
'red', attrs=['bold'])
cprint('Para que ela rode corretamente, siga as seguintes instruções:',
'cyan', attrs=['bold'])
cprint('Consoles:', 'yellow', attrs=['bold'])
cprint(' Playstation 4 -> playstation-4', 'green', attrs=['bold'])
cprint(' Xbox One -> xbox-one', 'green', attrs=['bold'])
cprint(' Computador -> pc', 'green', attrs=['bold'])
cprint(' Nintendo Switch -> switch', 'green', attrs=['bold'])
cprint('Exemplos de jogos: ', 'yellow', attrs=['bold'])
cprint(' Uncharted: The Lost Legacy', 'green', attrs=['bold'])
cprint(' God of War', 'green', attrs=['bold'])
cprint(' Ori and The Blind Forest', 'green', attrs=['bold'])
cprint(
'Aviso: Os jogos devem ser escritos com o nome exato e os consoles da maneira demonstrada, caso contrário, não funcionará!'
, 'magenta', attrs=['bold'])
print('\n')
def get_and_write(mc, csl):
print(f"Title: {mc['result']['title']}")
print(f"Release Date: {mc['result']['releaseDate']}")
print(f"Score: {mc['result']['score']}")
print(f"Developer: {mc['result']['developer']}\n")
mc_title = mc['result']['title']
mc_score = mc['result']['score']
mc_developer = mc['result']['developer']
rsp = write_file(mc_title, mc_score, mc_developer, mc, csl)
if rsp:
write_file(mc_title, mc_score, mc_developer, mc, csl)
def write_file(title, score, developer, mc, csl):
source = '<https://www.metacritic.com/game/'
aux_title = ''
source = source + csl + '/'
path = Path('gamedeflib_rdf.ttl')
if path.is_file() and os.stat('gamedeflib_rdf.ttl').st_size > 0:
file = open('gamedeflib_rdf.ttl', 'r')
count = 1
for element in file:
jogo = f'_:game{count}\n'
if element == jogo:
count = count + 1
file.close()
file = open('gamedeflib_rdf.ttl', 'a+')
file.write(f'\n_:game{count}\n')
file.write(f'\trdfs:label "{title}";\n')
file.write(f'\tdbp:score {score};\n')
genre_number(mc, file)
publisher_number(mc, file)
file.write(f'\tdbo:developer "{developer}";\n')
aux_title = title.lower()
aux_title = aux_title.replace(':', '')
aux_title = aux_title.replace(' ', '-')
source = source + aux_title + '>'
file.write(f'\tdc:source {source};\n')
console_check(csl, file)
file.close()
else:
file = open('gamedeflib_rdf.ttl', 'w+')
file.write('@prefix dc: \t<http://purl.org/dc/elements/1.1/> .\n')
file.write(
'@prefix rdf:\t<http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n')
file.write('@prefix rdfs:\t<http://www.w3.org/2000/01/rdf-schema#> .\n'
)
file.write('@prefix foaf:\t<http://xmlns.com/foaf/0.1/> .\n')
file.write('@prefix dbo: <http://dbpedia.org/ontology/> .\n')
file.write('@prefix dbpedia: <http://dbpedia.org/page/> .\n')
file.write('@prefix dbp: <http://dbpedia.org/property/> .\n')
file.write(
"""dbpedia:PlayStation_4
foaf:name "PlayStation 4";
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 4".
"""
)
file.write(
"""dbpedia:PlayStation_3
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 3".
"""
)
file.write(
"""dbpedia:PlayStation_2
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 2".
"""
)
file.write(
"""dbpedia:PlayStation
dbp:type dbpedia:Video_game_console;
rdfs:label "PlayStation".
"""
)
file.write(
"""dbpedia:XBox_One
foaf:name "XBox One";
dbo:type dbpedia:Home_video_game_console;
rdfs:label "XBox One" .
"""
)
file.write(
"""dbpedia:XBox_360
dbo:type dbpedia:Home_video_game_console;
rdfs:label "XBox 360" .
"""
)
file.write(
"""dbpedia:Nintendo_Switch
foaf:name "New Nintendank New Wii U 2.0+";
dbo:type dbpedia:Video_game_hardware;
rdfs:label "Nintendo Switch" .
"""
)
file.write(
"""dbpedia:Computer
dbp:title "Computer";
rdf:type dbo:Device;
rdfs:label "Computer" .
"""
)
return 1
def genre_number(mc, f):
tam = len(mc['result']['genre'])
for x in range(0, tam):
print(f"Genre number {x + 1}: {mc['result']['genre'][x]}")
aux = mc['result']['genre'][x]
f.write(f'\tdbo:genre "{aux}";\n')
def publisher_number(mc, f):
tam = len(mc['result']['publisher'])
for x in range(0, tam):
print(f"Publisher number {x + 1}: {mc['result']['publisher'][x]}")
aux = mc['result']['publisher'][x]
f.write(f'\tdbo:publisher "{aux}";\n')
def main():
print('Digite o console do jogo desejado: ', end='')
console = str(input())
print('Digite o título do jogo desejado: ', end='')
title = str(input())
try:
url = 'https://chicken-coop.p.rapidapi.com/games/' + title
querystring = {'platform': console}
headers = {'x-rapidapi-host': 'chicken-coop.p.rapidapi.com',
'x-rapidapi-key':
'c3df04dcc0msh2d6e3cc8ccd93dep1c9851jsn230c81227b26'}
response = requests.request('GET', url, headers=headers, params=
querystring)
metacritic = json.loads(response.text)
if metacritic['result'] == 'No result':
print(
'\nAlguma informação digitada está incorreta. Tente novamente.'
)
else:
get_and_write(metacritic, console)
except Exception as err:
print(
'Algum erro desconhecido ocorreu durante a execucação.\nTente novamente.'
)
cprint(err, 'red')
initial_warnings()
main()
while True:
print('Gostaria de adicionar outro jogo na base RDF: (1 - Sim/0 - Não): ',
end='')
try:
ans = int(input())
if ans == 1:
main()
elif ans == 0:
print('Encerrando o script')
break
else:
print('Valor digitado deve ser 0 ou 1.')
except ValueError as e:
print('Valor foi inserido incorretamente. Tente denovo.')
cprint(e, 'red')
| import requests
import json
from termcolor import cprint
from pathlib import Path
import os
def console_check(csl, f):
if csl == 'playstation-4':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_4.')
if csl == 'playstation-3':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_3.')
if csl == 'playstation-2':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_2.')
if csl == 'playstation':
f.write('\tdbo:computingPlatform dbpedia:PlayStation.')
if csl == 'xbox-one':
f.write('\tdbo:computingPlatform dbpedia:Xbox_One.')
if csl == 'xbox-360':
f.write('\tdbo:computingPlatform dbpedia:Xbox_360.')
if csl == 'switch':
f.write('\tdbo:computingPlatform dbpedia:Nintendo_Switch.')
if csl == 'pc':
f.write('\tdbo:computingPlatform dbpedia:Computer.')
f.write('\n\n')
def initial_warnings():
cprint('Esse programa funciona usando uma API chamada Chicken Coop API.',
'red', attrs=['bold'])
cprint('Essa API pega informações sobre jogos de determinados consoles.',
'red', attrs=['bold'])
cprint('Para que ela rode corretamente, siga as seguintes instruções:',
'cyan', attrs=['bold'])
cprint('Consoles:', 'yellow', attrs=['bold'])
cprint(' Playstation 4 -> playstation-4', 'green', attrs=['bold'])
cprint(' Xbox One -> xbox-one', 'green', attrs=['bold'])
cprint(' Computador -> pc', 'green', attrs=['bold'])
cprint(' Nintendo Switch -> switch', 'green', attrs=['bold'])
cprint('Exemplos de jogos: ', 'yellow', attrs=['bold'])
cprint(' Uncharted: The Lost Legacy', 'green', attrs=['bold'])
cprint(' God of War', 'green', attrs=['bold'])
cprint(' Ori and The Blind Forest', 'green', attrs=['bold'])
cprint(
'Aviso: Os jogos devem ser escritos com o nome exato e os consoles da maneira demonstrada, caso contrário, não funcionará!'
, 'magenta', attrs=['bold'])
print('\n')
def get_and_write(mc, csl):
print(f"Title: {mc['result']['title']}")
print(f"Release Date: {mc['result']['releaseDate']}")
print(f"Score: {mc['result']['score']}")
print(f"Developer: {mc['result']['developer']}\n")
mc_title = mc['result']['title']
mc_score = mc['result']['score']
mc_developer = mc['result']['developer']
rsp = write_file(mc_title, mc_score, mc_developer, mc, csl)
if rsp:
write_file(mc_title, mc_score, mc_developer, mc, csl)
def write_file(title, score, developer, mc, csl):
source = '<https://www.metacritic.com/game/'
aux_title = ''
source = source + csl + '/'
path = Path('gamedeflib_rdf.ttl')
if path.is_file() and os.stat('gamedeflib_rdf.ttl').st_size > 0:
file = open('gamedeflib_rdf.ttl', 'r')
count = 1
for element in file:
jogo = f'_:game{count}\n'
if element == jogo:
count = count + 1
file.close()
file = open('gamedeflib_rdf.ttl', 'a+')
file.write(f'\n_:game{count}\n')
file.write(f'\trdfs:label "{title}";\n')
file.write(f'\tdbp:score {score};\n')
genre_number(mc, file)
publisher_number(mc, file)
file.write(f'\tdbo:developer "{developer}";\n')
aux_title = title.lower()
aux_title = aux_title.replace(':', '')
aux_title = aux_title.replace(' ', '-')
source = source + aux_title + '>'
file.write(f'\tdc:source {source};\n')
console_check(csl, file)
file.close()
else:
file = open('gamedeflib_rdf.ttl', 'w+')
file.write('@prefix dc: \t<http://purl.org/dc/elements/1.1/> .\n')
file.write(
'@prefix rdf:\t<http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n')
file.write('@prefix rdfs:\t<http://www.w3.org/2000/01/rdf-schema#> .\n'
)
file.write('@prefix foaf:\t<http://xmlns.com/foaf/0.1/> .\n')
file.write('@prefix dbo: <http://dbpedia.org/ontology/> .\n')
file.write('@prefix dbpedia: <http://dbpedia.org/page/> .\n')
file.write('@prefix dbp: <http://dbpedia.org/property/> .\n')
file.write(
"""dbpedia:PlayStation_4
foaf:name "PlayStation 4";
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 4".
"""
)
file.write(
"""dbpedia:PlayStation_3
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 3".
"""
)
file.write(
"""dbpedia:PlayStation_2
dbo:type dbpedia:Home_video_game_console;
rdfs:label "PlayStation 2".
"""
)
file.write(
"""dbpedia:PlayStation
dbp:type dbpedia:Video_game_console;
rdfs:label "PlayStation".
"""
)
file.write(
"""dbpedia:XBox_One
foaf:name "XBox One";
dbo:type dbpedia:Home_video_game_console;
rdfs:label "XBox One" .
"""
)
file.write(
"""dbpedia:XBox_360
dbo:type dbpedia:Home_video_game_console;
rdfs:label "XBox 360" .
"""
)
file.write(
"""dbpedia:Nintendo_Switch
foaf:name "New Nintendank New Wii U 2.0+";
dbo:type dbpedia:Video_game_hardware;
rdfs:label "Nintendo Switch" .
"""
)
file.write(
"""dbpedia:Computer
dbp:title "Computer";
rdf:type dbo:Device;
rdfs:label "Computer" .
"""
)
return 1
def genre_number(mc, f):
tam = len(mc['result']['genre'])
for x in range(0, tam):
print(f"Genre number {x + 1}: {mc['result']['genre'][x]}")
aux = mc['result']['genre'][x]
f.write(f'\tdbo:genre "{aux}";\n')
def publisher_number(mc, f):
tam = len(mc['result']['publisher'])
for x in range(0, tam):
print(f"Publisher number {x + 1}: {mc['result']['publisher'][x]}")
aux = mc['result']['publisher'][x]
f.write(f'\tdbo:publisher "{aux}";\n')
def main():
print('Digite o console do jogo desejado: ', end='')
console = str(input())
print('Digite o título do jogo desejado: ', end='')
title = str(input())
try:
url = 'https://chicken-coop.p.rapidapi.com/games/' + title
querystring = {'platform': console}
headers = {'x-rapidapi-host': 'chicken-coop.p.rapidapi.com',
'x-rapidapi-key':
'c3df04dcc0msh2d6e3cc8ccd93dep1c9851jsn230c81227b26'}
response = requests.request('GET', url, headers=headers, params=
querystring)
metacritic = json.loads(response.text)
if metacritic['result'] == 'No result':
print(
'\nAlguma informação digitada está incorreta. Tente novamente.'
)
else:
get_and_write(metacritic, console)
except Exception as err:
print(
'Algum erro desconhecido ocorreu durante a execucação.\nTente novamente.'
)
cprint(err, 'red')
initial_warnings()
main()
while True:
print('Gostaria de adicionar outro jogo na base RDF: (1 - Sim/0 - Não): ',
end='')
try:
ans = int(input())
if ans == 1:
main()
elif ans == 0:
print('Encerrando o script')
break
else:
print('Valor digitado deve ser 0 ou 1.')
except ValueError as e:
print('Valor foi inserido incorretamente. Tente denovo.')
cprint(e, 'red')
| import requests
import json
from termcolor import cprint
from pathlib import Path
import os
def console_check(csl, f):
if csl == 'playstation-4':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_4.')
if csl == 'playstation-3':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_3.')
if csl == 'playstation-2':
f.write('\tdbo:computingPlatform dbpedia:PlayStation_2.')
if csl == 'playstation':
f.write('\tdbo:computingPlatform dbpedia:PlayStation.')
if csl == 'xbox-one':
f.write('\tdbo:computingPlatform dbpedia:Xbox_One.')
if csl == 'xbox-360':
f.write('\tdbo:computingPlatform dbpedia:Xbox_360.')
if csl == 'switch':
f.write('\tdbo:computingPlatform dbpedia:Nintendo_Switch.')
if csl == 'pc':
f.write('\tdbo:computingPlatform dbpedia:Computer.')
f.write('\n\n')
def initial_warnings():
cprint("Esse programa funciona usando uma API chamada Chicken Coop API.", "red", attrs=['bold'])
cprint("Essa API pega informações sobre jogos de determinados consoles.", "red", attrs=['bold'])
cprint("Para que ela rode corretamente, siga as seguintes instruções:", "cyan", attrs=['bold'])
cprint("Consoles:", 'yellow', attrs=['bold'])
cprint(" Playstation 4 -> playstation-4", "green", attrs=['bold'])
cprint(" Xbox One -> xbox-one", "green", attrs=['bold'])
cprint(" Computador -> pc", "green", attrs=['bold'])
cprint(" Nintendo Switch -> switch", "green", attrs=['bold'])
cprint("Exemplos de jogos: ", 'yellow', attrs=['bold'])
cprint(" Uncharted: The Lost Legacy", "green", attrs=['bold'])
cprint(" God of War", "green", attrs=['bold'])
cprint(" Ori and The Blind Forest", "green", attrs=['bold'])
cprint("Aviso: Os jogos devem ser escritos com o nome exato e os consoles da maneira demonstrada,"
" caso contrário, não funcionará!", 'magenta', attrs=['bold'])
print("\n")
def get_and_write(mc, csl):
print(f"Title: {mc['result']['title']}")
print(f"Release Date: {mc['result']['releaseDate']}")
# print(f"Description: {mc['result']['description']}")
print(f"Score: {mc['result']['score']}")
# print(f"Rating: {mc['result']['rating']}")
print(f"Developer: {mc['result']['developer']}\n")
mc_title = mc['result']['title']
# mc_description = mc['result']['description']
mc_score = mc['result']['score']
mc_developer = mc['result']['developer']
rsp = write_file(mc_title, mc_score, mc_developer, mc, csl)
if rsp:
write_file(mc_title, mc_score, mc_developer, mc, csl)
def write_file(title, score, developer, mc, csl):
source = "<https://www.metacritic.com/game/"
aux_title = ''
source = source + csl + '/'
path = Path('gamedeflib_rdf.ttl')
if path.is_file() and os.stat('gamedeflib_rdf.ttl').st_size > 0:
file = open('gamedeflib_rdf.ttl', 'r')
count = 1
for element in file:
jogo = f'_:game{count}\n'
if element == jogo:
count = count + 1
file.close()
file = open('gamedeflib_rdf.ttl', 'a+')
file.write(f'\n_:game{count}\n')
file.write(f'\trdfs:label "{title}";\n')
file.write(f'\tdbp:score {score};\n')
genre_number(mc, file)
publisher_number(mc, file)
file.write(f'\tdbo:developer "{developer}";\n')
aux_title = title.lower()
aux_title = aux_title.replace(":", "")
aux_title = aux_title.replace(" ", "-")
source = source + aux_title + ">"
file.write(f'\tdc:source {source};\n')
console_check(csl, file)
file.close()
else:
file = open('gamedeflib_rdf.ttl', 'w+')
file.write("@prefix dc: <http://purl.org/dc/elements/1.1/> .\n")
file.write("@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n")
file.write("@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n")
file.write("@prefix foaf: <http://xmlns.com/foaf/0.1/> .\n")
file.write("@prefix dbo: <http://dbpedia.org/ontology/> .\n")
file.write("@prefix dbpedia: <http://dbpedia.org/page/> .\n")
file.write("@prefix dbp: <http://dbpedia.org/property/> .\n")
file.write('dbpedia:PlayStation_4\n'
'\tfoaf:name "PlayStation 4";\n'
'\tdbo:type dbpedia:Home_video_game_console;\n'
'\trdfs:label "PlayStation 4".\n\n')
file.write('dbpedia:PlayStation_3\n'
'\tdbo:type dbpedia:Home_video_game_console;\n'
'\trdfs:label "PlayStation 3".\n\n')
file.write('dbpedia:PlayStation_2\n'
'\tdbo:type dbpedia:Home_video_game_console;\n'
'\trdfs:label "PlayStation 2".\n\n')
file.write('dbpedia:PlayStation\n'
'\tdbp:type dbpedia:Video_game_console;\n'
'\trdfs:label "PlayStation".\n\n')
file.write('dbpedia:XBox_One\n'
'\tfoaf:name "XBox One";\n'
'\tdbo:type dbpedia:Home_video_game_console;\n'
'\trdfs:label "XBox One" .\n\n')
file.write('dbpedia:XBox_360\n'
'\tdbo:type dbpedia:Home_video_game_console;\n'
'\trdfs:label "XBox 360" .\n\n')
file.write('dbpedia:Nintendo_Switch\n'
'\tfoaf:name "New Nintendank New Wii U 2.0+";\n'
'\tdbo:type dbpedia:Video_game_hardware;\n'
'\trdfs:label "Nintendo Switch" .\n\n')
file.write('dbpedia:Computer\n'
'\tdbp:title "Computer";\n'
'\trdf:type dbo:Device;\n'
'\trdfs:label "Computer" .\n\n')
return 1
def genre_number(mc, f):
tam = len(mc['result']['genre'])
for x in range(0, tam):
print(f"Genre number {x+1}: {mc['result']['genre'][x]}")
aux = mc['result']['genre'][x]
f.write(f'\tdbo:genre "{aux}";\n')
def publisher_number(mc, f):
tam = len(mc['result']['publisher'])
for x in range(0, tam):
print(f"Publisher number {x + 1}: {mc['result']['publisher'][x]}")
aux = mc['result']['publisher'][x]
f.write(f'\tdbo:publisher "{aux}";\n')
def main():
print('Digite o console do jogo desejado: ', end='')
console = str(input())
print('Digite o título do jogo desejado: ', end='')
title = str(input())
try:
url = "https://chicken-coop.p.rapidapi.com/games/"+title
querystring = {"platform": console}
headers = {
'x-rapidapi-host': "chicken-coop.p.rapidapi.com",
'x-rapidapi-key': "c3df04dcc0msh2d6e3cc8ccd93dep1c9851jsn230c81227b26"
}
response = requests.request("GET", url, headers=headers, params=querystring)
metacritic = json.loads(response.text)
if metacritic['result'] == 'No result':
print("\nAlguma informação digitada está incorreta. Tente novamente.")
else:
get_and_write(metacritic, console)
except Exception as err:
print("Algum erro desconhecido ocorreu durante a execucação.\nTente novamente.")
cprint(err, 'red')
initial_warnings()
main()
while True:
print('Gostaria de adicionar outro jogo na base RDF: (1 - Sim/0 - Não): ', end='')
try:
ans = int(input())
if ans == 1:
main()
elif ans == 0:
print('Encerrando o script')
break
else:
print('Valor digitado deve ser 0 ou 1.')
except ValueError as e:
print('Valor foi inserido incorretamente. Tente denovo.')
cprint(e, 'red')
| [
6,
7,
8,
9,
10
] |
2,063 | faf4f4d26236ac555594ef6913a0d43c3516f1f2 | /Users/andreilyskov/anaconda/lib/python3.5/sre_compile.py | null | null | null | null | [
0
] |
2,064 | d8af8e36bd00fbfc966ef1c4dd0c6385cbb019ee | <mask token>
def read_file(string_object):
""" Opens and reads through a file, returning none if it isnt found """
try:
return open(string_object, 'r')
except FileNotFoundError:
return None
<mask token>
def populate_weight_tuple_list(list_object):
""" Takes elements from a list containing course part names and their weights and returns a list of tuples containing those elements """
tuple_list = []
for i in range(len(list_object[0])):
weight_tuple = list_object[0][i], float(list_object[1][i])
tuple_list.append(weight_tuple)
return tuple_list
def populate_grades_tuple_list(list_object1, list_object2):
""" Takes elements from a list containing student emails and a list containing grades and returns a list of corresponding emails and grades in tuples """
tuple_list = []
for i in range(len(list_object1)):
grades_tuple = list_object1[i], list_object2[i]
tuple_list.append(grades_tuple)
return tuple_list
def calculate_final_grade(list_object1, list_object2):
""" Takes lists containing information about grades and course weights and calculates the final grade from the course """
list_object1 = [list(element) for element in list_object1]
for i in range(len(list_object1)):
final_grade = 0.0
for j in range(len(list_object1[i][1])):
final_grade += list_object1[i][1][j] * list_object2[j][1]
list_object1[i].append(final_grade)
list_object1 = [tuple(element) for element in list_object1]
return list_object1
<mask token>
| <mask token>
def read_file(string_object):
""" Opens and reads through a file, returning none if it isnt found """
try:
return open(string_object, 'r')
except FileNotFoundError:
return None
<mask token>
def populate_weight_tuple_list(list_object):
""" Takes elements from a list containing course part names and their weights and returns a list of tuples containing those elements """
tuple_list = []
for i in range(len(list_object[0])):
weight_tuple = list_object[0][i], float(list_object[1][i])
tuple_list.append(weight_tuple)
return tuple_list
def populate_grades_tuple_list(list_object1, list_object2):
""" Takes elements from a list containing student emails and a list containing grades and returns a list of corresponding emails and grades in tuples """
tuple_list = []
for i in range(len(list_object1)):
grades_tuple = list_object1[i], list_object2[i]
tuple_list.append(grades_tuple)
return tuple_list
def calculate_final_grade(list_object1, list_object2):
""" Takes lists containing information about grades and course weights and calculates the final grade from the course """
list_object1 = [list(element) for element in list_object1]
for i in range(len(list_object1)):
final_grade = 0.0
for j in range(len(list_object1[i][1])):
final_grade += list_object1[i][1][j] * list_object2[j][1]
list_object1[i].append(final_grade)
list_object1 = [tuple(element) for element in list_object1]
return list_object1
def print_results(list_object1, list_object2):
""" Takes lists containing information about course parts and student grades and prints them in a formatted menu """
STUDENT_COLUMN = 16
GENERAL_COLUMN = 14
print()
print('{:>{}}'.format('Student ID', STUDENT_COLUMN), end='')
for i in range(len(list_object1)):
print('{:>{}}'.format(list_object1[i][0], GENERAL_COLUMN), end='')
print('{:>{}}'.format('Course grade', GENERAL_COLUMN))
for tuple_element in list_object2:
print('{:>{}}'.format(tuple_element[0], STUDENT_COLUMN), end='')
for i, value in enumerate(tuple_element[1]):
print('{:>{}}'.format(value, GENERAL_COLUMN), end='')
print('{:>{}}'.format(round(tuple_element[-1], 2), GENERAL_COLUMN))
<mask token>
| <mask token>
def read_file(string_object):
""" Opens and reads through a file, returning none if it isnt found """
try:
return open(string_object, 'r')
except FileNotFoundError:
return None
<mask token>
def populate_grades_list(file_object):
""" Takes information from a file containing student emails and grades and puts each in seperate lists """
email_list = []
grade_list = []
for line in file_object:
tmp_list = line.split()
email_list.append(tmp_list[0])
grade_list.append(tmp_list[1:])
for value_list in grade_list:
for i, value in enumerate(value_list):
value_list[i] = float(value)
return email_list, grade_list
def populate_weight_tuple_list(list_object):
""" Takes elements from a list containing course part names and their weights and returns a list of tuples containing those elements """
tuple_list = []
for i in range(len(list_object[0])):
weight_tuple = list_object[0][i], float(list_object[1][i])
tuple_list.append(weight_tuple)
return tuple_list
def populate_grades_tuple_list(list_object1, list_object2):
""" Takes elements from a list containing student emails and a list containing grades and returns a list of corresponding emails and grades in tuples """
tuple_list = []
for i in range(len(list_object1)):
grades_tuple = list_object1[i], list_object2[i]
tuple_list.append(grades_tuple)
return tuple_list
def calculate_final_grade(list_object1, list_object2):
""" Takes lists containing information about grades and course weights and calculates the final grade from the course """
list_object1 = [list(element) for element in list_object1]
for i in range(len(list_object1)):
final_grade = 0.0
for j in range(len(list_object1[i][1])):
final_grade += list_object1[i][1][j] * list_object2[j][1]
list_object1[i].append(final_grade)
list_object1 = [tuple(element) for element in list_object1]
return list_object1
def print_results(list_object1, list_object2):
""" Takes lists containing information about course parts and student grades and prints them in a formatted menu """
STUDENT_COLUMN = 16
GENERAL_COLUMN = 14
print()
print('{:>{}}'.format('Student ID', STUDENT_COLUMN), end='')
for i in range(len(list_object1)):
print('{:>{}}'.format(list_object1[i][0], GENERAL_COLUMN), end='')
print('{:>{}}'.format('Course grade', GENERAL_COLUMN))
for tuple_element in list_object2:
print('{:>{}}'.format(tuple_element[0], STUDENT_COLUMN), end='')
for i, value in enumerate(tuple_element[1]):
print('{:>{}}'.format(value, GENERAL_COLUMN), end='')
print('{:>{}}'.format(round(tuple_element[-1], 2), GENERAL_COLUMN))
<mask token>
| <mask token>
def read_file(string_object):
""" Opens and reads through a file, returning none if it isnt found """
try:
return open(string_object, 'r')
except FileNotFoundError:
return None
def populate_weight_list(file_object):
""" Takes information from a file object containing course weights and puts it into a list """
new_list = []
for line in file_object:
new_list.append(line.split())
return new_list
def populate_grades_list(file_object):
""" Takes information from a file containing student emails and grades and puts each in seperate lists """
email_list = []
grade_list = []
for line in file_object:
tmp_list = line.split()
email_list.append(tmp_list[0])
grade_list.append(tmp_list[1:])
for value_list in grade_list:
for i, value in enumerate(value_list):
value_list[i] = float(value)
return email_list, grade_list
def populate_weight_tuple_list(list_object):
""" Takes elements from a list containing course part names and their weights and returns a list of tuples containing those elements """
tuple_list = []
for i in range(len(list_object[0])):
weight_tuple = list_object[0][i], float(list_object[1][i])
tuple_list.append(weight_tuple)
return tuple_list
def populate_grades_tuple_list(list_object1, list_object2):
""" Takes elements from a list containing student emails and a list containing grades and returns a list of corresponding emails and grades in tuples """
tuple_list = []
for i in range(len(list_object1)):
grades_tuple = list_object1[i], list_object2[i]
tuple_list.append(grades_tuple)
return tuple_list
def calculate_final_grade(list_object1, list_object2):
""" Takes lists containing information about grades and course weights and calculates the final grade from the course """
list_object1 = [list(element) for element in list_object1]
for i in range(len(list_object1)):
final_grade = 0.0
for j in range(len(list_object1[i][1])):
final_grade += list_object1[i][1][j] * list_object2[j][1]
list_object1[i].append(final_grade)
list_object1 = [tuple(element) for element in list_object1]
return list_object1
def print_results(list_object1, list_object2):
""" Takes lists containing information about course parts and student grades and prints them in a formatted menu """
STUDENT_COLUMN = 16
GENERAL_COLUMN = 14
print()
print('{:>{}}'.format('Student ID', STUDENT_COLUMN), end='')
for i in range(len(list_object1)):
print('{:>{}}'.format(list_object1[i][0], GENERAL_COLUMN), end='')
print('{:>{}}'.format('Course grade', GENERAL_COLUMN))
for tuple_element in list_object2:
print('{:>{}}'.format(tuple_element[0], STUDENT_COLUMN), end='')
for i, value in enumerate(tuple_element[1]):
print('{:>{}}'.format(value, GENERAL_COLUMN), end='')
print('{:>{}}'.format(round(tuple_element[-1], 2), GENERAL_COLUMN))
def main_func():
""" Main function """
parts_file_name = input('Enter filename for parts: ')
parts_file = read_file(parts_file_name)
if parts_file == None:
print('File {} not found'.format(parts_file_name))
else:
parts_list = populate_weight_list(parts_file)
weight_tuples_list = populate_weight_tuple_list(parts_list)
print(weight_tuples_list)
grades_file_name = input('Enter filename for grades: ')
grade_file = read_file(grades_file_name)
if grade_file == None:
print('File {} not found'.format(grades_file_name))
else:
email_list, grades_list = populate_grades_list(grade_file)
grades_tuple_list = populate_grades_tuple_list(email_list,
grades_list)
print(grades_tuple_list)
modified_grade_tuple_list = calculate_final_grade(grades_tuple_list
, weight_tuples_list)
print(modified_grade_tuple_list)
print_results(weight_tuples_list, modified_grade_tuple_list)
<mask token>
| """
This program takes information about students and their coursework and calculates their final grades based on the weight of each course factor
"""
def read_file(string_object):
""" Opens and reads through a file, returning none if it isnt found """
try:
return open(string_object,"r")
except FileNotFoundError:
return None
def populate_weight_list(file_object):
""" Takes information from a file object containing course weights and puts it into a list """
new_list = []
for line in file_object:
new_list.append(line.split())
return new_list
def populate_grades_list(file_object):
""" Takes information from a file containing student emails and grades and puts each in seperate lists """
email_list = []
grade_list = []
for line in file_object:
tmp_list = line.split()
email_list.append(tmp_list[0])
grade_list.append(tmp_list[1::])
for value_list in grade_list:
for i, value in enumerate(value_list):
value_list[i] = float(value)
return email_list, grade_list
def populate_weight_tuple_list(list_object):
""" Takes elements from a list containing course part names and their weights and returns a list of tuples containing those elements """
tuple_list = []
for i in range(len(list_object[0])):
weight_tuple = (list_object[0][i], float(list_object[1][i]))
tuple_list.append(weight_tuple)
return tuple_list
def populate_grades_tuple_list(list_object1, list_object2):
""" Takes elements from a list containing student emails and a list containing grades and returns a list of corresponding emails and grades in tuples """
tuple_list = []
for i in range(len(list_object1)):
grades_tuple = (list_object1[i], list_object2[i])
tuple_list.append(grades_tuple)
return tuple_list
def calculate_final_grade(list_object1, list_object2):
""" Takes lists containing information about grades and course weights and calculates the final grade from the course """
list_object1 = [list(element) for element in list_object1] #Have to turn the tuples in the list to lists so that we can add the final grade to the list
for i in range(len(list_object1)):
final_grade = 0.0
for j in range(len(list_object1[i][1])):
final_grade += (list_object1[i][1][j] * list_object2[j][1])
list_object1[i].append(final_grade)
list_object1 = [tuple(element) for element in list_object1] #Turn the lists in the list into tuples again
return list_object1
def print_results(list_object1, list_object2):
""" Takes lists containing information about course parts and student grades and prints them in a formatted menu """
STUDENT_COLUMN = 16
GENERAL_COLUMN = 14
print()
print("{:>{}}".format("Student ID",STUDENT_COLUMN),end="")
for i in range(len(list_object1)):
print("{:>{}}".format(list_object1[i][0],GENERAL_COLUMN),end="")
print("{:>{}}".format("Course grade",GENERAL_COLUMN))
for tuple_element in list_object2:
print("{:>{}}".format(tuple_element[0],STUDENT_COLUMN),end="")
for i, value in enumerate(tuple_element[1]):
print("{:>{}}".format(value,GENERAL_COLUMN),end="")
print("{:>{}}".format(round(tuple_element[-1],2),GENERAL_COLUMN))
def main_func():
""" Main function """
parts_file_name = input("Enter filename for parts: ")
parts_file = read_file(parts_file_name)
if parts_file == None:
print("File {} not found".format(parts_file_name))
else:
parts_list = populate_weight_list(parts_file)
weight_tuples_list = populate_weight_tuple_list(parts_list)
print(weight_tuples_list)
grades_file_name = input("Enter filename for grades: ")
grade_file = read_file(grades_file_name)
if grade_file == None:
print("File {} not found".format(grades_file_name))
else:
email_list, grades_list = populate_grades_list(grade_file)
grades_tuple_list = populate_grades_tuple_list(email_list, grades_list)
print(grades_tuple_list)
modified_grade_tuple_list = calculate_final_grade(grades_tuple_list, weight_tuples_list)
print(modified_grade_tuple_list)
print_results(weight_tuples_list,modified_grade_tuple_list)
main_func()
| [
4,
5,
6,
8,
10
] |
2,065 | 17058b323c0a0974dfa8f124ccd6cb5bf29dd849 | <mask token>
class Blogsaljazeera2Spider(Spider):
<mask token>
<mask token>
<mask token>
@staticmethod
def cleanhtml(raw_html):
cleanr = re.compile('<.*?>')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
@staticmethod
def lua_script(n):
LUA_SCRIPT = (
"""
function main(splash)
local url = splash.args.url
assert(splash:go(url))
assert(splash:wait(1))
for i=1,{},1 do
assert(splash:runjs('document.getElementsByTagName("button")[0].click()'))
assert(splash:wait(1))
end
return {}
end
"""
.format(n, '{html=splash:html()}'))
return LUA_SCRIPT
def parse(self, response):
for url in self.start_urls:
yield Request(response.urljoin(url), self.parse_result, meta={
'splash': {'args': {'lua_source': self.lua_script(2)},
'endpoint': 'execute'}})
def parse_result(self, response):
for link in response.xpath(
"//*[@id='topics_Artilce_container']/div/a/@href").extract():
yield Request(response.urljoin(link), self.parse_links,
dont_filter=False)
<mask token>
def parse_comment(self, response):
item = ArticleItem()
title = ''
try:
title = get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(response.xpath(
"//h1[@class='tweet_strip_text']/text()").extract_first()).
strip()))
except (RuntimeError, TypeError, NameError):
pass
item['title'] = title
author = ''
try:
author = get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(response.xpath('null').extract_first()).strip()))
except (RuntimeError, TypeError, NameError):
pass
item['author'] = author
item['link'] = response.url
description = list()
try:
description.extend([self.cleanhtml(d) for d in get_display(
arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath
('null').extract())))])
except (RuntimeError, TypeError, NameError):
pass
item['description'] = description
comment = list()
names = list()
feeds = list()
try:
comment.extend([get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(d))) for d in response.xpath('//article/p/text()'
).extract()])
names.extend([get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(d))) for d in response.xpath(
'//article/div/div/h2/text()').extract()])
feeds.extend([self.cleanhtml(d) for d in response.xpath(
"//*[@class='number_likes']/text()").extract()])
except (RuntimeError, TypeError, NameError):
pass
item['comments'] = comment
item['names'] = names
item['feedbacks'] = feeds
return item
| <mask token>
class Blogsaljazeera2Spider(Spider):
<mask token>
<mask token>
<mask token>
@staticmethod
def cleanhtml(raw_html):
cleanr = re.compile('<.*?>')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
@staticmethod
def lua_script(n):
LUA_SCRIPT = (
"""
function main(splash)
local url = splash.args.url
assert(splash:go(url))
assert(splash:wait(1))
for i=1,{},1 do
assert(splash:runjs('document.getElementsByTagName("button")[0].click()'))
assert(splash:wait(1))
end
return {}
end
"""
.format(n, '{html=splash:html()}'))
return LUA_SCRIPT
def parse(self, response):
for url in self.start_urls:
yield Request(response.urljoin(url), self.parse_result, meta={
'splash': {'args': {'lua_source': self.lua_script(2)},
'endpoint': 'execute'}})
def parse_result(self, response):
for link in response.xpath(
"//*[@id='topics_Artilce_container']/div/a/@href").extract():
yield Request(response.urljoin(link), self.parse_links,
dont_filter=False)
def parse_links(self, response):
rep = int(int(response.xpath("//input[@id='intTotal']/@value").
extract_first()) / 6) + 1
yield SplashRequest(url=response.urljoin(''), callback=self.
parse_comment, endpoint='execute', args={'lua_source': self.
lua_script(rep)})
def parse_comment(self, response):
item = ArticleItem()
title = ''
try:
title = get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(response.xpath(
"//h1[@class='tweet_strip_text']/text()").extract_first()).
strip()))
except (RuntimeError, TypeError, NameError):
pass
item['title'] = title
author = ''
try:
author = get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(response.xpath('null').extract_first()).strip()))
except (RuntimeError, TypeError, NameError):
pass
item['author'] = author
item['link'] = response.url
description = list()
try:
description.extend([self.cleanhtml(d) for d in get_display(
arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath
('null').extract())))])
except (RuntimeError, TypeError, NameError):
pass
item['description'] = description
comment = list()
names = list()
feeds = list()
try:
comment.extend([get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(d))) for d in response.xpath('//article/p/text()'
).extract()])
names.extend([get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(d))) for d in response.xpath(
'//article/div/div/h2/text()').extract()])
feeds.extend([self.cleanhtml(d) for d in response.xpath(
"//*[@class='number_likes']/text()").extract()])
except (RuntimeError, TypeError, NameError):
pass
item['comments'] = comment
item['names'] = names
item['feedbacks'] = feeds
return item
| <mask token>
class Blogsaljazeera2Spider(Spider):
name = 'blogsaljazeera2'
allowed_domains = ['blogs.aljazeera.net']
start_urls = ['http://blogs.aljazeera.net/topics/short']
@staticmethod
def cleanhtml(raw_html):
cleanr = re.compile('<.*?>')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
@staticmethod
def lua_script(n):
LUA_SCRIPT = (
"""
function main(splash)
local url = splash.args.url
assert(splash:go(url))
assert(splash:wait(1))
for i=1,{},1 do
assert(splash:runjs('document.getElementsByTagName("button")[0].click()'))
assert(splash:wait(1))
end
return {}
end
"""
.format(n, '{html=splash:html()}'))
return LUA_SCRIPT
def parse(self, response):
for url in self.start_urls:
yield Request(response.urljoin(url), self.parse_result, meta={
'splash': {'args': {'lua_source': self.lua_script(2)},
'endpoint': 'execute'}})
def parse_result(self, response):
for link in response.xpath(
"//*[@id='topics_Artilce_container']/div/a/@href").extract():
yield Request(response.urljoin(link), self.parse_links,
dont_filter=False)
def parse_links(self, response):
rep = int(int(response.xpath("//input[@id='intTotal']/@value").
extract_first()) / 6) + 1
yield SplashRequest(url=response.urljoin(''), callback=self.
parse_comment, endpoint='execute', args={'lua_source': self.
lua_script(rep)})
def parse_comment(self, response):
item = ArticleItem()
title = ''
try:
title = get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(response.xpath(
"//h1[@class='tweet_strip_text']/text()").extract_first()).
strip()))
except (RuntimeError, TypeError, NameError):
pass
item['title'] = title
author = ''
try:
author = get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(response.xpath('null').extract_first()).strip()))
except (RuntimeError, TypeError, NameError):
pass
item['author'] = author
item['link'] = response.url
description = list()
try:
description.extend([self.cleanhtml(d) for d in get_display(
arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath
('null').extract())))])
except (RuntimeError, TypeError, NameError):
pass
item['description'] = description
comment = list()
names = list()
feeds = list()
try:
comment.extend([get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(d))) for d in response.xpath('//article/p/text()'
).extract()])
names.extend([get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(d))) for d in response.xpath(
'//article/div/div/h2/text()').extract()])
feeds.extend([self.cleanhtml(d) for d in response.xpath(
"//*[@class='number_likes']/text()").extract()])
except (RuntimeError, TypeError, NameError):
pass
item['comments'] = comment
item['names'] = names
item['feedbacks'] = feeds
return item
| from __future__ import unicode_literals
import re
import arabic_reshaper
from scrapy import Spider, Request
from bidi.algorithm import get_display
from websites.items import ArticleItem
from operator import add
from scrapy_splash import SplashRequest
class Blogsaljazeera2Spider(Spider):
name = 'blogsaljazeera2'
allowed_domains = ['blogs.aljazeera.net']
start_urls = ['http://blogs.aljazeera.net/topics/short']
@staticmethod
def cleanhtml(raw_html):
cleanr = re.compile('<.*?>')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
@staticmethod
def lua_script(n):
LUA_SCRIPT = (
"""
function main(splash)
local url = splash.args.url
assert(splash:go(url))
assert(splash:wait(1))
for i=1,{},1 do
assert(splash:runjs('document.getElementsByTagName("button")[0].click()'))
assert(splash:wait(1))
end
return {}
end
"""
.format(n, '{html=splash:html()}'))
return LUA_SCRIPT
def parse(self, response):
for url in self.start_urls:
yield Request(response.urljoin(url), self.parse_result, meta={
'splash': {'args': {'lua_source': self.lua_script(2)},
'endpoint': 'execute'}})
def parse_result(self, response):
for link in response.xpath(
"//*[@id='topics_Artilce_container']/div/a/@href").extract():
yield Request(response.urljoin(link), self.parse_links,
dont_filter=False)
def parse_links(self, response):
rep = int(int(response.xpath("//input[@id='intTotal']/@value").
extract_first()) / 6) + 1
yield SplashRequest(url=response.urljoin(''), callback=self.
parse_comment, endpoint='execute', args={'lua_source': self.
lua_script(rep)})
def parse_comment(self, response):
item = ArticleItem()
title = ''
try:
title = get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(response.xpath(
"//h1[@class='tweet_strip_text']/text()").extract_first()).
strip()))
except (RuntimeError, TypeError, NameError):
pass
item['title'] = title
author = ''
try:
author = get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(response.xpath('null').extract_first()).strip()))
except (RuntimeError, TypeError, NameError):
pass
item['author'] = author
item['link'] = response.url
description = list()
try:
description.extend([self.cleanhtml(d) for d in get_display(
arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath
('null').extract())))])
except (RuntimeError, TypeError, NameError):
pass
item['description'] = description
comment = list()
names = list()
feeds = list()
try:
comment.extend([get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(d))) for d in response.xpath('//article/p/text()'
).extract()])
names.extend([get_display(arabic_reshaper.reshape(u'' + self.
cleanhtml(d))) for d in response.xpath(
'//article/div/div/h2/text()').extract()])
feeds.extend([self.cleanhtml(d) for d in response.xpath(
"//*[@class='number_likes']/text()").extract()])
except (RuntimeError, TypeError, NameError):
pass
item['comments'] = comment
item['names'] = names
item['feedbacks'] = feeds
return item
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import arabic_reshaper
from scrapy import Spider, Request
from bidi.algorithm import get_display
from websites.items import ArticleItem
from operator import add
from scrapy_splash import SplashRequest
class Blogsaljazeera2Spider(Spider):
name = 'blogsaljazeera2'
allowed_domains = ['blogs.aljazeera.net']
start_urls = ['http://blogs.aljazeera.net/topics/short']
@staticmethod
def cleanhtml(raw_html):
cleanr = re.compile('<.*?>')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
@staticmethod
def lua_script(n):
LUA_SCRIPT = """
function main(splash)
local url = splash.args.url
assert(splash:go(url))
assert(splash:wait(1))
for i=1,{},1 do
assert(splash:runjs('document.getElementsByTagName("button")[0].click()'))
assert(splash:wait(1))
end
return {}
end
""".format(n, "{html=splash:html()}")
return LUA_SCRIPT
def parse(self, response):
for url in self.start_urls:
yield Request(response.urljoin(url), self.parse_result, meta={
'splash': {
'args': {'lua_source': self.lua_script(2)},
'endpoint': 'execute',
}
})
def parse_result(self, response):
for link in response.xpath("//*[@id='topics_Artilce_container']/div/a/@href").extract():
yield Request(response.urljoin(link), self.parse_links, dont_filter=False)
def parse_links(self, response):
rep = int(int(response.xpath("//input[@id='intTotal']/@value").extract_first())/6)+1
yield SplashRequest(url=response.urljoin(''), callback=self.parse_comment, endpoint='execute', args={'lua_source': self.lua_script(rep)})
def parse_comment(self, response):
item = ArticleItem()
title = ""
try:
title = get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath("//h1[@class='tweet_strip_text']/text()").extract_first()).strip()))
except (RuntimeError, TypeError, NameError):
pass
item["title"] = title
author = ""
try:
author = get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath("null").extract_first()).strip()))
except (RuntimeError, TypeError, NameError):
pass
item["author"] = author
item["link"] = response.url
description = list()
try:
description.extend([self.cleanhtml(d) for d in get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(response.xpath("null").extract())))])
except (RuntimeError, TypeError, NameError):
pass
item["description"] = description
comment = list()
names = list()
feeds = list()
try:
comment.extend([get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(d))) for d in response.xpath("//article/p/text()").extract()])
names.extend([get_display(arabic_reshaper.reshape(u'' + self.cleanhtml(d))) for d in response.xpath("//article/div/div/h2/text()").extract()])
feeds.extend([self.cleanhtml(d) for d in response.xpath("//*[@class='number_likes']/text()").extract()])
except (RuntimeError, TypeError, NameError):
pass
item["comments"] = comment
item["names"] = names
item["feedbacks"] = feeds
return item
| [
6,
7,
8,
9,
10
] |
2,066 | 00f2aafe1a0c66d0414d189b9fa3bbc2da9fd727 | # -*- coding:utf-8 -*
import tushare as ts
import numpy as np
import pandas as pd
import datetime
import chardet
import urllib
import urllib2
import re
from bs4 import BeautifulSoup
import time
from pandas import Series,DataFrame
def get_relation(stock1,stock2):
hist_data = ts.get_hist_data(stock1,start='2018-05-01')
if hist_data is None:
return 0
hist_data.sort_values(by = "date",ascending = True,inplace = True)
hist_data_second = ts.get_hist_data(stock2,start='2018-05-01')
if hist_data_second is None:
return 0
hist_data_second.sort_values(by = "date",ascending = True,inplace = True)
result = pd.concat([hist_data,hist_data_second],axis = 1)
result = result['close']
result = result.dropna(how = 'any')
#result.to_excel('result.xlsx')
corr_result= result.corr()
result=np.array(corr_result.iloc[1:3,0:1])
return result[0][0]
year = datetime.datetime.now().strftime('%Y')
month = datetime.datetime.now().strftime('%m')
day = datetime.datetime.now().strftime('%d')
second = datetime.datetime.now().strftime('%s')
season = int(month) /3 +1
basic = ts.get_stock_basics()
basic.to_excel( year+month+day+second + '_basics.xlsx')
grouped_pe = basic['pe'].groupby(basic['industry'])
grouped_pe.mean().to_excel( year+month+day+second + '_grouped_pe.xlsx')
grouped_pb = basic['pb'].groupby(basic['industry'])
#print grouped.mean()
grouped_pb.mean().to_excel( year+month+day+second + '_grouped_pb.xlsx')
#np_industry = np.array(grouped_pb.mean().index)
grouped_industry=pd.concat([grouped_pe.mean(),grouped_pb.mean()],axis =1 ,join = 'inner')
grouped_industry.to_excel( year+month+day+second + '_grouped_industry.xlsx')
np_industry = np.array(grouped_pb.mean().index)
#for industry in np_industry:
# current_industy = basic[basic['industry'].isin([str(industry)])]
# current_industy.to_excel(str(industry)+ '.xlsx')
yj_current_season=ts.forecast_data(int(year),season)
yj_last_season=ts.forecast_data(int(year),season-1)
yj_last_season_index=yj_last_season.set_index('code')
yj_curren_seaon_index=yj_current_season.set_index('code')
yj_index=pd.concat([yj_curren_seaon_index,yj_last_season_index],axis =0 ,join = 'outer')
#yj_index.to_excel('index_yeji.xlsx')
result = pd.concat([yj_index,basic],axis =1 ,join = 'inner')
#result_select = result[result['type'].isin([u'\u9884\u5347',u'\u9884\u589e'])]
result_select = result[result['type'].isin([u'\u9884\u589e'])]
result_select.sort_values(by = "report_date",ascending = False,inplace = True)
result_select = result_select[result_select['report_date'].isin([np.array(result_select['report_date'])[0]])]
for code in np.array(result_select.index):
result_select.ix[str(code),'mean-pe'] = grouped_pe.mean()[result_select.ix[str(code),'industry']]
hist_data = ts.get_hist_data(str(code),start='2018-05-01')
if hist_data is not None:
hist_data.sort_values(by = "date",ascending = False,inplace = True)
hist_data = hist_data.iloc[0:5,:]
#five_day_everage = hist_data['close'].mean()
#hist_data.to_excel( year+month+day+second+str(code) + 'history.xlsx')
result_select.ix[str(code),'five-day-mean'] = hist_data['close'].mean()
close_price = np.array(hist_data['close'])
if close_price.size > 0:
result_select.ix[str(code),'last_day_price'] = np.array(hist_data['close'])[0]
result_select.ix[str(code),'increase-rate'] = \
(np.array(hist_data['close'])[0] - hist_data['close'].mean())/hist_data['close'].mean()
result_select.ix[str(code),'touzhijiazhi'] = \
(result_select.ix[str(code),'totalAssets']*10000)/(result_select.ix[str(code),'totals']*10000*10000)
result_select.ix[str(code),'price-values'] = \
result_select.ix[str(code),'touzhijiazhi'] /result_select.ix[str(code),'last_day_price']
if result_select.ix[str(code),'pe'] == 0:
result_select.ix[str(code),'pe'] = result_select.ix[str(code),'mean-pe']
result_select.ix[str(code),'pray-values'] = \
result_select.ix[str(code),'price-values'] * result_select.ix[str(code),'npr']/100.0 \
*result_select.ix[str(code),'mean-pe'] /result_select.ix[str(code),'pe'] \
*hist_data['close'].mean()/result_select.ix[str(code),'last_day_price']
result_select.to_excel( year+month+day+second + '_yeji.xlsx')
i = datetime.datetime.now()
#print ("当前的日期是%s" %i)
time_string = "%s-%s-%s"%(i.year,i.month,i.day)
print time_string
url ='http://query.sse.com.cn/infodisplay/queryBltnBookInfo.do?jsonCallBack=jsonpCallback55433&isNew=1&publishYear=2018'
#url ='https://query.sse.com.cn/infodisplay/'
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Host':'query.sse.com.cn',
'Referer':'http://www.sse.com.cn/disclosure/listedinfo/periodic/',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN',
'Connection': 'keep-alive'
}
#values = {'inputCode':'000063'}
#pos_data = urllib.urlencode(values)
def compare_time(time1,time2):
s_time = time.mktime(time.strptime(time1,'%Y-%m-%d'))
e_time = time.mktime(time.strptime(time2,'%Y-%m-%d'))
return int(s_time) - int(e_time)
def my_save(filename,contents):
fh=open(filename,'w')
fh.write(contents)
fh.close()
request = urllib2.Request(url,headers = headers)
page = urllib2.urlopen(request)
#page.encoding = 'utf-8'
soup = BeautifulSoup(page,"lxml")
html = soup.select('p')
string1 = str(html[0])
string2 = string1.split('ROWNUM_')
df=pd.DataFrame(columns=['Name','code','type','publishDate0','actualDate'])
for string in string2:
name= re.findall(r'companyAbbr":"(.+?)","',string)
code= re.findall(r'companyCode":"(.+?)","',string)
report_type= re.findall(r'bulletinType":"(.+?)","',string)
date = re.findall(r'publishDate0":"(.+?)","',string)
actual = re.findall(r'actualDate":"(.+?)","',string)
if len(actual) == 0 and len(date)!=0 and compare_time(str(date[0]),time_string) > 0:
df=df.append(pd.DataFrame({'Name':name,'code':code,'type':report_type,'publishDate0':date}),ignore_index=True)
df.sort_values(by = "publishDate0",ascending = True,inplace = True)
#df= df.iloc[0:16,:]
df.to_excel('ready_to_report.xlsx')
np_ready_report = np.unique(np.array(df['code']))
np_increase_report = np.array(result_select.index)
forcast=pd.DataFrame()
#forcast=pd.DataFrame(columns=['increase code','forcast code','relation'])
index =0;
for code1 in np_increase_report:
for code2 in np_ready_report:
if cmp(basic.ix[str(code2),'industry'],basic.ix[str(code1),'industry']) == 0:
relation = get_relation(str(code1),str(code2))
forcast.ix[str(index),'increase code'] = code1
forcast.ix[str(index),'forcast code'] = code2
forcast.ix[str(index),'relation'] = relation
forcast.ix[str(index),'publishDate0'] = np.array(df[df['code'].isin([code2])]['publishDate0'])[0]
forcast.ix[str(index),'forcast industry'] = basic.ix[str(code2),'industry']
forcast.ix[str(index),'increase industry'] = basic.ix[str(code1),'industry']
index = index +1
forcast.to_excel('forcast.xlsx')
| null | null | null | null | [
0
] |
2,067 | 65eb7d01ccea137605d54d816b707c2cd3709931 | <mask token>
def start_button_callback(obj, w, h, amount):
_max = int(w.get()) * int(h.get())
if not (obj.validation_check(w) and obj.validation_check(h) and obj.
validation_check(amount, _max)):
ctypes.windll.user32.MessageBoxW(0, 'Wprowadź poprawne dane', 'Błąd', 1
)
return False
else:
obj.exit()
game = GameWindow(int(w.get()), int(h.get()), int(amount.get()))
game.start_game()
return True
<mask token>
| <mask token>
def start_button_callback(obj, w, h, amount):
_max = int(w.get()) * int(h.get())
if not (obj.validation_check(w) and obj.validation_check(h) and obj.
validation_check(amount, _max)):
ctypes.windll.user32.MessageBoxW(0, 'Wprowadź poprawne dane', 'Błąd', 1
)
return False
else:
obj.exit()
game = GameWindow(int(w.get()), int(h.get()), int(amount.get()))
game.start_game()
return True
def main():
main_window = m_window.MainWindow()
main_window.init(start_button_callback).mainloop()
<mask token>
| <mask token>
def start_button_callback(obj, w, h, amount):
_max = int(w.get()) * int(h.get())
if not (obj.validation_check(w) and obj.validation_check(h) and obj.
validation_check(amount, _max)):
ctypes.windll.user32.MessageBoxW(0, 'Wprowadź poprawne dane', 'Błąd', 1
)
return False
else:
obj.exit()
game = GameWindow(int(w.get()), int(h.get()), int(amount.get()))
game.start_game()
return True
def main():
main_window = m_window.MainWindow()
main_window.init(start_button_callback).mainloop()
if __name__ == '__main__':
main()
| import ctypes
from game import GameWindow
import start_window as m_window
def start_button_callback(obj, w, h, amount):
_max = int(w.get()) * int(h.get())
if not (obj.validation_check(w) and obj.validation_check(h) and obj.
validation_check(amount, _max)):
ctypes.windll.user32.MessageBoxW(0, 'Wprowadź poprawne dane', 'Błąd', 1
)
return False
else:
obj.exit()
game = GameWindow(int(w.get()), int(h.get()), int(amount.get()))
game.start_game()
return True
def main():
main_window = m_window.MainWindow()
main_window.init(start_button_callback).mainloop()
if __name__ == '__main__':
main()
| import ctypes
from game import GameWindow
import start_window as m_window
def start_button_callback(obj, w, h, amount):
_max = int(w.get()) * int(h.get())
if not (obj.validation_check(w) and obj.validation_check(h) and obj.validation_check(amount, _max)):
ctypes.windll.user32.MessageBoxW(0, "Wprowadź poprawne dane", "Błąd", 1)
return False
else:
obj.exit()
game = GameWindow(int(w.get()), int(h.get()), int(amount.get()))
game.start_game()
return True
def main():
main_window = m_window.MainWindow()
main_window.init(start_button_callback).mainloop()
if __name__ == '__main__':
main()
| [
1,
2,
3,
4,
5
] |
2,068 | fc8f3be408f4d21de2ae18776cd60177c82bea77 | #!/usr/bin/python
#coding:utf-8
import glob, os
#from collections import OrderedDict
aa = os.popen("grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt").read().strip()
#os.system("grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt")
#bb = aa.split('-')[1]
res = []
fileName = file('/opt/csvt01/logs/tmp.txt')
while True:
line = fileName.readline()
if len(line) ==0:break
a = line.split('-')[1]
res.append(a)
fileName.close()
#print res
a = {}
for i in res:
if res.count(i)>1:
a[i] = res.count(i)
#print (a)
def fun(s):
d = sorted(s.iteritems(),key=lambda t:t[1],reverse=True)
return d
d = fun(a)
for i in d:
print i[0]
| null | null | null | null | [
0
] |
2,069 | a4f446d6fd2a34c0ef591d7cbda59dccc0a36611 | #!/usr/bin/env python
#coding:utf-8
import os
def listDir(path):
allFile = []
subFile = os.listdir(path) #列出当前路径下的目录或者文件,返回列表
for fileName in subFile:
fullFile = os.path.join(path, fileName) #os提供方法连接路径与文件名形成完整路径名,作用同:字符串+“/”+字符串
if os.path.isdir(fullFile): #判断是否为目录或者文件,有isfile()方法
listDir(fullFile) #递归
allFile.append(fullFile.decode('gbk').encode('utf-8')) #对于中文的编码
print fullFile.decode('gbk').encode('utf-8')
return allFile
#递归方式获取文件目录
#递归方法的测试
#listDir("C:/Users/13160/Desktop")
#系统提供遍历目录的方法os.walk(path),返回3元元组(遍历路径名,目录列表,文件列表)
for path, dir, file in os.walk("C:/Users/13160/Desktop"):
for f in file:
print os.path.join(path, f).decode('gbk').encode('utf-8')
for d in dir:
print os.path.join(path, d).decode('gbk').encode('utf-8') | null | null | null | null | [
0
] |
2,070 | a73e3a07ab0ebb90fa744d3dfc8d9da119f99283 | <mask token>
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0, 0, 0)
self.B = vec3(1, 0, 0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError('Expected two vec3s')
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError('Expected 0 or 2 arguments, got ' + len(args))
def origin(self):
return self.A
<mask token>
def point_at_parameter(self, t):
return self.A + t * self.B
<mask token>
| <mask token>
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0, 0, 0)
self.B = vec3(1, 0, 0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError('Expected two vec3s')
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError('Expected 0 or 2 arguments, got ' + len(args))
def origin(self):
return self.A
def direction(self):
return self.B
def point_at_parameter(self, t):
return self.A + t * self.B
<mask token>
| <mask token>
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0, 0, 0)
self.B = vec3(1, 0, 0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError('Expected two vec3s')
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError('Expected 0 or 2 arguments, got ' + len(args))
def origin(self):
return self.A
def direction(self):
return self.B
def point_at_parameter(self, t):
return self.A + t * self.B
if __name__ == '__main__':
r = ray(vec3(3, 2, 5.5), vec3(1, 0, 0))
print(r.point_at_parameter(5.0))
| from vector3 import vec3
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0, 0, 0)
self.B = vec3(1, 0, 0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError('Expected two vec3s')
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError('Expected 0 or 2 arguments, got ' + len(args))
def origin(self):
return self.A
def direction(self):
return self.B
def point_at_parameter(self, t):
return self.A + t * self.B
if __name__ == '__main__':
r = ray(vec3(3, 2, 5.5), vec3(1, 0, 0))
print(r.point_at_parameter(5.0))
| from vector3 import vec3
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0,0,0)
self.B = vec3(1,0,0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError("Expected two vec3s")
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError("Expected 0 or 2 arguments, got " + len(args))
def origin(self):
return self.A
def direction(self):
return self.B
def point_at_parameter(self, t):
return self.A + t*self.B
if __name__ == "__main__":
r = ray(vec3(3,2,5.5), vec3(1,0,0))
print(r.point_at_parameter(5.0))
| [
4,
5,
6,
7,
8
] |
2,071 | aaee69d339cf1c14e54366633155ee57026e6487 | <mask token>
| <mask token>
T = List[int]
C = Callable[[int], None]
| from typing import List, Callable
T = List[int]
C = Callable[[int], None]
| from typing import List, Callable
#: A list of int
T = List[int]
C = Callable[[int], None] # a generic alias not having a doccomment
| null | [
0,
1,
2,
3
] |
2,072 | f727c0551f20fb0dc72b4d81b7b3ed8ce9b1b6f4 | <mask token>
def downgrade():
op.drop_constraint(None, 'user', type_='unique')
op.drop_constraint(None, 'user', type_='unique')
op.drop_column('user', 'money')
| <mask token>
def upgrade():
op.add_column('user', sa.Column('money', sa.Integer(), nullable=False))
op.create_unique_constraint(None, 'user', ['password'])
op.create_unique_constraint(None, 'user', ['email'])
def downgrade():
op.drop_constraint(None, 'user', type_='unique')
op.drop_constraint(None, 'user', type_='unique')
op.drop_column('user', 'money')
| <mask token>
revision = '0bb5933fe69f'
down_revision = '09c6fdb3cf81'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('user', sa.Column('money', sa.Integer(), nullable=False))
op.create_unique_constraint(None, 'user', ['password'])
op.create_unique_constraint(None, 'user', ['email'])
def downgrade():
op.drop_constraint(None, 'user', type_='unique')
op.drop_constraint(None, 'user', type_='unique')
op.drop_column('user', 'money')
| <mask token>
from alembic import op
import sqlalchemy as sa
revision = '0bb5933fe69f'
down_revision = '09c6fdb3cf81'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('user', sa.Column('money', sa.Integer(), nullable=False))
op.create_unique_constraint(None, 'user', ['password'])
op.create_unique_constraint(None, 'user', ['email'])
def downgrade():
op.drop_constraint(None, 'user', type_='unique')
op.drop_constraint(None, 'user', type_='unique')
op.drop_column('user', 'money')
| """empty message
Revision ID: 0bb5933fe69f
Revises: 09c6fdb3cf81
Create Date: 2021-03-11 16:48:06.771046
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0bb5933fe69f'
down_revision = '09c6fdb3cf81'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('money', sa.Integer(), nullable=False))
op.create_unique_constraint(None, 'user', ['password'])
op.create_unique_constraint(None, 'user', ['email'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'user', type_='unique')
op.drop_constraint(None, 'user', type_='unique')
op.drop_column('user', 'money')
# ### end Alembic commands ###
| [
1,
2,
3,
4,
5
] |
2,073 | 3c341b17f260cc745c8659ee769493216522ac19 | <mask token>
| <mask token>
for video_id in list_video_id:
url = ('https://www.googleapis.com/youtube/v3/videos?id=' + video_id +
'&part=statistics&key=' + API_KEY)
response = requests.get(url).json()
for i in response['items']:
rows.append({'videoid': i['id'], 'viewCount': i['statistics'][
'viewCount'], 'likeCount': i['statistics']['likeCount'],
'dislikeCount': i['statistics']['dislikeCount'],
'favoriteCount': i['statistics']['favoriteCount'],
'commentCount': i['statistics']['commentCount']})
print(rows)
with open('get_api_youtube.csv', 'w', encoding='UTF8', newline='') as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
for j in rows:
writer.writerow(j)
| <mask token>
API_KEY = 'AIzaSyALrKc3-W0u_Ku-J2OpyjnqFhV5wKlwKGs'
list_video_id = ['7cmvABXyUC0', '9eH-7x7swEM', 'JndzGxbwvG0', 'l0P5_E6J_g0']
fieldnames = ['videoid', 'viewCount', 'likeCount', 'dislikeCount',
'favoriteCount', 'commentCount']
rows = []
for video_id in list_video_id:
url = ('https://www.googleapis.com/youtube/v3/videos?id=' + video_id +
'&part=statistics&key=' + API_KEY)
response = requests.get(url).json()
for i in response['items']:
rows.append({'videoid': i['id'], 'viewCount': i['statistics'][
'viewCount'], 'likeCount': i['statistics']['likeCount'],
'dislikeCount': i['statistics']['dislikeCount'],
'favoriteCount': i['statistics']['favoriteCount'],
'commentCount': i['statistics']['commentCount']})
print(rows)
with open('get_api_youtube.csv', 'w', encoding='UTF8', newline='') as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
for j in rows:
writer.writerow(j)
| import requests
import json, csv
import pandas as pd
API_KEY = 'AIzaSyALrKc3-W0u_Ku-J2OpyjnqFhV5wKlwKGs'
list_video_id = ['7cmvABXyUC0', '9eH-7x7swEM', 'JndzGxbwvG0', 'l0P5_E6J_g0']
fieldnames = ['videoid', 'viewCount', 'likeCount', 'dislikeCount',
'favoriteCount', 'commentCount']
rows = []
for video_id in list_video_id:
url = ('https://www.googleapis.com/youtube/v3/videos?id=' + video_id +
'&part=statistics&key=' + API_KEY)
response = requests.get(url).json()
for i in response['items']:
rows.append({'videoid': i['id'], 'viewCount': i['statistics'][
'viewCount'], 'likeCount': i['statistics']['likeCount'],
'dislikeCount': i['statistics']['dislikeCount'],
'favoriteCount': i['statistics']['favoriteCount'],
'commentCount': i['statistics']['commentCount']})
print(rows)
with open('get_api_youtube.csv', 'w', encoding='UTF8', newline='') as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
for j in rows:
writer.writerow(j)
| import requests
import json, csv
import pandas as pd
API_KEY = 'AIzaSyALrKc3-W0u_Ku-J2OpyjnqFhV5wKlwKGs'
list_video_id = ['7cmvABXyUC0', '9eH-7x7swEM', 'JndzGxbwvG0', 'l0P5_E6J_g0']
fieldnames = ['videoid', 'viewCount', 'likeCount', 'dislikeCount', 'favoriteCount', 'commentCount']
rows = []
for video_id in list_video_id:
url = "https://www.googleapis.com/youtube/v3/videos?id=" + video_id + "&part=statistics&key=" + API_KEY
response = requests.get(url).json()
for i in response['items']:
rows.append({"videoid": i['id'],
"viewCount": i['statistics']['viewCount'],
"likeCount": i['statistics']['likeCount'],
"dislikeCount": i['statistics']['dislikeCount'],
"favoriteCount": i['statistics']['favoriteCount'],
"commentCount": i['statistics']['commentCount']})
print(rows)
with open(r'get_api_youtube.csv', 'w', encoding='UTF8', newline='') as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
for j in rows:
writer.writerow(j)
| [
0,
1,
2,
3,
4
] |
2,074 | 2e8737a48bd04ef5c158afb23dc94476ea790e18 | from zipline.api import (
# add_history,
history,
order_target_percent,
order,
record,
symbol,
get_datetime,
schedule_function,
)
from zipline.algorithm import TradingAlgorithm
from zipline.utils.factory import load_from_yahoo
import numpy as np
import pandas as pd
from datetime import datetime
cash = 0 # tracks the amount of money in the backtest
def initialize(context):
context.target_window = dict()
context.bought_options = dict()
# context.underlying = pd.read_csv('../data/underlying/FB.csv')
# context.underlying = pd.to_datetime(context.underlying['Date'])
context.options = pd.read_csv('../data/cleaned_data/FB.csv')
context.options['date'] = pd.to_datetime(context.options['date'])
context.options['expiration'] = pd.to_datetime(context.options['expiration'])
# (7) Trade (MODIFY SO THIS SHOULD ONLY HAPPEN ONCE A DAY)
def handle_data(context, data):
day_option_df = context.options[context.options['date'] == get_datetime()]
call_options = day_option_df[day_option_df['type'] == 'C']
################################## classifier stuff happens somewhere here
call_options_good = call_options # call_options_good is the classified call_options
##################################
# purchase the options that we think will end up in the money (could also modify this to give weight to it)
for index, row in call_options_good.iterrows():
context.bought_options = rbind(context.bought_options, row)
cash -= row['price']
# exercise expiring options that we've bought (assuming strike price is lower than expiration price)
expiring_calls = context.bought_options[context.bought_options['expiration'] == get_datetime()]
for index, row in expiring_calls.iterrows():
price = history(symbol(row['ticker']), '1d', 'price').iloc[0,0]
cash += 100*max(price - row['strike'], 0) # assuming 100:1 ratio equity:option
# need to add a way to plot cash data vs datetime
def add_to_window(context, window_size, datapoint, ticker):
tw = context.target_window[ticker]
tw.append(datapoint)
context.target_window[ticker] = tw[-window_size:] if len(tw) > window_size else tw
if __name__ == '__main__':
cash = 10000 # arbitrary amount
universe = ['FB'] # need to change the universe
data = load_from_yahoo(stocks=universe,
indexes={}, start=datetime(2016, 4, 3),
end=datetime.today())
olmar = TradingAlgorithm(initialize=initialize, handle_data=handle_data, capital_base=10000)
backtest = olmar.run(data)
backtest.to_csv('backtest-50-2012.csv')
print backtest['algorithm_period_return'][-1]
import pyfolio as pf
returns, positions, transactions, gross_lev = pf.utils.extract_rets_pos_txn_from_zipline(backtest)
pf.create_full_tear_sheet(returns, positions=positions, transactions=transactions, gross_lev=gross_lev, live_start_date='2004-10-22')
| null | null | null | null | [
0
] |
2,075 | 2226382c494af33957a44d9f1682f7deacf574a2 | <mask token>
| <mask token>
for i in d:
packs[i % k] += 1
<mask token>
if k % 2 == 0:
counter += packs[k // 2] // 2
for i in range(1, ceil(k / 2)):
counter += min(packs[i], packs[k - i])
print(counter * 2)
| <mask token>
n, k = map(int, input().split())
d = list(map(int, input().split()))
packs = [0] * k
for i in d:
packs[i % k] += 1
counter = packs[0] // 2
if k % 2 == 0:
counter += packs[k // 2] // 2
for i in range(1, ceil(k / 2)):
counter += min(packs[i], packs[k - i])
print(counter * 2)
| from math import ceil
n, k = map(int, input().split())
d = list(map(int, input().split()))
packs = [0] * k
for i in d:
packs[i % k] += 1
counter = packs[0] // 2
if k % 2 == 0:
counter += packs[k // 2] // 2
for i in range(1, ceil(k / 2)):
counter += min(packs[i], packs[k - i])
print(counter * 2)
| from math import ceil
n, k = map(int, input().split())
d = list(map(int, input().split()))
packs = [0]*k
for i in d:
packs[i%k] += 1
counter = packs[0]//2
if (k % 2) == 0:
counter += packs[k//2]//2
for i in range(1, ceil(k/2)):
counter += min(packs[i], packs[k-i])
print(counter*2)
| [
0,
1,
2,
3,
4
] |
2,076 | 81774d3b4d9fbf22ed19e1cba7ec5e8e3707f51a | <mask token>
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, 'b')
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, 'b')
<mask token>
| <mask token>
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, 'b')
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, 'b')
<mask token>
xor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)
xor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)
| <mask token>
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, 'b')
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, 'b')
key = 'verystongk'
xor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)
xor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)
| import inputoutput
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, 'b')
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, 'b')
key = 'verystongk'
xor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)
xor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)
| import inputoutput
def xor_encryption(source, destination, key):
"""
Returns text encrypted or decrypted with xor
Keyword arguments:
source - path to file with text to be encrypted
destination - path to the file where you want to save the result
key - encryption key
"""
text = inputoutput.read_from_file(source, "b")
# text = read_from_file(source)
key = bytearray(key, 'utf-8')
result = bytearray()
for i in range(len(text)):
result.append(text[i] ^ key[i % len(key)])
inputoutput.write_to_file(result, destination, "b")
# def write_to_file(data, filename):
# """
# Write binary data to file
# Keyword arguments:
# data - binary data to be written
# filename - path to the file where you want to save the result
# """
# f = open(filename, 'wb')
# f.write(data)
# f.close()
# def read_from_file(filename):
# """
# Read binary data from file
# Keyword arguments:
# filename - path to the file where you want to save the result
# Returns:
# data - binary data from file
# """
# f = open(filename, 'rb')
# data = f.read()
# f.close()
# return data
key = 'verystongk'
# Шифрование
xor_encryption('sixth_practice/text.txt', 'sixth_practice/text1.txt', key)
# Расшифрование
xor_encryption('sixth_practice/text1.txt', 'sixth_practice/text2.txt', key)
| [
1,
2,
3,
4,
5
] |
2,077 | c4fcca61e560046c77046079fb305be8c883653b | <mask token>
class course_form(report_sxw.rml_parse):
<mask token>
<mask token>
def _get_course(self, data):
training_category_obj = self.pool.get('hr.training.category')
training_category_id = data['training_category_id']
training_category_id = (not training_category_id and
training_category_obj.browse(self.cr, self.uid, []) or
training_category_id)
self.cr.execute(
' select distinct c.id as course_id , c.name as course_name from hr_training_course as c where c.training_category_id in %s'
, (tuple(training_category_id),))
res = self.cr.dictfetchall()
return res
def _get_data(self, data, course_id):
date1 = data['date_from']
date2 = data['date_to']
side = data['type'] == '3' and 'inside' or 'outside'
self.year = date1 and mx.DateTime.Parser.DateTimeFromString(date1
).year or self.year
res = []
if date1 and date2:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.start_date >= %s and t.end_date <= %s "
, (tuple([course_id]), side, date1, date2))
elif date1 and not date2:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.start_date >= %s"
, (tuple([course_id]), side, date1))
elif date2 and not date1:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.end_date <= %s "
, (tuple([course_id]), side, date2))
else:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s "
, (tuple([course_id]), side))
res = self.cr.dictfetchall()
return res
def _get_time(self):
return self.year
<mask token>
| <mask token>
class course_form(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(course_form, self).__init__(cr, uid, name, context)
self.localcontext.update({'time': time, 'time1': self._get_time,
'course': self._get_course, 'line': self._get_data, 'user':
self._get_user})
self.year = int(time.strftime('%Y'))
def _get_user(self, data, header=False):
if header:
return self.pool.get('res.company').browse(self.cr, self.uid,
data['form']['company_id'][0]).logo
else:
return self.pool.get('res.users').browse(self.cr, self.uid,
self.uid).name
def _get_course(self, data):
training_category_obj = self.pool.get('hr.training.category')
training_category_id = data['training_category_id']
training_category_id = (not training_category_id and
training_category_obj.browse(self.cr, self.uid, []) or
training_category_id)
self.cr.execute(
' select distinct c.id as course_id , c.name as course_name from hr_training_course as c where c.training_category_id in %s'
, (tuple(training_category_id),))
res = self.cr.dictfetchall()
return res
def _get_data(self, data, course_id):
date1 = data['date_from']
date2 = data['date_to']
side = data['type'] == '3' and 'inside' or 'outside'
self.year = date1 and mx.DateTime.Parser.DateTimeFromString(date1
).year or self.year
res = []
if date1 and date2:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.start_date >= %s and t.end_date <= %s "
, (tuple([course_id]), side, date1, date2))
elif date1 and not date2:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.start_date >= %s"
, (tuple([course_id]), side, date1))
elif date2 and not date1:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.end_date <= %s "
, (tuple([course_id]), side, date2))
else:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s "
, (tuple([course_id]), side))
res = self.cr.dictfetchall()
return res
def _get_time(self):
return self.year
<mask token>
| <mask token>
class course_form(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(course_form, self).__init__(cr, uid, name, context)
self.localcontext.update({'time': time, 'time1': self._get_time,
'course': self._get_course, 'line': self._get_data, 'user':
self._get_user})
self.year = int(time.strftime('%Y'))
def _get_user(self, data, header=False):
if header:
return self.pool.get('res.company').browse(self.cr, self.uid,
data['form']['company_id'][0]).logo
else:
return self.pool.get('res.users').browse(self.cr, self.uid,
self.uid).name
def _get_course(self, data):
training_category_obj = self.pool.get('hr.training.category')
training_category_id = data['training_category_id']
training_category_id = (not training_category_id and
training_category_obj.browse(self.cr, self.uid, []) or
training_category_id)
self.cr.execute(
' select distinct c.id as course_id , c.name as course_name from hr_training_course as c where c.training_category_id in %s'
, (tuple(training_category_id),))
res = self.cr.dictfetchall()
return res
def _get_data(self, data, course_id):
date1 = data['date_from']
date2 = data['date_to']
side = data['type'] == '3' and 'inside' or 'outside'
self.year = date1 and mx.DateTime.Parser.DateTimeFromString(date1
).year or self.year
res = []
if date1 and date2:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.start_date >= %s and t.end_date <= %s "
, (tuple([course_id]), side, date1, date2))
elif date1 and not date2:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.start_date >= %s"
, (tuple([course_id]), side, date1))
elif date2 and not date1:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.end_date <= %s "
, (tuple([course_id]), side, date2))
else:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s "
, (tuple([course_id]), side))
res = self.cr.dictfetchall()
return res
def _get_time(self):
return self.year
report_sxw.report_sxw('report.course.outside', 'hr.employee.training',
'addons/hr_ntc_custom/report/training.rml', parser=course_form, header=
False)
| import time
import datetime
import mx
from openerp.report import report_sxw
class course_form(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(course_form, self).__init__(cr, uid, name, context)
self.localcontext.update({'time': time, 'time1': self._get_time,
'course': self._get_course, 'line': self._get_data, 'user':
self._get_user})
self.year = int(time.strftime('%Y'))
def _get_user(self, data, header=False):
if header:
return self.pool.get('res.company').browse(self.cr, self.uid,
data['form']['company_id'][0]).logo
else:
return self.pool.get('res.users').browse(self.cr, self.uid,
self.uid).name
def _get_course(self, data):
training_category_obj = self.pool.get('hr.training.category')
training_category_id = data['training_category_id']
training_category_id = (not training_category_id and
training_category_obj.browse(self.cr, self.uid, []) or
training_category_id)
self.cr.execute(
' select distinct c.id as course_id , c.name as course_name from hr_training_course as c where c.training_category_id in %s'
, (tuple(training_category_id),))
res = self.cr.dictfetchall()
return res
def _get_data(self, data, course_id):
date1 = data['date_from']
date2 = data['date_to']
side = data['type'] == '3' and 'inside' or 'outside'
self.year = date1 and mx.DateTime.Parser.DateTimeFromString(date1
).year or self.year
res = []
if date1 and date2:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.start_date >= %s and t.end_date <= %s "
, (tuple([course_id]), side, date1, date2))
elif date1 and not date2:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.start_date >= %s"
, (tuple([course_id]), side, date1))
elif date2 and not date1:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s and t.end_date <= %s "
, (tuple([course_id]), side, date2))
else:
self.cr.execute(
" select distinct emp.marital as marital, t.end_date as end,t.start_date as start,c.name as country,t.course_type as type,t.location as location,res.name as name from hr_employee_training t left join hr_employee_training_line line on (line.training_employee_id=t.id) left join hr_employee emp on (emp.id=line.employee_id) left join hr_job jop on (jop.id=emp.job_id) left join resource_resource res on (res.id=emp.resource_id) left join hr_training_course cou on(cou.id=t.course_id) left join res_country c on(t.country_id=c.id) where t.course_id = %s and t.type ='hr.approved.course' and t.training_place = %s "
, (tuple([course_id]), side))
res = self.cr.dictfetchall()
return res
def _get_time(self):
return self.year
report_sxw.report_sxw('report.course.outside', 'hr.employee.training',
'addons/hr_ntc_custom/report/training.rml', parser=course_form, header=
False)
| import time
import datetime
import mx
from openerp.report import report_sxw
class course_form(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(course_form, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'time1': self._get_time,
'course':self._get_course,
'line':self._get_data,
'user':self._get_user,
})
self.year = int(time.strftime('%Y'))
def _get_user(self,data, header=False):
if header:
return self.pool.get('res.company').browse(self.cr, self.uid, data['form']['company_id'][0]).logo
else:
return self.pool.get('res.users').browse(self.cr, self.uid, self.uid).name
def _get_course(self,data):
training_category_obj = self.pool.get('hr.training.category')
training_category_id = data['training_category_id']
training_category_id = not training_category_id and training_category_obj.browse(self.cr,self.uid,[]) or training_category_id
self.cr.execute(" select distinct c.id as course_id , c.name as course_name "\
"from hr_training_course as c "\
"where c.training_category_id in %s",(tuple(training_category_id),))
res = self.cr.dictfetchall()
return res
def _get_data(self, data,course_id):
date1 = data['date_from']
date2 = data['date_to']
side = data['type'] == '3' and 'inside' or 'outside'
self.year = date1 and mx.DateTime.Parser.DateTimeFromString(date1).year or self.year
res=[]
if date1 and date2:
self.cr.execute(" select distinct emp.marital as marital, "\
"t.end_date as end,"\
"t.start_date as start,"\
"c.name as country,"\
"t.course_type as type,"\
"t.location as location,"\
"res.name as name " \
"from hr_employee_training t "\
"left join hr_employee_training_line line on (line.training_employee_id=t.id) "\
"left join hr_employee emp on (emp.id=line.employee_id) "\
"left join hr_job jop on (jop.id=emp.job_id) "\
"left join resource_resource res on (res.id=emp.resource_id) "\
"left join hr_training_course cou on(cou.id=t.course_id) "\
"left join res_country c on(t.country_id=c.id) "\
"where t.course_id = %s and "\
"t.type ='hr.approved.course' and t.training_place = %s and "\
"t.start_date >= %s and t.end_date <= %s ",(tuple([course_id]),side,date1,date2))
elif date1 and not date2:
self.cr.execute(" select distinct emp.marital as marital, "\
"t.end_date as end,"\
"t.start_date as start,"\
"c.name as country,"\
"t.course_type as type,"\
"t.location as location,"\
"res.name as name " \
"from hr_employee_training t "\
"left join hr_employee_training_line line on (line.training_employee_id=t.id) "\
"left join hr_employee emp on (emp.id=line.employee_id) "\
"left join hr_job jop on (jop.id=emp.job_id) "\
"left join resource_resource res on (res.id=emp.resource_id) "\
"left join hr_training_course cou on(cou.id=t.course_id) "\
"left join res_country c on(t.country_id=c.id) "\
"where t.course_id = %s and "\
"t.type ='hr.approved.course' and t.training_place = %s and "\
"t.start_date >= %s",(tuple([course_id]),side,date1))
elif date2 and not date1:
self.cr.execute(" select distinct emp.marital as marital, "\
"t.end_date as end,"\
"t.start_date as start,"\
"c.name as country,"\
"t.course_type as type,"\
"t.location as location,"\
"res.name as name " \
"from hr_employee_training t "\
"left join hr_employee_training_line line on (line.training_employee_id=t.id) "\
"left join hr_employee emp on (emp.id=line.employee_id) "\
"left join hr_job jop on (jop.id=emp.job_id) "\
"left join resource_resource res on (res.id=emp.resource_id) "\
"left join hr_training_course cou on(cou.id=t.course_id) "\
"left join res_country c on(t.country_id=c.id) "\
"where t.course_id = %s and "\
"t.type ='hr.approved.course' and t.training_place = %s and "\
"t.end_date <= %s ",(tuple([course_id]),side,date2))
else:
self.cr.execute(" select distinct emp.marital as marital, "\
"t.end_date as end,"\
"t.start_date as start,"\
"c.name as country,"\
"t.course_type as type,"\
"t.location as location,"\
"res.name as name " \
"from hr_employee_training t "\
"left join hr_employee_training_line line on (line.training_employee_id=t.id) "\
"left join hr_employee emp on (emp.id=line.employee_id) "\
"left join hr_job jop on (jop.id=emp.job_id) "\
"left join resource_resource res on (res.id=emp.resource_id) "\
"left join hr_training_course cou on(cou.id=t.course_id) "\
"left join res_country c on(t.country_id=c.id) "\
"where t.course_id = %s and "\
"t.type ='hr.approved.course' and t.training_place = %s ",(tuple([course_id]),side))
res=self.cr.dictfetchall()
return res
def _get_time(self):
return self.year
report_sxw.report_sxw('report.course.outside', 'hr.employee.training', 'addons/hr_ntc_custom/report/training.rml' ,parser=course_form ,header=False)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
4,
6,
7,
8,
9
] |
2,078 | de12c6d78c0144978ffc651829364de16930b173 | <mask token>
class Detector(object):
<mask token>
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=
600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465,
122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(name=self.__class__.
__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None, None, :]
self.classes = CLASSES[dataset]
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale / short_size, self.max_size / long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *im.shape)
self.net.blobs['data'].data[0, ...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0, 6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print('Detection took {:.3f}s for {:d} objects'.format(timer.
total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
im = im[:, :, (2, 1, 0)]
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.
colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],
score), bbox=dict(facecolor=self.colormap[class_id], alpha=
0.5), fontsize=12, color='white')
return ax
| <mask token>
class Detector(object):
"""Faster R-CNN Detector"""
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=
600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465,
122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(name=self.__class__.
__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None, None, :]
self.classes = CLASSES[dataset]
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale / short_size, self.max_size / long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *im.shape)
self.net.blobs['data'].data[0, ...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0, 6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print('Detection took {:.3f}s for {:d} objects'.format(timer.
total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
im = im[:, :, (2, 1, 0)]
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.
colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],
score), bbox=dict(facecolor=self.colormap[class_id], alpha=
0.5), fontsize=12, color='white')
return ax
| <mask token>
__all__ = ['Detector']
CLASSES = dict(voc=('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus',
'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train',
'tvmonitor'), coco=('person', 'bicycle', 'car', 'motorcycle',
'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat',
'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe',
'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis',
'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove',
'skateboard', 'surfboard', 'tennis racket', 'bottle', 'wine glass',
'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich',
'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake',
'chair', 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',
'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',
'scissors', 'teddy bear', 'hair drier', 'toothbrush'))
class Detector(object):
"""Faster R-CNN Detector"""
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=
600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465,
122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(name=self.__class__.
__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None, None, :]
self.classes = CLASSES[dataset]
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale / short_size, self.max_size / long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *im.shape)
self.net.blobs['data'].data[0, ...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0, 6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print('Detection took {:.3f}s for {:d} objects'.format(timer.
total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
im = im[:, :, (2, 1, 0)]
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.
colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],
score), bbox=dict(facecolor=self.colormap[class_id], alpha=
0.5), fontsize=12, color='white')
return ax
| import sys
import cv2
import numpy as np
import matplotlib.pyplot as plt
from .caffe_path import caffe
from .timer import Timer
__all__ = ['Detector']
CLASSES = dict(voc=('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus',
'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train',
'tvmonitor'), coco=('person', 'bicycle', 'car', 'motorcycle',
'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat',
'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe',
'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis',
'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove',
'skateboard', 'surfboard', 'tennis racket', 'bottle', 'wine glass',
'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich',
'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake',
'chair', 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',
'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',
'scissors', 'teddy bear', 'hair drier', 'toothbrush'))
class Detector(object):
"""Faster R-CNN Detector"""
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=
600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465,
122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(name=self.__class__.
__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None, None, :]
self.classes = CLASSES[dataset]
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale / short_size, self.max_size / long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *im.shape)
self.net.blobs['data'].data[0, ...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0, 6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print('Detection took {:.3f}s for {:d} objects'.format(timer.
total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
im = im[:, :, (2, 1, 0)]
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.
colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],
score), bbox=dict(facecolor=self.colormap[class_id], alpha=
0.5), fontsize=12, color='white')
return ax
| import sys
import cv2
import numpy as np
import matplotlib.pyplot as plt
from .caffe_path import caffe
from .timer import Timer
__all__ = ['Detector']
# VOC Class list
CLASSES = dict(
voc = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car',
'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike',
'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor'),
coco = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train',
'truck', 'boat', 'traffic light', 'fire hydrant', 'stop sign',
'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep',
'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella',
'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard',
'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard',
'surfboard', 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork',
'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich', 'orange',
'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair',
'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',
'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',
'scissors', 'teddy bear', 'hair drier', 'toothbrush')
)
class Detector(object):
"""Faster R-CNN Detector"""
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco',
scale=600, max_size=1000, transpose=(2, 0, 1),
mean=[102.9801, 115.9465, 122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(
name=self.__class__.__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None,None,:]
self.classes = CLASSES[dataset]
# colormap for visualization
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale/short_size, self.max_size/long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *(im.shape))
self.net.blobs['data'].data[0,...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0,6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print ('Detection took {:.3f}s for {:d} objects'.format(timer.total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
# create image axes
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
im = im[:, :, (2, 1, 0)] # to rgb
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y-2, '{:s} {:.3f}'.format(self.classes[class_id], score),
bbox=dict(facecolor=self.colormap[class_id], alpha=0.5), fontsize=12, color='white')
return ax | [
6,
7,
8,
9,
10
] |
2,079 | 0e2c71ab4f194af3c2ee65c2cbd6f36921eb587e | <mask token>
| default_app_config = 'assistant.additionalpage.apps.AdditionalPageAppConfig'
| default_app_config = "assistant.additionalpage.apps.AdditionalPageAppConfig"
| null | null | [
0,
1,
2
] |
2,080 | 97bff6eb0cd16c915180cb634e6bf30e17adfdef | <mask token>
| <mask token>
def SO3_to_R3(x_skew):
x = np.zeros((3, 1))
x[0, 0] = -1 * x_skew[1, 2]
x[1, 0] = x_skew[0, 2]
x[2, 0] = -1 * x_skew[0, 1]
return x
| import numpy as np
def SO3_to_R3(x_skew):
x = np.zeros((3, 1))
x[0, 0] = -1 * x_skew[1, 2]
x[1, 0] = x_skew[0, 2]
x[2, 0] = -1 * x_skew[0, 1]
return x
| null | null | [
0,
1,
2
] |
2,081 | 23066cd644826bcfef1ef41f154924ac89e12069 | <mask token>
def get(isamAppliance, check_mode=False, force=False):
"""
Get information on existing snapshots
"""
return isamAppliance.invoke_get('Retrieving snapshots', '/snapshots')
<mask token>
def search(isamAppliance, comment, check_mode=False, force=False):
"""
Retrieve snapshots with given comment contained
"""
ret_obj = isamAppliance.create_return_object()
ret_obj_all = get(isamAppliance)
for obj in ret_obj_all['data']:
if comment in obj['comment']:
logger.debug('Snapshot comment "{0}" has this string "{1}" in it.'
.format(obj['comment'], comment))
if ret_obj['data'] == {}:
ret_obj['data'] = [obj['id']]
else:
ret_obj['data'].append(obj['id'])
return ret_obj
<mask token>
def _check(isamAppliance, comment='', id=None):
"""
Check if the last created snapshot has the exact same comment or id exists
:param isamAppliance:
:param comment:
:return:
"""
ret_obj = get(isamAppliance)
if id != None:
for snaps in ret_obj['data']:
if snaps['id'] == id:
logger.debug('Found id: {}'.format(id))
return True
else:
for snaps in ret_obj['data']:
if snaps['comment'] == comment:
logger.debug('Found comment: {}'.format(comment))
return True
return False
def delete(isamAppliance, id=None, comment=None, check_mode=False, force=False
):
"""
Delete snapshot(s) - check id before processing comment. id can be a list
"""
ids = []
delete_flag = False
if isinstance(id, list):
for i in id:
if _check(isamAppliance, id=i) is True:
delete_flag = True
ids.append(i)
elif _check(isamAppliance, id=id) is True:
delete_flag = True
ids.append(id)
elif comment is not None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
delete_flag = True
ids = ret_obj['data']
logger.info('Deleting the following list of IDs: {}'.format(ids))
if force is True or delete_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete('Deleting snapshot',
'/snapshots/multi_destroy?record_ids=' + ','.join(ids))
return isamAppliance.create_return_object()
<mask token>
def modify(isamAppliance, id, comment, check_mode=False, force=False):
"""
Modify the snapshot comment
"""
if force is True or _check(isamAppliance, id=id) is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put('Modifying snapshot',
'/snapshots/' + id, {'comment': comment})
return isamAppliance.create_return_object()
def apply(isamAppliance, id=None, comment=None, check_mode=False, force=False):
"""
Apply a snapshot
There is a priority in the parameter to be used for snapshot applying: id > comment
"""
apply_flag = False
if id is not None:
apply_flag = _check(isamAppliance, id=id)
elif comment is not None:
ret_obj = search(isamAppliance, comment)
if ret_obj['data'] != {}:
if len(ret_obj['data']) == 1:
id = ret_obj['data'][0]
apply_flag = True
else:
logger.warn(
'There are multiple files with matching comments. Only one snapshot at a time can be applied !'
)
else:
logger.warn('No snapshot detail provided - no id nor comment.')
if force is True or apply_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_snapshot_id('Applying snapshot',
'/snapshots/apply/' + id, {'snapshot_id': id})
return isamAppliance.create_return_object()
def download(isamAppliance, filename, id=None, comment=None, check_mode=
False, force=False):
"""
Download one snapshot file to a zip file.
Multiple file download is now supported. Simply pass a list of id.
For backwards compatibility the id parameter and old behaviour is checked at the beginning.
"""
ids = []
download_flag = False
if isinstance(id, list):
for i in id:
if _check(isamAppliance, id=i) is True:
download_flag = True
ids.append(i)
elif _check(isamAppliance, id=id) is True:
download_flag = True
ids.append(id)
elif comment is not None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
download_flag = True
ids = ret_obj['data']
logger.info('Downloading the following list of IDs: {}'.format(ids))
if force is True or os.path.exists(filename
) is False and download_flag is True:
if check_mode is False:
return isamAppliance.invoke_get_file(
'Downloading multiple snapshots',
'/snapshots/download?record_ids=' + ','.join(ids), filename)
return isamAppliance.create_return_object()
<mask token>
def apply_latest(isamAppliance, check_mode=False, force=False):
"""
Apply latest snapshot file (revert to latest)
"""
ret_obj = get(isamAppliance)
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
id = snaps['id']
return apply(isamAppliance, id, check_mode, force)
def upload(isamAppliance, file, comment=None, check_mode=False, force=False):
"""
Upload Snapshot file
"""
if comment is None:
import zipfile
zFile = zipfile.ZipFile(file)
if 'Comment' in zFile.namelist():
comment = zFile.open('Comment')
if force is True or _check(isamAppliance, comment=comment) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_files('Upload Snapshot',
'/snapshots', [{'file_formfield': 'uploadedfile',
'filename': file, 'mimetype': 'application/octet-stream'}],
{'comment': comment if comment != None else ''},
json_response=False)
return isamAppliance.create_return_object()
<mask token>
| <mask token>
def get(isamAppliance, check_mode=False, force=False):
"""
Get information on existing snapshots
"""
return isamAppliance.invoke_get('Retrieving snapshots', '/snapshots')
def get_latest(isamAppliance, check_mode=False, force=False):
"""
Retrieve id of latest found snapshot
"""
ret_obj_id = isamAppliance.create_return_object()
ret_obj = get(isamAppliance)
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
ret_obj_id['data'] = snaps['id']
return ret_obj_id
def search(isamAppliance, comment, check_mode=False, force=False):
"""
Retrieve snapshots with given comment contained
"""
ret_obj = isamAppliance.create_return_object()
ret_obj_all = get(isamAppliance)
for obj in ret_obj_all['data']:
if comment in obj['comment']:
logger.debug('Snapshot comment "{0}" has this string "{1}" in it.'
.format(obj['comment'], comment))
if ret_obj['data'] == {}:
ret_obj['data'] = [obj['id']]
else:
ret_obj['data'].append(obj['id'])
return ret_obj
def create(isamAppliance, comment='', check_mode=False, force=False):
"""
Create a new snapshot
"""
if force is True or _check(isamAppliance, comment=comment) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post('Creating snapshot',
'/snapshots', {'comment': comment})
return isamAppliance.create_return_object()
def _check(isamAppliance, comment='', id=None):
"""
Check if the last created snapshot has the exact same comment or id exists
:param isamAppliance:
:param comment:
:return:
"""
ret_obj = get(isamAppliance)
if id != None:
for snaps in ret_obj['data']:
if snaps['id'] == id:
logger.debug('Found id: {}'.format(id))
return True
else:
for snaps in ret_obj['data']:
if snaps['comment'] == comment:
logger.debug('Found comment: {}'.format(comment))
return True
return False
def delete(isamAppliance, id=None, comment=None, check_mode=False, force=False
):
"""
Delete snapshot(s) - check id before processing comment. id can be a list
"""
ids = []
delete_flag = False
if isinstance(id, list):
for i in id:
if _check(isamAppliance, id=i) is True:
delete_flag = True
ids.append(i)
elif _check(isamAppliance, id=id) is True:
delete_flag = True
ids.append(id)
elif comment is not None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
delete_flag = True
ids = ret_obj['data']
logger.info('Deleting the following list of IDs: {}'.format(ids))
if force is True or delete_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete('Deleting snapshot',
'/snapshots/multi_destroy?record_ids=' + ','.join(ids))
return isamAppliance.create_return_object()
def multi_delete(isamAppliance, ids=[], comment=None, check_mode=False,
force=False):
"""
Delete multiple snapshots based on id or comment
"""
if comment != None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj['data'] == {}:
return isamAppliance.create_return_object(changed=False)
elif ids == []:
ids = ret_obj['data']
else:
for snaps in ret_obj['data']:
ids.append(snaps)
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete('Deleting one or multiple snapshots'
, '/snapshots/multi_destroy?record_ids=' + ','.join(ids))
return isamAppliance.create_return_object()
def modify(isamAppliance, id, comment, check_mode=False, force=False):
"""
Modify the snapshot comment
"""
if force is True or _check(isamAppliance, id=id) is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put('Modifying snapshot',
'/snapshots/' + id, {'comment': comment})
return isamAppliance.create_return_object()
def apply(isamAppliance, id=None, comment=None, check_mode=False, force=False):
"""
Apply a snapshot
There is a priority in the parameter to be used for snapshot applying: id > comment
"""
apply_flag = False
if id is not None:
apply_flag = _check(isamAppliance, id=id)
elif comment is not None:
ret_obj = search(isamAppliance, comment)
if ret_obj['data'] != {}:
if len(ret_obj['data']) == 1:
id = ret_obj['data'][0]
apply_flag = True
else:
logger.warn(
'There are multiple files with matching comments. Only one snapshot at a time can be applied !'
)
else:
logger.warn('No snapshot detail provided - no id nor comment.')
if force is True or apply_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_snapshot_id('Applying snapshot',
'/snapshots/apply/' + id, {'snapshot_id': id})
return isamAppliance.create_return_object()
def download(isamAppliance, filename, id=None, comment=None, check_mode=
False, force=False):
"""
Download one snapshot file to a zip file.
Multiple file download is now supported. Simply pass a list of id.
For backwards compatibility the id parameter and old behaviour is checked at the beginning.
"""
ids = []
download_flag = False
if isinstance(id, list):
for i in id:
if _check(isamAppliance, id=i) is True:
download_flag = True
ids.append(i)
elif _check(isamAppliance, id=id) is True:
download_flag = True
ids.append(id)
elif comment is not None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
download_flag = True
ids = ret_obj['data']
logger.info('Downloading the following list of IDs: {}'.format(ids))
if force is True or os.path.exists(filename
) is False and download_flag is True:
if check_mode is False:
return isamAppliance.invoke_get_file(
'Downloading multiple snapshots',
'/snapshots/download?record_ids=' + ','.join(ids), filename)
return isamAppliance.create_return_object()
<mask token>
def apply_latest(isamAppliance, check_mode=False, force=False):
"""
Apply latest snapshot file (revert to latest)
"""
ret_obj = get(isamAppliance)
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
id = snaps['id']
return apply(isamAppliance, id, check_mode, force)
def upload(isamAppliance, file, comment=None, check_mode=False, force=False):
"""
Upload Snapshot file
"""
if comment is None:
import zipfile
zFile = zipfile.ZipFile(file)
if 'Comment' in zFile.namelist():
comment = zFile.open('Comment')
if force is True or _check(isamAppliance, comment=comment) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_files('Upload Snapshot',
'/snapshots', [{'file_formfield': 'uploadedfile',
'filename': file, 'mimetype': 'application/octet-stream'}],
{'comment': comment if comment != None else ''},
json_response=False)
return isamAppliance.create_return_object()
<mask token>
| <mask token>
def get(isamAppliance, check_mode=False, force=False):
"""
Get information on existing snapshots
"""
return isamAppliance.invoke_get('Retrieving snapshots', '/snapshots')
def get_latest(isamAppliance, check_mode=False, force=False):
"""
Retrieve id of latest found snapshot
"""
ret_obj_id = isamAppliance.create_return_object()
ret_obj = get(isamAppliance)
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
ret_obj_id['data'] = snaps['id']
return ret_obj_id
def search(isamAppliance, comment, check_mode=False, force=False):
"""
Retrieve snapshots with given comment contained
"""
ret_obj = isamAppliance.create_return_object()
ret_obj_all = get(isamAppliance)
for obj in ret_obj_all['data']:
if comment in obj['comment']:
logger.debug('Snapshot comment "{0}" has this string "{1}" in it.'
.format(obj['comment'], comment))
if ret_obj['data'] == {}:
ret_obj['data'] = [obj['id']]
else:
ret_obj['data'].append(obj['id'])
return ret_obj
def create(isamAppliance, comment='', check_mode=False, force=False):
"""
Create a new snapshot
"""
if force is True or _check(isamAppliance, comment=comment) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post('Creating snapshot',
'/snapshots', {'comment': comment})
return isamAppliance.create_return_object()
def _check(isamAppliance, comment='', id=None):
"""
Check if the last created snapshot has the exact same comment or id exists
:param isamAppliance:
:param comment:
:return:
"""
ret_obj = get(isamAppliance)
if id != None:
for snaps in ret_obj['data']:
if snaps['id'] == id:
logger.debug('Found id: {}'.format(id))
return True
else:
for snaps in ret_obj['data']:
if snaps['comment'] == comment:
logger.debug('Found comment: {}'.format(comment))
return True
return False
def delete(isamAppliance, id=None, comment=None, check_mode=False, force=False
):
"""
Delete snapshot(s) - check id before processing comment. id can be a list
"""
ids = []
delete_flag = False
if isinstance(id, list):
for i in id:
if _check(isamAppliance, id=i) is True:
delete_flag = True
ids.append(i)
elif _check(isamAppliance, id=id) is True:
delete_flag = True
ids.append(id)
elif comment is not None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
delete_flag = True
ids = ret_obj['data']
logger.info('Deleting the following list of IDs: {}'.format(ids))
if force is True or delete_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete('Deleting snapshot',
'/snapshots/multi_destroy?record_ids=' + ','.join(ids))
return isamAppliance.create_return_object()
def multi_delete(isamAppliance, ids=[], comment=None, check_mode=False,
force=False):
"""
Delete multiple snapshots based on id or comment
"""
if comment != None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj['data'] == {}:
return isamAppliance.create_return_object(changed=False)
elif ids == []:
ids = ret_obj['data']
else:
for snaps in ret_obj['data']:
ids.append(snaps)
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete('Deleting one or multiple snapshots'
, '/snapshots/multi_destroy?record_ids=' + ','.join(ids))
return isamAppliance.create_return_object()
def modify(isamAppliance, id, comment, check_mode=False, force=False):
"""
Modify the snapshot comment
"""
if force is True or _check(isamAppliance, id=id) is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put('Modifying snapshot',
'/snapshots/' + id, {'comment': comment})
return isamAppliance.create_return_object()
def apply(isamAppliance, id=None, comment=None, check_mode=False, force=False):
"""
Apply a snapshot
There is a priority in the parameter to be used for snapshot applying: id > comment
"""
apply_flag = False
if id is not None:
apply_flag = _check(isamAppliance, id=id)
elif comment is not None:
ret_obj = search(isamAppliance, comment)
if ret_obj['data'] != {}:
if len(ret_obj['data']) == 1:
id = ret_obj['data'][0]
apply_flag = True
else:
logger.warn(
'There are multiple files with matching comments. Only one snapshot at a time can be applied !'
)
else:
logger.warn('No snapshot detail provided - no id nor comment.')
if force is True or apply_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_snapshot_id('Applying snapshot',
'/snapshots/apply/' + id, {'snapshot_id': id})
return isamAppliance.create_return_object()
def download(isamAppliance, filename, id=None, comment=None, check_mode=
False, force=False):
"""
Download one snapshot file to a zip file.
Multiple file download is now supported. Simply pass a list of id.
For backwards compatibility the id parameter and old behaviour is checked at the beginning.
"""
ids = []
download_flag = False
if isinstance(id, list):
for i in id:
if _check(isamAppliance, id=i) is True:
download_flag = True
ids.append(i)
elif _check(isamAppliance, id=id) is True:
download_flag = True
ids.append(id)
elif comment is not None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
download_flag = True
ids = ret_obj['data']
logger.info('Downloading the following list of IDs: {}'.format(ids))
if force is True or os.path.exists(filename
) is False and download_flag is True:
if check_mode is False:
return isamAppliance.invoke_get_file(
'Downloading multiple snapshots',
'/snapshots/download?record_ids=' + ','.join(ids), filename)
return isamAppliance.create_return_object()
<mask token>
def apply_latest(isamAppliance, check_mode=False, force=False):
"""
Apply latest snapshot file (revert to latest)
"""
ret_obj = get(isamAppliance)
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
id = snaps['id']
return apply(isamAppliance, id, check_mode, force)
def upload(isamAppliance, file, comment=None, check_mode=False, force=False):
"""
Upload Snapshot file
"""
if comment is None:
import zipfile
zFile = zipfile.ZipFile(file)
if 'Comment' in zFile.namelist():
comment = zFile.open('Comment')
if force is True or _check(isamAppliance, comment=comment) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_files('Upload Snapshot',
'/snapshots', [{'file_formfield': 'uploadedfile',
'filename': file, 'mimetype': 'application/octet-stream'}],
{'comment': comment if comment != None else ''},
json_response=False)
return isamAppliance.create_return_object()
def compare(isamAppliance1, isamAppliance2):
"""
Compare list of snapshots between 2 appliances
"""
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
for snapshot in ret_obj1['data']:
del snapshot['id']
del snapshot['filename']
for snapshot in ret_obj2['data']:
del snapshot['id']
del snapshot['filename']
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2,
deleted_keys=['id', 'filename'])
| <mask token>
def get(isamAppliance, check_mode=False, force=False):
"""
Get information on existing snapshots
"""
return isamAppliance.invoke_get('Retrieving snapshots', '/snapshots')
def get_latest(isamAppliance, check_mode=False, force=False):
"""
Retrieve id of latest found snapshot
"""
ret_obj_id = isamAppliance.create_return_object()
ret_obj = get(isamAppliance)
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
ret_obj_id['data'] = snaps['id']
return ret_obj_id
def search(isamAppliance, comment, check_mode=False, force=False):
"""
Retrieve snapshots with given comment contained
"""
ret_obj = isamAppliance.create_return_object()
ret_obj_all = get(isamAppliance)
for obj in ret_obj_all['data']:
if comment in obj['comment']:
logger.debug('Snapshot comment "{0}" has this string "{1}" in it.'
.format(obj['comment'], comment))
if ret_obj['data'] == {}:
ret_obj['data'] = [obj['id']]
else:
ret_obj['data'].append(obj['id'])
return ret_obj
def create(isamAppliance, comment='', check_mode=False, force=False):
"""
Create a new snapshot
"""
if force is True or _check(isamAppliance, comment=comment) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post('Creating snapshot',
'/snapshots', {'comment': comment})
return isamAppliance.create_return_object()
def _check(isamAppliance, comment='', id=None):
"""
Check if the last created snapshot has the exact same comment or id exists
:param isamAppliance:
:param comment:
:return:
"""
ret_obj = get(isamAppliance)
if id != None:
for snaps in ret_obj['data']:
if snaps['id'] == id:
logger.debug('Found id: {}'.format(id))
return True
else:
for snaps in ret_obj['data']:
if snaps['comment'] == comment:
logger.debug('Found comment: {}'.format(comment))
return True
return False
def delete(isamAppliance, id=None, comment=None, check_mode=False, force=False
):
"""
Delete snapshot(s) - check id before processing comment. id can be a list
"""
ids = []
delete_flag = False
if isinstance(id, list):
for i in id:
if _check(isamAppliance, id=i) is True:
delete_flag = True
ids.append(i)
elif _check(isamAppliance, id=id) is True:
delete_flag = True
ids.append(id)
elif comment is not None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
delete_flag = True
ids = ret_obj['data']
logger.info('Deleting the following list of IDs: {}'.format(ids))
if force is True or delete_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete('Deleting snapshot',
'/snapshots/multi_destroy?record_ids=' + ','.join(ids))
return isamAppliance.create_return_object()
def multi_delete(isamAppliance, ids=[], comment=None, check_mode=False,
force=False):
"""
Delete multiple snapshots based on id or comment
"""
if comment != None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj['data'] == {}:
return isamAppliance.create_return_object(changed=False)
elif ids == []:
ids = ret_obj['data']
else:
for snaps in ret_obj['data']:
ids.append(snaps)
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete('Deleting one or multiple snapshots'
, '/snapshots/multi_destroy?record_ids=' + ','.join(ids))
return isamAppliance.create_return_object()
def modify(isamAppliance, id, comment, check_mode=False, force=False):
"""
Modify the snapshot comment
"""
if force is True or _check(isamAppliance, id=id) is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put('Modifying snapshot',
'/snapshots/' + id, {'comment': comment})
return isamAppliance.create_return_object()
def apply(isamAppliance, id=None, comment=None, check_mode=False, force=False):
"""
Apply a snapshot
There is a priority in the parameter to be used for snapshot applying: id > comment
"""
apply_flag = False
if id is not None:
apply_flag = _check(isamAppliance, id=id)
elif comment is not None:
ret_obj = search(isamAppliance, comment)
if ret_obj['data'] != {}:
if len(ret_obj['data']) == 1:
id = ret_obj['data'][0]
apply_flag = True
else:
logger.warn(
'There are multiple files with matching comments. Only one snapshot at a time can be applied !'
)
else:
logger.warn('No snapshot detail provided - no id nor comment.')
if force is True or apply_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_snapshot_id('Applying snapshot',
'/snapshots/apply/' + id, {'snapshot_id': id})
return isamAppliance.create_return_object()
def download(isamAppliance, filename, id=None, comment=None, check_mode=
False, force=False):
"""
Download one snapshot file to a zip file.
Multiple file download is now supported. Simply pass a list of id.
For backwards compatibility the id parameter and old behaviour is checked at the beginning.
"""
ids = []
download_flag = False
if isinstance(id, list):
for i in id:
if _check(isamAppliance, id=i) is True:
download_flag = True
ids.append(i)
elif _check(isamAppliance, id=id) is True:
download_flag = True
ids.append(id)
elif comment is not None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
download_flag = True
ids = ret_obj['data']
logger.info('Downloading the following list of IDs: {}'.format(ids))
if force is True or os.path.exists(filename
) is False and download_flag is True:
if check_mode is False:
return isamAppliance.invoke_get_file(
'Downloading multiple snapshots',
'/snapshots/download?record_ids=' + ','.join(ids), filename)
return isamAppliance.create_return_object()
def download_latest(isamAppliance, dir='.', check_mode=False, force=False):
"""
Download latest snapshot file to a zip file.
"""
ret_obj = get(isamAppliance)
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
id = snaps['id']
file = snaps['filename']
filename = os.path.join(dir, file)
return download(isamAppliance, filename, id, check_mode, force)
def apply_latest(isamAppliance, check_mode=False, force=False):
"""
Apply latest snapshot file (revert to latest)
"""
ret_obj = get(isamAppliance)
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
id = snaps['id']
return apply(isamAppliance, id, check_mode, force)
def upload(isamAppliance, file, comment=None, check_mode=False, force=False):
"""
Upload Snapshot file
"""
if comment is None:
import zipfile
zFile = zipfile.ZipFile(file)
if 'Comment' in zFile.namelist():
comment = zFile.open('Comment')
if force is True or _check(isamAppliance, comment=comment) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_files('Upload Snapshot',
'/snapshots', [{'file_formfield': 'uploadedfile',
'filename': file, 'mimetype': 'application/octet-stream'}],
{'comment': comment if comment != None else ''},
json_response=False)
return isamAppliance.create_return_object()
def compare(isamAppliance1, isamAppliance2):
"""
Compare list of snapshots between 2 appliances
"""
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
for snapshot in ret_obj1['data']:
del snapshot['id']
del snapshot['filename']
for snapshot in ret_obj2['data']:
del snapshot['id']
del snapshot['filename']
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2,
deleted_keys=['id', 'filename'])
| import logging
import ibmsecurity.utilities.tools
import os.path
logger = logging.getLogger(__name__)
def get(isamAppliance, check_mode=False, force=False):
"""
Get information on existing snapshots
"""
return isamAppliance.invoke_get("Retrieving snapshots", "/snapshots")
def get_latest(isamAppliance, check_mode=False, force=False):
"""
Retrieve id of latest found snapshot
"""
ret_obj_id = isamAppliance.create_return_object()
ret_obj = get(isamAppliance)
# Get snapshot with lowest 'id' value - that will be latest one
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
ret_obj_id['data'] = snaps['id']
return ret_obj_id
def search(isamAppliance, comment, check_mode=False, force=False):
"""
Retrieve snapshots with given comment contained
"""
ret_obj = isamAppliance.create_return_object()
ret_obj_all = get(isamAppliance)
for obj in ret_obj_all['data']:
if comment in obj['comment']:
logger.debug("Snapshot comment \"{0}\" has this string \"{1}\" in it.".format(obj['comment'], comment))
if ret_obj['data'] == {}:
ret_obj['data'] = [obj['id']]
else:
ret_obj['data'].append(obj['id'])
return ret_obj
def create(isamAppliance, comment='', check_mode=False, force=False):
"""
Create a new snapshot
"""
if force is True or _check(isamAppliance, comment=comment) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post("Creating snapshot", "/snapshots",
{
'comment': comment
})
return isamAppliance.create_return_object()
def _check(isamAppliance, comment='', id=None):
"""
Check if the last created snapshot has the exact same comment or id exists
:param isamAppliance:
:param comment:
:return:
"""
ret_obj = get(isamAppliance)
if id != None:
for snaps in ret_obj['data']:
if snaps['id'] == id:
logger.debug("Found id: {}".format(id))
return True
else:
for snaps in ret_obj['data']:
if snaps['comment'] == comment:
logger.debug("Found comment: {}".format(comment))
return True
return False
def delete(isamAppliance, id=None, comment=None, check_mode=False, force=False):
"""
Delete snapshot(s) - check id before processing comment. id can be a list
"""
ids = []
delete_flag = False
if (isinstance(id, list)):
for i in id:
if _check(isamAppliance, id=i) is True:
delete_flag = True
ids.append(i)
elif (_check(isamAppliance, id=id) is True):
delete_flag = True
ids.append(id)
elif (comment is not None):
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
delete_flag = True
ids = ret_obj['data']
logger.info("Deleting the following list of IDs: {}".format(ids))
if force is True or delete_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete("Deleting snapshot",
"/snapshots/multi_destroy?record_ids=" + ",".join(ids))
return isamAppliance.create_return_object()
def multi_delete(isamAppliance, ids=[], comment=None, check_mode=False, force=False):
"""
Delete multiple snapshots based on id or comment
"""
if comment != None:
ret_obj = search(isamAppliance, comment=comment)
if ret_obj['data'] == {}:
return isamAppliance.create_return_object(changed=False)
else:
if ids == []:
ids = ret_obj['data']
else:
for snaps in ret_obj['data']:
ids.append(snaps)
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete("Deleting one or multiple snapshots", "/snapshots/multi_destroy?record_ids=" + ",".join(ids))
return isamAppliance.create_return_object()
def modify(isamAppliance, id, comment, check_mode=False, force=False):
"""
Modify the snapshot comment
"""
if force is True or _check(isamAppliance, id=id) is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put("Modifying snapshot", "/snapshots/" + id,
{
'comment': comment
})
return isamAppliance.create_return_object()
def apply(isamAppliance, id=None, comment=None, check_mode=False, force=False):
"""
Apply a snapshot
There is a priority in the parameter to be used for snapshot applying: id > comment
"""
apply_flag = False
if id is not None:
apply_flag = _check(isamAppliance, id=id)
elif comment is not None:
ret_obj = search(isamAppliance, comment)
if ret_obj['data'] != {}:
if len(ret_obj['data']) == 1:
id = ret_obj['data'][0]
apply_flag = True
else:
logger.warn(
"There are multiple files with matching comments. Only one snapshot at a time can be applied !")
else:
logger.warn("No snapshot detail provided - no id nor comment.")
if force is True or apply_flag is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_snapshot_id("Applying snapshot", "/snapshots/apply/" + id,
{"snapshot_id": id})
return isamAppliance.create_return_object()
def download(isamAppliance, filename, id=None, comment=None, check_mode=False, force=False):
"""
Download one snapshot file to a zip file.
Multiple file download is now supported. Simply pass a list of id.
For backwards compatibility the id parameter and old behaviour is checked at the beginning.
"""
ids = []
download_flag = False
if (isinstance(id, list)):
for i in id:
if _check(isamAppliance, id=i) is True:
download_flag = True
ids.append(i)
elif (_check(isamAppliance, id=id) is True):
download_flag = True
ids.append(id)
elif (comment is not None):
ret_obj = search(isamAppliance, comment=comment)
if ret_obj != {} and ret_obj['data'] != {}:
download_flag = True
ids = ret_obj['data']
logger.info("Downloading the following list of IDs: {}".format(ids))
if force is True or (
os.path.exists(filename) is False and download_flag is True): # Don't overwrite if not forced to
if check_mode is False: # We are in check_mode but would try to download named ids
# Download all ids known so far
return isamAppliance.invoke_get_file("Downloading multiple snapshots",
"/snapshots/download?record_ids=" + ",".join(ids), filename)
return isamAppliance.create_return_object()
def download_latest(isamAppliance, dir='.', check_mode=False, force=False):
"""
Download latest snapshot file to a zip file.
"""
ret_obj = get(isamAppliance)
# Get snapshot with lowest 'id' value - that will be latest one
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
id = snaps['id']
file = snaps['filename']
filename = os.path.join(dir, file)
return download(isamAppliance, filename, id, check_mode, force)
def apply_latest(isamAppliance, check_mode=False, force=False):
"""
Apply latest snapshot file (revert to latest)
"""
ret_obj = get(isamAppliance)
# Get snapshot with lowest 'id' value - that will be latest one
snaps = min(ret_obj['data'], key=lambda snap: snap['index'])
id = snaps['id']
return apply(isamAppliance, id, check_mode, force)
def upload(isamAppliance, file, comment=None, check_mode=False, force=False):
"""
Upload Snapshot file
"""
if comment is None:
import zipfile
zFile = zipfile.ZipFile(file)
if "Comment" in zFile.namelist():
comment = zFile.open("Comment")
if force is True or _check(isamAppliance, comment=comment) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_files(
"Upload Snapshot",
"/snapshots",
[{
'file_formfield': 'uploadedfile',
'filename': file,
'mimetype': 'application/octet-stream'
}],
{
'comment': comment if comment != None else ''
}, json_response=False)
return isamAppliance.create_return_object()
def compare(isamAppliance1, isamAppliance2):
"""
Compare list of snapshots between 2 appliances
"""
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
# id of snapshot is uniquely generated on appliance and should therefore be ignored in comparison.
# filename of snapshot is generated based on exact date/time and will differ even if 2 snapshots are taken near the
# same time. Therefore, filename should be ignored in comparison
for snapshot in ret_obj1['data']:
del snapshot['id']
del snapshot['filename']
for snapshot in ret_obj2['data']:
del snapshot['id']
del snapshot['filename']
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=['id', 'filename'])
| [
9,
12,
13,
14,
17
] |
2,082 | c0cabf2b6f7190aefbaefa197a9008de3a344147 | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
dependencies = [('core', '0052_add_more_tags')]
operations = [migrations.RenameField(model_name='reporter', old_name=
'auth0_role_name', new_name='auth0_role_names')]
| from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('core', '0052_add_more_tags')]
operations = [migrations.RenameField(model_name='reporter', old_name=
'auth0_role_name', new_name='auth0_role_names')]
| # Generated by Django 3.1.7 on 2021-03-29 18:50
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("core", "0052_add_more_tags"),
]
operations = [
migrations.RenameField(
model_name="reporter",
old_name="auth0_role_name",
new_name="auth0_role_names",
),
]
| [
0,
1,
2,
3,
4
] |
2,083 | 8471e6a3b6623236740ad5219e5038a64e0c0056 | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.
AUTH_USER_MODEL), ('review', '0002_auto_20200419_1409')]
operations = [migrations.RenameModel(old_name='add_review', new_name=
'add_rev')]
| from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.
AUTH_USER_MODEL), ('review', '0002_auto_20200419_1409')]
operations = [migrations.RenameModel(old_name='add_review', new_name=
'add_rev')]
| # Generated by Django 3.0.5 on 2020-04-23 11:23
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('review', '0002_auto_20200419_1409'),
]
operations = [
migrations.RenameModel(
old_name='add_review',
new_name='add_rev',
),
]
| [
0,
1,
2,
3,
4
] |
2,084 | 89a3c34b3145b93a4cfa78eeb055c8136ab2bfe6 | <mask token>
class OvsApi(object):
<mask token>
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
<mask token>
<mask token>
<mask token>
<mask token>
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
<mask token>
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge', self.
integration_bridge, 'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find('Interface', ('options', '=', {
'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=
iface['name'], tunnel_type=iface['type']))
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(
)
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=columns).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface
<mask token>
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', 'name']).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface['name']
<mask token>
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=
None, peer_link_name=None):
links = self._gen_link_mapping(local_bridge, peer_bridge,
local_link_name, peer_link_name)
self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])
self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])
return links
<mask token>
<mask token>
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport', check_error=
False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(id=external_ids.get('qos-id'), topic=
external_ids.get('qos-topic'), version=external_ids.get(
'version'))
<mask token>
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', max_kbps), (
'ingress_policing_burst', max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
<mask token>
<mask token>
<mask token>
| <mask token>
class OvsApi(object):
<mask token>
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
def initialize(self, nb_api):
db_connection = '%s:%s:%s' % (self.protocol, self.ip, self.port)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_STARTED, None)
self.ovsdb = impl_idl.DFOvsdbApi(nb_api, db_connection, self.
vsctl_timeout)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_FINISHED, None)
def _db_get_val(self, table, record, column, check_error=False,
log_errors=True):
return self.ovsdb.db_get(table, record, column).execute(check_error
=check_error, log_errors=log_errors)
<mask token>
def set_controller(self, bridge, targets):
self.ovsdb.set_controller(bridge, targets).execute()
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
<mask token>
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge', self.
integration_bridge, 'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find('Interface', ('options', '=', {
'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=
iface['name'], tunnel_type=iface['type']))
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(
)
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=columns).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface
<mask token>
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', 'name']).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface['name']
def _gen_link_mapping(self, bridge1, bridge2, bridge1_link_name=None,
bridge2_link_name=None):
if bridge1_link_name is None:
bridge1_link_name = '%s-patch' % bridge2
if bridge2_link_name is None:
bridge2_link_name = '%s-patch' % bridge1
LOG.debug(
'genrated mappings {%(bridge1)s: %(link1)s, %(bridge2)s: %(link2)s}'
, {'bridge1': bridge1, 'link1': bridge1_link_name, 'bridge2':
bridge2, 'link2': bridge2_link_name})
return bridge1_link_name, bridge2_link_name
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=
None, peer_link_name=None):
links = self._gen_link_mapping(local_bridge, peer_bridge,
local_link_name, peer_link_name)
self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])
self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])
return links
<mask token>
<mask token>
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport', check_error=
False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(id=external_ids.get('qos-id'), topic=
external_ids.get('qos-topic'), version=external_ids.get(
'version'))
<mask token>
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', max_kbps), (
'ingress_policing_burst', max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
def clear_port_qos(self, port_id):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', 0), ('ingress_policing_burst', 0)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))
txn.add(self.ovsdb.delete_qos(port_id))
<mask token>
<mask token>
| <mask token>
class OvsApi(object):
<mask token>
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
def initialize(self, nb_api):
db_connection = '%s:%s:%s' % (self.protocol, self.ip, self.port)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_STARTED, None)
self.ovsdb = impl_idl.DFOvsdbApi(nb_api, db_connection, self.
vsctl_timeout)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_FINISHED, None)
def _db_get_val(self, table, record, column, check_error=False,
log_errors=True):
return self.ovsdb.db_get(table, record, column).execute(check_error
=check_error, log_errors=log_errors)
<mask token>
def set_controller(self, bridge, targets):
self.ovsdb.set_controller(bridge, targets).execute()
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
def check_controller(self, target):
controllers = self.ovsdb.get_controller(self.integration_bridge
).execute()
return target in controllers
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge', self.
integration_bridge, 'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find('Interface', ('options', '=', {
'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=
iface['name'], tunnel_type=iface['type']))
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(
)
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=columns).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface
def get_port_ofport_by_id(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'name', 'ofport'})
if iface and self._check_ofport(iface['name'], iface['ofport']):
return iface['ofport']
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', 'name']).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface['name']
def _gen_link_mapping(self, bridge1, bridge2, bridge1_link_name=None,
bridge2_link_name=None):
if bridge1_link_name is None:
bridge1_link_name = '%s-patch' % bridge2
if bridge2_link_name is None:
bridge2_link_name = '%s-patch' % bridge1
LOG.debug(
'genrated mappings {%(bridge1)s: %(link1)s, %(bridge2)s: %(link2)s}'
, {'bridge1': bridge1, 'link1': bridge1_link_name, 'bridge2':
bridge2, 'link2': bridge2_link_name})
return bridge1_link_name, bridge2_link_name
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=
None, peer_link_name=None):
links = self._gen_link_mapping(local_bridge, peer_bridge,
local_link_name, peer_link_name)
self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])
self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])
return links
<mask token>
<mask token>
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport', check_error=
False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(id=external_ids.get('qos-id'), topic=
external_ids.get('qos-topic'), version=external_ids.get(
'version'))
<mask token>
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', max_kbps), (
'ingress_policing_burst', max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
def clear_port_qos(self, port_id):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', 0), ('ingress_policing_burst', 0)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))
txn.add(self.ovsdb.delete_qos(port_id))
<mask token>
<mask token>
| <mask token>
class OvsApi(object):
<mask token>
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
def initialize(self, nb_api):
db_connection = '%s:%s:%s' % (self.protocol, self.ip, self.port)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_STARTED, None)
self.ovsdb = impl_idl.DFOvsdbApi(nb_api, db_connection, self.
vsctl_timeout)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_FINISHED, None)
def _db_get_val(self, table, record, column, check_error=False,
log_errors=True):
return self.ovsdb.db_get(table, record, column).execute(check_error
=check_error, log_errors=log_errors)
<mask token>
def set_controller(self, bridge, targets):
self.ovsdb.set_controller(bridge, targets).execute()
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
def check_controller(self, target):
controllers = self.ovsdb.get_controller(self.integration_bridge
).execute()
return target in controllers
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge', self.
integration_bridge, 'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find('Interface', ('options', '=', {
'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=
iface['name'], tunnel_type=iface['type']))
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(
)
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=columns).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface
def get_port_ofport_by_id(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'name', 'ofport'})
if iface and self._check_ofport(iface['name'], iface['ofport']):
return iface['ofport']
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', 'name']).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface['name']
def _gen_link_mapping(self, bridge1, bridge2, bridge1_link_name=None,
bridge2_link_name=None):
if bridge1_link_name is None:
bridge1_link_name = '%s-patch' % bridge2
if bridge2_link_name is None:
bridge2_link_name = '%s-patch' % bridge1
LOG.debug(
'genrated mappings {%(bridge1)s: %(link1)s, %(bridge2)s: %(link2)s}'
, {'bridge1': bridge1, 'link1': bridge1_link_name, 'bridge2':
bridge2, 'link2': bridge2_link_name})
return bridge1_link_name, bridge2_link_name
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=
None, peer_link_name=None):
links = self._gen_link_mapping(local_bridge, peer_bridge,
local_link_name, peer_link_name)
self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])
self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])
return links
def _create_patch_port(self, bridge, port, peer, peer_port):
if cfg.CONF.df.enable_dpdk:
self.ovsdb.add_br(bridge, datapath_type='netdev').execute()
else:
self.ovsdb.add_br(bridge, datapath_type='system').execute()
if not self.patch_port_exist(port):
self.ovsdb.add_patch_port(bridge, port, peer, peer_port).execute()
<mask token>
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport', check_error=
False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(id=external_ids.get('qos-id'), topic=
external_ids.get('qos-topic'), version=external_ids.get(
'version'))
<mask token>
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', max_kbps), (
'ingress_policing_burst', max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
def clear_port_qos(self, port_id):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', 0), ('ingress_policing_burst', 0)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))
txn.add(self.ovsdb.delete_qos(port_id))
<mask token>
def get_vtp_ofport(self, tunnel_type):
return self.get_port_ofport(tunnel_type + '-vtp')
| # Copyright (c) 2015 OpenStack Foundation.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_config import cfg
from oslo_log import log
from ovs import vlog
from dragonflow.controller.common import constants
from dragonflow.db.models import ovs
from dragonflow.db.models import qos
from dragonflow.ovsdb import impl_idl
LOG = log.getLogger(__name__)
OFPORT_RANGE_MIN = 1
OFPORT_RANGE_MAX = 65533
OVS_LOG_FILE_NAME = 'df-ovs.log'
class OvsApi(object):
"""The interface of openvswitch
Consumers use this class to set openvswitch or get results from
openvswitch.
"""
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
# NOTE: This has to be this name vsctl_timeout, as neutron will use
# this attribute to set the timeout of ovs db.
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
def initialize(self, nb_api):
db_connection = ('%s:%s:%s' % (self.protocol, self.ip, self.port))
nb_api.db_change_callback(None, None,
constants.CONTROLLER_OVS_SYNC_STARTED, None)
self.ovsdb = impl_idl.DFOvsdbApi(
nb_api, db_connection, self.vsctl_timeout)
nb_api.db_change_callback(None, None,
constants.CONTROLLER_OVS_SYNC_FINISHED, None)
def _db_get_val(self, table, record, column, check_error=False,
log_errors=True):
return self.ovsdb.db_get(table, record, column).execute(
check_error=check_error, log_errors=log_errors)
def _get_bridge_for_iface(self, iface_name):
return self.ovsdb.iface_to_br(iface_name).execute()
def set_controller(self, bridge, targets):
self.ovsdb.set_controller(bridge, targets).execute()
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
def check_controller(self, target):
controllers = self.ovsdb.get_controller(
self.integration_bridge).execute()
return target in controllers
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge',
self.integration_bridge,
'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find(
'Interface', ('options', '=', {'remote_ip': 'flow'}),
columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if (self.integration_bridge !=
self._get_bridge_for_iface(iface['name'])):
continue
tunnel_ports.append(
ovs.OvsPort(
id=str(iface['uuid']),
name=iface['name'],
tunnel_type=iface['type'],
),
)
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name,
self.integration_bridge).execute()
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning("ofport %(ofport)s for port %(port)s is invalid.",
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find(
'Interface', ('external_ids', '=', {'iface-id': port_id}),
columns=columns).execute()
for iface in ifaces:
if (self.integration_bridge !=
self._get_bridge_for_iface(iface['name'])):
# iface-id is the port id in neutron, the same neutron port
# might create multiple interfaces in different bridges
continue
return iface
def get_port_ofport_by_id(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(
port_id, {'name', 'ofport'})
if iface and self._check_ofport(iface['name'], iface['ofport']):
return iface['ofport']
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(
port_id, {'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find(
'Interface', ('external_ids', '=', {'iface-id': port_id}),
columns=['external_ids', 'name']).execute()
for iface in ifaces:
if (self.integration_bridge !=
self._get_bridge_for_iface(iface['name'])):
# iface-id is the port id in neutron, the same neutron port
# might create multiple interfaces in different bridges
continue
return iface['name']
def _gen_link_mapping(self, bridge1, bridge2,
bridge1_link_name=None,
bridge2_link_name=None):
if bridge1_link_name is None:
bridge1_link_name = "%s-patch" % bridge2
if bridge2_link_name is None:
bridge2_link_name = "%s-patch" % bridge1
LOG.debug('genrated mappings {%(bridge1)s: %(link1)s,'
' %(bridge2)s: %(link2)s}',
{'bridge1': bridge1,
'link1': bridge1_link_name,
'bridge2': bridge2,
'link2': bridge2_link_name})
return (bridge1_link_name, bridge2_link_name)
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge,
local_link_name=None, peer_link_name=None):
links = self._gen_link_mapping(
local_bridge,
peer_bridge,
local_link_name,
peer_link_name)
self._create_patch_port(
local_bridge,
links[0],
peer_bridge,
links[1])
self._create_patch_port(
peer_bridge,
links[1],
local_bridge,
links[0])
return links
def _create_patch_port(self, bridge, port, peer, peer_port):
if cfg.CONF.df.enable_dpdk:
self.ovsdb.add_br(bridge, datapath_type='netdev').execute()
else:
self.ovsdb.add_br(bridge, datapath_type='system').execute()
if not self.patch_port_exist(port):
self.ovsdb.add_patch_port(bridge, port, peer, peer_port).execute()
def patch_port_exist(self, port):
return 'patch' == self._db_get_val('Interface', port, 'type',
check_error=False,
log_errors=False)
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport',
check_error=False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find(
'QoS', ('external_ids', '=', {'iface-id': port_id}),
columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(
id=external_ids.get('qos-id'),
topic=external_ids.get('qos-topic'),
version=external_ids.get('version'),
)
def set_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
qos_uuid = txn.add(self.ovsdb.create_qos(port_id, qos))
txn.add(self.ovsdb.db_set('Interface', port_name,
('ingress_policing_rate', max_kbps),
('ingress_policing_burst',
max_burst_kbps)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', qos_uuid)))
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name,
('ingress_policing_rate', max_kbps),
('ingress_policing_burst',
max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
def clear_port_qos(self, port_id):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name,
('ingress_policing_rate', 0),
('ingress_policing_burst', 0)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))
txn.add(self.ovsdb.delete_qos(port_id))
def delete_port_qos_and_queue(self, port_id):
self.ovsdb.delete_qos(port_id).execute()
def get_vtp_ofport(self, tunnel_type):
return self.get_port_ofport(tunnel_type + '-vtp')
| [
18,
23,
25,
27,
35
] |
2,085 | e5607d9893b775b216d1790897124a673b190c26 | <mask token>
| <mask token>
SECRET_KEY = os.environ['SECRET_KEY']
ALLOWED_HOSTS = ['demo.pythonic.nl']
DEBUG = False
| from .base import *
import os
SECRET_KEY = os.environ['SECRET_KEY']
ALLOWED_HOSTS = ['demo.pythonic.nl']
DEBUG = False
| null | null | [
0,
1,
2
] |
2,086 | 6909e70db4f907e26ad604f95c79a405010907bd | <mask token>
| <mask token>
def hdfs_get_filelist(blob_path, delimiter='_'):
""" Lists hdfs dir and returns named tuples with information of file based on its filename. """
def hdfs_listdir(blob_path):
command = 'hdfs dfs -ls ' + blob_path
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
p.wait()
files = [item.rstrip('\n').split()[-1] for item in p.stdout.readlines()
]
if len(files) > 0:
files.pop(0)
qty_files = len(files)
return files, qty_files
files, qty_files = hdfs_listdir(blob_path)
kpis = []
if qty_files > 0:
KPI = namedtuple('KPI', ['filepath', 'filename', 'kpi_name',
'initial_date', 'final_date', 'key', 'extension'])
for file in files:
filename, ext = basename(file), splitext(basename(file))[1]
if ext == '.json':
splits = 3
kpi = KPI(filepath=file, filename=filename, kpi_name=
filename.rsplit(delimiter, splits)[0], initial_date=
filename.rsplit(delimiter, splits)[1], final_date=
filename.rsplit(delimiter, splits)[2], key=splitext(
filename.rsplit(delimiter, splits)[3])[0], extension=ext)
else:
splits = 1
kpi = KPI(filepath=file, filename=filename, kpi_name=
filename.rsplit(delimiter, splits)[0], initial_date=
None, final_date=None, key=splitext(filename.rsplit(
delimiter, splits)[1])[0], extension=ext)
kpis.append(kpi)
return kpis, len(kpis)
<mask token>
| <mask token>
def hdfs_get_filelist(blob_path, delimiter='_'):
""" Lists hdfs dir and returns named tuples with information of file based on its filename. """
def hdfs_listdir(blob_path):
command = 'hdfs dfs -ls ' + blob_path
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
p.wait()
files = [item.rstrip('\n').split()[-1] for item in p.stdout.readlines()
]
if len(files) > 0:
files.pop(0)
qty_files = len(files)
return files, qty_files
files, qty_files = hdfs_listdir(blob_path)
kpis = []
if qty_files > 0:
KPI = namedtuple('KPI', ['filepath', 'filename', 'kpi_name',
'initial_date', 'final_date', 'key', 'extension'])
for file in files:
filename, ext = basename(file), splitext(basename(file))[1]
if ext == '.json':
splits = 3
kpi = KPI(filepath=file, filename=filename, kpi_name=
filename.rsplit(delimiter, splits)[0], initial_date=
filename.rsplit(delimiter, splits)[1], final_date=
filename.rsplit(delimiter, splits)[2], key=splitext(
filename.rsplit(delimiter, splits)[3])[0], extension=ext)
else:
splits = 1
kpi = KPI(filepath=file, filename=filename, kpi_name=
filename.rsplit(delimiter, splits)[0], initial_date=
None, final_date=None, key=splitext(filename.rsplit(
delimiter, splits)[1])[0], extension=ext)
kpis.append(kpi)
return kpis, len(kpis)
kpis, files = hdfs_get_filelist(
'wasbs://hdiprojsupplydatalake-2018-07-12t15-58-09-078z@hdiprojsupplydatalake.blob.core.windows.net/estrutura_final/'
)
| import subprocess
from collections import namedtuple
from os.path import basename, splitext
def hdfs_get_filelist(blob_path, delimiter='_'):
""" Lists hdfs dir and returns named tuples with information of file based on its filename. """
def hdfs_listdir(blob_path):
command = 'hdfs dfs -ls ' + blob_path
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
p.wait()
files = [item.rstrip('\n').split()[-1] for item in p.stdout.readlines()
]
if len(files) > 0:
files.pop(0)
qty_files = len(files)
return files, qty_files
files, qty_files = hdfs_listdir(blob_path)
kpis = []
if qty_files > 0:
KPI = namedtuple('KPI', ['filepath', 'filename', 'kpi_name',
'initial_date', 'final_date', 'key', 'extension'])
for file in files:
filename, ext = basename(file), splitext(basename(file))[1]
if ext == '.json':
splits = 3
kpi = KPI(filepath=file, filename=filename, kpi_name=
filename.rsplit(delimiter, splits)[0], initial_date=
filename.rsplit(delimiter, splits)[1], final_date=
filename.rsplit(delimiter, splits)[2], key=splitext(
filename.rsplit(delimiter, splits)[3])[0], extension=ext)
else:
splits = 1
kpi = KPI(filepath=file, filename=filename, kpi_name=
filename.rsplit(delimiter, splits)[0], initial_date=
None, final_date=None, key=splitext(filename.rsplit(
delimiter, splits)[1])[0], extension=ext)
kpis.append(kpi)
return kpis, len(kpis)
kpis, files = hdfs_get_filelist(
'wasbs://hdiprojsupplydatalake-2018-07-12t15-58-09-078z@hdiprojsupplydatalake.blob.core.windows.net/estrutura_final/'
)
| import subprocess
from collections import namedtuple
from os.path import basename, splitext
def hdfs_get_filelist(blob_path, delimiter="_"):
""" Lists hdfs dir and returns named tuples with information of file based on its filename. """
def hdfs_listdir(blob_path):
command = 'hdfs dfs -ls ' + blob_path
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
p.wait()
files = [item.rstrip("\n").split()[-1] for item in p.stdout.readlines()]
if len(files) > 0:
files.pop(0) # remove summary from ls: "found n items".
qty_files = len(files)
return files, qty_files
files, qty_files = hdfs_listdir(blob_path)
kpis = []
# If there are items in dir.
if qty_files > 0:
KPI = namedtuple('KPI', ["filepath", "filename", "kpi_name", "initial_date", "final_date", "key", "extension"])
for file in files:
filename, ext = basename(file), splitext(basename(file))[1]
if ext == ".json":
splits = 3
kpi = KPI(
filepath=file
, filename=filename
, kpi_name=filename.rsplit(delimiter, splits)[0]
, initial_date=filename.rsplit(delimiter, splits)[1]
, final_date=filename.rsplit(delimiter, splits)[2]
, key=splitext(filename.rsplit(delimiter, splits)[3])[0]
, extension=ext
)
else: # ext != ".json":
splits = 1
kpi = KPI(
filepath=file
, filename=filename
, kpi_name=filename.rsplit(delimiter, splits)[0]
, initial_date=None
, final_date=None
, key=splitext(filename.rsplit(delimiter, splits)[1])[0]
, extension=ext
)
kpis.append(kpi)
return kpis, len(kpis)
kpis, files = hdfs_get_filelist("wasbs://hdiprojsupplydatalake-2018-07-12t15-58-09-078z@hdiprojsupplydatalake.blob.core.windows.net/estrutura_final/")
| [
0,
1,
2,
3,
4
] |
2,087 | 96b113678a3453520cd2e62eb11efd9582710409 | <mask token>
class MatchaMilkTea(MilkTea):
<mask token>
def getName(self) ->str:
return self.name
<mask token>
<mask token>
def setPrice(self, price: int) ->None:
self.__price = price
<mask token>
def setIced(self, iced: bool) ->None:
self._iced = iced
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def getMatcha(self) ->int:
return self.__matcha
def setMatcha(self, matcha: int) ->None:
self.__matcha = matcha
<mask token>
def setCondensedMilk(self, condensedMilk: int) ->None:
self.__condensedMilk = condensedMilk
<mask token>
def subBlackTea(self, amount: int) ->None:
if amount > self.__blackTea:
raise ValueError
print("You can't subtract more blacktea.")
else:
self.setBlackTea(self.getBlackTea() - amount)
<mask token>
def addCondensedMilk(self, amount: int) ->None:
self.setCondensedMilk(self.getCondensedMilk() + amount)
self.setPrice(self.getPrice() + amount * 500)
| <mask token>
class MatchaMilkTea(MilkTea):
def __init__(self):
super().__init__()
self.__matcha = 1
self.__condensedMilk = 1
self.name = 'MatchaMilkTea'
self.__price = 4500
self.__milk = 400
self.__blackTea = 2
def getName(self) ->str:
return self.name
<mask token>
def getPrice(self) ->int:
return self.__price
def setPrice(self, price: int) ->None:
self.__price = price
<mask token>
def setIced(self, iced: bool) ->None:
self._iced = iced
def getWater(self) ->int:
pass
def setWater(self, water: int) ->None:
pass
def getMilk(self) ->int:
return self.__milk
<mask token>
def getBlackTea(self) ->int:
return self.__blackTea
<mask token>
def getMatcha(self) ->int:
return self.__matcha
def setMatcha(self, matcha: int) ->None:
self.__matcha = matcha
<mask token>
def setCondensedMilk(self, condensedMilk: int) ->None:
self.__condensedMilk = condensedMilk
<mask token>
def subBlackTea(self, amount: int) ->None:
if amount > self.__blackTea:
raise ValueError
print("You can't subtract more blacktea.")
else:
self.setBlackTea(self.getBlackTea() - amount)
<mask token>
def addCondensedMilk(self, amount: int) ->None:
self.setCondensedMilk(self.getCondensedMilk() + amount)
self.setPrice(self.getPrice() + amount * 500)
| <mask token>
class MatchaMilkTea(MilkTea):
def __init__(self):
super().__init__()
self.__matcha = 1
self.__condensedMilk = 1
self.name = 'MatchaMilkTea'
self.__price = 4500
self.__milk = 400
self.__blackTea = 2
def getName(self) ->str:
return self.name
def setName(self, name: str) ->None:
self.name = name
def getPrice(self) ->int:
return self.__price
def setPrice(self, price: int) ->None:
self.__price = price
<mask token>
def setIced(self, iced: bool) ->None:
self._iced = iced
def getWater(self) ->int:
pass
def setWater(self, water: int) ->None:
pass
def getMilk(self) ->int:
return self.__milk
def setMilk(self, milk: int) ->None:
self.__milk = milk
def getBlackTea(self) ->int:
return self.__blackTea
def setBlackTea(self, blacktea: int) ->None:
self.__blackTea = blacktea
def getMatcha(self) ->int:
return self.__matcha
def setMatcha(self, matcha: int) ->None:
self.__matcha = matcha
<mask token>
def setCondensedMilk(self, condensedMilk: int) ->None:
self.__condensedMilk = condensedMilk
def addBlackTea(self, amount: int) ->None:
self.setBlackTea(self.getBlackTea() + amount)
self.setPrice(self.getPrice() + amount * 500)
def subBlackTea(self, amount: int) ->None:
if amount > self.__blackTea:
raise ValueError
print("You can't subtract more blacktea.")
else:
self.setBlackTea(self.getBlackTea() - amount)
def addMatcha(self, amount: int) ->None:
self.setMatcha(self.getMatcha() + amount)
self.setPrice(self.getPrice() + amount * 400)
def addCondensedMilk(self, amount: int) ->None:
self.setCondensedMilk(self.getCondensedMilk() + amount)
self.setPrice(self.getPrice() + amount * 500)
| from com.kakao.cafe.menu.tea.milkTea import MilkTea
class MatchaMilkTea(MilkTea):
def __init__(self):
super().__init__()
self.__matcha = 1
self.__condensedMilk = 1
self.name = 'MatchaMilkTea'
self.__price = 4500
self.__milk = 400
self.__blackTea = 2
def getName(self) ->str:
return self.name
def setName(self, name: str) ->None:
self.name = name
def getPrice(self) ->int:
return self.__price
def setPrice(self, price: int) ->None:
self.__price = price
def isIced(self) ->bool:
return self.iced
def setIced(self, iced: bool) ->None:
self._iced = iced
def getWater(self) ->int:
pass
def setWater(self, water: int) ->None:
pass
def getMilk(self) ->int:
return self.__milk
def setMilk(self, milk: int) ->None:
self.__milk = milk
def getBlackTea(self) ->int:
return self.__blackTea
def setBlackTea(self, blacktea: int) ->None:
self.__blackTea = blacktea
def getMatcha(self) ->int:
return self.__matcha
def setMatcha(self, matcha: int) ->None:
self.__matcha = matcha
def getCondensedMilk(self) ->int:
return self.__condensedMilk
def setCondensedMilk(self, condensedMilk: int) ->None:
self.__condensedMilk = condensedMilk
def addBlackTea(self, amount: int) ->None:
self.setBlackTea(self.getBlackTea() + amount)
self.setPrice(self.getPrice() + amount * 500)
def subBlackTea(self, amount: int) ->None:
if amount > self.__blackTea:
raise ValueError
print("You can't subtract more blacktea.")
else:
self.setBlackTea(self.getBlackTea() - amount)
def addMatcha(self, amount: int) ->None:
self.setMatcha(self.getMatcha() + amount)
self.setPrice(self.getPrice() + amount * 400)
def addCondensedMilk(self, amount: int) ->None:
self.setCondensedMilk(self.getCondensedMilk() + amount)
self.setPrice(self.getPrice() + amount * 500)
| from com.kakao.cafe.menu.tea.milkTea import MilkTea
class MatchaMilkTea(MilkTea):
def __init__(self):
super().__init__()
self.__matcha = 1
self.__condensedMilk = 1
self.name = "MatchaMilkTea"
self.__price = 4500
self.__milk = 400
self.__blackTea = 2
def getName(self) -> str:
return self.name
def setName(self, name: str) -> None:
self.name = name
def getPrice(self) -> int:
return self.__price
def setPrice(self, price: int) -> None:
self.__price = price
def isIced(self) -> bool:
return self.iced
def setIced(self, iced: bool) -> None:
self._iced = iced
def getWater(self) -> int:
pass
def setWater(self, water: int) -> None:
pass
def getMilk(self) -> int:
return self.__milk
def setMilk(self, milk: int) -> None:
self.__milk = milk
def getBlackTea(self) -> int:
return self.__blackTea
def setBlackTea(self, blacktea: int) -> None:
self.__blackTea = blacktea
def getMatcha(self) -> int:
return self.__matcha
def setMatcha(self, matcha: int) -> None:
self.__matcha = matcha
def getCondensedMilk(self) -> int:
return self.__condensedMilk
def setCondensedMilk(self, condensedMilk: int) -> None:
self.__condensedMilk = condensedMilk
def addBlackTea(self, amount: int) -> None:
self.setBlackTea(self.getBlackTea() + amount)
self.setPrice(self.getPrice() + amount * 500)
def subBlackTea(self, amount: int) -> None:
if amount > self.__blackTea:
raise ValueError
print("You can't subtract more blacktea.")
else:
self.setBlackTea(self.getBlackTea() - amount)
def addMatcha(self, amount: int) -> None:
self.setMatcha(self.getMatcha() + amount)
self.setPrice(self.getPrice() + amount * 400)
def addCondensedMilk(self, amount: int) -> None:
self.setCondensedMilk(self.getCondensedMilk() + amount)
self.setPrice(self.getPrice() + amount * 500)
| [
9,
15,
20,
23,
24
] |
2,088 | a1710ee228a432db92c9586ddff0bfcad1f434a8 | # !/usr/bin/env python
# -*- coding: utf-8 -*-
# tail -2 hightemp.txt
import sys
with open(sys.argv[1]) as f:
lines = f.readlines();
n = sys.argv[2];
print "".join(lines[len(lines)-int(n):]) | null | null | null | null | [
0
] |
2,089 | f84ab1530cbc6bd25c45fc607d8f1cd461b180bf | <mask token>
| <mask token>
for h in range(11, 41):
for i in range(model_img_array.shape[0]):
for j in range(model_img_array.shape[2]):
dis = np.sqrt(pow(13 - i, 2) + pow(9 - j, 2))
if dis <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(11 - i, 2) + pow(14 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(9 - i, 2) + pow(19 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(8 - i, 2) + pow(10 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(5 - i, 2) + pow(15 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(2 - i, 2) + pow(10 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
<mask token>
model_img.SetSpacing(spacing)
sitk.WriteImage(model_img, '../../data/ground_data/model_img.nii')
<mask token>
print(np.sum(h))
<mask token>
sitk.WriteImage(h_img, '../../data/ground_data/h.nii')
<mask token>
print(h_pad)
<mask token>
recon_img.SetSpacing(spacing)
sitk.WriteImage(recon_img, '../../data/ground_data/recon.nii')
| <mask token>
new_img = sitk.ReadImage('../../data/ground_data/new_img.nii')
spacing = new_img.GetSpacing()
new_img_array = sitk.GetArrayFromImage(new_img)
model_img_array = np.zeros(new_img_array.shape)
yu = 0.1
for h in range(11, 41):
for i in range(model_img_array.shape[0]):
for j in range(model_img_array.shape[2]):
dis = np.sqrt(pow(13 - i, 2) + pow(9 - j, 2))
if dis <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(11 - i, 2) + pow(14 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(9 - i, 2) + pow(19 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(8 - i, 2) + pow(10 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(5 - i, 2) + pow(15 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(2 - i, 2) + pow(10 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
model_img = sitk.GetImageFromArray(model_img_array)
model_img.SetSpacing(spacing)
sitk.WriteImage(model_img, '../../data/ground_data/model_img.nii')
img_array = sitk.GetArrayFromImage(sitk.ReadImage(
'../../data/ground_data/0.709_0.5_0.75VVHR_0.236_211_211_111_60itr_1sub.nii'
))
model_img_array = sitk.GetArrayFromImage(model_img)
new_img_f = fftn(new_img_array)
model_f = fftn(model_img_array)
H = new_img_f / (model_f + 0.0001)
h = np.fft.ifftn(H)
h = np.real(h)
h = (h - h.min()) / (h.max() - h.min())
h = h / np.sum(h)
print(np.sum(h))
h_img = sitk.GetImageFromArray(h)
sitk.WriteImage(h_img, '../../data/ground_data/h.nii')
h_pad = np.lib.pad(h, ((0, img_array.shape[0] - h.shape[0]), (0, img_array.
shape[1] - h.shape[1]), (0, img_array.shape[2] - h.shape[2])),
'constant', constant_values=0)
print(h_pad)
H_pad = fftn(h_pad)
img_f = fftn(img_array)
recon = img_f / (H_pad + H.any().min() + 0.0001)
recon = ifftn(recon)
recon = np.real(recon)
recon_img = sitk.GetImageFromArray(recon)
recon_img.SetSpacing(spacing)
sitk.WriteImage(recon_img, '../../data/ground_data/recon.nii')
| <mask token>
import numpy as np
import SimpleITK as sitk
import skimage.restoration.deconvolution
from numpy.fft import fftn, ifftn
new_img = sitk.ReadImage('../../data/ground_data/new_img.nii')
spacing = new_img.GetSpacing()
new_img_array = sitk.GetArrayFromImage(new_img)
model_img_array = np.zeros(new_img_array.shape)
yu = 0.1
for h in range(11, 41):
for i in range(model_img_array.shape[0]):
for j in range(model_img_array.shape[2]):
dis = np.sqrt(pow(13 - i, 2) + pow(9 - j, 2))
if dis <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(11 - i, 2) + pow(14 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(9 - i, 2) + pow(19 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(8 - i, 2) + pow(10 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(5 - i, 2) + pow(15 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow(2 - i, 2) + pow(10 - j, 2)) <= 2:
model_img_array[i, h, j] = yu
model_img = sitk.GetImageFromArray(model_img_array)
model_img.SetSpacing(spacing)
sitk.WriteImage(model_img, '../../data/ground_data/model_img.nii')
img_array = sitk.GetArrayFromImage(sitk.ReadImage(
'../../data/ground_data/0.709_0.5_0.75VVHR_0.236_211_211_111_60itr_1sub.nii'
))
model_img_array = sitk.GetArrayFromImage(model_img)
new_img_f = fftn(new_img_array)
model_f = fftn(model_img_array)
H = new_img_f / (model_f + 0.0001)
h = np.fft.ifftn(H)
h = np.real(h)
h = (h - h.min()) / (h.max() - h.min())
h = h / np.sum(h)
print(np.sum(h))
h_img = sitk.GetImageFromArray(h)
sitk.WriteImage(h_img, '../../data/ground_data/h.nii')
h_pad = np.lib.pad(h, ((0, img_array.shape[0] - h.shape[0]), (0, img_array.
shape[1] - h.shape[1]), (0, img_array.shape[2] - h.shape[2])),
'constant', constant_values=0)
print(h_pad)
H_pad = fftn(h_pad)
img_f = fftn(img_array)
recon = img_f / (H_pad + H.any().min() + 0.0001)
recon = ifftn(recon)
recon = np.real(recon)
recon_img = sitk.GetImageFromArray(recon)
recon_img.SetSpacing(spacing)
sitk.WriteImage(recon_img, '../../data/ground_data/recon.nii')
| #!usr/bin/env python
# -*- coding:utf-8 _*
"""
@File : build_model_2.py
@Author : ljt
@Description: xx
@Time : 2021/6/12 21:46
"""
import numpy as np
import SimpleITK as sitk
import skimage.restoration.deconvolution
from numpy.fft import fftn, ifftn
new_img = sitk.ReadImage("../../data/ground_data/new_img.nii")
spacing = new_img.GetSpacing()
# 原始SimpleITK数据的存储形式为(Width, Height, Depth)即(X,Y,Z)
# 使用GetArrayFromImage()方法后,X轴与Z轴发生了对调
# 输出形状为:(Depth, Height, Width)即(Z,Y,X)。
new_img_array = sitk.GetArrayFromImage(new_img)
model_img_array = np.zeros((new_img_array.shape))
# [10, 19, 14]
# [15, 16, 12]
# [20, 25, 10]
# [11, 32, 9]
# [16, 16, 6]
# [11, 19, 3]
# h -> 11-41
yu = 0.1
for h in range(11,41):
for i in range(model_img_array.shape[0]):
for j in range(model_img_array.shape[2]):
dis = np.sqrt(pow((13 - i), 2) + pow((9 - j), 2))
if dis <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow((11 - i), 2) + pow((14 - j), 2)) <=2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow((9 - i), 2) + pow((19 - j), 2)) <=2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow((8 - i), 2) + pow((10 - j), 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow((5 - i), 2) + pow((15 - j), 2)) <= 2:
model_img_array[i, h, j] = yu
elif np.sqrt(pow((2 - i), 2) + pow((10 - j), 2)) <= 2:
model_img_array[i, h, j] = yu
# else:
# print(new_img_array[i, h, j])
model_img = sitk.GetImageFromArray(model_img_array)
model_img.SetSpacing(spacing)
sitk.WriteImage(model_img, "../../data/ground_data/model_img.nii")
img_array = sitk.GetArrayFromImage(sitk.ReadImage(
"../../data/ground_data/0.709_0.5_0.75VVHR_0.236_211_211_111_60itr_1sub.nii"))
model_img_array = sitk.GetArrayFromImage(model_img)
new_img_f = fftn(new_img_array)
model_f = fftn(model_img_array)
H = new_img_f / (model_f + 0.0001)
h = np.fft.ifftn(H)
h = np.real(h)
h = (h -h.min()) / (h.max() - h.min())
h =h / np.sum(h)
print(np.sum(h))
h_img = sitk.GetImageFromArray(h)
sitk.WriteImage(h_img, "../../data/ground_data/h.nii")
h_pad = np.lib.pad(h, ((0,img_array.shape[0] - h.shape[0]), (0, img_array.shape[1] - h.shape[1]), (0, img_array.shape[2] - h.shape[2])), 'constant', constant_values=(0))
print(h_pad)
H_pad = fftn(h_pad)
img_f = fftn(img_array)
# recon = model_f / (H + 0.0001)
recon = img_f / (H_pad + H.any().min() + 0.0001)
# print(recon)
recon = ifftn(recon)
recon = np.real(recon)
# lucy_richoid
# recon = skimage.restoration.deconvolution.richardson_lucy(img_array, h, iterations=3)
recon_img = sitk.GetImageFromArray(recon)
recon_img.SetSpacing(spacing)
sitk.WriteImage(recon_img, "../../data/ground_data/recon.nii")
| [
0,
1,
2,
3,
4
] |
2,090 | 77995aab723fb118be3f986b8cd93f349690baca | <mask token>
@app.route('/reportes')
def reportes():
try:
cur = mysql.connect().cursor()
if 'usuario' in session:
return render_template('views/reportes.html', id=session['id'])
else:
return redirect('/login')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_compras_api', methods=['POST'])
def compras():
try:
cur = mysql.connect().cursor()
if not 'usuario' in session:
return jsonify('Debes registrarte')
else:
_json = request.get_json(force=True)
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = (
'SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'
)
values = _id, _fecha1, _fecha2
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1],
'costo envio': row[5], 'precio': row[3], 'cantidad':
int(float(row[2])), 'total': row[4]})
res = jsonify(data)
res.status_code = 200
return res
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_ventas_api', methods=['POST'])
def ventas():
try:
if 'usuario' in session and session['tipo_usuario'] == 'T':
cur = mysql.connect().cursor()
_json = request.get_json(force=True)
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = (
'SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'
)
values = '%d %M %Y', _id, _fecha1, _fecha2
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1], 'stock':
row[2], 'publicacion': row[3], 'precio': row[4],
'tiempo envio': row[5], 'costo envio': row[6],
'cantidad': float(row[7]), 'total': row[8]})
return jsonify(data)
else:
return jsonify('Debes registrarte como tienda')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_facturas_api')
def facturas():
try:
cur = mysql.connect().cursor()
facturas = []
data = {}
if session['tipo_usuario'] is 'C':
cur.execute(
'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s'
, ('%d %M %Y', session['id']))
else:
cur.execute(
'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s'
, ('%d %M %Y', session['id']))
rows = cur.fetchall()
for row in rows:
if session['tipo_usuario'] is 'C':
data = {'id': row[0], 'fecha': row[1], 'total': row[2],
'metodopago': {'propietario': row[3], 'numero': row[4]},
'tienda': row[5], 'direccionenvio': {'casillero': row[6
], 'provincia': row[7]}, 'productos': []}
else:
data = {'id': row[0], 'fecha': row[1], 'total': row[2],
'metodopago': {'propietario': row[3], 'numero': row[4]},
'comprador': row[5], 'direccionenvio': {'casillero':
row[6], 'provincia': row[7]}, 'productos': []}
cur.execute(
'SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s'
, (data['id'],))
prods = cur.fetchall()
for prod in prods:
data['productos'].append({'descripcion': prod[0],
'tiempo envio': prod[1], 'costo envio': prod[2],
'cantidad': prod[3], 'precio': prod[4]})
facturas.append(data)
return jsonify(facturas)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
<mask token>
| <mask token>
@app.route('/reportes')
def reportes():
try:
cur = mysql.connect().cursor()
if 'usuario' in session:
return render_template('views/reportes.html', id=session['id'])
else:
return redirect('/login')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_compras_api', methods=['POST'])
def compras():
try:
cur = mysql.connect().cursor()
if not 'usuario' in session:
return jsonify('Debes registrarte')
else:
_json = request.get_json(force=True)
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = (
'SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'
)
values = _id, _fecha1, _fecha2
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1],
'costo envio': row[5], 'precio': row[3], 'cantidad':
int(float(row[2])), 'total': row[4]})
res = jsonify(data)
res.status_code = 200
return res
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_ventas_api', methods=['POST'])
def ventas():
try:
if 'usuario' in session and session['tipo_usuario'] == 'T':
cur = mysql.connect().cursor()
_json = request.get_json(force=True)
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = (
'SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'
)
values = '%d %M %Y', _id, _fecha1, _fecha2
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1], 'stock':
row[2], 'publicacion': row[3], 'precio': row[4],
'tiempo envio': row[5], 'costo envio': row[6],
'cantidad': float(row[7]), 'total': row[8]})
return jsonify(data)
else:
return jsonify('Debes registrarte como tienda')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_facturas_api')
def facturas():
try:
cur = mysql.connect().cursor()
facturas = []
data = {}
if session['tipo_usuario'] is 'C':
cur.execute(
'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s'
, ('%d %M %Y', session['id']))
else:
cur.execute(
'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s'
, ('%d %M %Y', session['id']))
rows = cur.fetchall()
for row in rows:
if session['tipo_usuario'] is 'C':
data = {'id': row[0], 'fecha': row[1], 'total': row[2],
'metodopago': {'propietario': row[3], 'numero': row[4]},
'tienda': row[5], 'direccionenvio': {'casillero': row[6
], 'provincia': row[7]}, 'productos': []}
else:
data = {'id': row[0], 'fecha': row[1], 'total': row[2],
'metodopago': {'propietario': row[3], 'numero': row[4]},
'comprador': row[5], 'direccionenvio': {'casillero':
row[6], 'provincia': row[7]}, 'productos': []}
cur.execute(
'SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s'
, (data['id'],))
prods = cur.fetchall()
for prod in prods:
data['productos'].append({'descripcion': prod[0],
'tiempo envio': prod[1], 'costo envio': prod[2],
'cantidad': prod[3], 'precio': prod[4]})
facturas.append(data)
return jsonify(facturas)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_suscripciones_api')
def suscripciones():
try:
cur = mysql.connect().cursor()
data = []
tiendas = []
cur.execute(
'SELECT u.id_usr, u.nombre, u.email, u.foto FROM seguir t LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda WHERE t.id_comprador =%s '
, (session['id'],))
rows = cur.fetchall()
for row in rows:
tiendas.append({'id': row[0], 'nombre': row[1], 'foto': row[3],
'email': row[2]})
productos = []
cur.execute(
'SELECT u.descripcion, c.descripcion, u.precio, b.nombre, u.id_prod FROM listadeseos t LEFT JOIN tbl_productos u ON u.id_prod = t.id_producto LEFT JOIN tbl_categoriasproductos c ON u.id_categoria = c.id_catp LEFT JOIN tbl_usuarios b ON u.usr_id = b.id_usr WHERE t.usr_id =%s '
, (session['id'],))
rows = cur.fetchall()
for row in rows:
productos.append({'id': row[4], 'descripcion': row[0],
'categoria': row[1], 'precio': row[2], 'tienda': row[3]})
data.append(productos)
data.append(tiendas)
return jsonify(data)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
<mask token>
| <mask token>
@app.route('/reportes')
def reportes():
try:
cur = mysql.connect().cursor()
if 'usuario' in session:
return render_template('views/reportes.html', id=session['id'])
else:
return redirect('/login')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_compras_api', methods=['POST'])
def compras():
try:
cur = mysql.connect().cursor()
if not 'usuario' in session:
return jsonify('Debes registrarte')
else:
_json = request.get_json(force=True)
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = (
'SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'
)
values = _id, _fecha1, _fecha2
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1],
'costo envio': row[5], 'precio': row[3], 'cantidad':
int(float(row[2])), 'total': row[4]})
res = jsonify(data)
res.status_code = 200
return res
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_ventas_api', methods=['POST'])
def ventas():
try:
if 'usuario' in session and session['tipo_usuario'] == 'T':
cur = mysql.connect().cursor()
_json = request.get_json(force=True)
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = (
'SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'
)
values = '%d %M %Y', _id, _fecha1, _fecha2
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1], 'stock':
row[2], 'publicacion': row[3], 'precio': row[4],
'tiempo envio': row[5], 'costo envio': row[6],
'cantidad': float(row[7]), 'total': row[8]})
return jsonify(data)
else:
return jsonify('Debes registrarte como tienda')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_facturas_api')
def facturas():
try:
cur = mysql.connect().cursor()
facturas = []
data = {}
if session['tipo_usuario'] is 'C':
cur.execute(
'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s'
, ('%d %M %Y', session['id']))
else:
cur.execute(
'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s'
, ('%d %M %Y', session['id']))
rows = cur.fetchall()
for row in rows:
if session['tipo_usuario'] is 'C':
data = {'id': row[0], 'fecha': row[1], 'total': row[2],
'metodopago': {'propietario': row[3], 'numero': row[4]},
'tienda': row[5], 'direccionenvio': {'casillero': row[6
], 'provincia': row[7]}, 'productos': []}
else:
data = {'id': row[0], 'fecha': row[1], 'total': row[2],
'metodopago': {'propietario': row[3], 'numero': row[4]},
'comprador': row[5], 'direccionenvio': {'casillero':
row[6], 'provincia': row[7]}, 'productos': []}
cur.execute(
'SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s'
, (data['id'],))
prods = cur.fetchall()
for prod in prods:
data['productos'].append({'descripcion': prod[0],
'tiempo envio': prod[1], 'costo envio': prod[2],
'cantidad': prod[3], 'precio': prod[4]})
facturas.append(data)
return jsonify(facturas)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_suscripciones_api')
def suscripciones():
try:
cur = mysql.connect().cursor()
data = []
tiendas = []
cur.execute(
'SELECT u.id_usr, u.nombre, u.email, u.foto FROM seguir t LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda WHERE t.id_comprador =%s '
, (session['id'],))
rows = cur.fetchall()
for row in rows:
tiendas.append({'id': row[0], 'nombre': row[1], 'foto': row[3],
'email': row[2]})
productos = []
cur.execute(
'SELECT u.descripcion, c.descripcion, u.precio, b.nombre, u.id_prod FROM listadeseos t LEFT JOIN tbl_productos u ON u.id_prod = t.id_producto LEFT JOIN tbl_categoriasproductos c ON u.id_categoria = c.id_catp LEFT JOIN tbl_usuarios b ON u.usr_id = b.id_usr WHERE t.usr_id =%s '
, (session['id'],))
rows = cur.fetchall()
for row in rows:
productos.append({'id': row[4], 'descripcion': row[0],
'categoria': row[1], 'precio': row[2], 'tienda': row[3]})
data.append(productos)
data.append(tiendas)
return jsonify(data)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_ofertas_api', methods=['POST'])
def ofertas():
try:
cur = mysql.connect().cursor()
if not 'usuario' in session:
return jsonify('Debes iniciar sesion')
else:
_json = request.get_json(force=True)
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
_precio = _json['precio']
_categoria = _json['categoria']
query = (
'SELECT p.id_prod, p.descripcion, c.descripcion, p.precio, DATE_FORMAT(p.publicacion, %s) FROM tbl_productos p LEFT JOIN tbl_categoriasproductos c ON p.id_categoria = c.id_catp WHERE p.id_categoria = %s AND p.precio <= %s AND p.publicacion BETWEEN %s AND %s '
)
values = '%d %M %Y', _categoria, _precio, _fecha1, _fecha2
cur.execute(query, values)
productos = []
rows = cur.fetchall()
if rows:
for row in rows:
productos.append({'id': row[0], 'descripcion': row[1],
'categoria': row[2], 'precio': row[3],
'publicacion': row[4]})
return jsonify(productos)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
| from flask import jsonify, request, render_template, redirect, session, flash
from init import app
from init import mysql
@app.route('/reportes')
def reportes():
try:
cur = mysql.connect().cursor()
if 'usuario' in session:
return render_template('views/reportes.html', id=session['id'])
else:
return redirect('/login')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_compras_api', methods=['POST'])
def compras():
try:
cur = mysql.connect().cursor()
if not 'usuario' in session:
return jsonify('Debes registrarte')
else:
_json = request.get_json(force=True)
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = (
'SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'
)
values = _id, _fecha1, _fecha2
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1],
'costo envio': row[5], 'precio': row[3], 'cantidad':
int(float(row[2])), 'total': row[4]})
res = jsonify(data)
res.status_code = 200
return res
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_ventas_api', methods=['POST'])
def ventas():
try:
if 'usuario' in session and session['tipo_usuario'] == 'T':
cur = mysql.connect().cursor()
_json = request.get_json(force=True)
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = (
'SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod'
)
values = '%d %M %Y', _id, _fecha1, _fecha2
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1], 'stock':
row[2], 'publicacion': row[3], 'precio': row[4],
'tiempo envio': row[5], 'costo envio': row[6],
'cantidad': float(row[7]), 'total': row[8]})
return jsonify(data)
else:
return jsonify('Debes registrarte como tienda')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_facturas_api')
def facturas():
try:
cur = mysql.connect().cursor()
facturas = []
data = {}
if session['tipo_usuario'] is 'C':
cur.execute(
'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s'
, ('%d %M %Y', session['id']))
else:
cur.execute(
'SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s'
, ('%d %M %Y', session['id']))
rows = cur.fetchall()
for row in rows:
if session['tipo_usuario'] is 'C':
data = {'id': row[0], 'fecha': row[1], 'total': row[2],
'metodopago': {'propietario': row[3], 'numero': row[4]},
'tienda': row[5], 'direccionenvio': {'casillero': row[6
], 'provincia': row[7]}, 'productos': []}
else:
data = {'id': row[0], 'fecha': row[1], 'total': row[2],
'metodopago': {'propietario': row[3], 'numero': row[4]},
'comprador': row[5], 'direccionenvio': {'casillero':
row[6], 'provincia': row[7]}, 'productos': []}
cur.execute(
'SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s'
, (data['id'],))
prods = cur.fetchall()
for prod in prods:
data['productos'].append({'descripcion': prod[0],
'tiempo envio': prod[1], 'costo envio': prod[2],
'cantidad': prod[3], 'precio': prod[4]})
facturas.append(data)
return jsonify(facturas)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_suscripciones_api')
def suscripciones():
try:
cur = mysql.connect().cursor()
data = []
tiendas = []
cur.execute(
'SELECT u.id_usr, u.nombre, u.email, u.foto FROM seguir t LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda WHERE t.id_comprador =%s '
, (session['id'],))
rows = cur.fetchall()
for row in rows:
tiendas.append({'id': row[0], 'nombre': row[1], 'foto': row[3],
'email': row[2]})
productos = []
cur.execute(
'SELECT u.descripcion, c.descripcion, u.precio, b.nombre, u.id_prod FROM listadeseos t LEFT JOIN tbl_productos u ON u.id_prod = t.id_producto LEFT JOIN tbl_categoriasproductos c ON u.id_categoria = c.id_catp LEFT JOIN tbl_usuarios b ON u.usr_id = b.id_usr WHERE t.usr_id =%s '
, (session['id'],))
rows = cur.fetchall()
for row in rows:
productos.append({'id': row[4], 'descripcion': row[0],
'categoria': row[1], 'precio': row[2], 'tienda': row[3]})
data.append(productos)
data.append(tiendas)
return jsonify(data)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
@app.route('/reporte_ofertas_api', methods=['POST'])
def ofertas():
try:
cur = mysql.connect().cursor()
if not 'usuario' in session:
return jsonify('Debes iniciar sesion')
else:
_json = request.get_json(force=True)
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
_precio = _json['precio']
_categoria = _json['categoria']
query = (
'SELECT p.id_prod, p.descripcion, c.descripcion, p.precio, DATE_FORMAT(p.publicacion, %s) FROM tbl_productos p LEFT JOIN tbl_categoriasproductos c ON p.id_categoria = c.id_catp WHERE p.id_categoria = %s AND p.precio <= %s AND p.publicacion BETWEEN %s AND %s '
)
values = '%d %M %Y', _categoria, _precio, _fecha1, _fecha2
cur.execute(query, values)
productos = []
rows = cur.fetchall()
if rows:
for row in rows:
productos.append({'id': row[0], 'descripcion': row[1],
'categoria': row[2], 'precio': row[3],
'publicacion': row[4]})
return jsonify(productos)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
| from flask import jsonify, request, render_template, redirect, session, flash
from init import app
from init import mysql
#Devuelve la pagina de reportes
@app.route('/reportes')
def reportes():
try:
cur = mysql.connect().cursor()
if 'usuario' in session:
return render_template('views/reportes.html', id=session['id'])
else:
return redirect('/login')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
#Se accede a los reportes de compras AXIOS
@app.route('/reporte_compras_api', methods=['POST'])
def compras():
try:
cur = mysql.connect().cursor()
if not 'usuario' in session:
return jsonify('Debes registrarte')
else:
_json = request.get_json(force=True)
# _id = session['id']
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = "SELECT p.id_prod, p.descripcion, SUM(t.cantidad), p.precio, SUM(c.total), p.costoenvio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE c.id_comprador = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod"
values = (_id, _fecha1, _fecha2)
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1], 'costo envio': row[5] ,'precio':row[3],'cantidad': int(float(row[2])),'total': row[4]})
res = jsonify(data)
res.status_code = 200
return res
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
#Se obtienen las ventas AXIOS
@app.route('/reporte_ventas_api', methods=['POST'])
def ventas():
try:
if 'usuario' in session and session['tipo_usuario'] == 'T':
cur = mysql.connect().cursor()
_json = request.get_json(force=True)
_id = session['id']
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
data = []
query = "SELECT DISTINCT p.id_prod, p.descripcion, p.stock, DATE_FORMAT(p.publicacion, %s), p.precio, p.tiempoenvio, p.costoenvio, SUM(t.cantidad), SUM(c.total) FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod LEFT JOIN tbl_compras c ON c.id_comp = t.id_comp WHERE p.usr_id = %s AND c.fecha BETWEEN %s AND %s GROUP BY p.id_prod"
values = ("%d %M %Y",_id, _fecha1, _fecha2)
cur.execute(query, values)
rows = cur.fetchall()
for row in rows:
data.append({'id': row[0], 'descripcion': row[1], 'stock':row[2], 'publicacion':row[3], 'precio':row[4], 'tiempo envio': row[5], 'costo envio': row[6],'cantidad': float(row[7]), 'total': row[8]})
return jsonify(data)
else:
return jsonify('Debes registrarte como tienda')
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
#Se otienen las facturas AXIOS
@app.route('/reporte_facturas_api')
def facturas():
try:
cur = mysql.connect().cursor()
facturas = []
data = {}
#Verifica si es un comprador o tienda
if session['tipo_usuario'] is 'C':
cur.execute("SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_comprador = %s", ("%d %M %Y",session['id'],))
else:
cur.execute("SELECT t.id_comp, DATE_FORMAT(t.fecha, %s), t.total, p.nombrepropietario, p.numero, u.nombre, d.numcasillero, d.provincia FROM tbl_compras t LEFT JOIN tbl_metodosdepago p ON t.id_pago = p.id_pago LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_comprador LEFT JOIN tbl_direccionesdeenvio d ON d.id_dire = d.id_dire WHERE t.id_tienda = %s", ("%d %M %Y",session['id'],))
rows = cur.fetchall()
for row in rows:
if session['tipo_usuario'] is 'C':
data = {'id': row[0], 'fecha':row[1], 'total':row[2], 'metodopago':{'propietario': row[3], 'numero': row[4]}, 'tienda': row[5], 'direccionenvio': {'casillero':row[6], 'provincia':row[7]}, 'productos':[]}
else:
data = {'id': row[0], 'fecha':row[1], 'total':row[2], 'metodopago':{'propietario': row[3], 'numero': row[4]}, 'comprador': row[5], 'direccionenvio': {'casillero':row[6], 'provincia':row[7]}, 'productos':[]}
cur.execute("SELECT p.descripcion, p.tiempoenvio, p.costoenvio, t.cantidad, p.precio FROM productos t LEFT JOIN tbl_productos p ON p.id_prod = t.id_prod WHERE t.id_comp = %s", (data['id'], ))
prods = cur.fetchall()
for prod in prods:
data['productos'].append({'descripcion': prod[0], 'tiempo envio': prod[1], 'costo envio':prod[2], 'cantidad':prod[3], 'precio':prod[4]})
facturas.append(data)
return jsonify(facturas)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
#Retornar las suscripciones del comprador AXIOS
@app.route('/reporte_suscripciones_api')
def suscripciones():
try:
cur = mysql.connect().cursor()
data = []
tiendas = []
cur.execute("SELECT u.id_usr, u.nombre, u.email, u.foto FROM seguir t LEFT JOIN tbl_usuarios u ON u.id_usr = t.id_tienda WHERE t.id_comprador =%s ", (session['id'],))
rows = cur.fetchall()
for row in rows:
tiendas.append({'id': row[0], 'nombre':row[1], 'foto': row[3], 'email':row[2]})
productos = []
cur.execute("SELECT u.descripcion, c.descripcion, u.precio, b.nombre, u.id_prod FROM listadeseos t LEFT JOIN tbl_productos u ON u.id_prod = t.id_producto LEFT JOIN tbl_categoriasproductos c ON u.id_categoria = c.id_catp LEFT JOIN tbl_usuarios b ON u.usr_id = b.id_usr WHERE t.usr_id =%s ", (session['id'],))
rows = cur.fetchall()
for row in rows:
productos.append({'id':row[4],'descripcion': row[0], 'categoria':row[1], 'precio':row[2], 'tienda': row[3]})
data.append(productos)
data.append(tiendas)
return jsonify(data)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close()
#Retorna productos con filtro establecido AXIOS
@app.route('/reporte_ofertas_api', methods=['POST'])
def ofertas():
try:
cur = mysql.connect().cursor()
if not 'usuario' in session:
return jsonify('Debes iniciar sesion');
else:
_json = request.get_json(force=True)
_fecha1 = _json['fechaInicio']
_fecha2 = _json['fechaFin']
_precio= _json['precio']
_categoria = _json['categoria']
query = "SELECT p.id_prod, p.descripcion, c.descripcion, p.precio, DATE_FORMAT(p.publicacion, %s) FROM tbl_productos p LEFT JOIN tbl_categoriasproductos c ON p.id_categoria = c.id_catp WHERE p.id_categoria = %s AND p.precio <= %s AND p.publicacion BETWEEN %s AND %s "
values =("%d %M %Y", _categoria, _precio, _fecha1, _fecha2)
cur.execute(query, values)
productos = []
rows = cur.fetchall()
if rows:
for row in rows:
productos.append({'id':row[0],'descripcion': row[1], 'categoria':row[2], 'precio': row[3], 'publicacion':row[4]})
return jsonify(productos)
except Exception as e:
print(e)
return jsonify('Ha ocurrido un error')
finally:
cur.close() | [
4,
5,
6,
7,
8
] |
2,091 | 36682c4ab90cdd22b644906e22ede71254eb42ff | <mask token>
| <mask token>
assert str(sys.argv[1]) is not None
<mask token>
for k in ALPHA_VALS:
total_train_error = 0
total_train_variance = 0
total_test_error = 0
total_test_variance = 0
dumb_total_train_error = 0
dumb_total_train_variance = 0
dumb_total_test_error = 0
dumb_total_test_variance = 0
baseline_error = 0
total_count = 0
for j in range(ITERATIONS):
for i in range(len(variables)):
allData = []
allDumbData = []
cursor = db.playtime_model.find({'PLAYER_GROUP': i, 'AVG_MIN':
{'$gt': MINUTE_RESTRICTION}})
count = 0
for document in cursor:
dataRow = []
for variable in variables:
dataRow.append(document[variable])
dataRow.append(document['AVG_MIN'])
dataRow.append(document['WIN_CHANCE'] ** 2)
dataRow.append(document['MIN'])
allData.append(dataRow)
allDumbData.append([document['AVG_MIN'], document['MIN']])
count = count + 1
print('player group: %d, game count: %d' % (i, count))
if count > 600:
total_count += count
Xy = np.array(allData)
np.random.shuffle(Xy)
X = Xy[:, range(0, Xy.shape[1] - 1)]
y = Xy[:, Xy.shape[1] - 1]
X_normalized = preprocessing.scale(X)
train_test_split = int(round(len(y) * 0.7))
X_normalized_train = X_normalized[:train_test_split]
X_normalized_test = X_normalized[train_test_split:]
y_train = y[:train_test_split]
y_test = y[train_test_split:]
if k == 0:
regr = linear_model.LinearRegression(fit_intercept=True)
else:
regr = linear_model.Lasso(alpha=k, fit_intercept=True)
regr.fit(X_normalized_train, y_train)
train_error = np.mean((regr.predict(X_normalized_train) -
y_train) ** 2)
train_variance = regr.score(X_normalized_train, y_train)
test_error = np.mean((regr.predict(X_normalized_test) -
y_test) ** 2)
test_variance = regr.score(X_normalized_test, y_test)
total_train_error += train_error * count
total_train_variance += train_variance * count
total_test_error += test_error * count
total_test_variance += test_variance * count
total_train_error = total_train_error / total_count
total_train_variance = total_train_variance / total_count
total_test_error = total_test_error / total_count
total_test_variance = total_test_variance / total_count
print('alpha-value: %.2f' % k)
print('total_train_error: %.2f' % total_train_error)
print('total_train_variance: %.2f' % total_train_variance)
print('total_test_error: %.2f' % total_test_error)
print('total_test_variance: %.2f' % total_test_variance)
if total_test_error < best_error:
best_error = total_test_error
best_k = k
<mask token>
for document in cursor:
baseline_error += (document['AVG_MIN'] - document['MIN']) ** 2
count += 1
<mask token>
print('baseline error: %.2f' % baseline_error)
print('best error: %.2f, best alpha: %.2f' % (best_error, best_k))
| <mask token>
assert str(sys.argv[1]) is not None
client = MongoClient(str(sys.argv[1]))
db = client.nba_py
variables = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19']
ITERATIONS = 5
MINUTE_RESTRICTION = 15
ALPHA_VALS = [0, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 1]
best_error = 999
best_k = 0
for k in ALPHA_VALS:
total_train_error = 0
total_train_variance = 0
total_test_error = 0
total_test_variance = 0
dumb_total_train_error = 0
dumb_total_train_variance = 0
dumb_total_test_error = 0
dumb_total_test_variance = 0
baseline_error = 0
total_count = 0
for j in range(ITERATIONS):
for i in range(len(variables)):
allData = []
allDumbData = []
cursor = db.playtime_model.find({'PLAYER_GROUP': i, 'AVG_MIN':
{'$gt': MINUTE_RESTRICTION}})
count = 0
for document in cursor:
dataRow = []
for variable in variables:
dataRow.append(document[variable])
dataRow.append(document['AVG_MIN'])
dataRow.append(document['WIN_CHANCE'] ** 2)
dataRow.append(document['MIN'])
allData.append(dataRow)
allDumbData.append([document['AVG_MIN'], document['MIN']])
count = count + 1
print('player group: %d, game count: %d' % (i, count))
if count > 600:
total_count += count
Xy = np.array(allData)
np.random.shuffle(Xy)
X = Xy[:, range(0, Xy.shape[1] - 1)]
y = Xy[:, Xy.shape[1] - 1]
X_normalized = preprocessing.scale(X)
train_test_split = int(round(len(y) * 0.7))
X_normalized_train = X_normalized[:train_test_split]
X_normalized_test = X_normalized[train_test_split:]
y_train = y[:train_test_split]
y_test = y[train_test_split:]
if k == 0:
regr = linear_model.LinearRegression(fit_intercept=True)
else:
regr = linear_model.Lasso(alpha=k, fit_intercept=True)
regr.fit(X_normalized_train, y_train)
train_error = np.mean((regr.predict(X_normalized_train) -
y_train) ** 2)
train_variance = regr.score(X_normalized_train, y_train)
test_error = np.mean((regr.predict(X_normalized_test) -
y_test) ** 2)
test_variance = regr.score(X_normalized_test, y_test)
total_train_error += train_error * count
total_train_variance += train_variance * count
total_test_error += test_error * count
total_test_variance += test_variance * count
total_train_error = total_train_error / total_count
total_train_variance = total_train_variance / total_count
total_test_error = total_test_error / total_count
total_test_variance = total_test_variance / total_count
print('alpha-value: %.2f' % k)
print('total_train_error: %.2f' % total_train_error)
print('total_train_variance: %.2f' % total_train_variance)
print('total_test_error: %.2f' % total_test_error)
print('total_test_variance: %.2f' % total_test_variance)
if total_test_error < best_error:
best_error = total_test_error
best_k = k
cursor = db.playtime_model.find({'AVG_MIN': {'$gt': MINUTE_RESTRICTION}})
baseline_error = 0.0
count = 0
for document in cursor:
baseline_error += (document['AVG_MIN'] - document['MIN']) ** 2
count += 1
baseline_error = baseline_error / count
print('baseline error: %.2f' % baseline_error)
print('best error: %.2f, best alpha: %.2f' % (best_error, best_k))
| import sys
import numpy as np
from pymongo import MongoClient
from sklearn import linear_model, preprocessing
assert str(sys.argv[1]) is not None
client = MongoClient(str(sys.argv[1]))
db = client.nba_py
variables = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19']
ITERATIONS = 5
MINUTE_RESTRICTION = 15
ALPHA_VALS = [0, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 1]
best_error = 999
best_k = 0
for k in ALPHA_VALS:
total_train_error = 0
total_train_variance = 0
total_test_error = 0
total_test_variance = 0
dumb_total_train_error = 0
dumb_total_train_variance = 0
dumb_total_test_error = 0
dumb_total_test_variance = 0
baseline_error = 0
total_count = 0
for j in range(ITERATIONS):
for i in range(len(variables)):
allData = []
allDumbData = []
cursor = db.playtime_model.find({'PLAYER_GROUP': i, 'AVG_MIN':
{'$gt': MINUTE_RESTRICTION}})
count = 0
for document in cursor:
dataRow = []
for variable in variables:
dataRow.append(document[variable])
dataRow.append(document['AVG_MIN'])
dataRow.append(document['WIN_CHANCE'] ** 2)
dataRow.append(document['MIN'])
allData.append(dataRow)
allDumbData.append([document['AVG_MIN'], document['MIN']])
count = count + 1
print('player group: %d, game count: %d' % (i, count))
if count > 600:
total_count += count
Xy = np.array(allData)
np.random.shuffle(Xy)
X = Xy[:, range(0, Xy.shape[1] - 1)]
y = Xy[:, Xy.shape[1] - 1]
X_normalized = preprocessing.scale(X)
train_test_split = int(round(len(y) * 0.7))
X_normalized_train = X_normalized[:train_test_split]
X_normalized_test = X_normalized[train_test_split:]
y_train = y[:train_test_split]
y_test = y[train_test_split:]
if k == 0:
regr = linear_model.LinearRegression(fit_intercept=True)
else:
regr = linear_model.Lasso(alpha=k, fit_intercept=True)
regr.fit(X_normalized_train, y_train)
train_error = np.mean((regr.predict(X_normalized_train) -
y_train) ** 2)
train_variance = regr.score(X_normalized_train, y_train)
test_error = np.mean((regr.predict(X_normalized_test) -
y_test) ** 2)
test_variance = regr.score(X_normalized_test, y_test)
total_train_error += train_error * count
total_train_variance += train_variance * count
total_test_error += test_error * count
total_test_variance += test_variance * count
total_train_error = total_train_error / total_count
total_train_variance = total_train_variance / total_count
total_test_error = total_test_error / total_count
total_test_variance = total_test_variance / total_count
print('alpha-value: %.2f' % k)
print('total_train_error: %.2f' % total_train_error)
print('total_train_variance: %.2f' % total_train_variance)
print('total_test_error: %.2f' % total_test_error)
print('total_test_variance: %.2f' % total_test_variance)
if total_test_error < best_error:
best_error = total_test_error
best_k = k
cursor = db.playtime_model.find({'AVG_MIN': {'$gt': MINUTE_RESTRICTION}})
baseline_error = 0.0
count = 0
for document in cursor:
baseline_error += (document['AVG_MIN'] - document['MIN']) ** 2
count += 1
baseline_error = baseline_error / count
print('baseline error: %.2f' % baseline_error)
print('best error: %.2f, best alpha: %.2f' % (best_error, best_k))
| import sys
import numpy as np
from pymongo import MongoClient
from sklearn import linear_model, preprocessing
assert str(sys.argv[1]) is not None
client = MongoClient(str(sys.argv[1]))
db = client.nba_py
variables = ['0', '1', '2', '3', '4',
'5', '6', '7', '8', '9',
'10', '11', '12', '13', '14',
'15', '16', '17', '18', '19', ]
ITERATIONS = 5
MINUTE_RESTRICTION = 15
ALPHA_VALS = [0, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 1]
best_error = 999
best_k = 0
for k in ALPHA_VALS:
total_train_error = 0
total_train_variance = 0
total_test_error = 0
total_test_variance = 0
dumb_total_train_error = 0
dumb_total_train_variance = 0
dumb_total_test_error = 0
dumb_total_test_variance = 0
baseline_error = 0
total_count = 0
for j in range(ITERATIONS):
for i in range(len(variables)):
allData = []
allDumbData = []
cursor = db.playtime_model.find({"PLAYER_GROUP": i, "AVG_MIN": {"$gt": MINUTE_RESTRICTION}})
count = 0
for document in cursor:
dataRow = []
for variable in variables:
dataRow.append(document[variable])
dataRow.append(document['AVG_MIN'])
dataRow.append((document['WIN_CHANCE'])**2)
dataRow.append(document['MIN'])
allData.append(dataRow)
allDumbData.append([document['AVG_MIN'], document['MIN']])
count = count + 1
print("player group: %d, game count: %d" % (i, count))
if (count > 600):
total_count += count
Xy = np.array(allData)
np.random.shuffle(Xy)
X = Xy[ :, range(0, Xy.shape[1]-1) ]
y = Xy[ :, Xy.shape[1]-1 ]
X_normalized = preprocessing.scale(X)
# Separate into Train and Test datasets
train_test_split = int(round(len(y) * 0.7))
X_normalized_train = X_normalized[:train_test_split]
X_normalized_test = X_normalized[train_test_split:]
y_train = y[:train_test_split]
y_test = y[train_test_split:]
# train model
if k == 0:
regr = linear_model.LinearRegression(fit_intercept=True)
else:
regr = linear_model.Lasso(alpha=k, fit_intercept=True)
regr.fit(X_normalized_train, y_train)
# Coefficients
# print('Intercept: ', regr.intercept_) ------------------------------------
# for i in range(regr.coef_.size): -----------------------------------------
# print (variables[i], regr.coef_[i]) ----------------------------------
# print("================") ------------------------------------------------
# Error Analysis
train_error = np.mean((regr.predict(X_normalized_train) - y_train) ** 2)
train_variance = regr.score(X_normalized_train, y_train)
test_error = np.mean((regr.predict(X_normalized_test) - y_test) ** 2)
test_variance = regr.score(X_normalized_test, y_test)
# print("Residual sum of squares for training set: %.2f" % train_error) ----
# print('Variance score: %.2f' % train_variance) ---------------------------
# print("Residual sum of squares for test set: %.2f" % test_error) -
# print('Variance score: %.2f' % test_variance) --------------------
total_train_error += train_error * count
total_train_variance += train_variance * count
total_test_error += test_error * count
total_test_variance += test_variance * count
#~~~~calculate against baseline~~~~~~~~~~~
# Xy = np.array(allDumbData) -----------------------------------
# np.random.shuffle(Xy) ----------------------------------------
# X = Xy[ :, range(0, Xy.shape[1]-1) ] -------------------------
# y = Xy[ :, Xy.shape[1]-1 ] -----------------------------------
# -----------------------------------------------------------------------------
# X_normalized = (X) -------------------------------------------
# -----------------------------------------------------------------------------
# # Separate into Train and Test datasets ----------------------
# train_test_split = int(round(len(y) * 0.7)) ------------------
# X_normalized_train = X_normalized[:train_test_split] ---------
# X_normalized_test = X_normalized[train_test_split:] ----------
# y_train = y[:train_test_split] -------------------------------
# y_test = y[train_test_split:] --------------------------------
# -----------------------------------------------------------------------------
# regr = linear_model.LinearRegression(fit_intercept=True) -----
# regr.fit(X_normalized_train, y_train) ------------------------
# -----------------------------------------------------------------------------
# # Error Analysis ---------------------------------------------
# train_error = np.mean((regr.predict(X_normalized_train) - y_train) ** 2)
# train_variance = regr.score(X_normalized_train, y_train) -----
# test_error = np.mean((regr.predict(X_normalized_test) - y_test) ** 2)
# test_variance = regr.score(X_normalized_test, y_test) --------
# # print("Residual sum of squares for training set: %.2f" % train_error) ----
# # print('Variance score: %.2f' % train_variance) ---------------------------
# # print("Residual sum of squares for dumb test set: %.2f" % test_error)
# # print('Variance score for dumb test set: %.2f' % test_variance) --
# dumb_total_train_error += train_error * count ----------------
# dumb_total_train_variance += train_variance * count ----------
# dumb_total_test_error += test_error * count ------------------
# dumb_total_test_variance += test_variance * count ------------
total_train_error = total_train_error / total_count
total_train_variance = total_train_variance / total_count
total_test_error = total_test_error / total_count
total_test_variance = total_test_variance / total_count
# dumb_total_train_error = dumb_total_train_error / total_count ------------
# dumb_total_train_variance = dumb_total_train_variance / total_count ------
# dumb_total_test_error = dumb_total_test_error / total_count --------------
# dumb_total_test_variance = dumb_total_test_variance / total_count --------
print("alpha-value: %.2f" % k)
print("total_train_error: %.2f" % total_train_error)
print("total_train_variance: %.2f" % total_train_variance)
print("total_test_error: %.2f" % total_test_error)
print("total_test_variance: %.2f" % total_test_variance)
# print("dumb_total_train_error: %.2f" % dumb_total_train_error) -----------
# print("dumb_total_train_variance: %.2f" % dumb_total_train_variance) -----
# print("dumb_total_test_error: %.2f" % dumb_total_test_error) -------------
# print("dumb_total_test_variance: %.2f" % dumb_total_test_variance) -------
# print("total_count: %d" % (total_count / ITERATIONS)) --------------------
if (total_test_error < best_error):
best_error = total_test_error
best_k = k
# Calculate against baseline ---------------------------------------------------
cursor = db.playtime_model.find({"AVG_MIN": {"$gt": MINUTE_RESTRICTION}})
baseline_error = 0.0
count = 0
for document in cursor:
baseline_error += (document['AVG_MIN'] - document['MIN'])**2
count += 1
baseline_error = baseline_error / count
print("baseline error: %.2f" % baseline_error)
print("best error: %.2f, best alpha: %.2f" % (best_error, best_k)) | [
0,
1,
2,
3,
4
] |
2,092 | f9ba944724b262afb39f2859b5726b961536cdf0 | <mask token>
| quotes = [
'Today you are you! That is truer than true! There is no one alive who is you-er than you!'
, "Don't cry because it's over. Smile because it happened.",
"You have brains in your head. You have feet in your shoes. You can steer yourself in any direction you choose. You're on your own, and you know what you know. And you are the guy who'll decide where to go."
,
"The more that you read, the more things you will know. The more that you learn, the more places you'll go. "
, 'I like nonsense; it wakes up the brain cells.',
"Step with care and great tact, and remember that Life's a Great Balancing Act."
,
'How did it get so late so soon? Its night before its afternoon. December is here before its June. My goodness how the time has flewn. How did it get so late so soon?'
,
'Think left and think right and think low and think high. Oh, the thinks you can think up if only you try!'
, "A person's a person, no matter how small.",
'You can get help from teachers, but you are going to have to learn a lot by yourself, sitting alone in a room.'
,
"Unless someone like you cares a whole awful lot, nothing is going to get better. It's not."
,
"You're never too old, too wacky, too wild, to pick up a book and read to a child."
, 'Today was good. Today was fun. Tomorrow is another one.',
'I meant what I said and I said what I meant.',
"You're in pretty good shape for the shape you are in.",
'Only you can control your future.', 'I am not a consecutive writer.',
"Maybe Christmas, the Grinch thought, doesn't come from a store.",
'Preachers in pulpits talked about what a great message is in the book. No matter what you do, somebody always imputes meaning into your books.'
,
'Sometimes, when I see my granddaughters make small discoveries of their own, I wish I were a child.'
, 'Adults are obsolete children.',
"Whenever things go a bit sour in a job I'm doing, I always tell myself, 'You can do better than this.'"
, 'From there to here, and here to there, funny things are everywhere.',
"I stay out of politics because if I begin thinking too much about politics, I'll probably... drop writing children's books and become a political cartoonist again."
, "I was saving the name of 'Geisel' for the Great American Novel.",
"You make 'em, I amuse 'em."]
| quotes = [
"Today you are you! That is truer than true! There is no one alive who is you-er than you!",
"Don't cry because it's over. Smile because it happened.",
"You have brains in your head. You have feet in your shoes. You can steer yourself in any direction you choose. You're on your own, and you know what you know. And you are the guy who'll decide where to go.",
"The more that you read, the more things you will know. The more that you learn, the more places you'll go. ",
"I like nonsense; it wakes up the brain cells.",
"Step with care and great tact, and remember that Life's a Great Balancing Act.",
"How did it get so late so soon? Its night before its afternoon. December is here before its June. My goodness how the time has flewn. How did it get so late so soon?",
"Think left and think right and think low and think high. Oh, the thinks you can think up if only you try!",
"A person's a person, no matter how small.",
"You can get help from teachers, but you are going to have to learn a lot by yourself, sitting alone in a room.",
"Unless someone like you cares a whole awful lot, nothing is going to get better. It's not.",
"You're never too old, too wacky, too wild, to pick up a book and read to a child.",
"Today was good. Today was fun. Tomorrow is another one.",
"I meant what I said and I said what I meant.",
"You're in pretty good shape for the shape you are in.",
"Only you can control your future.",
"I am not a consecutive writer.",
"Maybe Christmas, the Grinch thought, doesn't come from a store.",
"Preachers in pulpits talked about what a great message is in the book. No matter what you do, somebody always imputes meaning into your books.",
"Sometimes, when I see my granddaughters make small discoveries of their own, I wish I were a child.",
"Adults are obsolete children.",
"Whenever things go a bit sour in a job I'm doing, I always tell myself, 'You can do better than this.'",
"From there to here, and here to there, funny things are everywhere.",
"I stay out of politics because if I begin thinking too much about politics, I'll probably... drop writing children's books and become a political cartoonist again.",
"I was saving the name of 'Geisel' for the Great American Novel.",
"You make 'em, I amuse 'em."
] | null | null | [
0,
1,
2
] |
2,093 | bcad9869e6bc9b17eee490897b4b706171381366 | <mask token>
class StudentListView(ListView):
<mask token>
<mask token>
<mask token>
def get_queryset(self):
return Student.objects.filter(course='Python')
<mask token>
<mask token>
| <mask token>
class StudentListView(ListView):
<mask token>
<mask token>
<mask token>
def get_queryset(self):
return Student.objects.filter(course='Python')
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['freshers'] = Student.objects.all().order_by('name')
return context
def get_template_names(self):
if self.request.COOKIES['user'] == 'farzam':
template_name = 'staff/farzam.html'
else:
template_name = self.template_name
return template_name
| <mask token>
class StudentListView(ListView):
model = Student
template_name = 'staff/student_list.html'
ordering = ['name']
def get_queryset(self):
return Student.objects.filter(course='Python')
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['freshers'] = Student.objects.all().order_by('name')
return context
def get_template_names(self):
if self.request.COOKIES['user'] == 'farzam':
template_name = 'staff/farzam.html'
else:
template_name = self.template_name
return template_name
| from django.shortcuts import render
from django.views.generic.list import ListView
from .models import Student
class StudentListView(ListView):
model = Student
template_name = 'staff/student_list.html'
ordering = ['name']
def get_queryset(self):
return Student.objects.filter(course='Python')
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['freshers'] = Student.objects.all().order_by('name')
return context
def get_template_names(self):
if self.request.COOKIES['user'] == 'farzam':
template_name = 'staff/farzam.html'
else:
template_name = self.template_name
return template_name
| from django.shortcuts import render
from django.views.generic.list import ListView
from .models import Student
# Create your views here.
class StudentListView(ListView):
model = Student
# Custom has a HIGH priority than default in any field
template_name = 'staff/student_list.html'
# template_name_suffix = '_list'
ordering = ['name']
# context_object_name = 'students'
def get_queryset(self):
return Student.objects.filter(course='Python')
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['freshers'] = Student.objects.all().order_by('name')
return context
def get_template_names(self):
# if self.request.user.is_superuser:
# template_name = 'staff/admin.html'
# elif self.request.user.is_staff:
# template_name = 'staff/staff.html'
# else:
# template_name = self.template_name
# return template_name
if self.request.COOKIES['user'] == 'farzam':
template_name = 'staff/farzam.html'
else:
template_name = self.template_name
return template_name | [
2,
4,
5,
6,
7
] |
2,094 | a253ab5ef80a61c3784862625cde81de4c4ef984 | <mask token>
class MultimediaTest(BaseTestCase):
<mask token>
<mask token>
<mask token>
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data)
self.assertContains(response,
'Por favor verifique os campos obrigatórios')
self.assertContains(response,
'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,
'Você precisa selecionar pelo menos uma área temática')
form_data = complete_form_data()
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
self.assertEquals(Media.objects.all()[0].cooperative_center_code,
'BR1.1')
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
self.assertContains(response, media_test.title)
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
self.assertContains(response,
'é necessário ter pelo menos um descritor')
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, 'Você tem certeza que deseja apagar?')
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(
) == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-types/')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, 'Video')
def test_add_media_type(self):
"""
Tests create media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',
'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/media-type/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, 'Foto')
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
MediaCollection.objects.create(name='Coleção 1', description=
'Coleção de teste 1', created_by_id=1, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 2', description=
'Coleção de teste 2', created_by_id=2, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 3', description=
'Coleção de teste 3', created_by_id=3, cooperative_center_code=
'PY3.8')
response = self.client.get('/multimedia/collections')
self.assertContains(response, 'Coleção 1')
self.assertEquals(response.context['object_list'].count(), 3)
response = self.client.get(
'/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {'name': 'Coleção nova', 'description':
'Coleção de teste', 'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/collection/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, 'Coleção nova')
| <mask token>
class MultimediaTest(BaseTestCase):
<mask token>
def setUp(self):
super(MultimediaTest, self).setUp()
media_type = MediaType.objects.create(acronym='video', name='Video')
thematic_area = ThematicArea.objects.create(acronym='LISBR1.1',
name='Teste')
<mask token>
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data)
self.assertContains(response,
'Por favor verifique os campos obrigatórios')
self.assertContains(response,
'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,
'Você precisa selecionar pelo menos uma área temática')
form_data = complete_form_data()
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
self.assertEquals(Media.objects.all()[0].cooperative_center_code,
'BR1.1')
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
self.assertContains(response, media_test.title)
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
self.assertContains(response,
'é necessário ter pelo menos um descritor')
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, 'Você tem certeza que deseja apagar?')
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(
) == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-types/')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, 'Video')
def test_add_media_type(self):
"""
Tests create media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',
'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/media-type/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, 'Foto')
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
MediaCollection.objects.create(name='Coleção 1', description=
'Coleção de teste 1', created_by_id=1, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 2', description=
'Coleção de teste 2', created_by_id=2, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 3', description=
'Coleção de teste 3', created_by_id=3, cooperative_center_code=
'PY3.8')
response = self.client.get('/multimedia/collections')
self.assertContains(response, 'Coleção 1')
self.assertEquals(response.context['object_list'].count(), 3)
response = self.client.get(
'/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {'name': 'Coleção nova', 'description':
'Coleção de teste', 'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/collection/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, 'Coleção nova')
| <mask token>
def minimal_form_data():
"""
Define a minimal fields for submit a media form
"""
form_data = {'status': '0', 'title': 'Foto 1', 'description': 'Foto 1',
'media_type': '1',
'main-descriptor-content_type-object_id-TOTAL_FORMS': '0',
'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',
'main-keyword-content_type-object_id-TOTAL_FORMS': '0',
'main-keyword-content_type-object_id-INITIAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0'}
return form_data
<mask token>
class MultimediaTest(BaseTestCase):
"""
Tests for multimedia app
"""
def setUp(self):
super(MultimediaTest, self).setUp()
media_type = MediaType.objects.create(acronym='video', name='Video')
thematic_area = ThematicArea.objects.create(acronym='LISBR1.1',
name='Teste')
def test_list_media(self):
"""
Test list media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/')
self.assertContains(response, 'Midia de teste (BR1.1')
self.assertNotContains(response, 'Media de prueba (PY3.1)')
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data)
self.assertContains(response,
'Por favor verifique os campos obrigatórios')
self.assertContains(response,
'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,
'Você precisa selecionar pelo menos uma área temática')
form_data = complete_form_data()
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
self.assertEquals(Media.objects.all()[0].cooperative_center_code,
'BR1.1')
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
self.assertContains(response, media_test.title)
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
self.assertContains(response,
'é necessário ter pelo menos um descritor')
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, 'Você tem certeza que deseja apagar?')
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(
) == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-types/')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, 'Video')
def test_add_media_type(self):
"""
Tests create media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',
'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/media-type/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, 'Foto')
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
MediaCollection.objects.create(name='Coleção 1', description=
'Coleção de teste 1', created_by_id=1, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 2', description=
'Coleção de teste 2', created_by_id=2, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 3', description=
'Coleção de teste 3', created_by_id=3, cooperative_center_code=
'PY3.8')
response = self.client.get('/multimedia/collections')
self.assertContains(response, 'Coleção 1')
self.assertEquals(response.context['object_list'].count(), 3)
response = self.client.get(
'/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {'name': 'Coleção nova', 'description':
'Coleção de teste', 'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/collection/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, 'Coleção nova')
| <mask token>
def minimal_form_data():
"""
Define a minimal fields for submit a media form
"""
form_data = {'status': '0', 'title': 'Foto 1', 'description': 'Foto 1',
'media_type': '1',
'main-descriptor-content_type-object_id-TOTAL_FORMS': '0',
'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',
'main-keyword-content_type-object_id-TOTAL_FORMS': '0',
'main-keyword-content_type-object_id-INITIAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0'}
return form_data
def complete_form_data():
"""
Define missing fields for a valid submission of media object
"""
missing_fields = {'link': 'http://www.youtube.com', 'publication_date':
'01/12/2015', 'main-descriptor-content_type-object_id-TOTAL_FORMS':
'1', 'main-descriptor-content_type-object_id-0-id': '',
'main-descriptor-content_type-object_id-0-text': 'malaria',
'main-descriptor-content_type-object_id-0-code': '^d8462',
'main-descriptor-content_type-object_id-0-status': '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '1',
'main-resourcethematic-content_type-object_id-0-thematic_area': '1',
'main-resourcethematic-content_type-object_id-0-status': '0'}
complete_form_data = minimal_form_data()
complete_form_data.update(missing_fields)
return complete_form_data
def create_media_object():
"""
Create media object for tests
"""
media1 = Media.objects.create(status=0, title='Midia de teste (BR1.1)',
media_type_id=1, link='http://bvsalud.org', created_by_id=1,
cooperative_center_code='BR1.1')
media_ct = ContentType.objects.get_for_model(media1)
descriptor = Descriptor.objects.create(object_id=1, content_type=
media_ct, text='malaria')
thematic = ResourceThematic.objects.create(object_id=1, content_type=
media_ct, thematic_area_id=1)
media2 = Media.objects.create(status=0, title='Media de prueba (PY3.1)',
media_type_id=1, link='http://bvsalud.org', created_by_id=2,
cooperative_center_code='PY3.1')
class MultimediaTest(BaseTestCase):
"""
Tests for multimedia app
"""
def setUp(self):
super(MultimediaTest, self).setUp()
media_type = MediaType.objects.create(acronym='video', name='Video')
thematic_area = ThematicArea.objects.create(acronym='LISBR1.1',
name='Teste')
def test_list_media(self):
"""
Test list media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/')
self.assertContains(response, 'Midia de teste (BR1.1')
self.assertNotContains(response, 'Media de prueba (PY3.1)')
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data)
self.assertContains(response,
'Por favor verifique os campos obrigatórios')
self.assertContains(response,
'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,
'Você precisa selecionar pelo menos uma área temática')
form_data = complete_form_data()
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
self.assertEquals(Media.objects.all()[0].cooperative_center_code,
'BR1.1')
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
self.assertContains(response, media_test.title)
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
self.assertContains(response,
'é necessário ter pelo menos um descritor')
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, 'Você tem certeza que deseja apagar?')
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(
) == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-types/')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, 'Video')
def test_add_media_type(self):
"""
Tests create media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',
'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/media-type/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, 'Foto')
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
MediaCollection.objects.create(name='Coleção 1', description=
'Coleção de teste 1', created_by_id=1, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 2', description=
'Coleção de teste 2', created_by_id=2, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 3', description=
'Coleção de teste 3', created_by_id=3, cooperative_center_code=
'PY3.8')
response = self.client.get('/multimedia/collections')
self.assertContains(response, 'Coleção 1')
self.assertEquals(response.context['object_list'].count(), 3)
response = self.client.get(
'/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {'name': 'Coleção nova', 'description':
'Coleção de teste', 'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/collection/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, 'Coleção nova')
| # coding: utf-8
from django.test.client import Client
from django.contrib.contenttypes.models import ContentType
from main.models import Descriptor, ResourceThematic, ThematicArea
from utils.tests import BaseTestCase
from models import *
def minimal_form_data():
'''
Define a minimal fields for submit a media form
'''
form_data = {
'status': '0',
'title': 'Foto 1',
'description': 'Foto 1',
'media_type' : '1',
'main-descriptor-content_type-object_id-TOTAL_FORMS': '0',
'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',
'main-keyword-content_type-object_id-TOTAL_FORMS': '0',
'main-keyword-content_type-object_id-INITIAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0',
}
return form_data
def complete_form_data():
'''
Define missing fields for a valid submission of media object
'''
missing_fields = {
'link' : 'http://www.youtube.com',
'publication_date' : '01/12/2015',
'main-descriptor-content_type-object_id-TOTAL_FORMS' : '1',
'main-descriptor-content_type-object_id-0-id' : '',
'main-descriptor-content_type-object_id-0-text' : 'malaria',
'main-descriptor-content_type-object_id-0-code' : '^d8462',
'main-descriptor-content_type-object_id-0-status' : '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS' : '1',
'main-resourcethematic-content_type-object_id-0-thematic_area' : '1',
'main-resourcethematic-content_type-object_id-0-status' : '0',
}
complete_form_data = minimal_form_data()
complete_form_data.update(missing_fields)
return complete_form_data
def create_media_object():
'''
Create media object for tests
'''
# Create a Media object and test that is present on list
media1 = Media.objects.create(status=0,title='Midia de teste (BR1.1)',
media_type_id=1, link='http://bvsalud.org', created_by_id=1,
cooperative_center_code='BR1.1')
media_ct = ContentType.objects.get_for_model(media1)
descriptor = Descriptor.objects.create(object_id=1, content_type=media_ct, text='malaria')
thematic = ResourceThematic.objects.create(object_id=1, content_type=media_ct, thematic_area_id=1)
media2 = Media.objects.create(status=0,title='Media de prueba (PY3.1)',
media_type_id=1, link='http://bvsalud.org', created_by_id=2,
cooperative_center_code='PY3.1')
class MultimediaTest(BaseTestCase):
"""
Tests for multimedia app
"""
def setUp(self):
super(MultimediaTest, self).setUp()
# create auxiliary models used on tests
media_type = MediaType.objects.create(acronym='video', name='Video')
thematic_area = ThematicArea.objects.create(acronym='LISBR1.1', name='Teste')
def test_list_media(self):
"""
Test list media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/')
self.assertContains(response, "Midia de teste (BR1.1")
# list only medias from user cooperative center (BR1.1)
self.assertNotContains(response, "Media de prueba (PY3.1)")
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
# invalid submission with missing required fields
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data )
self.assertContains(response,'Por favor verifique os campos obrigatórios')
self.assertContains(response,'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,'Você precisa selecionar pelo menos uma área temática')
# complete form_data with required fields and re-submit form
form_data = complete_form_data()
# test valid submission
# after submit a valid content the view will redirect to /multimedia and list the objects
# follow=True will allow check if the new data is on the list
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, "Foto 1")
# check if is set cooperative center code of user (editor = BR1.1)
self.assertEquals(Media.objects.all()[0].cooperative_center_code, "BR1.1")
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
# Test if return form with fields
self.assertContains(response, media_test.title)
# Test changes values and submit
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
# check for validation of descriptor and thematic area for status = Admitted
self.assertContains(response, "é necessário ter pelo menos um descritor")
# check for normal edition
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, "Foto 1")
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, "Você tem certeza que deseja apagar?")
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count() == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
# check if documentalist has access to list media-types
self.login_documentalist()
response = self.client.get('/multimedia/media-types/' )
# 403 = unauthorized
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, "Video")
def test_add_media_type(self):
"""
Tests create media type
"""
# check if documentalist has access to create new media-types
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new' )
# 403 = unauthorized
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {
'status': '0',
'acronym': 'foto',
'name': 'Foto',
'language' : 'pt-br',
'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0',
}
response = self.client.post('/multimedia/media-type/new', form_data, follow=True )
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, "Foto")
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
# Create a media collection object and test that is present on list
MediaCollection.objects.create(name='Coleção 1',
description='Coleção de teste 1',
created_by_id=1, cooperative_center_code='BR1.1')
MediaCollection.objects.create(name='Coleção 2',
description='Coleção de teste 2',
created_by_id=2, cooperative_center_code='BR1.1')
MediaCollection.objects.create(name='Coleção 3',
description='Coleção de teste 3',
created_by_id=3, cooperative_center_code='PY3.8')
response = self.client.get('/multimedia/collections')
# check if only one collection is returned (restrict by user)
self.assertContains(response, "Coleção 1")
self.assertEquals(response.context['object_list'].count(), 3)
# check if return only colections from cooperative center BR1.1
response = self.client.get('/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {
'name': 'Coleção nova',
'description': 'Coleção de teste',
'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0',
}
response = self.client.post('/multimedia/collection/new', form_data, follow=True )
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, "Coleção nova")
| [
8,
9,
12,
14,
16
] |
2,095 | 184b850e85b523f22a44cfde698efd96b94d819d | import os
templateFile = 'crab_template.py'
samples=[\
#"/TTJets_MSDecaysCKM_central_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
#"/TTJets_MSDecaysCKM_central_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v2/MINIAODSIM", #Identical? Same event count #miniAODTuple_e/
# "/TTJets_MSDecaysCKM_central_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU_S14_POSTLS170_V6-v1/MINIAODSIM", #MiniAODTupleTT1e/
# "/WJetsToLNu_HT-200to400_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM", #/data/easilar/crab3WorkAreas/...
# "/WJetsToLNu_HT-400to600_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM", #/data/easilar/crab3WorkAreas/...
# "/WJetsToLNu_HT-600toInf_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/WJetsToLNu_13TeV-madgraph-pythia8-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYJetsToLL_M-50_HT-200to400_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYJetsToLL_M-50_HT-400to600_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYJetsToLL_M-50_HT-600toInf_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYJetsToLL_M-50_13TeV-madgraph-pythia8/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM", #/data/easilar/crab3WorkAreas/...
# "/DYJetsToLL_M-50_13TeV-pythia6/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYToEE_M-50_Tune4C_13TeV-pythia8/Spring14miniaod-castor-v2/MINIAODSIM",
# "/DYToEE_Tune4C_13TeV-pythia8/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYToMuMu_M-15To50_Tune4C_13TeV-pythia8/Spring14miniaod-castor_PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYToMuMu_M-50_Tune4C_13TeV-pythia8/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYToMuMu_M-50_Tune4C_13TeV-pythia8/Spring14miniaod-castor_PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYToMuMu_M-6To15_Tune4C_13TeV-pythia8/Spring14miniaod-castor_PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/DYToMuMu_Tune4C_13TeV-pythia8/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/TToBLNu_s-channel-EMu_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/TToBLNu_t-channel-EMu_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/TToBLNu_tW-channel-DR-EMu_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v2/MINIAODSIM",
# "/TToLeptons_s-channel-CSA14_Tune4C_13TeV-aMCatNLO-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/T_tW-channel-DR_Tune4C_13TeV-CSA14-powheg-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/Tbar_tW-channel-DR_Tune4C_13TeV-CSA14-powheg-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/TBarToLeptons_s-channel-CSA14_Tune4C_13TeV-aMCatNLO-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
# "/TBarToLeptons_t-channel_Tune4C_CSA14_13TeV-aMCatNLO-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v1/MINIAODSIM",
#"/WJetsToLNu_HT-100to200_Tune4C_13TeV-madgraph-tauola/Spring14miniaod-PU20bx25_POSTLS170_V5-v2/MINIAODSIM",
#"/WJetsToLNu_HT-100to200_Tune4C_13TeV-madgraph-tauola/schoef-WJetsToLNu_HT-100to200_Tune4C_13TeV-madgraph-tauola_Spring14dr-PU_S14_POSTLS170_V6-v1-92bfc1aa0ef8c674e0edabb945b19298/USER",
#"/WJetsToLNu_HT-200to400_Tune4C_13TeV-madgraph-tauola/schoef-WJetsToLNu_HT-200to400_Tune4C_13TeV-madgraph-tauola_Spring14dr-PU_S14_POSTLS170_V6-v1-92bfc1aa0ef8c674e0edabb945b19298/USER",
#"/WJetsToLNu_HT-400to600_Tune4C_13TeV-madgraph-tauola/schoef-WJetsToLNu_HT-400to600_Tune4C_13TeV-madgraph-tauola_Spring14dr-PU_S14_POSTLS170_V6-v1-92bfc1aa0ef8c674e0edabb945b19298/USER",
#"/WJetsToLNu_HT-600toInf_Tune4C_13TeV-madgraph-tauola/schoef-WJetsToLNu_HT-600toInf_Tune4C_13TeV-madgraph-tauola_Spring14dr-PU_S14_POSTLS170_V6-v1-92bfc1aa0ef8c674e0edabb945b19298/USER",
"/T5Full_T5Full-1200-1000-800-Decay-MGMMatch50/schoef-T5Full_T5Full-1200-1000-800-Decay-MGMMatch50-miniAOD-92bfc1aa0ef8c674e0edabb945b19298/USER",
"/T5Full_T5Full-1500-800-100-Decay-MGMMatch50/schoef-T5Full_T5Full-1500-800-100-Decay-MGMMatch50-miniAOD-92bfc1aa0ef8c674e0edabb945b19298/USER"
]
for s in samples:
pySampleName = s[1:].replace('/','_')
#pySampleName = s[1:].replace('/','').replace('_','').replace('-','')
cfgFileName = 'New_crab_'+pySampleName+'.py'
print "Sample",s
print "Using template",templateFile
if os.path.isfile(cfgFileName) :
print "Skipping! File ",cfgFileName,"already there!!"
continue
ofile = file(cfgFileName,'w')
if not os.path.isfile(templateFile) :
print "Stop. TemplateFile not found:", templateFile
break
ifile = open(templateFile,'r')
replacements = [["DPMDIRECTORY", pySampleName], ["WORKINGDIRECTORY", '/data/easilar/crab3WorkAreas/'+pySampleName], ["SAMPLENAME", s]]
for line in ifile.readlines():
# print line
for r in replacements:
line=line.replace(r[0],r[1])
ofile.write(line)
ifile.close()
ofile.close()
print "Written",ofile.name
| null | null | null | null | [
0
] |
2,096 | e70c5c9a62faa4c501c0f103ce0a0a419aaf4301 | <mask token>
def restart():
root.destroy()
os.startfile('data\\programs\\game with tkinter.py')
def disableButton():
global l, restartButton, start
b1.config(state='disabled')
b2.config(state='disabled')
b3.config(state='disabled')
b4.config(state='disabled')
b5.config(state='disabled')
b6.config(state='disabled')
b7.config(state='disabled')
b8.config(state='disabled')
b9.config(state='disabled')
start.config(state='disabled')
restartButton.config(state='normal', command=restart, text=
' --->press to restart<--- ')
<mask token>
def funForB3():
global notPresentList, element, l
ans = notPresentList[2] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB4():
global notPresentList, element, l
ans = notPresentList[3] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB5():
global notPresentList, element, l
ans = notPresentList[4] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB6():
global notPresentList, element, l
ans = notPresentList[5] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB7():
global notPresentList, element, l
ans = notPresentList[6] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB8():
global notPresentList, element, l
ans = notPresentList[7] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB9():
global notPresentList, element, l
ans = notPresentList[8] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def present():
with open('data\\database\\present.txt', 'r') as file:
content = file.read().split('\n')
presentList = [content[random.randint(0, 400)], content[random.
randint(0, 400)], content[random.randint(0, 400)], content[
random.randint(0, 400)], content[random.randint(0, 400)],
content[random.randint(0, 400)], content[random.randint(0, 400)
], content[random.randint(0, 400)], content[random.randint(0, 400)]
]
element = presentList[random.randint(0, 8)]
return presentList, element
def notPresent():
global buttonList, start
with open('data\\database\\notpresent.txt', 'r') as file:
content = file.read().split('\n')
notPresentList = [content[random.randint(0, 35)], content[random.
randint(0, 35)], content[random.randint(0, 35)], content[random
.randint(0, 35)], content[random.randint(0, 35)], content[
random.randint(0, 35)], content[random.randint(0, 35)], content
[random.randint(0, 35)]]
start.config(state='normal')
obj = present()
presentList, element = obj[0], obj[1]
for i in range(9):
buttonList[i].config(text=presentList[i], state='disabled')
notPresentList.insert(random.randint(0, 9), element)
return notPresentList, element
<mask token>
| <mask token>
def restart():
root.destroy()
os.startfile('data\\programs\\game with tkinter.py')
def disableButton():
global l, restartButton, start
b1.config(state='disabled')
b2.config(state='disabled')
b3.config(state='disabled')
b4.config(state='disabled')
b5.config(state='disabled')
b6.config(state='disabled')
b7.config(state='disabled')
b8.config(state='disabled')
b9.config(state='disabled')
start.config(state='disabled')
restartButton.config(state='normal', command=restart, text=
' --->press to restart<--- ')
<mask token>
def funForB2():
global notPresentList, element, l
ans = notPresentList[1] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB3():
global notPresentList, element, l
ans = notPresentList[2] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB4():
global notPresentList, element, l
ans = notPresentList[3] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB5():
global notPresentList, element, l
ans = notPresentList[4] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB6():
global notPresentList, element, l
ans = notPresentList[5] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB7():
global notPresentList, element, l
ans = notPresentList[6] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB8():
global notPresentList, element, l
ans = notPresentList[7] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB9():
global notPresentList, element, l
ans = notPresentList[8] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def present():
with open('data\\database\\present.txt', 'r') as file:
content = file.read().split('\n')
presentList = [content[random.randint(0, 400)], content[random.
randint(0, 400)], content[random.randint(0, 400)], content[
random.randint(0, 400)], content[random.randint(0, 400)],
content[random.randint(0, 400)], content[random.randint(0, 400)
], content[random.randint(0, 400)], content[random.randint(0, 400)]
]
element = presentList[random.randint(0, 8)]
return presentList, element
def notPresent():
global buttonList, start
with open('data\\database\\notpresent.txt', 'r') as file:
content = file.read().split('\n')
notPresentList = [content[random.randint(0, 35)], content[random.
randint(0, 35)], content[random.randint(0, 35)], content[random
.randint(0, 35)], content[random.randint(0, 35)], content[
random.randint(0, 35)], content[random.randint(0, 35)], content
[random.randint(0, 35)]]
start.config(state='normal')
obj = present()
presentList, element = obj[0], obj[1]
for i in range(9):
buttonList[i].config(text=presentList[i], state='disabled')
notPresentList.insert(random.randint(0, 9), element)
return notPresentList, element
<mask token>
| <mask token>
def restart():
root.destroy()
os.startfile('data\\programs\\game with tkinter.py')
def disableButton():
global l, restartButton, start
b1.config(state='disabled')
b2.config(state='disabled')
b3.config(state='disabled')
b4.config(state='disabled')
b5.config(state='disabled')
b6.config(state='disabled')
b7.config(state='disabled')
b8.config(state='disabled')
b9.config(state='disabled')
start.config(state='disabled')
restartButton.config(state='normal', command=restart, text=
' --->press to restart<--- ')
def funForB1():
global notPresentList, element, l, start
ans = notPresentList[0] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB2():
global notPresentList, element, l
ans = notPresentList[1] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB3():
global notPresentList, element, l
ans = notPresentList[2] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB4():
global notPresentList, element, l
ans = notPresentList[3] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB5():
global notPresentList, element, l
ans = notPresentList[4] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB6():
global notPresentList, element, l
ans = notPresentList[5] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB7():
global notPresentList, element, l
ans = notPresentList[6] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB8():
global notPresentList, element, l
ans = notPresentList[7] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB9():
global notPresentList, element, l
ans = notPresentList[8] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def present():
with open('data\\database\\present.txt', 'r') as file:
content = file.read().split('\n')
presentList = [content[random.randint(0, 400)], content[random.
randint(0, 400)], content[random.randint(0, 400)], content[
random.randint(0, 400)], content[random.randint(0, 400)],
content[random.randint(0, 400)], content[random.randint(0, 400)
], content[random.randint(0, 400)], content[random.randint(0, 400)]
]
element = presentList[random.randint(0, 8)]
return presentList, element
def notPresent():
global buttonList, start
with open('data\\database\\notpresent.txt', 'r') as file:
content = file.read().split('\n')
notPresentList = [content[random.randint(0, 35)], content[random.
randint(0, 35)], content[random.randint(0, 35)], content[random
.randint(0, 35)], content[random.randint(0, 35)], content[
random.randint(0, 35)], content[random.randint(0, 35)], content
[random.randint(0, 35)]]
start.config(state='normal')
obj = present()
presentList, element = obj[0], obj[1]
for i in range(9):
buttonList[i].config(text=presentList[i], state='disabled')
notPresentList.insert(random.randint(0, 9), element)
return notPresentList, element
def start():
global buttonList, start, notPresentList, element
start.config(state='disabled')
for i in range(9):
buttonList[i].config(text=notPresentList[i], state='normal')
<mask token>
| <mask token>
def restart():
root.destroy()
os.startfile('data\\programs\\game with tkinter.py')
def disableButton():
global l, restartButton, start
b1.config(state='disabled')
b2.config(state='disabled')
b3.config(state='disabled')
b4.config(state='disabled')
b5.config(state='disabled')
b6.config(state='disabled')
b7.config(state='disabled')
b8.config(state='disabled')
b9.config(state='disabled')
start.config(state='disabled')
restartButton.config(state='normal', command=restart, text=
' --->press to restart<--- ')
def funForB1():
global notPresentList, element, l, start
ans = notPresentList[0] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB2():
global notPresentList, element, l
ans = notPresentList[1] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB3():
global notPresentList, element, l
ans = notPresentList[2] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB4():
global notPresentList, element, l
ans = notPresentList[3] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB5():
global notPresentList, element, l
ans = notPresentList[4] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB6():
global notPresentList, element, l
ans = notPresentList[5] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB7():
global notPresentList, element, l
ans = notPresentList[6] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB8():
global notPresentList, element, l
ans = notPresentList[7] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB9():
global notPresentList, element, l
ans = notPresentList[8] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def present():
with open('data\\database\\present.txt', 'r') as file:
content = file.read().split('\n')
presentList = [content[random.randint(0, 400)], content[random.
randint(0, 400)], content[random.randint(0, 400)], content[
random.randint(0, 400)], content[random.randint(0, 400)],
content[random.randint(0, 400)], content[random.randint(0, 400)
], content[random.randint(0, 400)], content[random.randint(0, 400)]
]
element = presentList[random.randint(0, 8)]
return presentList, element
def notPresent():
global buttonList, start
with open('data\\database\\notpresent.txt', 'r') as file:
content = file.read().split('\n')
notPresentList = [content[random.randint(0, 35)], content[random.
randint(0, 35)], content[random.randint(0, 35)], content[random
.randint(0, 35)], content[random.randint(0, 35)], content[
random.randint(0, 35)], content[random.randint(0, 35)], content
[random.randint(0, 35)]]
start.config(state='normal')
obj = present()
presentList, element = obj[0], obj[1]
for i in range(9):
buttonList[i].config(text=presentList[i], state='disabled')
notPresentList.insert(random.randint(0, 9), element)
return notPresentList, element
def start():
global buttonList, start, notPresentList, element
start.config(state='disabled')
for i in range(9):
buttonList[i].config(text=notPresentList[i], state='normal')
<mask token>
root.title('Memory Game')
root.geometry('400x500')
root.resizable(0, 0)
root.config(bg='white')
<mask token>
start.place(x=150, y=110)
<mask token>
frameMain.place(x=10, y=150)
<mask token>
l.place(x=180, y=5)
<mask token>
b1.place(x=10, y=16)
b2.place(x=150, y=16)
b3.place(x=290, y=16)
b4.place(x=10, y=110)
b5.place(x=150, y=110)
b6.place(x=290, y=110)
b7.place(x=10, y=204)
b8.place(x=150, y=204)
b9.place(x=290, y=204)
<mask token>
restartButton.place(x=60, y=460)
<mask token>
root.mainloop()
| import time,random,os
from tkinter import *
def restart():
root.destroy()
os.startfile(r"data\programs\game with tkinter.py")
def disableButton():
global l,restartButton,start
b1.config(state="disabled")
b2.config(state="disabled")
b3.config(state="disabled")
b4.config(state="disabled")
b5.config(state="disabled")
b6.config(state="disabled")
b7.config(state="disabled")
b8.config(state="disabled")
b9.config(state="disabled")
start.config(state="disabled")
restartButton.config(state="normal",command=restart,text=" --->press to restart<--- ")
def funForB1():
global notPresentList,element,l,start
ans = notPresentList[0] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB2():
global notPresentList,element,l
ans = notPresentList[1] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB3():
global notPresentList,element,l
ans = notPresentList[2] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB4():
global notPresentList,element,l
ans = notPresentList[3] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB5():
global notPresentList,element,l
ans = notPresentList[4] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB6():
global notPresentList,element,l
ans = notPresentList[5] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB7():
global notPresentList,element,l
ans = notPresentList[6] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB8():
global notPresentList,element,l
ans = notPresentList[7] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def funForB9():
global notPresentList,element,l
ans = notPresentList[8] == element
if ans:
l.config(image=image1)
else:
l.config(image=image2)
disableButton()
def present():
with open(r"data\database\present.txt", "r") as file:
content = file.read().split("\n")
presentList = [
content[random.randint(0,400)],
content[random.randint(0,400)],
content[random.randint(0,400)],
content[random.randint(0,400)],
content[random.randint(0,400)],
content[random.randint(0,400)],
content[random.randint(0,400)],
content[random.randint(0,400)],
content[random.randint(0,400)]
]
element = presentList[random.randint(0,8)]
return (presentList,element)
def notPresent():
global buttonList,start
with open(r"data\database\notpresent.txt","r") as file:
content = file.read().split("\n")
notPresentList = [
content[random.randint(0,35)],
content[random.randint(0,35)],
content[random.randint(0,35)],
content[random.randint(0,35)],
content[random.randint(0,35)],
content[random.randint(0,35)],
content[random.randint(0,35)],
content[random.randint(0,35)],
]
start.config(state="normal")
obj = present()
presentList,element = obj[0],obj[1]
for i in range(9):
buttonList[i].config(text = presentList[i], state="disabled")
notPresentList.insert(random.randint(0,9),element)
return (notPresentList,element)
def start():
global buttonList,start,notPresentList,element
start.config(state="disabled")
for i in range(9):
buttonList[i].config(text = notPresentList[i], state="normal")
# main
root =Tk()
root.title("Memory Game")
root.geometry("400x500")
root.resizable(0,0)
root.config(bg="white")
image1 = PhotoImage(file=r"data\img\smiley.png")
image2 = PhotoImage(file=r"data\img\pleading.png")
start = Button(root, bg="black", fg="white", text="-->Start<--", font="comicsansms 15 bold", command=start, relief="raised",state="normal", bd=2)
start.place(x=150,y=110)
frameMain = Frame(root, relief="flat", bd=1, background="white", width=400, height=417)
frameMain.place(x=10, y=150)
image=PhotoImage(file=r"data\img\emoji.png")
l=Label(root,image=image ,font="comicsansms 15 bold", fg="black", bg="white")
l.place(x=180,y=5)
b1=Button(frameMain, bg='cyan', text="plz start", fg="white", width=10, height=5, relief='raised',bd=3, state="normal",disabledforeground="white",command = funForB1)
b2=Button(frameMain, bg='teal', text="plz start", fg="white", width=10, height=5, relief='raised',bd=3, state="normal",disabledforeground="white",command = funForB2)
b3=Button(frameMain, bg='cyan', text="plz start", fg="white", width=10, height=5, relief='raised',bd=3, state="normal",disabledforeground="white",command = funForB3)
b4=Button(frameMain, bg='teal', text="plz start", fg="white", width=10, height=5, relief='raised',bd=3, state="normal",disabledforeground="white",command = funForB4)
b5=Button(frameMain, bg='cyan', text="plz start", fg="white", width=10, height=5, relief='raised',bd=3, state="normal",disabledforeground="white",command = funForB5)
b6=Button(frameMain, bg='teal', text="plz start", fg="white", width=10, height=5, relief='raised',bd=3, state="normal",disabledforeground="white",command = funForB6)
b7=Button(frameMain, bg='cyan', text="plz start", fg="white", width=10, height=5, relief='raised',bd=3, state="normal",disabledforeground="white",command = funForB7)
b8=Button(frameMain, bg='teal', text="plz start", fg="white", width=10, height=5, relief='raised',bd=3, state="normal",disabledforeground="white",command = funForB8)
b9=Button(frameMain, bg='cyan', text="plz start", fg="white", width=10, height=5, relief='raised',bd=3, state="normal",disabledforeground="white",command = funForB9)
b1.place(x=10,y=16)
b2.place(x=150,y=16)
b3.place(x=290,y=16)
b4.place(x=10,y=110)
b5.place(x=150,y=110)
b6.place(x=290,y=110)
b7.place(x=10,y=204)
b8.place(x=150,y=204)
b9.place(x=290,y=204)
buttonList = [b1,b2,b3,b4,b5,b6,b7,b8,b9]
restartButton = Button(root, bg="teal", fg="white", text="!!! Remember these items !!!", font="comicsansms 15 bold", relief="raised",state="disabled",disabledforeground="white")
restartButton.place(x=60,y=460)
obj = notPresent()
notPresentList,element = obj[0],obj[1]
root.mainloop()
| [
11,
12,
14,
15,
18
] |
2,097 | 95cdf6a22655d500c2838899ec9dfbff637a5969 | #!/usr/bin/python
#
#
# This is the Hydra slave module
| null | null | null | null | [
1
] |
2,098 | e0c5498d9b18a6a32fcd2725ef4f6a1adaef6c68 | <mask token>
| <mask token>
main(sys.argv)
| import sys
from ulang.runtime.main import main
main(sys.argv)
| null | null | [
0,
1,
2
] |
2,099 | fa8431ae96cd6c1133d56285d0168f43d9068bc5 | <mask token>
class PongBall(Widget):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def __init__(self, **kwargs):
super(PongBall, self).__init__(**kwargs)
self._body = body = self.world.CreateDynamicBody(position=self.pos,
linearDamping=0)
fix = body.CreateCircleFixture(radius=self.radius, density=0,
restitution=1, friction=0)
self.hue = random()
<mask token>
class PongGame(App):
ball = ObjectProperty(None)
player1 = ObjectProperty(None)
player2 = ObjectProperty(None)
def touchdown(self, instance, touch):
self.serve_ball()
def serve_ball(self):
vel = self.ball._body.linearVelocity
vel.x = random() - 0.5
vel.y = random() - 0.5
self.ball._body.linearVelocity = vel
self.ball._body.SetTransform(b2Vec2(200, 200), 0)
def build(self):
canvas = Widget()
canvas.bind(on_touch_down=self.touchdown)
self.world = world = b2World((0, -10), True)
edges = self.world.CreateStaticBody(shapes=b2EdgeShape(vertices=[(-
4000, 0), (0, 4000)]))
edges.position.Set(0, 0)
self.ball = ball = PongBall(y=200, x=200, world=world)
canvas.add_widget(ball)
self.serve_ball()
Clock.schedule_interval(self.update, 1 / 60)
return canvas
def update(self, dt):
self.world.Step(dt, 10, 8)
self.ball.update_from_body()
<mask token>
| <mask token>
class PongPaddle(Widget):
<mask token>
def __init__(self, **kwargs):
super(PongPaddle, self).__init__(**kwargs)
class PongBall(Widget):
radius = NumericProperty(20)
hue = NumericProperty(0)
world = ObjectProperty(None)
_body = ObjectProperty(None)
speed = 100
def __init__(self, **kwargs):
super(PongBall, self).__init__(**kwargs)
self._body = body = self.world.CreateDynamicBody(position=self.pos,
linearDamping=0)
fix = body.CreateCircleFixture(radius=self.radius, density=0,
restitution=1, friction=0)
self.hue = random()
def update_from_body(self):
vel = self._body.linearVelocity
if vel.length > 0 and (vel.length > 1.05 or vel.length < 0.95):
t = self.speed / vel.length
vel.x = vel.x * t
vel.y = vel.y * t
self._body.linearVelocity = vel
self.pos = self._body.position.x, self._body.position.y
class PongGame(App):
ball = ObjectProperty(None)
player1 = ObjectProperty(None)
player2 = ObjectProperty(None)
def touchdown(self, instance, touch):
self.serve_ball()
def serve_ball(self):
vel = self.ball._body.linearVelocity
vel.x = random() - 0.5
vel.y = random() - 0.5
self.ball._body.linearVelocity = vel
self.ball._body.SetTransform(b2Vec2(200, 200), 0)
def build(self):
canvas = Widget()
canvas.bind(on_touch_down=self.touchdown)
self.world = world = b2World((0, -10), True)
edges = self.world.CreateStaticBody(shapes=b2EdgeShape(vertices=[(-
4000, 0), (0, 4000)]))
edges.position.Set(0, 0)
self.ball = ball = PongBall(y=200, x=200, world=world)
canvas.add_widget(ball)
self.serve_ball()
Clock.schedule_interval(self.update, 1 / 60)
return canvas
def update(self, dt):
self.world.Step(dt, 10, 8)
self.ball.update_from_body()
<mask token>
| <mask token>
class PongPaddle(Widget):
score = NumericProperty(0)
def __init__(self, **kwargs):
super(PongPaddle, self).__init__(**kwargs)
class PongBall(Widget):
radius = NumericProperty(20)
hue = NumericProperty(0)
world = ObjectProperty(None)
_body = ObjectProperty(None)
speed = 100
def __init__(self, **kwargs):
super(PongBall, self).__init__(**kwargs)
self._body = body = self.world.CreateDynamicBody(position=self.pos,
linearDamping=0)
fix = body.CreateCircleFixture(radius=self.radius, density=0,
restitution=1, friction=0)
self.hue = random()
def update_from_body(self):
vel = self._body.linearVelocity
if vel.length > 0 and (vel.length > 1.05 or vel.length < 0.95):
t = self.speed / vel.length
vel.x = vel.x * t
vel.y = vel.y * t
self._body.linearVelocity = vel
self.pos = self._body.position.x, self._body.position.y
class PongGame(App):
ball = ObjectProperty(None)
player1 = ObjectProperty(None)
player2 = ObjectProperty(None)
def touchdown(self, instance, touch):
self.serve_ball()
def serve_ball(self):
vel = self.ball._body.linearVelocity
vel.x = random() - 0.5
vel.y = random() - 0.5
self.ball._body.linearVelocity = vel
self.ball._body.SetTransform(b2Vec2(200, 200), 0)
def build(self):
canvas = Widget()
canvas.bind(on_touch_down=self.touchdown)
self.world = world = b2World((0, -10), True)
edges = self.world.CreateStaticBody(shapes=b2EdgeShape(vertices=[(-
4000, 0), (0, 4000)]))
edges.position.Set(0, 0)
self.ball = ball = PongBall(y=200, x=200, world=world)
canvas.add_widget(ball)
self.serve_ball()
Clock.schedule_interval(self.update, 1 / 60)
return canvas
def update(self, dt):
self.world.Step(dt, 10, 8)
self.ball.update_from_body()
<mask token>
| from Box2D import *
from random import random
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ObjectProperty
from kivy.lang import Builder
from kivy.clock import Clock
Builder.load_string(
"""
<PongBall>:
canvas:
Color:
hsv: self.hue, 1, 1
Ellipse:
pos: self.x - self.radius, self.y - self.radius
size: self.radius * 2, self.radius * 2
<PongPaddle>:
size: 25, 200
canvas:
Rectangle:
pos:self.pos
size:self.size
"""
)
class PongPaddle(Widget):
score = NumericProperty(0)
def __init__(self, **kwargs):
super(PongPaddle, self).__init__(**kwargs)
class PongBall(Widget):
radius = NumericProperty(20)
hue = NumericProperty(0)
world = ObjectProperty(None)
_body = ObjectProperty(None)
speed = 100
def __init__(self, **kwargs):
super(PongBall, self).__init__(**kwargs)
self._body = body = self.world.CreateDynamicBody(position=self.pos,
linearDamping=0)
fix = body.CreateCircleFixture(radius=self.radius, density=0,
restitution=1, friction=0)
self.hue = random()
def update_from_body(self):
vel = self._body.linearVelocity
if vel.length > 0 and (vel.length > 1.05 or vel.length < 0.95):
t = self.speed / vel.length
vel.x = vel.x * t
vel.y = vel.y * t
self._body.linearVelocity = vel
self.pos = self._body.position.x, self._body.position.y
class PongGame(App):
ball = ObjectProperty(None)
player1 = ObjectProperty(None)
player2 = ObjectProperty(None)
def touchdown(self, instance, touch):
self.serve_ball()
def serve_ball(self):
vel = self.ball._body.linearVelocity
vel.x = random() - 0.5
vel.y = random() - 0.5
self.ball._body.linearVelocity = vel
self.ball._body.SetTransform(b2Vec2(200, 200), 0)
def build(self):
canvas = Widget()
canvas.bind(on_touch_down=self.touchdown)
self.world = world = b2World((0, -10), True)
edges = self.world.CreateStaticBody(shapes=b2EdgeShape(vertices=[(-
4000, 0), (0, 4000)]))
edges.position.Set(0, 0)
self.ball = ball = PongBall(y=200, x=200, world=world)
canvas.add_widget(ball)
self.serve_ball()
Clock.schedule_interval(self.update, 1 / 60)
return canvas
def update(self, dt):
self.world.Step(dt, 10, 8)
self.ball.update_from_body()
PongGame().run()
| #Kivy + Box2d test
#Not working...
from Box2D import *
from random import random
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ObjectProperty
from kivy.lang import Builder
from kivy.clock import Clock
Builder.load_string('''
<PongBall>:
canvas:
Color:
hsv: self.hue, 1, 1
Ellipse:
pos: self.x - self.radius, self.y - self.radius
size: self.radius * 2, self.radius * 2
<PongPaddle>:
size: 25, 200
canvas:
Rectangle:
pos:self.pos
size:self.size
''')
class PongPaddle(Widget):
score = NumericProperty(0)
def __init__(self, **kwargs):
super(PongPaddle,self).__init__(**kwargs)
class PongBall(Widget):
radius = NumericProperty(20)
hue = NumericProperty(0)
# for physics
world = ObjectProperty(None)
_body = ObjectProperty(None)
speed = 100
def __init__(self, **kwargs):
super(PongBall, self).__init__(**kwargs)
self._body = body = self.world.CreateDynamicBody(
position = self.pos,
linearDamping=0
)
fix = body.CreateCircleFixture(
radius = self.radius,
density = 0,
restitution = 1,
friction=0
)
self.hue = random()
def update_from_body(self):
#constant speed
vel = self._body.linearVelocity
if(vel.length > 0 and (vel.length > 1.05 or vel.length < 0.95)):
t = self.speed/vel.length
vel.x = vel.x*t
vel.y = vel.y*t
self._body.linearVelocity = vel
self.pos = self._body.position.x, self._body.position.y
class PongGame(App):
ball = ObjectProperty(None)
player1 = ObjectProperty(None)
player2 = ObjectProperty(None)
def touchdown(self, instance, touch):
self.serve_ball()
def serve_ball(self):
vel = self.ball._body.linearVelocity
vel.x = random()-0.5
vel.y = random()-0.5
self.ball._body.linearVelocity = vel
#self.ball._body.SetPosition(b2Vec2(200,200))
self.ball._body.SetTransform(b2Vec2(200,200),0)
#self.ball._body.position.x = 200
#self.ball._body.position.y = 200
def build(self):
canvas = Widget()
canvas.bind(on_touch_down=self.touchdown)
self.world = world = b2World((0,-10), True)
edges = self.world.CreateStaticBody(
shapes=b2EdgeShape(vertices=[(-4000,0),(0,4000)])
)
edges.position.Set(0,0)
self.ball = ball = PongBall(y=200,x=200,world=world)
canvas.add_widget(ball)
self.serve_ball()
Clock.schedule_interval(self.update, 1/60)
return canvas
def update(self, dt):
self.world.Step(dt, 10, 8)
self.ball.update_from_body()
PongGame().run() | [
8,
12,
13,
15,
16
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.