index
int64 0
10k
| blob_id
stringlengths 40
40
| step-1
stringlengths 13
984k
| step-2
stringlengths 6
1.23M
⌀ | step-3
stringlengths 15
1.34M
⌀ | step-4
stringlengths 30
1.34M
⌀ | step-5
stringlengths 64
1.2M
⌀ | step-ids
sequencelengths 1
5
|
---|---|---|---|---|---|---|---|
900 | 502e0f0c6376617dc094fcdd47bea9773d011864 | <mask token>
| def filter_lines(in_filename, in_filename2, out_filename):
"""Read records from in_filename and write records to out_filename if
the beginning of the line (taken up to the first comma at or after
position 11) is found in keys (which must be a set of byte strings).
"""
proper_convert = 0
missing_convert = 0
fourteen_set = set()
with open(in_filename, 'r') as in_f, open(in_filename2, 'r'
) as in_f2, open(out_filename, 'w') as out_f:
for line in in_f:
vals = line.strip().split(',')
fips = vals[0]
if fips not in fourteen_set:
fourteen_set.add(fips)
for line in in_f2:
vals = line.strip().split(',')
fips = vals[0]
count = vals[1]
proper_convert += 1
if fips not in fourteen_set:
new_line = str(fips) + ',' + str(count) + '\n'
out_f.write(new_line)
missing_convert += 1
return proper_convert, missing_convert
<mask token>
| def filter_lines(in_filename, in_filename2, out_filename):
"""Read records from in_filename and write records to out_filename if
the beginning of the line (taken up to the first comma at or after
position 11) is found in keys (which must be a set of byte strings).
"""
proper_convert = 0
missing_convert = 0
fourteen_set = set()
with open(in_filename, 'r') as in_f, open(in_filename2, 'r'
) as in_f2, open(out_filename, 'w') as out_f:
for line in in_f:
vals = line.strip().split(',')
fips = vals[0]
if fips not in fourteen_set:
fourteen_set.add(fips)
for line in in_f2:
vals = line.strip().split(',')
fips = vals[0]
count = vals[1]
proper_convert += 1
if fips not in fourteen_set:
new_line = str(fips) + ',' + str(count) + '\n'
out_f.write(new_line)
missing_convert += 1
return proper_convert, missing_convert
<mask token>
print(counter1)
print(new_vals1)
| def filter_lines(in_filename, in_filename2, out_filename):
"""Read records from in_filename and write records to out_filename if
the beginning of the line (taken up to the first comma at or after
position 11) is found in keys (which must be a set of byte strings).
"""
proper_convert = 0
missing_convert = 0
fourteen_set = set()
with open(in_filename, 'r') as in_f, open(in_filename2, 'r'
) as in_f2, open(out_filename, 'w') as out_f:
for line in in_f:
vals = line.strip().split(',')
fips = vals[0]
if fips not in fourteen_set:
fourteen_set.add(fips)
for line in in_f2:
vals = line.strip().split(',')
fips = vals[0]
count = vals[1]
proper_convert += 1
if fips not in fourteen_set:
new_line = str(fips) + ',' + str(count) + '\n'
out_f.write(new_line)
missing_convert += 1
return proper_convert, missing_convert
in_filename = '/Users/VamsiG/Music/2014_Data/FCC_Final_Output.csv'
in_filename1 = '/Users/VamsiG/Music/2016_Data/FCC_Final_Output.csv'
out_filename = '/Users/VamsiG/Music/FCC_Overlap_CompleteFips.csv'
counter1, new_vals1 = filter_lines(in_filename, in_filename1, out_filename)
print(counter1)
print(new_vals1)
| def filter_lines(in_filename, in_filename2,out_filename):
"""Read records from in_filename and write records to out_filename if
the beginning of the line (taken up to the first comma at or after
position 11) is found in keys (which must be a set of byte strings).
"""
proper_convert = 0
missing_convert = 0
fourteen_set = set()
with open(in_filename, 'r') as in_f, open(in_filename2, 'r') as in_f2, open(out_filename, 'w') as out_f:
for line in in_f:
vals = line.strip().split(",")
fips = vals[0]
if(fips not in fourteen_set):
fourteen_set.add(fips)
for line in in_f2:
vals = line.strip().split(",")
fips = vals[0]
count = vals[1]
proper_convert += 1
if(fips not in fourteen_set):
new_line = str(fips)+","+str(count)+"\n"
out_f.write(new_line)
missing_convert += 1
return (proper_convert, missing_convert)
in_filename = "/Users/VamsiG/Music/2014_Data/FCC_Final_Output.csv"
in_filename1 = "/Users/VamsiG/Music/2016_Data/FCC_Final_Output.csv"
out_filename= "/Users/VamsiG/Music/FCC_Overlap_CompleteFips.csv"
counter1, new_vals1 = filter_lines(in_filename,in_filename1,out_filename)
print(counter1)
print(new_vals1) | [
0,
1,
2,
3,
4
] |
901 | a17abd3947a946daf2c453c120f2e79d2ba60778 | <mask token>
| <mask token>
while len(sc_lst) < 10:
try:
sc = int(input('请第%d位评委打分:' % i))
if sc > 0 and sc < 101:
sc_lst.append(sc)
i += 1
else:
print('超出范围,输入无效')
except:
print('请输入1-100以内的数字')
<mask token>
sc_lst.remove(max_sc)
sc_lst.remove(min_sc)
<mask token>
print('去除最高分%d,最低分%d,平均分为%d' % (max_sc, min_sc, ave_sc))
print('end')
| sc_lst = []
i = 1
while len(sc_lst) < 10:
try:
sc = int(input('请第%d位评委打分:' % i))
if sc > 0 and sc < 101:
sc_lst.append(sc)
i += 1
else:
print('超出范围,输入无效')
except:
print('请输入1-100以内的数字')
max_sc = max(sc_lst)
min_sc = min(sc_lst)
sc_lst.remove(max_sc)
sc_lst.remove(min_sc)
ave_sc = sum(sc_lst) / len(sc_lst)
print('去除最高分%d,最低分%d,平均分为%d' % (max_sc, min_sc, ave_sc))
print('end')
| # 赛场统分
# 【问题】在编程竞赛中,有10个评委为参赛的选手打分,分数为0 ~ 100分。
# 选手最后得分为:去掉一个最高分和一个最低分后其余8个分数的平均值。请编写一个程序实现。
sc_lst = []
i = 1
while len(sc_lst) < 10:
try:
sc = int(input('请第%d位评委打分:' % i))
if sc > 0 and sc < 101:
sc_lst.append(sc)
i += 1
else:
print('超出范围,输入无效')
except:
print('请输入1-100以内的数字')
max_sc = max(sc_lst)
min_sc = min(sc_lst)
sc_lst.remove(max_sc)
sc_lst.remove(min_sc)
ave_sc = sum(sc_lst) / len(sc_lst)
print('去除最高分%d,最低分%d,平均分为%d' % (max_sc, min_sc, ave_sc))
print('end')
| null | [
0,
1,
2,
3
] |
902 | 04670041dab49f8c2d4a0415030356e7ea92925f | <mask token>
class ModelExtractor(object):
def __init__(self, modelzip):
self.modelzip = modelzip
def __enter__(self):
if not self.__is_model_good():
raise ValueError('Invalid model zip file')
obj = self.__get_obj_filename()
if obj is None:
raise ValueError('No obj file present in model zip')
self.path = mkdtemp()
try:
self.modelzip.extractall(self.path)
except:
raise ValueError('Error while extracting zip file')
return {'path': self.path, 'obj': join(self.path, obj)}
def __exit__(self, type, value, tb):
rmtree(self.path, ignore_errors=True)
<mask token>
def __get_obj_filename(self):
for info in self.modelzip.infolist():
if info.filename.endswith('.obj'):
return info.filename
return None
| <mask token>
class ModelExtractor(object):
def __init__(self, modelzip):
self.modelzip = modelzip
def __enter__(self):
if not self.__is_model_good():
raise ValueError('Invalid model zip file')
obj = self.__get_obj_filename()
if obj is None:
raise ValueError('No obj file present in model zip')
self.path = mkdtemp()
try:
self.modelzip.extractall(self.path)
except:
raise ValueError('Error while extracting zip file')
return {'path': self.path, 'obj': join(self.path, obj)}
def __exit__(self, type, value, tb):
rmtree(self.path, ignore_errors=True)
def __is_model_good(self):
total_size_uncompressed = 0
for path in self.modelzip.namelist():
if '..' in path or path.startswith('/'):
return False
info = self.modelzip.getinfo(path)
uncompressed_size = info.file_size
total_size_uncompressed += uncompressed_size
return total_size_uncompressed < MAX_UNCOMPRESSED_SIZE
def __get_obj_filename(self):
for info in self.modelzip.infolist():
if info.filename.endswith('.obj'):
return info.filename
return None
| <mask token>
MAX_UNCOMPRESSED_SIZE = 100000000.0
class ModelExtractor(object):
def __init__(self, modelzip):
self.modelzip = modelzip
def __enter__(self):
if not self.__is_model_good():
raise ValueError('Invalid model zip file')
obj = self.__get_obj_filename()
if obj is None:
raise ValueError('No obj file present in model zip')
self.path = mkdtemp()
try:
self.modelzip.extractall(self.path)
except:
raise ValueError('Error while extracting zip file')
return {'path': self.path, 'obj': join(self.path, obj)}
def __exit__(self, type, value, tb):
rmtree(self.path, ignore_errors=True)
def __is_model_good(self):
total_size_uncompressed = 0
for path in self.modelzip.namelist():
if '..' in path or path.startswith('/'):
return False
info = self.modelzip.getinfo(path)
uncompressed_size = info.file_size
total_size_uncompressed += uncompressed_size
return total_size_uncompressed < MAX_UNCOMPRESSED_SIZE
def __get_obj_filename(self):
for info in self.modelzip.infolist():
if info.filename.endswith('.obj'):
return info.filename
return None
| from tempfile import mkdtemp
from shutil import rmtree
from os.path import join
import os
MAX_UNCOMPRESSED_SIZE = 100000000.0
class ModelExtractor(object):
def __init__(self, modelzip):
self.modelzip = modelzip
def __enter__(self):
if not self.__is_model_good():
raise ValueError('Invalid model zip file')
obj = self.__get_obj_filename()
if obj is None:
raise ValueError('No obj file present in model zip')
self.path = mkdtemp()
try:
self.modelzip.extractall(self.path)
except:
raise ValueError('Error while extracting zip file')
return {'path': self.path, 'obj': join(self.path, obj)}
def __exit__(self, type, value, tb):
rmtree(self.path, ignore_errors=True)
def __is_model_good(self):
total_size_uncompressed = 0
for path in self.modelzip.namelist():
if '..' in path or path.startswith('/'):
return False
info = self.modelzip.getinfo(path)
uncompressed_size = info.file_size
total_size_uncompressed += uncompressed_size
return total_size_uncompressed < MAX_UNCOMPRESSED_SIZE
def __get_obj_filename(self):
for info in self.modelzip.infolist():
if info.filename.endswith('.obj'):
return info.filename
return None
| from tempfile import mkdtemp
from shutil import rmtree
from os.path import join
import os
MAX_UNCOMPRESSED_SIZE = 100e6 # 100MB
# Extracts a zipfile into a directory safely
class ModelExtractor(object):
def __init__(self, modelzip):
self.modelzip = modelzip
def __enter__(self):
if not self.__is_model_good():
raise ValueError('Invalid model zip file')
obj = self.__get_obj_filename()
if obj is None:
raise ValueError('No obj file present in model zip')
self.path = mkdtemp()
try:
self.modelzip.extractall(self.path)
except:
raise ValueError('Error while extracting zip file')
return {
'path': self.path,
'obj': join(self.path, obj)
}
def __exit__(self, type, value, tb):
rmtree(self.path, ignore_errors=True)
def __is_model_good(self):
total_size_uncompressed = 0
for path in self.modelzip.namelist():
if '..' in path or path.startswith('/'):
return False
info = self.modelzip.getinfo(path)
uncompressed_size = info.file_size
total_size_uncompressed += uncompressed_size
return total_size_uncompressed < MAX_UNCOMPRESSED_SIZE
def __get_obj_filename(self):
for info in self.modelzip.infolist():
if info.filename.endswith('.obj'):
return info.filename
return None
| [
5,
6,
7,
8,
9
] |
903 | cf3b66a635c6549553af738f263b035217e75a7a | <mask token>
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists) // 2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
<mask token>
| <mask token>
def merge(a, b):
global count
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] <= b[h]:
c.append(a[j])
j += 1
else:
count += len(a[j:])
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
return c
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists) // 2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
<mask token>
| <mask token>
def merge(a, b):
global count
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] <= b[h]:
c.append(a[j])
j += 1
else:
count += len(a[j:])
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
return c
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists) // 2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
if __name__ == '__main__':
a = [7, 6, 5, 9, 10, 11]
print(merge_sort(a))
print(count)
hash(i)
| count = 0
def merge(a, b):
global count
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] <= b[h]:
c.append(a[j])
j += 1
else:
count += len(a[j:])
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
return c
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists) // 2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
if __name__ == '__main__':
a = [7, 6, 5, 9, 10, 11]
print(merge_sort(a))
print(count)
hash(i)
| count=0
def merge(a, b):
global count
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] <= b[h]:
c.append(a[j])
j += 1
else:
count+=(len(a[j:]))
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
# count += h+1
return c
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists)//2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
if __name__ == '__main__':
a = [7, 6, 5,9, 10, 11]
print(merge_sort(a))
print(count)
hash(i) | [
1,
2,
3,
4,
5
] |
904 | d2298ad1e4737b983ba6d1f2fff59750137510b5 | <mask token>
class JobTest(TestCase):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def test_field_order(self):
"""
Job test with field order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_field_order'
name = 'TestFieldOrder'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>"""
)
<mask token>
def test_ready_only_job_pass(self):
"""
Job read only test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_pass'
name = 'TestReadOnlyPass'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
self.assertEqual(Site.objects.count(), 0)
<mask token>
def test_read_only_no_commit_field(self):
"""
Job read only test commit field is not shown.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_no_commit_field'
name = 'TestReadOnlyNoCommitField'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var">Var:</label></th><td>
<input class="form-control form-control" id="id_var" name="var" placeholder="None" required type="text">
<br><span class="helptext">Hello</span><input id="id__commit" name="_commit" type="hidden" value="False"></td></tr>"""
)
<mask token>
class JobFileUploadTest(TestCase):
"""Test a job that uploads/deletes files."""
@classmethod
def setUpTestData(cls):
cls.file_contents = b'I am content.\n'
cls.dummy_file = SimpleUploadedFile(name='dummy.txt', content=cls.
file_contents)
cls.job_content_type = ContentType.objects.get(app_label='extras',
model='job')
def setUp(self):
self.dummy_file.seek(0)
def test_run_job_pass(self):
"""Test that file upload succeeds; job SUCCEEDS; and files are deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
job_name = 'local/test_file_upload_pass/TestFileUploadPass'
job_class = get_job(job_name)
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = {'file': self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))
self.assertEqual(serialized_data['file'], FileProxy.objects.
latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
run_job(data=serialized_data, request=None, commit=False,
job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.data['run']['log'][0][2],
f'File contents: {self.file_contents}')
self.assertEqual(FileProxy.objects.count(), 0)
def test_run_job_fail(self):
"""Test that file upload succeeds; job FAILS; files deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
job_name = 'local/test_file_upload_fail/TestFileUploadFail'
job_class = get_job(job_name)
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = {'file': self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))
self.assertEqual(serialized_data['file'], FileProxy.objects.
latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
run_job(data=serialized_data, request=None, commit=False,
job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.data['run']['log'][0][2],
f'File contents: {self.file_contents}')
self.assertEqual(job_result.data['run']['log'][-1][-1],
'Database changes have been reverted due to error.')
self.assertEqual(FileProxy.objects.count(), 0)
| <mask token>
class JobTest(TestCase):
<mask token>
<mask token>
@classmethod
def setUpTestData(cls):
cls.job_content_type = ContentType.objects.get(app_label='extras',
model='job')
def test_job_pass(self):
"""
Job test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_pass'
name = 'TestPass'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
<mask token>
def test_field_order(self):
"""
Job test with field order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_field_order'
name = 'TestFieldOrder'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>"""
)
def test_no_field_order(self):
"""
Job test without field_order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_no_field_order'
name = 'TestNoFieldOrder'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>"""
)
def test_ready_only_job_pass(self):
"""
Job read only test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_pass'
name = 'TestReadOnlyPass'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
self.assertEqual(Site.objects.count(), 0)
def test_read_only_job_fail(self):
"""
Job read only test with fail result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_fail'
name = 'TestReadOnlyFail'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_ERRORED)
self.assertEqual(Site.objects.count(), 0)
self.assertNotEqual(job_result.data['run']['log'][-1][-1],
'Database changes have been reverted due to error.')
def test_read_only_no_commit_field(self):
"""
Job read only test commit field is not shown.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_no_commit_field'
name = 'TestReadOnlyNoCommitField'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var">Var:</label></th><td>
<input class="form-control form-control" id="id_var" name="var" placeholder="None" required type="text">
<br><span class="helptext">Hello</span><input id="id__commit" name="_commit" type="hidden" value="False"></td></tr>"""
)
def test_ip_address_vars(self):
"""
Test that IPAddress variable fields behave as expected.
This test case exercises the following types for both IPv4 and IPv6:
- IPAddressVar
- IPAddressWithMaskVar
- IPNetworkVar
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_ipaddress_vars'
name = 'TestIPAddresses'
job_class = get_job(f'local/{module}/{name}')
form_data = dict(ipv4_address='1.2.3.4', ipv4_with_mask=
'1.2.3.4/32', ipv4_network='1.2.3.0/24', ipv6_address=
'2001:db8::1', ipv6_with_mask='2001:db8::1/64',
ipv6_network='2001:db8::/64')
form = job_class().as_form(form_data)
self.assertTrue(form.is_valid())
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = job_class.serialize_data(form.cleaned_data)
run_job(data=data, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
job_payload = job_result.data['run']['log'][0][2]
job_result_data = json.loads(job_payload)
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
self.assertEqual(form_data, job_result_data)
class JobFileUploadTest(TestCase):
"""Test a job that uploads/deletes files."""
@classmethod
def setUpTestData(cls):
cls.file_contents = b'I am content.\n'
cls.dummy_file = SimpleUploadedFile(name='dummy.txt', content=cls.
file_contents)
cls.job_content_type = ContentType.objects.get(app_label='extras',
model='job')
def setUp(self):
self.dummy_file.seek(0)
def test_run_job_pass(self):
"""Test that file upload succeeds; job SUCCEEDS; and files are deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
job_name = 'local/test_file_upload_pass/TestFileUploadPass'
job_class = get_job(job_name)
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = {'file': self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))
self.assertEqual(serialized_data['file'], FileProxy.objects.
latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
run_job(data=serialized_data, request=None, commit=False,
job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.data['run']['log'][0][2],
f'File contents: {self.file_contents}')
self.assertEqual(FileProxy.objects.count(), 0)
def test_run_job_fail(self):
"""Test that file upload succeeds; job FAILS; files deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
job_name = 'local/test_file_upload_fail/TestFileUploadFail'
job_class = get_job(job_name)
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = {'file': self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))
self.assertEqual(serialized_data['file'], FileProxy.objects.
latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
run_job(data=serialized_data, request=None, commit=False,
job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.data['run']['log'][0][2],
f'File contents: {self.file_contents}')
self.assertEqual(job_result.data['run']['log'][-1][-1],
'Database changes have been reverted due to error.')
self.assertEqual(FileProxy.objects.count(), 0)
| <mask token>
class JobTest(TestCase):
<mask token>
<mask token>
@classmethod
def setUpTestData(cls):
cls.job_content_type = ContentType.objects.get(app_label='extras',
model='job')
def test_job_pass(self):
"""
Job test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_pass'
name = 'TestPass'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
def test_job_fail(self):
"""
Job test with fail result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_fail'
name = 'TestFail'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_ERRORED)
def test_field_order(self):
"""
Job test with field order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_field_order'
name = 'TestFieldOrder'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>"""
)
def test_no_field_order(self):
"""
Job test without field_order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_no_field_order'
name = 'TestNoFieldOrder'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>"""
)
def test_ready_only_job_pass(self):
"""
Job read only test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_pass'
name = 'TestReadOnlyPass'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
self.assertEqual(Site.objects.count(), 0)
def test_read_only_job_fail(self):
"""
Job read only test with fail result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_fail'
name = 'TestReadOnlyFail'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_ERRORED)
self.assertEqual(Site.objects.count(), 0)
self.assertNotEqual(job_result.data['run']['log'][-1][-1],
'Database changes have been reverted due to error.')
def test_read_only_no_commit_field(self):
"""
Job read only test commit field is not shown.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_no_commit_field'
name = 'TestReadOnlyNoCommitField'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var">Var:</label></th><td>
<input class="form-control form-control" id="id_var" name="var" placeholder="None" required type="text">
<br><span class="helptext">Hello</span><input id="id__commit" name="_commit" type="hidden" value="False"></td></tr>"""
)
def test_ip_address_vars(self):
"""
Test that IPAddress variable fields behave as expected.
This test case exercises the following types for both IPv4 and IPv6:
- IPAddressVar
- IPAddressWithMaskVar
- IPNetworkVar
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_ipaddress_vars'
name = 'TestIPAddresses'
job_class = get_job(f'local/{module}/{name}')
form_data = dict(ipv4_address='1.2.3.4', ipv4_with_mask=
'1.2.3.4/32', ipv4_network='1.2.3.0/24', ipv6_address=
'2001:db8::1', ipv6_with_mask='2001:db8::1/64',
ipv6_network='2001:db8::/64')
form = job_class().as_form(form_data)
self.assertTrue(form.is_valid())
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = job_class.serialize_data(form.cleaned_data)
run_job(data=data, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
job_payload = job_result.data['run']['log'][0][2]
job_result_data = json.loads(job_payload)
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
self.assertEqual(form_data, job_result_data)
class JobFileUploadTest(TestCase):
"""Test a job that uploads/deletes files."""
@classmethod
def setUpTestData(cls):
cls.file_contents = b'I am content.\n'
cls.dummy_file = SimpleUploadedFile(name='dummy.txt', content=cls.
file_contents)
cls.job_content_type = ContentType.objects.get(app_label='extras',
model='job')
def setUp(self):
self.dummy_file.seek(0)
def test_run_job_pass(self):
"""Test that file upload succeeds; job SUCCEEDS; and files are deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
job_name = 'local/test_file_upload_pass/TestFileUploadPass'
job_class = get_job(job_name)
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = {'file': self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))
self.assertEqual(serialized_data['file'], FileProxy.objects.
latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
run_job(data=serialized_data, request=None, commit=False,
job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.data['run']['log'][0][2],
f'File contents: {self.file_contents}')
self.assertEqual(FileProxy.objects.count(), 0)
def test_run_job_fail(self):
"""Test that file upload succeeds; job FAILS; files deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
job_name = 'local/test_file_upload_fail/TestFileUploadFail'
job_class = get_job(job_name)
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = {'file': self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))
self.assertEqual(serialized_data['file'], FileProxy.objects.
latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
run_job(data=serialized_data, request=None, commit=False,
job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.data['run']['log'][0][2],
f'File contents: {self.file_contents}')
self.assertEqual(job_result.data['run']['log'][-1][-1],
'Database changes have been reverted due to error.')
self.assertEqual(FileProxy.objects.count(), 0)
| <mask token>
class JobTest(TestCase):
<mask token>
maxDiff = None
@classmethod
def setUpTestData(cls):
cls.job_content_type = ContentType.objects.get(app_label='extras',
model='job')
def test_job_pass(self):
"""
Job test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_pass'
name = 'TestPass'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
def test_job_fail(self):
"""
Job test with fail result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_fail'
name = 'TestFail'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_ERRORED)
def test_field_order(self):
"""
Job test with field order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_field_order'
name = 'TestFieldOrder'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>"""
)
def test_no_field_order(self):
"""
Job test without field_order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_no_field_order'
name = 'TestNoFieldOrder'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>"""
)
def test_ready_only_job_pass(self):
"""
Job read only test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_pass'
name = 'TestReadOnlyPass'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
self.assertEqual(Site.objects.count(), 0)
def test_read_only_job_fail(self):
"""
Job read only test with fail result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_fail'
name = 'TestReadOnlyFail'
job_class = get_job(f'local/{module}/{name}')
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
run_job(data={}, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_ERRORED)
self.assertEqual(Site.objects.count(), 0)
self.assertNotEqual(job_result.data['run']['log'][-1][-1],
'Database changes have been reverted due to error.')
def test_read_only_no_commit_field(self):
"""
Job read only test commit field is not shown.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_read_only_no_commit_field'
name = 'TestReadOnlyNoCommitField'
job_class = get_job(f'local/{module}/{name}')
form = job_class().as_form()
self.assertHTMLEqual(form.as_table(),
"""<tr><th><label for="id_var">Var:</label></th><td>
<input class="form-control form-control" id="id_var" name="var" placeholder="None" required type="text">
<br><span class="helptext">Hello</span><input id="id__commit" name="_commit" type="hidden" value="False"></td></tr>"""
)
def test_ip_address_vars(self):
"""
Test that IPAddress variable fields behave as expected.
This test case exercises the following types for both IPv4 and IPv6:
- IPAddressVar
- IPAddressWithMaskVar
- IPNetworkVar
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
module = 'test_ipaddress_vars'
name = 'TestIPAddresses'
job_class = get_job(f'local/{module}/{name}')
form_data = dict(ipv4_address='1.2.3.4', ipv4_with_mask=
'1.2.3.4/32', ipv4_network='1.2.3.0/24', ipv6_address=
'2001:db8::1', ipv6_with_mask='2001:db8::1/64',
ipv6_network='2001:db8::/64')
form = job_class().as_form(form_data)
self.assertTrue(form.is_valid())
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = job_class.serialize_data(form.cleaned_data)
run_job(data=data, request=None, commit=False, job_result_pk=
job_result.pk)
job_result.refresh_from_db()
job_payload = job_result.data['run']['log'][0][2]
job_result_data = json.loads(job_payload)
self.assertEqual(job_result.status, JobResultStatusChoices.
STATUS_COMPLETED)
self.assertEqual(form_data, job_result_data)
class JobFileUploadTest(TestCase):
"""Test a job that uploads/deletes files."""
@classmethod
def setUpTestData(cls):
cls.file_contents = b'I am content.\n'
cls.dummy_file = SimpleUploadedFile(name='dummy.txt', content=cls.
file_contents)
cls.job_content_type = ContentType.objects.get(app_label='extras',
model='job')
def setUp(self):
self.dummy_file.seek(0)
def test_run_job_pass(self):
"""Test that file upload succeeds; job SUCCEEDS; and files are deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
job_name = 'local/test_file_upload_pass/TestFileUploadPass'
job_class = get_job(job_name)
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = {'file': self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))
self.assertEqual(serialized_data['file'], FileProxy.objects.
latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
run_job(data=serialized_data, request=None, commit=False,
job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.data['run']['log'][0][2],
f'File contents: {self.file_contents}')
self.assertEqual(FileProxy.objects.count(), 0)
def test_run_job_fail(self):
"""Test that file upload succeeds; job FAILS; files deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,
'extras/tests/dummy_jobs')):
job_name = 'local/test_file_upload_fail/TestFileUploadFail'
job_class = get_job(job_name)
job_result = JobResult.objects.create(name=job_class.class_path,
obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())
data = {'file': self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))
self.assertEqual(serialized_data['file'], FileProxy.objects.
latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
run_job(data=serialized_data, request=None, commit=False,
job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.data['run']['log'][0][2],
f'File contents: {self.file_contents}')
self.assertEqual(job_result.data['run']['log'][-1][-1],
'Database changes have been reverted due to error.')
self.assertEqual(FileProxy.objects.count(), 0)
| import json
import os
import uuid
from django.core.files.uploadedfile import SimpleUploadedFile
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from nautobot.dcim.models import Site
from nautobot.extras.choices import JobResultStatusChoices
from nautobot.extras.jobs import get_job, run_job
from nautobot.extras.models import FileAttachment, FileProxy, JobResult
from nautobot.utilities.testing import TestCase
class JobTest(TestCase):
"""
Test basic jobs to ensure importing works.
"""
maxDiff = None
@classmethod
def setUpTestData(cls):
cls.job_content_type = ContentType.objects.get(app_label="extras", model="job")
def test_job_pass(self):
"""
Job test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_pass"
name = "TestPass"
job_class = get_job(f"local/{module}/{name}")
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED)
def test_job_fail(self):
"""
Job test with fail result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_fail"
name = "TestFail"
job_class = get_job(f"local/{module}/{name}")
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED)
def test_field_order(self):
"""
Job test with field order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_field_order"
name = "TestFieldOrder"
job_class = get_job(f"local/{module}/{name}")
form = job_class().as_form()
self.assertHTMLEqual(
form.as_table(),
"""<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>""",
)
def test_no_field_order(self):
"""
Job test without field_order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_no_field_order"
name = "TestNoFieldOrder"
job_class = get_job(f"local/{module}/{name}")
form = job_class().as_form()
self.assertHTMLEqual(
form.as_table(),
"""<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>""",
)
def test_ready_only_job_pass(self):
"""
Job read only test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_read_only_pass"
name = "TestReadOnlyPass"
job_class = get_job(f"local/{module}/{name}")
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED)
self.assertEqual(Site.objects.count(), 0) # Ensure DB transaction was aborted
def test_read_only_job_fail(self):
"""
Job read only test with fail result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_read_only_fail"
name = "TestReadOnlyFail"
job_class = get_job(f"local/{module}/{name}")
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED)
self.assertEqual(Site.objects.count(), 0) # Ensure DB transaction was aborted
# Also ensure the standard log message about aborting the transaction is *not* present
self.assertNotEqual(
job_result.data["run"]["log"][-1][-1], "Database changes have been reverted due to error."
)
def test_read_only_no_commit_field(self):
"""
Job read only test commit field is not shown.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_read_only_no_commit_field"
name = "TestReadOnlyNoCommitField"
job_class = get_job(f"local/{module}/{name}")
form = job_class().as_form()
self.assertHTMLEqual(
form.as_table(),
"""<tr><th><label for="id_var">Var:</label></th><td>
<input class="form-control form-control" id="id_var" name="var" placeholder="None" required type="text">
<br><span class="helptext">Hello</span><input id="id__commit" name="_commit" type="hidden" value="False"></td></tr>""",
)
def test_ip_address_vars(self):
"""
Test that IPAddress variable fields behave as expected.
This test case exercises the following types for both IPv4 and IPv6:
- IPAddressVar
- IPAddressWithMaskVar
- IPNetworkVar
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_ipaddress_vars"
name = "TestIPAddresses"
job_class = get_job(f"local/{module}/{name}")
# Fill out the form
form_data = dict(
ipv4_address="1.2.3.4",
ipv4_with_mask="1.2.3.4/32",
ipv4_network="1.2.3.0/24",
ipv6_address="2001:db8::1",
ipv6_with_mask="2001:db8::1/64",
ipv6_network="2001:db8::/64",
)
form = job_class().as_form(form_data)
self.assertTrue(form.is_valid())
# Prepare the job data
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
data = job_class.serialize_data(form.cleaned_data)
# Run the job and extract the job payload data
run_job(data=data, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
job_payload = job_result.data["run"]["log"][0][2] # Indexing makes me sad.
job_result_data = json.loads(job_payload)
# Assert stuff
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED)
self.assertEqual(form_data, job_result_data)
class JobFileUploadTest(TestCase):
"""Test a job that uploads/deletes files."""
@classmethod
def setUpTestData(cls):
cls.file_contents = b"I am content.\n"
cls.dummy_file = SimpleUploadedFile(name="dummy.txt", content=cls.file_contents)
cls.job_content_type = ContentType.objects.get(app_label="extras", model="job")
def setUp(self):
self.dummy_file.seek(0) # Reset cursor so we can read it again.
def test_run_job_pass(self):
"""Test that file upload succeeds; job SUCCEEDS; and files are deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
job_name = "local/test_file_upload_pass/TestFileUploadPass"
job_class = get_job(job_name)
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
# Serialize the file to FileProxy
data = {"file": self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
# Assert that the file was serialized to a FileProxy
self.assertTrue(isinstance(serialized_data["file"], uuid.UUID))
self.assertEqual(serialized_data["file"], FileProxy.objects.latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
# Run the job
run_job(data=serialized_data, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
# Assert that file contents were correctly read
self.assertEqual(
job_result.data["run"]["log"][0][2], f"File contents: {self.file_contents}" # "File contents: ..."
)
# Assert that FileProxy was cleaned up
self.assertEqual(FileProxy.objects.count(), 0)
def test_run_job_fail(self):
"""Test that file upload succeeds; job FAILS; files deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
job_name = "local/test_file_upload_fail/TestFileUploadFail"
job_class = get_job(job_name)
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
# Serialize the file to FileProxy
data = {"file": self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
# Assert that the file was serialized to a FileProxy
self.assertTrue(isinstance(serialized_data["file"], uuid.UUID))
self.assertEqual(serialized_data["file"], FileProxy.objects.latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
# Run the job
run_job(data=serialized_data, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
# Assert that file contents were correctly read
self.assertEqual(
job_result.data["run"]["log"][0][2], f"File contents: {self.file_contents}" # "File contents: ..."
)
# Also ensure the standard log message about aborting the transaction is present
self.assertEqual(job_result.data["run"]["log"][-1][-1], "Database changes have been reverted due to error.")
# Assert that FileProxy was cleaned up
self.assertEqual(FileProxy.objects.count(), 0)
| [
10,
15,
16,
17,
20
] |
905 | 48f2cc5b6d53c7317ad882947cabbc367cda0fb7 | <mask token>
def linear_combination_plus_error(X, num_dependent_cols=5, parameter_mean=0,
parameter_std=1, error_mean=0, error_std=1):
"""
Generate a column that is a random linear combination of
X1, X2 and X3 plus some random error
"""
length = X.shape[0]
param = np.random.normal(loc=parameter_mean, scale=parameter_std, size=
(num_dependent_cols,))
error = np.random.normal(loc=error_mean, scale=error_std, size=(length,))
result = np.zeros(length)
for i in range(num_dependent_cols):
result += param[i] * X[:, i]
return result + error
<mask token>
| <mask token>
def linear_combination_plus_error(X, num_dependent_cols=5, parameter_mean=0,
parameter_std=1, error_mean=0, error_std=1):
"""
Generate a column that is a random linear combination of
X1, X2 and X3 plus some random error
"""
length = X.shape[0]
param = np.random.normal(loc=parameter_mean, scale=parameter_std, size=
(num_dependent_cols,))
error = np.random.normal(loc=error_mean, scale=error_std, size=(length,))
result = np.zeros(length)
for i in range(num_dependent_cols):
result += param[i] * X[:, i]
return result + error
np.random.seed(472)
<mask token>
for i in range(num_independent_cols):
X[:, i] = np.random.normal(np.random.uniform(-5, 5), np.random.uniform(
1, 5), size=(num_data,))
for i in range(3, 1000):
X[:, i] = linear_combination_plus_error(X, num_dependent_cols=
num_independent_cols, parameter_std=2, error_std=1)
<mask token>
np.random.shuffle(col_nums)
<mask token>
X[:, 1000] += abs(min(X[:, 1000])) + 5
<mask token>
X1_df.to_csv('./sensors1.csv', header=None, index=None)
<mask token>
X2_df.to_csv('./sensors2.csv', header=None, index=None)
| <mask token>
def linear_combination_plus_error(X, num_dependent_cols=5, parameter_mean=0,
parameter_std=1, error_mean=0, error_std=1):
"""
Generate a column that is a random linear combination of
X1, X2 and X3 plus some random error
"""
length = X.shape[0]
param = np.random.normal(loc=parameter_mean, scale=parameter_std, size=
(num_dependent_cols,))
error = np.random.normal(loc=error_mean, scale=error_std, size=(length,))
result = np.zeros(length)
for i in range(num_dependent_cols):
result += param[i] * X[:, i]
return result + error
np.random.seed(472)
num_data = 10100
num_independent_cols = 3
X = np.zeros((num_data, 1001))
for i in range(num_independent_cols):
X[:, i] = np.random.normal(np.random.uniform(-5, 5), np.random.uniform(
1, 5), size=(num_data,))
for i in range(3, 1000):
X[:, i] = linear_combination_plus_error(X, num_dependent_cols=
num_independent_cols, parameter_std=2, error_std=1)
col_nums = list(range(1000))
np.random.shuffle(col_nums)
X[:, list(range(1000))] = X[:, col_nums]
X[:, 1000] = linear_combination_plus_error(X, num_dependent_cols=
num_independent_cols, parameter_mean=5, parameter_std=2)
X[:, 1000] += abs(min(X[:, 1000])) + 5
X = np.floor(X * 1000) / 1000
X1 = X[:10000, :]
X2 = X[10000:, :]
X1_df = pd.DataFrame(X1)
X1_df.to_csv('./sensors1.csv', header=None, index=None)
X2_df = pd.DataFrame(X2)
X2_df.to_csv('./sensors2.csv', header=None, index=None)
| import random
import numpy as np
import pandas as pd
def linear_combination_plus_error(X, num_dependent_cols=5, parameter_mean=0,
parameter_std=1, error_mean=0, error_std=1):
"""
Generate a column that is a random linear combination of
X1, X2 and X3 plus some random error
"""
length = X.shape[0]
param = np.random.normal(loc=parameter_mean, scale=parameter_std, size=
(num_dependent_cols,))
error = np.random.normal(loc=error_mean, scale=error_std, size=(length,))
result = np.zeros(length)
for i in range(num_dependent_cols):
result += param[i] * X[:, i]
return result + error
np.random.seed(472)
num_data = 10100
num_independent_cols = 3
X = np.zeros((num_data, 1001))
for i in range(num_independent_cols):
X[:, i] = np.random.normal(np.random.uniform(-5, 5), np.random.uniform(
1, 5), size=(num_data,))
for i in range(3, 1000):
X[:, i] = linear_combination_plus_error(X, num_dependent_cols=
num_independent_cols, parameter_std=2, error_std=1)
col_nums = list(range(1000))
np.random.shuffle(col_nums)
X[:, list(range(1000))] = X[:, col_nums]
X[:, 1000] = linear_combination_plus_error(X, num_dependent_cols=
num_independent_cols, parameter_mean=5, parameter_std=2)
X[:, 1000] += abs(min(X[:, 1000])) + 5
X = np.floor(X * 1000) / 1000
X1 = X[:10000, :]
X2 = X[10000:, :]
X1_df = pd.DataFrame(X1)
X1_df.to_csv('./sensors1.csv', header=None, index=None)
X2_df = pd.DataFrame(X2)
X2_df.to_csv('./sensors2.csv', header=None, index=None)
| import random
import numpy as np
import pandas as pd
def linear_combination_plus_error(X, num_dependent_cols=5, parameter_mean=0, parameter_std=1, error_mean=0, error_std=1):
"""
Generate a column that is a random linear combination of
X1, X2 and X3 plus some random error
"""
length = X.shape[0]
param = np.random.normal(loc=parameter_mean,
scale=parameter_std,
size=(num_dependent_cols,))
error = np.random.normal(loc=error_mean,
scale=error_std,
size=(length,))
result = np.zeros(length,)
for i in range(num_dependent_cols):
result += param[i] * X[:, i]
return result + error
np.random.seed(472)
num_data = 10100
num_independent_cols = 3
X = np.zeros((num_data, 1001))
# Generate 3 principal components
for i in range(num_independent_cols):
X[:, i] = np.random.normal(np.random.uniform(-5, 5),
np.random.uniform(1, 5), size=(num_data,))
# Generate other columns
for i in range(3, 1000):
X[:, i] = linear_combination_plus_error(X, num_dependent_cols=num_independent_cols, parameter_std=2, error_std=1)
# Randomly suffle the 1000 feature columns
col_nums = list(range(1000))
np.random.shuffle(col_nums)
X[:, list(range(1000))] = X[:, col_nums]
# Randomly generate Y
X[:, 1000] = linear_combination_plus_error(X, num_dependent_cols=num_independent_cols, parameter_mean=5, parameter_std=2)
X[:, 1000] += abs(min(X[:, 1000])) + 5
# Take only three digits after decimal point
X = np.floor(X * 1000) / 1000
# Split the data into 2 files
X1 = X[:10000, :]
X2 = X[10000:, :]
X1_df = pd.DataFrame(X1)
X1_df.to_csv("./sensors1.csv", header=None, index=None)
X2_df = pd.DataFrame(X2)
X2_df.to_csv("./sensors2.csv", header=None, index=None)
| [
1,
2,
3,
4,
5
] |
906 | 6ae529a5e5658ba409ec3e7284d8b2911c60dd00 | <mask token>
| <mask token>
df.to_csv('linkedin_jobs.csv', index=False)
| <mask token>
chrome_driver_path = os.path.join(os.path.abspath(os.getcwd()), 'chromedriver')
df = get_jobs('Data Scientist', 40, False, chrome_driver_path)
df.to_csv('linkedin_jobs.csv', index=False)
| import os
from linkedin_scraper import get_jobs
chrome_driver_path = os.path.join(os.path.abspath(os.getcwd()), 'chromedriver')
df = get_jobs('Data Scientist', 40, False, chrome_driver_path)
df.to_csv('linkedin_jobs.csv', index=False)
| null | [
0,
1,
2,
3
] |
907 | d268f8d563aac28852457f6f130b2fb4ea6269a2 | <mask token>
| <mask token>
print('Perpleksitet til news: %.2f' % perpNews)
print('Perpleksitet til adventure: %.2f' % perpAdventure)
<mask token>
for sekvens in zippy:
print('Ord: %4s Antall: %4d Sekvens: %.4f ' % (sekvens[0], sekvens[1],
sekvens[2]))
<mask token>
print('Standard vs modifisert tagging ved hjelp av reguleart uttrykk')
print("Med corpus: 'adventure'")
print(' Standard: %4.2f Modifisert: %4.2f ' % (checkTaggStandardFic,
checkTaggModifiedAdv))
print("Med corpus: 'fiction'")
print(' Standard: %4.2f Modifisert: %4.2f ' % (checkTaggStandardFic,
checkTaggModifiedFic))
<mask token>
for line in infile:
words = line.split(' ')
tekst.append(words)
infile.close()
<mask token>
for sentence in taggerTekst:
for taggs in sentence:
print(taggs)
<mask token>
| <mask token>
m = LM()
news = nltk.corpus.brown.sents(categories='news')
adventure = nltk.corpus.brown.sents(categories='adventure')
perpNews = 0.0
perpAdventure = 0.0
perpNews = m.perplexity(news)
perpAdventure = m.perplexity(adventure)
print('Perpleksitet til news: %.2f' % perpNews)
print('Perpleksitet til adventure: %.2f' % perpAdventure)
<mask token>
zippy = m.zipfity(news)
for sekvens in zippy:
print('Ord: %4s Antall: %4d Sekvens: %.4f ' % (sekvens[0], sekvens[1],
sekvens[2]))
<mask token>
brown_tagged_sents = nltk.corpus.brown.tagged_sents(categories='adventure')
adventure = [[w.lower() for w in line] for line in nltk.corpus.brown.sents(
categories='adventure')]
checkTaggStandardAdv = m.analyseRegularTagger('adventure')
checkTaggStandardFic = m.analyseRegularTagger('fiction')
checkTaggModifiedAdv = m.analyseRegularTagger('adventure', 'modified')
checkTaggModifiedFic = m.analyseRegularTagger('fiction', 'modified')
print('Standard vs modifisert tagging ved hjelp av reguleart uttrykk')
print("Med corpus: 'adventure'")
print(' Standard: %4.2f Modifisert: %4.2f ' % (checkTaggStandardFic,
checkTaggModifiedAdv))
print("Med corpus: 'fiction'")
print(' Standard: %4.2f Modifisert: %4.2f ' % (checkTaggStandardFic,
checkTaggModifiedFic))
infile = open('test_setninger.txt')
tekst = []
for line in infile:
words = line.split(' ')
tekst.append(words)
infile.close()
tekst = [[w.lower() for w in line] for line in tekst]
taggerTekst = m.regularTagger(tekst, 'modified')
for sentence in taggerTekst:
for taggs in sentence:
print(taggs)
<mask token>
| import nltk
from nltk import bigrams
from lm import *
m = LM()
news = nltk.corpus.brown.sents(categories='news')
adventure = nltk.corpus.brown.sents(categories='adventure')
perpNews = 0.0
perpAdventure = 0.0
perpNews = m.perplexity(news)
perpAdventure = m.perplexity(adventure)
print('Perpleksitet til news: %.2f' % perpNews)
print('Perpleksitet til adventure: %.2f' % perpAdventure)
<mask token>
zippy = m.zipfity(news)
for sekvens in zippy:
print('Ord: %4s Antall: %4d Sekvens: %.4f ' % (sekvens[0], sekvens[1],
sekvens[2]))
<mask token>
brown_tagged_sents = nltk.corpus.brown.tagged_sents(categories='adventure')
adventure = [[w.lower() for w in line] for line in nltk.corpus.brown.sents(
categories='adventure')]
checkTaggStandardAdv = m.analyseRegularTagger('adventure')
checkTaggStandardFic = m.analyseRegularTagger('fiction')
checkTaggModifiedAdv = m.analyseRegularTagger('adventure', 'modified')
checkTaggModifiedFic = m.analyseRegularTagger('fiction', 'modified')
print('Standard vs modifisert tagging ved hjelp av reguleart uttrykk')
print("Med corpus: 'adventure'")
print(' Standard: %4.2f Modifisert: %4.2f ' % (checkTaggStandardFic,
checkTaggModifiedAdv))
print("Med corpus: 'fiction'")
print(' Standard: %4.2f Modifisert: %4.2f ' % (checkTaggStandardFic,
checkTaggModifiedFic))
infile = open('test_setninger.txt')
tekst = []
for line in infile:
words = line.split(' ')
tekst.append(words)
infile.close()
tekst = [[w.lower() for w in line] for line in tekst]
taggerTekst = m.regularTagger(tekst, 'modified')
for sentence in taggerTekst:
for taggs in sentence:
print(taggs)
<mask token>
| import nltk
from nltk import bigrams
from lm import *
# Oppgave 1:
# opretter LM klasse til aa perpleksitere news og adventure
m = LM()
# Henter news og adventure for videre bruk
news=nltk.corpus.brown.sents(categories='news')
adventure=nltk.corpus.brown.sents(categories='adventure')
# initial parametre
perpNews = 0.0
perpAdventure = 0.0
# beregner perplexitet:
perpNews = m.perplexity(news)
perpAdventure = m.perplexity(adventure)
# printer ut perplexitet.
print("Perpleksitet til news: %.2f" %perpNews)
print("Perpleksitet til adventure: %.2f" %perpAdventure)
""" Oppgave 1 - evaluering av spraakmodeller
$ python oblig2b_steinrr.py
Perpleksitet til news: 72.69
Perpleksitet til adventure: 117.41
Perpleksiteten tiil adventure er hoeyeere fordi klassifikatoren vi benytter i LM er ikke trent paa dette korpuset.
Perpleksiteten til news ville ha veart lavere hvis klassifikatoren vi benytter hadde bare veart trent paa news.
Men dette er ikke bra pga da ville perpleksiteten til adventure veare enda hoyere enn den er naa.
"""
zippy = m.zipfity(news)
for sekvens in zippy:
print("Ord: %4s Antall: %4d Sekvens: %.4f " %(sekvens[0], sekvens[1], sekvens[2]))
""" Oppgave 2 - Zipfianske distribusjon
Ord: the Antall: 6386 Sekvens: 6386.0000
Ord: , Antall: 5188 Sekvens: 2594.0000
Ord: . Antall: 4030 Sekvens: 1343.3333
Ord: of Antall: 2861 Sekvens: 715.2500
Ord: and Antall: 2186 Sekvens: 437.2000
Ord: to Antall: 2144 Sekvens: 357.3333
Ord: a Antall: 2130 Sekvens: 304.2857
Ord: in Antall: 2020 Sekvens: 252.5000
Ord: for Antall: 969 Sekvens: 107.6667
Ord: that Antall: 829 Sekvens: 82.9000
"""
brown_tagged_sents = nltk.corpus.brown.tagged_sents(categories='adventure')
adventure = [[w.lower() for w in line] for line in nltk.corpus.brown.sents(categories='adventure')]
#m.regularTagger(adventure)
checkTaggStandardAdv = m.analyseRegularTagger('adventure')
checkTaggStandardFic = m.analyseRegularTagger('fiction')
checkTaggModifiedAdv = m.analyseRegularTagger('adventure', 'modified')
checkTaggModifiedFic = m.analyseRegularTagger('fiction', 'modified')
print("Standard vs modifisert tagging ved hjelp av reguleart uttrykk")
print("Med corpus: 'adventure'")
print(" Standard: %4.2f Modifisert: %4.2f " %(checkTaggStandardFic, checkTaggModifiedAdv))
print("Med corpus: 'fiction'")
print(" Standard: %4.2f Modifisert: %4.2f " %(checkTaggStandardFic, checkTaggModifiedFic))
infile = open("test_setninger.txt")
tekst = []
for line in infile:
words = line.split(" ")
tekst.append(words)
infile.close()
# fikser at alle ord har smaa bokstaver:
tekst = [[w.lower() for w in line] for line in tekst]
taggerTekst = m.regularTagger(tekst, 'modified')
for sentence in taggerTekst:
for taggs in sentence:
print(taggs)
""" Oppgave 3 - Ordklassetagging med regulære uttrykk
Standard vs modifisert tagging ved hjelp av reguleart uttrykk
Med corpus: 'adventure'
Standard: 0.18 Modifisert: 0.41
Med corpus: 'fiction'
Standard: 0.18 Modifisert: 0.40
...
..
... skriver ut tagger som blir kopiert inn til test_setninger_m_taggs.txt
..
Kommentarer for ytterligere forbedrelser:
1. said skulle ha veart kattegorisert som verb: VBD
2. he burde veare et pronom
3. had burde veare et verb til have
oppdatere reguleare utrykk:
1 og 3: (r'(.*ed|.*id|had)$', 'VBD')
2. regler for pronoum har jeg ikke lagt inn i det hele tatt saa dette er noe som
kan tilfoeres
"""
| [
0,
1,
2,
3,
4
] |
908 | 2f489a87e40bea979000dd429cc4cb0150ff4c3b | <mask token>
def get_level_diff(word, only_common=False):
if only_common:
word_df = df[(df['word'] == word) & (df['common'] == 1)]
else:
word_df = df[df['word'] == word]
return (word_df.values[0][3], word_df.values[0][8]) if len(word_df
) > 0 else (None, None)
<mask token>
def translate_words(words, target):
key = 'AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w'
words_string = ''
for word in words:
words_string += '&q='
words_string += word
url = (
f'https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}'
)
res = json.loads(requests.get(url).content)['data']['translations']
return [s['translatedText'] for s in res]
def hello_http(request):
request_args = request.args
if request_args and 'words' in request_args:
words = json.loads(request_args['words'])
if isinstance(words, list) and len(words) > 0:
target = request_args.get('target', 'es')
by_str = request_args.get('by', 'level')
by = 1 if by_str == 'freq' else 0
reverse = request_args.get('reverse', 'false') == 'true'
only_common = request_args.get('only-common', 'false') == 'true'
results = order_words(words, by=by, reverse=reverse,
only_common=only_common)
translated = translate_words([result[0] for result in results],
target)
return json.dumps([[results[i][0], results[i][1], translated[i]
] for i in range(len(results))])
else:
return 'not list'
else:
return 'error'
| <mask token>
def get_level_diff(word, only_common=False):
if only_common:
word_df = df[(df['word'] == word) & (df['common'] == 1)]
else:
word_df = df[df['word'] == word]
return (word_df.values[0][3], word_df.values[0][8]) if len(word_df
) > 0 else (None, None)
def order_words(words, by=0, reverse=False, only_common=False):
if by not in {0, 1}:
raise Exception('by is either 0 (by level), 1 (by frequency)')
if by == 1:
reverse = not reverse
word_results = []
for word in words:
level, freq = get_level_diff(word, only_common=only_common)
if level != None:
if by == 0:
word_results.append((word, level))
else:
word_results.append((word, freq))
word_results.sort(key=lambda x: x[1], reverse=reverse)
return word_results
def translate_words(words, target):
key = 'AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w'
words_string = ''
for word in words:
words_string += '&q='
words_string += word
url = (
f'https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}'
)
res = json.loads(requests.get(url).content)['data']['translations']
return [s['translatedText'] for s in res]
def hello_http(request):
request_args = request.args
if request_args and 'words' in request_args:
words = json.loads(request_args['words'])
if isinstance(words, list) and len(words) > 0:
target = request_args.get('target', 'es')
by_str = request_args.get('by', 'level')
by = 1 if by_str == 'freq' else 0
reverse = request_args.get('reverse', 'false') == 'true'
only_common = request_args.get('only-common', 'false') == 'true'
results = order_words(words, by=by, reverse=reverse,
only_common=only_common)
translated = translate_words([result[0] for result in results],
target)
return json.dumps([[results[i][0], results[i][1], translated[i]
] for i in range(len(results))])
else:
return 'not list'
else:
return 'error'
| <mask token>
with open('result.csv', newline='') as f:
df = pd.read_csv(f)
def get_level_diff(word, only_common=False):
if only_common:
word_df = df[(df['word'] == word) & (df['common'] == 1)]
else:
word_df = df[df['word'] == word]
return (word_df.values[0][3], word_df.values[0][8]) if len(word_df
) > 0 else (None, None)
def order_words(words, by=0, reverse=False, only_common=False):
if by not in {0, 1}:
raise Exception('by is either 0 (by level), 1 (by frequency)')
if by == 1:
reverse = not reverse
word_results = []
for word in words:
level, freq = get_level_diff(word, only_common=only_common)
if level != None:
if by == 0:
word_results.append((word, level))
else:
word_results.append((word, freq))
word_results.sort(key=lambda x: x[1], reverse=reverse)
return word_results
def translate_words(words, target):
key = 'AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w'
words_string = ''
for word in words:
words_string += '&q='
words_string += word
url = (
f'https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}'
)
res = json.loads(requests.get(url).content)['data']['translations']
return [s['translatedText'] for s in res]
def hello_http(request):
request_args = request.args
if request_args and 'words' in request_args:
words = json.loads(request_args['words'])
if isinstance(words, list) and len(words) > 0:
target = request_args.get('target', 'es')
by_str = request_args.get('by', 'level')
by = 1 if by_str == 'freq' else 0
reverse = request_args.get('reverse', 'false') == 'true'
only_common = request_args.get('only-common', 'false') == 'true'
results = order_words(words, by=by, reverse=reverse,
only_common=only_common)
translated = translate_words([result[0] for result in results],
target)
return json.dumps([[results[i][0], results[i][1], translated[i]
] for i in range(len(results))])
else:
return 'not list'
else:
return 'error'
| from flask import escape
import pandas as pd
import json
import requests
with open('result.csv', newline='') as f:
df = pd.read_csv(f)
def get_level_diff(word, only_common=False):
if only_common:
word_df = df[(df['word'] == word) & (df['common'] == 1)]
else:
word_df = df[df['word'] == word]
return (word_df.values[0][3], word_df.values[0][8]) if len(word_df
) > 0 else (None, None)
def order_words(words, by=0, reverse=False, only_common=False):
if by not in {0, 1}:
raise Exception('by is either 0 (by level), 1 (by frequency)')
if by == 1:
reverse = not reverse
word_results = []
for word in words:
level, freq = get_level_diff(word, only_common=only_common)
if level != None:
if by == 0:
word_results.append((word, level))
else:
word_results.append((word, freq))
word_results.sort(key=lambda x: x[1], reverse=reverse)
return word_results
def translate_words(words, target):
key = 'AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w'
words_string = ''
for word in words:
words_string += '&q='
words_string += word
url = (
f'https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}'
)
res = json.loads(requests.get(url).content)['data']['translations']
return [s['translatedText'] for s in res]
def hello_http(request):
request_args = request.args
if request_args and 'words' in request_args:
words = json.loads(request_args['words'])
if isinstance(words, list) and len(words) > 0:
target = request_args.get('target', 'es')
by_str = request_args.get('by', 'level')
by = 1 if by_str == 'freq' else 0
reverse = request_args.get('reverse', 'false') == 'true'
only_common = request_args.get('only-common', 'false') == 'true'
results = order_words(words, by=by, reverse=reverse,
only_common=only_common)
translated = translate_words([result[0] for result in results],
target)
return json.dumps([[results[i][0], results[i][1], translated[i]
] for i in range(len(results))])
else:
return 'not list'
else:
return 'error'
| from flask import escape
import pandas as pd
import json
import requests
with open('result.csv', newline='') as f:
df = pd.read_csv(f)
def get_level_diff(word, only_common=False):
if only_common:
word_df = df[(df['word']==word) & (df['common']==1)]
else:
word_df = df[df['word']==word]
return (word_df.values[0][3], word_df.values[0][8]) if len(word_df) > 0 else (None, None)
# order words based on either level or frequency.
def order_words(words, by=0, reverse=False, only_common=False):
if (by not in {0, 1}): raise Exception("by is either 0 (by level), 1 (by frequency)")
if (by == 1): reverse = not reverse
word_results = []
for word in words:
level, freq = get_level_diff(word, only_common=only_common)
if level != None:
if by == 0:
word_results.append((word, level))
else:
word_results.append((word, freq))
word_results.sort(key=lambda x : x[1], reverse=reverse)
return word_results
def translate_words(words, target):
key = "AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w"
words_string = ""
for word in words:
words_string += "&q="
words_string += word
url = f"https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}"
res = json.loads(requests.get(url).content)['data']['translations']
return [s['translatedText'] for s in res]
def hello_http(request):
request_args = request.args
#'words', 'lang-from', 'lang-to', 'by', 'reverse'
if request_args and 'words' in request_args:
words = json.loads(request_args['words'])
if isinstance(words, list) and len(words) > 0:
target = request_args.get('target', 'es')
by_str = request_args.get('by', 'level')
by = 1 if by_str == 'freq' else 0
reverse = request_args.get('reverse', 'false') == 'true'
only_common = request_args.get('only-common', 'false') == 'true'
results = order_words(words, by=by, reverse=reverse, only_common=only_common)
translated = translate_words([result[0] for result in results], target)
return json.dumps([[results[i][0], results[i][1], translated[i]] for i in range(len(results))])
else:
return "not list"
else:
return "error" | [
3,
4,
5,
6,
7
] |
909 | 7a65a5522db97a7a113a412883b640feede5bcee | from layout import UIDump
import Tkinter
from Tkinter import *
from ScriptGenerator import ScriptGen
class Divide_and_Conquer():
def __init__(self, XY):
self.XY = XY
self.user_val = 'None'
self.flag = 'green'
print self.XY
def bounds_Compare(self, bounds, filename):
""" Compares the bounds with Master XY and generates the Script fro given Element. """
# removed "android.widget.Spinner", "android.widget.ExpandableListView" from reqlist, it's interfering with the view.
reqlist = ["android.widget.EditText",
"android.widget.Button", "android.widget.CheckBox", "android.widget.RadioButton", "android.widget.TextView", "android.widget.RelativeLayout",
"android.widget.ImageView", "android.app.Dialogue", "android.view.View"]
ignore_list = [None,'','None']
collection = []
logs = []
count = 0
len_bounds = len(bounds)
for i in bounds:
print '\n ---------------------------------------------- \n'
# print "for every bound block" ----> DEBUG < -----
if int(bounds[count][2]) <= self.XY[1] <= int(bounds[count][3]):
if int(bounds[count][0]) <= self.XY[0] <= int(bounds[count][1]):
# print "current X_Y : ", str(self.XY)
# print "current bounds : ", str(UIDump.bounds[count])
# print "unique id : ", str(UIDump.check_unique_id[count])
# print "resource id : ", str(UIDump.check_resource_id[count])
# print "current text : ", str(UIDump.check_text[count])
# print "in range block" ----> DEBUG < -----
if UIDump.elements[count] in reqlist:
# print "in reqlist block" ----> DEBUG < -----
if UIDump.elements[count] == reqlist[0]:
# print "EditText block" ----> DEBUG < -----
window = Tkinter.Tk()
window.resizable(width=False,height=False);
window.geometry("200x80")
l1=Label(window,width=30,text="Enter Text to Type: ")
l1.pack()
self.entry_id = StringVar()
e1 = Entry(window, width=30,textvariable=self.entry_id)
e1.pack()
def input(args= None):
self.user_val = e1.get()
window.destroy()
if self.resource_id not in ignore_list:
ScriptGen(filename).script("vc.findViewByIdOrRaise('{id}').setText('{text}')\n".format(id=self.resource_id, text=self.user_val))
ScriptGen(filename).log("#LOG({classname}): Cleared and Typed : '{text}' on id : '{id}'\n".format(classname =self.classname,text=self.user_val, id=self.resource_id))
elif self.unique_id not in ignore_list:
ScriptGen(filename).script("vc.findViewByIdOrRaise('{id}').setText('{text}')\n".format(id=self.unique_id, text=self.user_val))
ScriptGen(filename).log("#LOG({classname}): Cleared and Typed : '{text}'\n".format(classname =self.classname,text=self.user_val))
elif UIDump.check_text[count] not in ignore_list:
ScriptGen(filename).script("vc.findViewWithTextOrRaise('{id_text}').setText('{text}')\n".format(id_text=UIDump.check_text[count], text=self.user_val))
ScriptGen(filename).log("#LOG({classname}): Cleared and Typed : '{text}' on Element with text : '{id_text}'\n".format(classname =self.classname,id_text=UIDump.check_text[count], text=self.user_val))
else :
ScriptGen(filename).script("device.touchDip({X},{Y},0)\n".format(X=int(self.XY[0]), Y=int(self.XY[1])))
ScriptGen(filename).log("#LOG({classname}): Vulnerable/Unstable field on co-ordinates ({X},{Y})\n".format(classname ="Vulnerable",X=int(self.XY[0]), Y=int(self.XY[1])))
def framedestroy():
window.destroy()
self.unique_id = UIDump.check_unique_id[count]
self.resource_id = UIDump.check_resource_id[count]
self.classname = UIDump.check_className[count]
b1=Button(window,text="Ok",width=10, command = input)
b1.pack(side=LEFT)
b1.place(x=10,y=50)
b2=Button(window, text = "Cancel", width=10, command = framedestroy)
b2.pack(side=RIGHT)
b2.place(x=110,y=50)
window.bind('<Return>', input)
window.mainloop()
self.flag = 'red'
break
elif UIDump.elements[count] in reqlist[1:4]:
# print "Button block" ----> DEBUG < -----
self.unique_id = UIDump.check_unique_id[count]
self.resource_id = UIDump.check_resource_id[count]
self.classname = UIDump.check_className[count]
if UIDump.check_text[count] not in ignore_list:
log_ = "#LOG({classname}): Clicked on element with text : '{id}'\n".format(classname =self.classname,id=UIDump.check_text[count])
line = "vc.findViewWithTextOrRaise('{id}').touch()\n\tvc.sleep(3)\n".format(id=UIDump.check_text[count])
if line not in collection:
collection.append(line)
logs.append(log_)
break
elif self.resource_id not in ignore_list:
log_ = "#LOG({classname}): Clicked on : '{id}'\n".format(classname =self.classname,id=self.resource_id)
line = "vc.findViewByIdOrRaise('{id}').touch()\n\tvc.sleep(3)\n".format(id=self.resource_id)
if line not in collection:
collection.append(line)
logs.append(log_)
break
elif self.unique_id not in ignore_list:
log_ = "#LOG({classname}): Clicked on : '{id}'\n".format(classname =self.classname,id=self.unique_id)
line = "vc.findViewByIdOrRaise('{id_text}').touch()\n\tvc.sleep(3)\n".format(id_text=self.unique_id)
if line not in collection:
collection.append(line)
logs.append(log_)
break
else :
log_ = "#LOG({classname}): Vulnerable/Unstable field on co-ordinates ({X},{Y})\n".format(classname =self.classname,X=int(self.XY[0]), Y=int(self.XY[1]))
line = "device.touchDip({X},{Y},0)\n\tvc.sleep(3)\n".format(X=int(self.XY[0]), Y=int(self.XY[1]))
if line not in collection:
collection.append(line)
logs.append(log_)
break
elif UIDump.elements[count] in reqlist[4:]:
# print "remaining views block" ----> DEBUG < -----
self.unique_id = UIDump.check_unique_id[count]
self.resource_id = UIDump.check_resource_id[count]
self.classname = UIDump.check_className[count]
if UIDump.check_text[count] not in ignore_list:
log_ = "#LOG({classname}): Clicked on element with Text : '{id}'\n".format(classname =self.classname,id=UIDump.check_text[count])
line = "vc.findViewWithTextOrRaise('{id}').touch()\n".format(id=UIDump.check_text[count])
if line not in collection:
collection.append(line)
logs.append(log_)
elif self.resource_id not in ignore_list:
log_ = "#LOG({classname}): Clicked on : '{id}'\n".format(classname =self.classname,id=self.resource_id)
line = "vc.findViewByIdOrRaise('{id}').touch()\n".format(id=self.resource_id)
if line not in collection:
collection.append(line)
logs.append(log_)
elif self.unique_id not in ignore_list:
log_ = "#LOG({classname}): Clicked on : '{id}'\n".format(classname =self.classname,id=self.unique_id)
line = "vc.findViewByIdOrRaise('{id_text}').touch()\n".format(id_text=self.unique_id)
if line not in collection:
collection.append(line)
logs.append(log_)
else :
log_ = "#LOG({classname}): Vulnerable/Unstable field on co-ordinates ({X},{Y})\n".format(classname ='Vulnerable',X=int(self.XY[0]), Y=int(self.XY[1]))
line = "device.touchDip({X},{Y},0)\n\tvc.sleep(3)\n".format(X=int(self.XY[0]), Y=int(self.XY[1]))
if line not in collection:
collection.append(line)
logs.append(log_)
else:
# print "not in imp view block" ----> DEBUG < -----
log_ = "#LOG({classname}): Vulnerable/Unstable field on co-ordinates ({X},{Y})\n".format(classname ='Vulnerable',X=int(self.XY[0]), Y=int(self.XY[1]))
line = "device.touchDip({X},{Y},0)\n\tvc.sleep(3)\n".format(X=int(self.XY[0]), Y=int(self.XY[1]))
if line not in collection:
collection.append(line)
logs.append(log_)
break
elif UIDump.elements[count] in ["android.widget.FrameLayout"]:
# print "FrameLayout block" ----> DEBUG < -----
log_ = "#LOG({classname}): Vulnerable/Unstable field on co-ordinates ({X},{Y})\n".format(classname ='Vulnerable',X=int(self.XY[0]), Y=int(self.XY[1]))
line = "device.touchDip({X},{Y},0)\n\tvc.sleep(3)\n".format(X=int(self.XY[0]), Y=int(self.XY[1]))
if line not in collection:
collection.append(line)
logs.append(log_)
count += 1
else :
# print "nothing matches block" ----> DEBUG < -----
log_ = "#LOG({classname}): Vulnerable/Unstable field on co-ordinates ({X},{Y})\n".format(classname ='Vulnerable',X=int(self.XY[0]), Y=int(self.XY[1]))
line = "device.touchDip({X},{Y},0)\n\tvc.sleep(3)\n".format(X=int(self.XY[0]), Y=int(self.XY[1]))
if line not in collection:
collection.append(line)
logs.append(log_)
print collection
print logs
# ----> DEBUG < -----
if self.flag == 'green':
ScriptGen(filename).script(collection[-1])
ScriptGen(filename).log(logs[-1])
else:
pass
def main():
Divide_and_Conquer().bounds_Compare(bounds)
if __name__ == '__main__':
main() | null | null | null | null | [
0
] |
910 | 7ce679d5b889493f278de6deca6ec6bdb7acd3f5 | #Author: Abeer Rafiq
#Modified: 11/23/2019 3:00pm
#Importing Packages
import socket, sys, time, json, sqlite3
import RPi.GPIO as GPIO
from datetime import datetime, date
#Creating a global server class
class GlobalServer:
#The constructor
def __init__(self, port, room_ip_addrs,
app_ip_addrs):
#Setting port
self.__port = int(port)
#Setting socket to receive
self.__soc_recv = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
recv_address = ('', self.__port)
self.__soc_recv.bind(recv_address)
#Setting socket/addresses to send to the room rpi and app
self.__soc_send = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.__room_addrs = (room_ip_addrs, self.__port)
self.__app_addrs = (app_ip_addrs, self.__port)
#Setting up led blinking
self.__receiveLED = 14
self.__sendLED = 15
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(self.__receiveLED, GPIO.OUT)
GPIO.setup(self.__sendLED, GPIO.OUT)
#Setting up string for acknowldegements
self.__ackstr = "{'opcode':'0'}"
#Setting database connections
dbpath = '/home/pi/Documents/Team_Project/dataBases/plantNursery_DB.db'
self.__dbconnect = sqlite3.connect(dbpath);
self.__dbconnect.row_factory = sqlite3.Row;
self.__cursor = self.__dbconnect.cursor()
#Setting up default threshold variables
self.__defaultThresholdValue = 80
self.__defaultLessGreaterThan = "<"
self.__lightThreshold = self.__defaultThresholdValue
self.__lightLessGreaterThan = self.__defaultLessGreaterThan
self.__soilMoistureThreshold = self.__defaultThresholdValue
self.__soilMoistureLessGreaterThan = self.__defaultLessGreaterThan
self.__roomHumidityThreshold = self.__defaultThresholdValue
self.__roomHumidityLessGreaterThan = self.__defaultLessGreaterThan
self.__roomTemperatureThreshold = self.__defaultThresholdValue
self.__roomTemperatureLessGreaterThan = self.__defaultLessGreaterThan
self.__currentLight = 0
self.__currentSoilMoisture = 0
self.__currentWaterDistance = 0
self.__currentRoomHumidity = 0
self.__currentRoomTemperature = 0
self.__waterPumpDuration = 2
#Setting timeout/end time values
self.__ack_timeout = 1
self.__ack_endTime = 4
print("\nGlobal Server Initialized")
#To blink a pin once
def blink(self, pin):
GPIO.output(pin,GPIO.HIGH)
time.sleep(1)
GPIO.output(pin,GPIO.LOW)
return
#Receives/returns buffer and sends ack
def receive(self):
#Receiving
print("\nWaiting to receive on port %d ... " % self.__port)
buf, address = self.__soc_recv.recvfrom(self.__port)
if(len(buf) > 0):
#Blink receive Led
self.blink(self.__receiveLED)
print ("Received %s bytes from '%s': %s " % (len(buf), address[0], buf))
#Sending ack
self.__soc_send.sendto(self.__ackstr, (address[0], self.__port))
#Blink send Led
self.blink(self.__sendLED)
print ("Sent %s to %s" % (self.__ackstr, (address[0], self.__port)))
#Give time for the ack sent to be acknowledged
time.sleep(self.__ack_endTime)
return buf
else:
return False
#To insert data into the database
def insertDBData(self, mySQL):
#Try inserting data to database table
try:
#Insert data
self.__cursor.execute(mySQL)
self.__dbconnect.commit();
except sqlite3.Error, e:
#If error, exit program
print ('\nDatabase Error %s:' % e.args[0])
self.__soc_recv.shutdown(1)
self.__soc_send.shutdown(1)
self.__cursor.close()
sys.exit(1)
return
#To add default threshold entries into the db
def setDefaultThresholds(self, potID):
potID = str(potID)
tdate = str(date.today())
ttime = str(datetime.now().strftime("%H:%M:%S"))
#Insert default thresholds into db
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', 'light', '" + \
str(self.__defaultThresholdValue) + "', '" + self.__defaultLessGreaterThan + \
"', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', 'soilMoisture', '" + \
str(self.__defaultThresholdValue) + "', '" + self.__defaultLessGreaterThan + \
"', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', 'roomTemperature', '" + \
str(self.__defaultThresholdValue) + "', '" + self.__defaultLessGreaterThan + \
"', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', 'roomHumidity', '" + \
str(self.__defaultThresholdValue) + "', '" + self.__defaultLessGreaterThan + \
"', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
print("\nSet Default Thresholds")
return
#To add user requested threshold entries into the db
def updateUserThresholdsTable(self, threshold):
potID = str(threshold.get("potID"))
lessGreaterThan = str(threshold.get("lessGreaterThan"))
thresholdValue = float(str(threshold.get("thresholdValue")))
sensorType = str(threshold.get("sensorType"))
tdate = str(date.today())
ttime = str(datetime.now().strftime("%H:%M:%S"))
#Insert thresholds into db
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', '" + sensorType + "', '" + str(thresholdValue) + \
"', '" + lessGreaterThan + "', '" + str(tdate) + "', '" + str(ttime) + "')"
self.insertDBData(mySQL)
#Reassign global server's instance threshold variables
if sensorType == "light":
self.__lightThreshold = thresholdValue
self.__lightLessGreaterThan = lessGreaterThan
elif sensorType == "soilMoisture":
self.__soilMoistureThreshold = thresholdValue
self.__soilMoistureLessGreaterThan = lessGreaterThan
elif sensorType == "roomTemperature":
self.__roomHumidityThreshold = thresholdValue
self.__roomHumidityLessGreaterThan = lessGreaterThan
elif sensorType == "roomHumidity":
self.__roomTemperatureThreshold = thresholdValue
self.__roomTemperatureLessGreaterThan = lessGreaterThan
print("\nSet User Requested Thresholds")
return
#To update user data in userPlantsTable
def updateUserPlantsTable(self, userInfo):
potID = str(userInfo.get('potID'))
roomID = str(userInfo.get('roomID'))
ownerID = str(userInfo.get('ownerID'))
#Inserting user data into db
mySQL = "INSERT INTO userPlants VALUES ('" + potID + "', '" + roomID + "', '" + ownerID + "')"
self.insertDBData(mySQL)
print("\nUpdated User Data")
return
#To update notes in userNotesTable
def updateUserNotesTable(self, userNotes):
potID = str(userNotes.get('potID'))
notes = str(userNotes.get('notes'))
tdate = str(date.today())
ttime = str(datetime.now().strftime("%H:%M:%S"))
#Inserting notes into db
mySQL = "INSERT INTO userNotes VALUES ('" + potID + "', '" + notes + "', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
print("\nUpdated Notes Data")
return
#To update pot data in db
def updatePotTable(self, sensorInfo, tdate, time):
potID = sensorInfo.get('potID')
self.__currentWaterDistance = sensorInfo.get('waterDistance')
self.__currentLight = sensorInfo.get('light')
self.__currentSoilMoisture = sensorInfo.get('soilMoisture')
#Inserting pot data into db
mySQL = "INSERT INTO potData VALUES ('" + str(potID) + "', '" + str(self.__currentLight)+ "', '" + \
str(self.__currentSoilMoisture) + "', '" + str(self.__currentWaterDistance) + "', '" + \
tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
print("\nUpdated Pot Data")
return
#To update room data in db
def updateRoomTable(self, sensorInfo,tdate, time):
self.__currentRoomTemperature = round(sensorInfo.get('temperature'), 2)
self.__currentRoomHumidity = round(sensorInfo.get('humidity'), 2)
roomID = sensorInfo.get('roomID')
#Inserting room data into db
mySQL = "insert into roomData values ('" + str(roomID) + "', '" + str(self.__currentRoomTemperature) + \
"', '" + str(self.__currentRoomHumidity) + "' , '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
print("\nUpdated Room Data")
return
#To compare current sensor data to threshold values
def checkUserThresholds(self):
#Notification json #Should be receiving an ack so timeout if no ack receivedstrings
lightNotfn = '{"opcode" : "D", "sensorArray" : "1, 0, 0, 0, 0, 0, 0, 0, 0, 0"}'
roomHumidityNotfn = '{"opcode" : "D", "sensorArray" : "0, 1, 0, 0, 0, 0, 0, 0, 0, 0"}'
roomTemperatureNotfn = '{"opcode" : "D", "sensorArray" : "0, 0, 1, 0, 0, 0, 0, 0, 0, 0"}'
soilMoistureNotfn = '{"opcode" : "D", "sensorArray" : "0, 0, 0, 1, 0, 0, 0, 0, 0, 0"}'
#Tuples of sensor data to easily neatly
light = (self.__currentLight, self.__lightThreshold, self.__lightLessGreaterThan, lightNotfn)
soilMoisture = (self.__currentSoilMoisture, self.__soilMoistureThreshold, \
self.__soilMoistureLessGreaterThan, soilMoistureNotfn, self.__waterPumpDuration)
roomHumidity = (self.__currentRoomHumidity, self.__roomHumidityThreshold, \
self.__roomHumidityLessGreaterThan, roomHumidityNotfn)
roomTemperature = (self.__currentRoomTemperature, self.__roomTemperatureThreshold, \
self.__roomTemperatureLessGreaterThan, roomTemperatureNotfn)
#Combined tuples for sensors
sensorArr = [light, roomHumidity, roomTemperature, soilMoisture]
#For each sensor compare current sensor value with threshold value
for sensor in sensorArr:
if sensor[2] == ">":
if sensor[0] > sensor[1]:
#Threshold is met, notify user
notifyApp(sensor[3])
if(len(sensor) == 4):
#Soil moisture's threshold is met, then start water pump, notify user
startPumpStr = '{"opcode" : "4", "pumpDuration" : "' + str(sensor[4]) + '"}'
startWaterPump(startPumpStr)
notifyApp(startPumpStr)
elif sensor[2] == "<":
if sensor[0] < sensor[1]:
#Threshold is met, notify user
notifyApp(sensor[3])
if(length(sensor) == 4):
#Soil moisture's threshold is met, then start water pump, notify user
startPumpStr = '{"opcode" : "4", "pumpDuration" : "' + str(sensor[4]) + '"}'
startWaterPump(startPumpStr)
notifyApp(startPumpStr)
print("\Thresholds Compared")
return
#Send room rpi msg to start water pump
def startWaterPump(self, startPump):
if (self.send_Room_Msg(startPump) == False):
#If no ack received, send msg again
print("\nStart Water Pump sent again to server")
self.startWaterPump(startPump)
return
#To send msgs to the room and wait for ack
def send_Room_Msg(self, message):
self.__soc_send.sendto(message, self.__room_addrs)
#Blink send LED
self.blink(self.__sendLED)
print("\Message sent to Room: " + message)
#Should be receiving an ack so timeout if no ack received
soc_recv.settimeout(self.__ack_timeout)
startTime = time.time()
endTime = self.__ack_endTime
while (True):
#If less than a endTime amount of time
if time.time() < (startTime + endTime):
try:
#Try Receving otherwise timeout and retry
print("Waiting for Acknowledgement . . .")
buf, address = soc_recv.recvfrom(self.__port)
except socket.timeout:
print("Receiving is Timed Out")
#Restart while loop (Retry)
continue
try:
#If buf is received, try to load it
buf = json.loads(buf)
if not len(buf):
#No ack received, retry
continue
else:
if (buf.get("opcode") == "0"):
#Ack recevied!
print("Acknowledgement Received")
return True
else:
#No ack received, retry
continue
except (ValueError, KeyError, TypeError):
#Ack not received, try again
continue
else:
#Failed to receive ack within a endTime amount of time
return False
return
#To notifcations msgs to the app
def notifyApp(self, message):
if (self.send_App_Msg(message) == False):
#If no ack received, send msg again
print("\nNotification sent again to server")
self.notifyApp(message)
return
#To send msgs to the app and wait for ack
def send_App_Msg(self, message):
self.__soc_send.sendto(message, self.__app_addrs)
#Blink send LED
self.blink(self.__sendLED)
print("\nNotifcation sent to App: " + message)
#Should be receiving an ack so timeout if no ack received
soc_recv.settimeout(self.__ack_timeout)
startTime = time.time()
endTime = self.__ack_endTime
while (True):
#If less than a endTime amount of time
if time.time() < (startTime + endTime):
try:
#Try Receving otherwise timeout and retry
print("Waiting for Acknowledgement . . .")
buf, address = soc_recv.recvfrom(self.__port)
except socket.timeout:
print("Receiving is Timed Out")
#Restart while loop (Retry)
continue
try:
#If buf is received, try to load it
buf = json.loads(buf)
if not len(buf):
#No ack received, retry
continue
else:
if (buf.get("opcode") == "0"):
#Ack recevied!
print("Acknowledgement Received")
return True
else:
#No ack received, retry
continue
except (ValueError, KeyError, TypeError):
#Ack not received, try again
continue
else:
#Failed to receive ack within a endTime amount of time
return False
return
#To get requested stats from the db
def get_stats(self, rowNumbers, sensors):
#Try retrieving data from the database
try:
#Retrieve Data
sensors = sensors.replace('"',"").replace("'","").replace('[',"").replace(']',"")
mysql = """SELECT """ + sensors + """, tdate, ttime FROM (
SELECT * FROM userPlants a
INNER JOIN potData b
ON a.potID = b.potID
INNER JOIN roomData c
ON a.roomID = c.roomID AND b.tdate = c.tdate AND b.ttime = c.ttime
ORDER BY c.tdate DESC, c.ttime DESC LIMIT """ + str(rowNumbers) + """)"""
myresult = self.__cursor.execute(mysql).fetchall()
except sqlite3.Error, e:
#If error, exit program
print '\nDatabase Error %s:' % e.args[0]
sys.exit(1)
#Convert data into json format
stats = json.dumps( [dict(i) for i in myresult] )
print("\nData Retreived from DB")
return stats
#To send the stats with the corresponding opcode
def send_stats(self, rowNumbers, sensors):
if rowNumbers == '0':
#0 means to send app just one most recent row of data (opcode E)
oneRow = globalServer.get_stats(1, sensors)
stats = '{"opcode" : "E", "statsArray" : "' + str(oneRow) + '"}'
else:
#Otherwise send mutiple recent rows of data (opcode 6)
manyRows = globalServer.get_stats(rowNumbers, sensors)
stats = '{"opcode" : "6", "statsArray" : "' + str(manyRows) + '"}'
#Send stats to App
#If ack received return
if (self.send_notifyApp(error) == True):
print("\nStats sent to app")
else:
#If no ack received, try sending again
print("\nStats sent again to app (notify again)")
self.send_stats(rowNumbers, sensors)
return
#Main function which receives json data and invokes methods based on opcode received
def main():
#Create GlobalServer object (port, room_ip_addrs, app_ip_addrs)
globalServer = GlobalServer(1000, '192.168.1.47',
'192.168.137.102')
while True:
message = globalServer.receive()
if (message == False):
#If length of buffer is <1
continue
else:
message = json.loads(message)
#User wants to update notes table
if (message.get('opcode') == "1"):
globalServer.updateUserNotesTable(message)
#User wants to add a pot with a room and owner
if (message.get('opcode') == "2"):
globalServer.updateUserPlantsTable(message)
#Set default thresholds for that potID
globalServer.setDefaultThresholds(message.get("potID"))
#If user wants to set thresholds to requested ones
if (message.get('opcode') == "3"):
globalServer.updateUserThresholdsTable(message)
#If user wants to view stats
if (message.get('opcode') == "5"):
rowNumbers = message.get("rowNumbers")
sensors = message.get("sensorType")
globalServer.send_stats(rowNumbers, sensors)
#If an error has occured in the room rpi or arduino
if (message.get('opcode') == "D"):
globalServer.notifyApp(str(message))
#If room rpi sent all sensory data, update tables, compare values to thresholds as well
if (message.get('opcode') == "9"):
tdate = str(date.today())
ttime = str(datetime.now().strftime("%H:%M:%S"))
globalServer.updateRoomTable(message, tdate, ttime)
globalServer.updatePotTable(message, tdate, ttime)
globalServer.checkUserThresholds()
self.__soc_recv.shutdown(1)
self.__soc_send.shutdown(1)
self.__cursor.close()
return
if __name__== "__main__":
main()
| null | null | null | null | [
0
] |
911 | 08a5a903d3757f8821554aa3649ec2ac2b2995a5 | /Users/tanzy/anaconda3/lib/python3.6/_dummy_thread.py | null | null | null | null | [
0
] |
912 | 09d31df9c76975377b44470e1f2ba4a5c4b7bbde | <mask token>
class GameStdIO:
<mask token>
<mask token>
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
sys.stdout.write('\n')
sys.stdout.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = sys.stdin.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = '{}_{}.log'.format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG,
filemode='w')
logging.info('Initialized bot {}'.format(name))
def __init__(self, name):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self._name = name
self._send_name = False
tag = int(self._get_string())
GameStdIO._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info('---NEW TURN---')
recv = self._get_string()
if recv == '':
self.close()
self.done = True
return self.map
self.map._parse(recv)
return self.map
def close(self):
pass
| <mask token>
class GameUnix:
<mask token>
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
self.sfile.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
self.sfile.write('\n')
self.sfile.flush()
<mask token>
<mask token>
<mask token>
<mask token>
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info('---NEW TURN---')
recv = self._get_string()
if recv == '':
self.close()
self.done = True
return self.map
self.map._parse(recv)
return self.map
<mask token>
class GameStdIO:
"""
:ivar map: Current map representation
:ivar initial_map: The initial version of the map before game starts
"""
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
sys.stdout.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
sys.stdout.write('\n')
sys.stdout.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = sys.stdin.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = '{}_{}.log'.format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG,
filemode='w')
logging.info('Initialized bot {}'.format(name))
def __init__(self, name):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self._name = name
self._send_name = False
tag = int(self._get_string())
GameStdIO._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info('---NEW TURN---')
recv = self._get_string()
if recv == '':
self.close()
self.done = True
return self.map
self.map._parse(recv)
return self.map
def close(self):
pass
| <mask token>
class GameUnix:
<mask token>
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
self.sfile.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
self.sfile.write('\n')
self.sfile.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = self.sfile.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = '{}_{}.log'.format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG,
filemode='w')
logging.info('Initialized bot {}'.format(name))
def __init__(self, name, socket_path='/dev/shm/bot.sock'):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
connected = False
while not connected:
try:
self.s.connect(socket_path)
connected = True
except Exception:
pass
self.sfile = self.s.makefile('rw')
self._name = name
self._send_name = False
tag = int(self._get_string())
GameUnix._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info('---NEW TURN---')
recv = self._get_string()
if recv == '':
self.close()
self.done = True
return self.map
self.map._parse(recv)
return self.map
<mask token>
class GameStdIO:
"""
:ivar map: Current map representation
:ivar initial_map: The initial version of the map before game starts
"""
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
sys.stdout.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
sys.stdout.write('\n')
sys.stdout.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = sys.stdin.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = '{}_{}.log'.format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG,
filemode='w')
logging.info('Initialized bot {}'.format(name))
def __init__(self, name):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self._name = name
self._send_name = False
tag = int(self._get_string())
GameStdIO._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info('---NEW TURN---')
recv = self._get_string()
if recv == '':
self.close()
self.done = True
return self.map
self.map._parse(recv)
return self.map
def close(self):
pass
| import sys
import logging
import copy
import socket
from . import game_map
class GameUnix:
"""
:ivar map: Current map representation
:ivar initial_map: The initial version of the map before game starts
"""
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
self.sfile.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
self.sfile.write('\n')
self.sfile.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = self.sfile.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = '{}_{}.log'.format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG,
filemode='w')
logging.info('Initialized bot {}'.format(name))
def __init__(self, name, socket_path='/dev/shm/bot.sock'):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
connected = False
while not connected:
try:
self.s.connect(socket_path)
connected = True
except Exception:
pass
self.sfile = self.s.makefile('rw')
self._name = name
self._send_name = False
tag = int(self._get_string())
GameUnix._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info('---NEW TURN---')
recv = self._get_string()
if recv == '':
self.close()
self.done = True
return self.map
self.map._parse(recv)
return self.map
def close(self):
self.sfile.close()
self.s.close()
class GameStdIO:
"""
:ivar map: Current map representation
:ivar initial_map: The initial version of the map before game starts
"""
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
sys.stdout.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
sys.stdout.write('\n')
sys.stdout.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = sys.stdin.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = '{}_{}.log'.format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG,
filemode='w')
logging.info('Initialized bot {}'.format(name))
def __init__(self, name):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self._name = name
self._send_name = False
tag = int(self._get_string())
GameStdIO._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info('---NEW TURN---')
recv = self._get_string()
if recv == '':
self.close()
self.done = True
return self.map
self.map._parse(recv)
return self.map
def close(self):
pass
| import sys
import logging
import copy
import socket
from . import game_map
class GameUnix:
"""
:ivar map: Current map representation
:ivar initial_map: The initial version of the map before game starts
"""
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
self.sfile.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
self.sfile.write('\n')
self.sfile.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = self.sfile.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = "{}_{}.log".format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG, filemode='w')
logging.info("Initialized bot {}".format(name))
def __init__(self, name, socket_path="/dev/shm/bot.sock"):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
connected = False
while not connected:
try:
self.s.connect(socket_path)
connected = True
except Exception:
pass # Do nothing, just try again
self.sfile = self.s.makefile('rw')
self._name = name
self._send_name = False
tag = int(self._get_string())
GameUnix._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info("---NEW TURN---")
recv = self._get_string()
if recv == "":
self.close()
self.done = True
return self.map # last step map
self.map._parse(recv)
return self.map
def close(self):
self.sfile.close()
self.s.close()
class GameStdIO:
"""
:ivar map: Current map representation
:ivar initial_map: The initial version of the map before game starts
"""
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
sys.stdout.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
sys.stdout.write('\n')
sys.stdout.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = sys.stdin.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = "{}_{}.log".format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG, filemode='w')
logging.info("Initialized bot {}".format(name))
def __init__(self, name):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self._name = name
self._send_name = False
tag = int(self._get_string())
GameStdIO._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info("---NEW TURN---")
recv = self._get_string()
if recv == "":
self.close()
self.done = True
return self.map # last step map
self.map._parse(recv)
return self.map
def close(self):
pass | [
8,
14,
18,
21,
22
] |
913 | 891588327046e26acb9a691fa8bb9a99420712d6 | <mask token>
| <mask token>
schema_view = get_swagger_view(title='Pastebin API')
urlpatterns = [url('^admin/', admin.site.urls), url('^doc_u/', schema_view),
url('^', include('o.urls')), url('^api/', include('restapi.urls',
namespace='res'))]
| from django.conf.urls import url, include
from django.contrib import admin
from rest_framework_swagger.views import get_swagger_view
schema_view = get_swagger_view(title='Pastebin API')
urlpatterns = [url('^admin/', admin.site.urls), url('^doc_u/', schema_view),
url('^', include('o.urls')), url('^api/', include('restapi.urls',
namespace='res'))]
| from django.conf.urls import url, include
from django.contrib import admin
from rest_framework_swagger.views import get_swagger_view
schema_view = get_swagger_view(title='Pastebin API')
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^doc_u/', schema_view),
url(r'^', include('o.urls', )),
url(r'^api/', include('restapi.urls', namespace='res')),
]
| null | [
0,
1,
2,
3
] |
914 | a6f3c51d4115a6e0d6f01aa75bf5e6e367840d43 | <mask token>
| <mask token>
class ConvMainUnitTest:
<mask token>
| <mask token>
class ConvMainUnitTest:
@staticmethod
def implicit_gemm(input: Tensor, weight: Tensor, output: Tensor,
padding: List[int], stride: List[int], dilation: List[int], ndim:
int, iter_algo_: int, op_type_: int, i_ltype_: int, w_ltype_: int,
o_ltype_: int, ts: Tuple[int, int, int], wts: Tuple[int, int, int],
num_stage: int, dacc: int, dcomp: int, algo: str, tensorop: List[
int], i_interleave: int=1, w_interleave: int=1, o_interleave: int=1,
alpha: float=1, beta: float=0, split_k_slices: int=1, workspace:
Tensor=Tensor(), mask_sparse: bool=False, increment_k_first: bool=
False, mask: Tensor=Tensor(), mask_argsort: Tensor=Tensor(),
indices: Tensor=Tensor(), mask_output: Tensor=Tensor()) ->None:
"""
Args:
input:
weight:
output:
padding:
stride:
dilation:
ndim:
iter_algo_:
op_type_:
i_ltype_:
w_ltype_:
o_ltype_:
ts:
wts:
num_stage:
dacc:
dcomp:
algo:
tensorop:
i_interleave:
w_interleave:
o_interleave:
alpha:
beta:
split_k_slices:
workspace:
mask_sparse:
increment_k_first:
mask:
mask_argsort:
indices:
mask_output:
"""
...
| from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type, Union, overload
from pccm.stubs import EnumClassValue, EnumValue
from cumm.tensorview import Tensor
class ConvMainUnitTest:
@staticmethod
def implicit_gemm(input: Tensor, weight: Tensor, output: Tensor,
padding: List[int], stride: List[int], dilation: List[int], ndim:
int, iter_algo_: int, op_type_: int, i_ltype_: int, w_ltype_: int,
o_ltype_: int, ts: Tuple[int, int, int], wts: Tuple[int, int, int],
num_stage: int, dacc: int, dcomp: int, algo: str, tensorop: List[
int], i_interleave: int=1, w_interleave: int=1, o_interleave: int=1,
alpha: float=1, beta: float=0, split_k_slices: int=1, workspace:
Tensor=Tensor(), mask_sparse: bool=False, increment_k_first: bool=
False, mask: Tensor=Tensor(), mask_argsort: Tensor=Tensor(),
indices: Tensor=Tensor(), mask_output: Tensor=Tensor()) ->None:
"""
Args:
input:
weight:
output:
padding:
stride:
dilation:
ndim:
iter_algo_:
op_type_:
i_ltype_:
w_ltype_:
o_ltype_:
ts:
wts:
num_stage:
dacc:
dcomp:
algo:
tensorop:
i_interleave:
w_interleave:
o_interleave:
alpha:
beta:
split_k_slices:
workspace:
mask_sparse:
increment_k_first:
mask:
mask_argsort:
indices:
mask_output:
"""
...
| from typing import (Any, Callable, Dict, List, Optional, Set, Tuple, Type,
Union, overload)
from pccm.stubs import EnumClassValue, EnumValue
from cumm.tensorview import Tensor
class ConvMainUnitTest:
@staticmethod
def implicit_gemm(input: Tensor, weight: Tensor, output: Tensor, padding: List[int], stride: List[int], dilation: List[int], ndim: int, iter_algo_: int, op_type_: int, i_ltype_: int, w_ltype_: int, o_ltype_: int, ts: Tuple[int, int, int], wts: Tuple[int, int, int], num_stage: int, dacc: int, dcomp: int, algo: str, tensorop: List[int], i_interleave: int = 1, w_interleave: int = 1, o_interleave: int = 1, alpha: float = 1, beta: float = 0, split_k_slices: int = 1, workspace: Tensor = Tensor(), mask_sparse: bool = False, increment_k_first: bool = False, mask: Tensor = Tensor(), mask_argsort: Tensor = Tensor(), indices: Tensor = Tensor(), mask_output: Tensor = Tensor()) -> None:
"""
Args:
input:
weight:
output:
padding:
stride:
dilation:
ndim:
iter_algo_:
op_type_:
i_ltype_:
w_ltype_:
o_ltype_:
ts:
wts:
num_stage:
dacc:
dcomp:
algo:
tensorop:
i_interleave:
w_interleave:
o_interleave:
alpha:
beta:
split_k_slices:
workspace:
mask_sparse:
increment_k_first:
mask:
mask_argsort:
indices:
mask_output:
"""
... | [
0,
1,
2,
3,
4
] |
915 | 0dd361239d85ed485594ac0f5e7e2168f0684544 | <mask token>
| <mask token>
@pytest.mark.parametrize('invalid_line', ['beginningGDG::',
'beginning::end', 'nothing'])
def test_parse_invalid_line(invalid_line):
assert gadget.parse_log_line(invalid_line) is None
| import pytest
import gadget
@pytest.mark.parametrize('invalid_line', ['beginningGDG::',
'beginning::end', 'nothing'])
def test_parse_invalid_line(invalid_line):
assert gadget.parse_log_line(invalid_line) is None
| import pytest
import gadget
@pytest.mark.parametrize('invalid_line', [
'beginningGDG::',
'beginning::end',
'nothing',
])
def test_parse_invalid_line(invalid_line):
assert gadget.parse_log_line(invalid_line) is None
| null | [
0,
1,
2,
3
] |
916 | 7de19a85a6a05bd2972b11571d5f05219c6beb1a | <mask token>
| <mask token>
def move_directory(input_directory_path, output_directory_path):
print('moving %s to %s' % (input_directory_path, output_directory_path))
if not dry_run:
shutil.move(input_directory_path, output_directory_path)
<mask token>
| <mask token>
def move_directory(input_directory_path, output_directory_path):
print('moving %s to %s' % (input_directory_path, output_directory_path))
if not dry_run:
shutil.move(input_directory_path, output_directory_path)
print('Root dir is %s' % root_path)
for level1 in os.listdir(root_path):
level1_path = os.path.join(root_path, level1)
if os.path.isdir(level1_path):
print('> %s' % level1)
for level2 in os.listdir(level1_path):
level2_path = os.path.join(level1_path, level2)
if os.path.isdir(level2_path):
print('>> %s' % level2)
move_directory(level2_path, root_path)
print('Deleting %s' % level1_path)
if not dry_run:
shutil.rmtree(level1_path)
| <mask token>
root_path = 'C:/Users/koyou/Desktop/test'
dry_run = False
def move_directory(input_directory_path, output_directory_path):
print('moving %s to %s' % (input_directory_path, output_directory_path))
if not dry_run:
shutil.move(input_directory_path, output_directory_path)
print('Root dir is %s' % root_path)
for level1 in os.listdir(root_path):
level1_path = os.path.join(root_path, level1)
if os.path.isdir(level1_path):
print('> %s' % level1)
for level2 in os.listdir(level1_path):
level2_path = os.path.join(level1_path, level2)
if os.path.isdir(level2_path):
print('>> %s' % level2)
move_directory(level2_path, root_path)
print('Deleting %s' % level1_path)
if not dry_run:
shutil.rmtree(level1_path)
| import os
import shutil
# root_path = '../from_1691'
root_path = 'C:/Users/koyou/Desktop/test'
# 실수할 수도 있으므로 dry_run 을 설정해서 로그만 찍을 것인지
# 실제 작동도 진행할 것인지 결정한다.
# dry_run = True
dry_run = False
def move_directory(input_directory_path, output_directory_path):
print("moving %s to %s" % (input_directory_path, output_directory_path))
if not dry_run:
shutil.move(input_directory_path, output_directory_path)
#
# main
#
print("Root dir is %s" % root_path)
for level1 in os.listdir(root_path): # level1 == test1
level1_path = os.path.join(root_path, level1)
if os.path.isdir(level1_path):
# 디렉토리 이름을 출력해줘야 진행상황 알 수 있음
print("> %s" % level1)
for level2 in os.listdir(level1_path): # level2 == test1-1
level2_path = os.path.join(level1_path, level2)
if os.path.isdir(level2_path):
# level2 이름 출력
print(">> %s" % level2)
move_directory(level2_path, root_path)
# 2. deleting dir
print("Deleting %s" % level1_path)
if not dry_run:
shutil.rmtree(level1_path)
| [
0,
1,
2,
3,
5
] |
917 | c85d7e799a652e82bfaf58e1e8bfa9c4606a8ecb | <mask token>
| <mask token>
def get_config(path_to_config: str) ->Dict[str, Any]:
"""Get config.
Args:
path_to_config (str): Path to config.
Returns:
Dict[str, Any]: Config.
"""
with open(path_to_config, mode='r') as fp:
config = yaml.safe_load(fp)
if 'experiment_name' not in config:
config['experiment_name'] = 'model'
config['path_to_save_folder'] = (Path(config['path_to_save_folder']) /
f"{config['experiment_name']}_{datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}"
)
config['path_to_config'] = path_to_config
config['path_to_save_model'] = config['path_to_save_folder'
] / 'model.joblib'
config['path_to_save_logfile'] = config['path_to_save_folder'
] / 'logging.txt'
config['path_to_save_target_names_mapping'] = config['path_to_save_folder'
] / 'target_names.json'
if 'tf-idf' not in config or config['tf-idf'] is None:
config['tf-idf'] = {}
if 'ngram_range' in config['tf-idf']:
config['tf-idf']['ngram_range'] = ast.literal_eval(config['tf-idf']
['ngram_range'])
if 'preprocessing' in config:
lemmatization = config['preprocessing']['lemmatization']
if lemmatization:
if lemmatization == 'pymorphy2':
lemmatizer = LemmatizerPymorphy2()
preprocessor = Preprocessor(lemmatizer)
config['tf-idf']['preprocessor'] = preprocessor
else:
raise KeyError(
f'Unknown lemmatizer {lemmatization}. Available lemmatizers: none, pymorphy2.'
)
if 'logreg' not in config or config['logreg'] is None:
config['logreg'] = {}
return config
| import ast
import datetime
from pathlib import Path
from typing import Any, Dict
import yaml
from .lemmatizer import LemmatizerPymorphy2, Preprocessor
def get_config(path_to_config: str) ->Dict[str, Any]:
"""Get config.
Args:
path_to_config (str): Path to config.
Returns:
Dict[str, Any]: Config.
"""
with open(path_to_config, mode='r') as fp:
config = yaml.safe_load(fp)
if 'experiment_name' not in config:
config['experiment_name'] = 'model'
config['path_to_save_folder'] = (Path(config['path_to_save_folder']) /
f"{config['experiment_name']}_{datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}"
)
config['path_to_config'] = path_to_config
config['path_to_save_model'] = config['path_to_save_folder'
] / 'model.joblib'
config['path_to_save_logfile'] = config['path_to_save_folder'
] / 'logging.txt'
config['path_to_save_target_names_mapping'] = config['path_to_save_folder'
] / 'target_names.json'
if 'tf-idf' not in config or config['tf-idf'] is None:
config['tf-idf'] = {}
if 'ngram_range' in config['tf-idf']:
config['tf-idf']['ngram_range'] = ast.literal_eval(config['tf-idf']
['ngram_range'])
if 'preprocessing' in config:
lemmatization = config['preprocessing']['lemmatization']
if lemmatization:
if lemmatization == 'pymorphy2':
lemmatizer = LemmatizerPymorphy2()
preprocessor = Preprocessor(lemmatizer)
config['tf-idf']['preprocessor'] = preprocessor
else:
raise KeyError(
f'Unknown lemmatizer {lemmatization}. Available lemmatizers: none, pymorphy2.'
)
if 'logreg' not in config or config['logreg'] is None:
config['logreg'] = {}
return config
| import ast
import datetime
from pathlib import Path
from typing import Any, Dict
import yaml
from .lemmatizer import LemmatizerPymorphy2, Preprocessor
def get_config(path_to_config: str) -> Dict[str, Any]:
"""Get config.
Args:
path_to_config (str): Path to config.
Returns:
Dict[str, Any]: Config.
"""
with open(path_to_config, mode="r") as fp:
config = yaml.safe_load(fp)
# backward compatibility
if "experiment_name" not in config:
config["experiment_name"] = "model"
config["path_to_save_folder"] = (
Path(config["path_to_save_folder"])
/ f"{config['experiment_name']}_{datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}"
)
config["path_to_config"] = path_to_config
config["path_to_save_model"] = config["path_to_save_folder"] / "model.joblib"
config["path_to_save_logfile"] = config["path_to_save_folder"] / "logging.txt"
config["path_to_save_target_names_mapping"] = (
config["path_to_save_folder"] / "target_names.json"
)
# tf-idf
if ("tf-idf" not in config) or (config["tf-idf"] is None):
config["tf-idf"] = {}
if "ngram_range" in config["tf-idf"]:
config["tf-idf"]["ngram_range"] = ast.literal_eval(
config["tf-idf"]["ngram_range"]
)
if "preprocessing" in config: # backward compatibility
lemmatization = config["preprocessing"]["lemmatization"]
if lemmatization:
if lemmatization == "pymorphy2":
lemmatizer = LemmatizerPymorphy2()
preprocessor = Preprocessor(lemmatizer)
config["tf-idf"]["preprocessor"] = preprocessor
else:
raise KeyError(
f"Unknown lemmatizer {lemmatization}. Available lemmatizers: none, pymorphy2."
)
# logreg
if ("logreg" not in config) or (config["logreg"] is None):
config["logreg"] = {}
return config
| null | [
0,
1,
2,
3
] |
918 | 73ff1444b5ab1469b616fe449ee6ab93acbbf85a | import time
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtSql import *
from PyQt5.QtWidgets import *
from qgis.core import QgsFeature, QgsGeometry, QgsProject
from shapely import wkb
print(__name__)
# Function definition
def TicTocGenerator():
# Generator that returns time differences
ti = 0 # initial time
tf = time.time() # final time
while True:
ti = tf
tf = time.time()
yield tf - ti # returns the time difference
def toc(tempBool=True):
# Prints the time difference yielded by generator instance TicToc
tempTimeInterval = next(TicToc)
if tempBool:
print("Elapsed time: %f seconds.\n" % tempTimeInterval)
def tic():
# Records a time in TicToc, marks the beginning of a time interval
toc(False)
def removeRoutesLayers():
layers = QgsProject.instance().mapLayers()
for layer_id, layer in layers.items():
if str(layer.name()) != "model_graph" and str(layer.name()) != "emme_zones" and str(layer.name()) != "labels" \
and str(layer.name()) != "OpenStreetMap" and str(layer.name()) != "all_results" and str(
layer.name()) != "Centroider" and str(layer.name()) != "dijk_result_table" and str(
layer.name()) != "ata_lid" and str(layer.name()) != "result_table":
QgsProject.instance().removeMapLayer(layer.id())
# Prints a route set based on whats in result_table.
def printRoutes():
i = 1
# WHERE rejoin_link=0 insert into to print
query = db.exec_("SELECT MAX(did) FROM result_table")
query.next()
print(query.value(0))
nr_routes = query.value(0)
lid_list_q = db.exec_("SELECT result_table.lid, foo.count FROM result_table INNER JOIN (SELECT count(*), lid \
FROM result_table WHERE not did=(-1) group by lid) as foo ON(result_table.lid = foo.lid) WHERE not did=(-1) \
group by result_table.lid, foo.count")
lid_list = []
lid_count = []
while lid_list_q.next():
lid_list.append(lid_list_q.value(0))
lid_count.append(lid_list_q.value(1))
# Källa https://sashat.me/2017/01/11/list-of-20-simple-distinct-colors/
color_list = [QColor.fromRgb(128, 0, 0), QColor.fromRgb(170, 10, 40), QColor.fromRgb(128, 128, 0),
QColor.fromRgb(0, 128, 128), QColor.fromRgb(0, 0, 128), QColor.fromRgb(0, 0, 0),
QColor.fromRgb(230, 25, 75), QColor.fromRgb(245, 130, 48), QColor.fromRgb(255, 255, 25),
QColor.fromRgb(210, 245, 60), QColor.fromRgb(60, 180, 75), QColor.fromRgb(70, 240, 240),
QColor.fromRgb(0, 130, 200), QColor.fromRgb(145, 30, 180), QColor.fromRgb(240, 50, 230),
QColor.fromRgb(128, 128, 128), QColor.fromRgb(250, 190, 190), QColor.fromRgb(255, 215, 180),
QColor.fromRgb(255, 250, 200), QColor.fromRgb(170, 255, 195)]
bad_color = ['Maroon', 'Magenta', 'Olive', 'Orange', 'Navy', 'Black', 'Red', 'Teal', 'Blue', 'Lime', 'Cyan', 'Green'
, 'Brown', 'Purple', 'Yellow', 'Grey', 'Pink', 'Apricot', 'Beige', 'Mint', 'Lavender']
lid_c = []
# while lid_query.next():
while i <= nr_routes:
dummy_q = db.exec_(
"SELECT did, lid, ST_astext(geom) as geom FROM result_table WHERE not did=(-1) and result_table.did =" + str(
i))
layert = QgsVectorLayer("MultiLineString?crs=epsg:3006", " route " + str(i), "memory")
QgsProject.instance().addMapLayer(layert)
featurelist = []
while dummy_q.next():
lid = dummy_q.value(1)
seg = QgsFeature()
j = 0
while j < len(lid_list):
if lid == lid_list[j]:
lid_nr = j
j += 1
# print("lid nr is:"+str(lid_nr)+ " lid is :"+str(lid_list[lid_nr])+" lid count is:"+str(lid_count[lid_nr]))
nr_included = 0
dummy = 0
j = 0
while j < len(lid_c):
if lid == lid_c[j]:
nr_included += 1
j += 1
# if dummy < nr_included:
# dummy = nr_included
lid_c.append(lid)
if lid_count[lid_nr] == 1:
offset = 0
else:
if lid_count[lid_nr] % 2 == 0:
# Even
off = (-lid_count[lid_nr] / 2) + nr_included
if off == 0:
offset = ((-lid_count[lid_nr] / 2) + nr_included + 1) * 200
else:
<<<<<<< HEAD
offset = ((-lid_count[lid_nr]/2) + nr_included)*200
=======
offset = ((-lid_count[lid_nr] / 2) + nr_included) * 80
>>>>>>> b69948db6665ed5f30d2925c9356500bdac0da03
else:
# Odd
print("odd value is :", (-lid_count[lid_nr] / 2) + nr_included)
print("odd value rounded is :", int((-lid_count[lid_nr] / 2) + nr_included))
<<<<<<< HEAD
offset = int(((-lid_count[lid_nr]/2) + nr_included)*200)
print("odd ",offset)
=======
offset = int(((-lid_count[lid_nr] / 2) + nr_included) * 80)
print("odd ", offset)
>>>>>>> b69948db6665ed5f30d2925c9356500bdac0da03
seg.setGeometry(QgsGeometry.fromWkt(dummy_q.value(2)).offsetCurve(offset, 1, 1, 2.0))
featurelist.append(seg)
symbol = QgsLineSymbol.createSimple({'color': bad_color[i], 'width': '0.4'})
renderers = layert.renderer()
renderers.setSymbol(symbol.clone())
qgis.utils.iface.layerTreeView().refreshLayerSymbology(layert.id())
single_symbol_renderer = layert.renderer()
symbol = single_symbol_renderer.symbol()
symbol.setWidth(0.8)
layert.dataProvider().addFeatures(featurelist)
layert.triggerRepaint()
i += 1
print("route nr", i - 1)
print("nr included max ", dummy)
# Start node
start_q = db.exec_("SELECT lid, ST_astext(ST_PointN(the_geom,1)) AS start \
FROM (SELECT lid, (ST_Dump(geom)).geom As the_geom \
FROM result_table WHERE did=1 and path_seq=1) As foo")
start_q.next()
layer = QgsVectorLayer('Point?crs=epsg:3006', 'Start', 'memory')
# Set the provider to accept the data source
prov = layer.dataProvider()
# Add a new feature and assign the geometry
feat = QgsFeature()
feat.setGeometry(QgsGeometry.fromWkt(start_q.value(1)))
prov.addFeatures([feat])
# Update extent of the layer
layer.updateExtents()
# Add the layer to the Layers panel
QgsProject.instance().addMapLayer(layer)
single_symbol_renderer = layer.renderer()
symbol1 = single_symbol_renderer.symbol()
symbol1.setColor(QColor.fromRgb(0, 225, 0))
symbol1.setSize(3)
# more efficient than refreshing the whole canvas, which requires a redraw of ALL layers
layer.triggerRepaint()
# update legend for layer
qgis.utils.iface.layerTreeView().refreshLayerSymbology(layer.id())
# End Node
end_q = db.exec_("SELECT lid, ST_astext(ST_PointN(the_geom,-1)) AS start FROM (SELECT lid, (ST_Dump(geom)).geom As the_geom \
FROM result_table WHERE path_seq = (SELECT max(path_seq) FROM result_table WHERE did=1) and did=1) AS foo")
end_q.next()
layere = QgsVectorLayer('Point?crs=epsg:3006', 'End', 'memory')
# Set the provider to accept the data source
prov = layere.dataProvider()
# Add a new feature and assign the geometry
feat = QgsFeature()
feat.setGeometry(QgsGeometry.fromWkt(end_q.value(1)))
prov.addFeatures([feat])
# Update extent of the layer
layere.updateExtents()
# Add the layer to the Layers panel
QgsProject.instance().addMapLayer(layere)
single_symbol_renderer = layere.renderer()
symbol = single_symbol_renderer.symbol()
symbol.setColor(QColor.fromRgb(255, 0, 0))
symbol.setSize(3)
layere.triggerRepaint()
qgis.utils.iface.layerTreeView().refreshLayerSymbology(layere.id())
def printRoutesRejoin():
i = 1
# WHERE rejoin_link=0 insert into to print
query = db.exec_("SELECT MAX(did) FROM result_table")
query.next()
nr_routes = query.value(0)
# Källa https://sashat.me/2017/01/11/list-of-20-simple-distinct-colors/
color_list = [QColor.fromRgb(128, 0, 0), QColor.fromRgb(170, 10, 40), QColor.fromRgb(128, 128, 0),
QColor.fromRgb(0, 128, 128), QColor.fromRgb(0, 0, 128), QColor.fromRgb(0, 0, 0),
QColor.fromRgb(230, 25, 75), QColor.fromRgb(245, 130, 48), QColor.fromRgb(255, 255, 25),
QColor.fromRgb(210, 245, 60), QColor.fromRgb(60, 180, 75), QColor.fromRgb(70, 240, 240),
QColor.fromRgb(0, 130, 200), QColor.fromRgb(145, 30, 180), QColor.fromRgb(240, 50, 230),
QColor.fromRgb(128, 128, 128), QColor.fromRgb(250, 190, 190), QColor.fromRgb(255, 215, 180),
QColor.fromRgb(255, 250, 200), QColor.fromRgb(170, 255, 195)]
bad_color = ['Maroon', 'Magenta', 'Olive', 'Orange', 'Navy', 'Black', 'Red', 'Teal', 'Blue', 'Lime', 'Cyan', 'Green'
, 'Brown', 'Purple', 'Yellow', 'Grey', 'Pink', 'Apricot', 'Beige', 'Mint', 'Lavender']
while i <= nr_routes:
# Routes without offset
sqlcall = "(select lid, did, geom from result_table where lid in (select lid from result_table group by lid having \
count(*) = 1) and did =" + str(i) + " and rejoin_link=0 group by lid, did, geom ORDER BY lid, did)"
uri.setDataSource("", sqlcall, "geom", "", "lid")
layert = QgsVectorLayer(uri.uri(), " route " + str(i), "postgres")
QgsProject.instance().addMapLayer(layert)
symbol = QgsLineSymbol.createSimple({'color': bad_color[i],
'width': '0.6',
'offset': '0'})
renderers = layert.renderer()
renderers.setSymbol(symbol.clone())
qgis.utils.iface.layerTreeView().refreshLayerSymbology(layert.id())
# single_symbol_renderer = layert.renderer()
# symbol = single_symbol_renderer.symbol()
# symbol.setWidth(0.8)
# Routes in need of offset
sqlcall = "(select lid, did, geom from result_table where lid in (select lid from result_table \
group by lid having count(*) > 1) and did=" + str(
i) + " and rejoin_link=0 group by lid, did, geom ORDER BY lid, did)"
uri.setDataSource("", sqlcall, "geom", "", "lid")
layert = QgsVectorLayer(uri.uri(), " route " + str(i), "postgres")
QgsProject.instance().addMapLayer(layert)
if i == 1:
offset = 0
else:
offset = i - i * 0.7
print("i is " + str(i) + " and offset is:" + str(offset))
symbol = QgsLineSymbol.createSimple({'color': bad_color[i],
'width': '0.4',
'offset': str(offset)})
renderers = layert.renderer()
renderers.setSymbol(symbol.clone())
qgis.utils.iface.layerTreeView().refreshLayerSymbology(layert.id())
# single_symbol_renderer = layert.renderer()
# symbol = single_symbol_renderer.symbol()
# symbol.setWidth(0.8)
#
# if i < len(color_list):
# symbol.setColor(color_list[i])
# layert.triggerRepaint()
# qgis.utils.iface.layerTreeView().refreshLayerSymbology(layert.id())
#
i = i + 1
# Start node
start_q = db.exec_("SELECT lid, ST_astext(ST_PointN(the_geom,1)) AS start \
FROM (SELECT lid, (ST_Dump(geom)).geom As the_geom \
FROM result_table WHERE did=1 and path_seq=1) As foo")
start_q.next()
layer = QgsVectorLayer('Point?crs=epsg:3006', 'Start', 'memory')
# Set the provider to accept the data source
prov = layer.dataProvider()
# Add a new feature and assign the geometry
feat = QgsFeature()
feat.setGeometry(QgsGeometry.fromWkt(start_q.value(1)))
prov.addFeatures([feat])
# Update extent of the layer
layer.updateExtents()
# Add the layer to the Layers panel
QgsProject.instance().addMapLayer(layer)
single_symbol_renderer = layer.renderer()
symbol = single_symbol_renderer.symbol()
symbol.setColor(QColor.fromRgb(0, 225, 0))
symbol.setSize(3)
# more efficient than refreshing the whole canvas, which requires a redraw of ALL layers
layer.triggerRepaint()
# update legend for layer
qgis.utils.iface.layerTreeView().refreshLayerSymbology(layer.id())
# End Node
end_q = db.exec_("SELECT lid, ST_astext(ST_PointN(the_geom,-1)) AS start FROM (SELECT lid, (ST_Dump(geom)).geom As the_geom \
FROM result_table WHERE path_seq = (SELECT max(path_seq) FROM result_table WHERE did=1) and did=1) AS foo")
end_q.next()
layere = QgsVectorLayer('Point?crs=epsg:3006', 'END', 'memory')
# Set the provider to accept the data source
prov = layere.dataProvider()
# Add a new feature and assign the geometry
feat = QgsFeature()
feat.setGeometry(QgsGeometry.fromWkt(end_q.value(1)))
prov.addFeatures([feat])
# Update extent of the layer
layere.updateExtents()
# Add the layer to the Layers panel
QgsProject.instance().addMapLayer(layere)
single_symbol_renderer = layere.renderer()
symbol = single_symbol_renderer.symbol()
symbol.setColor(QColor.fromRgb(255, 0, 0))
symbol.setSize(3)
layer.triggerRepaint()
qgis.utils.iface.layerTreeView().refreshLayerSymbology(layere.id())
# det jag behöver få från databasen start_list, end_list, lids
def print_selected_pairs():
# Removes layers not specified in removeRoutesLayers
removeRoutesLayers()
# Get list and removed lids
lids = []
temp_query1 = db.exec_("SELECT * FROM removed_lids")
while temp_query1.next():
lids.append(temp_query1.value(0))
temp_query2 = db.exec_("SELECT DISTINCT start_zone AS start_zones, end_zone AS end_zones FROM all_results")
start_list = []
end_list = []
while temp_query2.next():
start_list.append(temp_query2.value(0))
end_list.append(temp_query2.value(1))
# first it creates neccessary db-tables for visualization of the OD-pairs in star_list and end_list
# Create OD_lines table
db.exec_("DROP table if exists OD_lines")
db.exec_("SELECT ST_MakeLine(ST_Centroid(geom) ORDER BY id) AS geom into od_lines "
"FROM emme_zones where id = " + str(start_list[0]) + " OR id = " + str(end_list[0]) + "")
# Create emme_result table
db.exec_("DROP table if exists emme_results")
db.exec_("SELECT 0.0 as alt_route_cost,* INTO emme_results FROM emme_zones")
i = 0
while i < len(start_list):
if i > 0:
db.exec_("INSERT INTO OD_lines(geom) SELECT ST_MakeLine(ST_Centroid(geom) ORDER BY id) "
"AS geom FROM emme_zones where id = " + str(start_list[i]) + " OR id = " + str(end_list[i]) + "")
result_test = odEffect(start_list[i], end_list[i], lids)
print("Result of " + str(i) + " is: " + str(result_test))
db.exec_(
"UPDATE emme_results SET alt_route_cost = " + str(result_test) + " WHERE id = '" + str(start_list[i]) + "'"
" OR id = '" + str(
end_list[i]) + "';")
i += 1
db.exec_("ALTER TABLE OD_lines ADD COLUMN id SERIAL PRIMARY KEY;")
sqlcall = "(SELECT * FROM emme_results)"
uri.setDataSource("", sqlcall, "geom", "", "id")
layer = QgsVectorLayer(uri.uri(), "result_deterioration ", "postgres")
QgsProject.instance().addMapLayer(layer)
values = (
('Not affected', -3, -3, QColor.fromRgb(0, 0, 200)),
('No route', -2, -2, QColor.fromRgb(0, 225, 200)),
('No route that is not affected', -1, -1, QColor.fromRgb(255, 0, 0)),
('Not searched', 0, 0, QColor.fromRgb(255, 255, 255)),
('Alternative route: 1-10 % deterioration', 0, 1.1, QColor.fromRgb(102, 255, 102)),
('Alternative route: 10-100 % deterioration', 1.1, 1000, QColor.fromRgb(255, 255, 0)),
)
# create a category for each item in values
ranges = []
for label, lower, upper, color in values:
symbol = QgsSymbol.defaultSymbol(layer.geometryType())
symbol.setColor(QColor(color))
rng = QgsRendererRange(lower, upper, symbol, label)
ranges.append(rng)
## create the renderer and assign it to a layer
expression = 'alt_route_cost' # field name
layer.setRenderer(QgsGraduatedSymbolRenderer(expression, ranges))
# iface.mapCanvas().refresh()
# Print lines from od_lines
sqlcall = "(SELECT * FROM od_lines )"
uri.setDataSource("", sqlcall, "geom", "", "id")
layert = QgsVectorLayer(uri.uri(), " OD_pairs ", "postgres")
QgsProject.instance().addMapLayer(layert)
# Ska hämtas från databasen list,removed_lids
def allToAll():
# Removes layers not specified in removeRoutesLayers
removeRoutesLayers()
# Get list and removed lids
removed_lids = []
temp_query1 = db.exec_("SELECT * FROM removed_lids")
while temp_query1.next():
removed_lids.append(temp_query1.value(0))
temp_query2 = db.exec_("SELECT DISTINCT start_zone AS start_zones FROM all_results")
list = []
while temp_query2.next():
list.append(temp_query2.value(0))
removed_lid_string = "( lid = " + str(removed_lids[0])
i = 1
while i < len(removed_lids):
removed_lid_string += " or lid =" + str(removed_lids[i])
i += 1
removed_lid_string += ")"
# Queryn skapar tabell för alla länkar som går igenom removed_lid
db.exec_("DROP TABLE IF EXIST temp_test")
db.exec_(
" select * into temp_test from all_results f where exists(select 1 from all_results l where " + removed_lid_string + " and"
" (f.start_zone = l.start_zone and f.end_zone = l.end_zone and f.did = l.did))")
# Här vill jag skapa nytt lager som visar intressanta saker för varje zon
# Create emme_result table
db.exec_("DROP table if exists emme_results")
db.exec_(
"SELECT 0 as nr_non_affected, 0 as nr_no_routes, 0 as nr_all_routes_affected, 0.0 as mean_deterioration, 0 as nr_pairs,* INTO emme_results FROM emme_zones")
i = 0
while i < len(list):
result = analysis_multiple_zones(list[i], list, removed_lids)
db.exec_("UPDATE emme_results SET nr_non_affected = " + str(result[0]) + " , nr_no_routes = " +
str(result[1]) + " , nr_all_routes_affected = " + str(result[2]) + " , mean_deterioration = " +
str(result[3]) + " , nr_pairs = " + str(result[4]) + " WHERE id = " +
str(list[i]) + ";")
i += 1
############################ Create layer for mean deterioration
sqlcall = "(SELECT * FROM emme_results)"
uri.setDataSource("", sqlcall, "geom", "", "id")
layer = QgsVectorLayer(uri.uri(), "mean_deterioration ", "postgres")
QgsProject.instance().addMapLayer(layer)
values = (
('Not searched', 0, 0, QColor.fromRgb(255, 255, 255)),
('No deterioration', -1, -1, QColor.fromRgb(153, 204, 255)),
('Mean deterioration 1-20% ', 0, 1.2, QColor.fromRgb(102, 255, 102)),
('Mean deterioration 20-30% ', 1.2, 1.3, QColor.fromRgb(255, 255, 153)),
('Mean deterioration 30-50% ', 1.3, 1.5, QColor.fromRgb(255, 178, 102)),
('Mean deterioration 50-100% ', 1.5, 100, QColor.fromRgb(255, 102, 102)),
)
# create a category for each item in values
ranges = []
for label, lower, upper, color in values:
symbol = QgsSymbol.defaultSymbol(layer.geometryType())
symbol.setColor(QColor(color))
rng = QgsRendererRange(lower, upper, symbol, label)
ranges.append(rng)
## create the renderer and assign it to a layer
expression = 'mean_deterioration' # field name
layer.setRenderer(QgsGraduatedSymbolRenderer(expression, ranges))
############################ Create layer for nr_affected OD-pairs
sqlcall = "(select CASE WHEN nr_pairs > 0 THEN cast((nr_pairs - nr_non_affected) as float)/nr_pairs " \
"ELSE 100 END as prop_affected,* from emme_results)"
uri.setDataSource("", sqlcall, "geom", "", "id")
layer = QgsVectorLayer(uri.uri(), "prop_affected ", "postgres")
QgsProject.instance().addMapLayer(layer)
values = (
('Not searched', 1, 100, QColor.fromRgb(255, 255, 255)),
('0% affected pairs', 0, 0, QColor.fromRgb(153, 204, 255)),
('1-20% affected pairs', 0, 0.2, QColor.fromRgb(102, 255, 102)),
('20-30% affected pairs', 0.2, 0.3, QColor.fromRgb(255, 255, 153)),
('30-50% affected pairs', 0.3, 0.5, QColor.fromRgb(255, 178, 102)),
('50-100% affected pairs', 0.5, 1, QColor.fromRgb(255, 102, 102)),
)
# create a category for each item in values
ranges = []
for label, lower, upper, color in values:
symbol = QgsSymbol.defaultSymbol(layer.geometryType())
symbol.setColor(QColor(color))
rng = QgsRendererRange(lower, upper, symbol, label)
ranges.append(rng)
## create the renderer and assign it to a layer
expression = 'prop_affected' # field name
layer.setRenderer(QgsGraduatedSymbolRenderer(expression, ranges))
def odEffect(start, end, lids):
start_zone = start
end_zone = end
removed_lid_string = "( lid = " + str(lids[0])
i = 1
while i < len(lids):
removed_lid_string += " or lid =" + str(lids[i])
i += 1
removed_lid_string += ")"
# Finding best, non-affected alternative route
query1 = db.exec_("SELECT MIN(did) FROM all_results WHERE"
" start_zone = " + str(start_zone) + " AND end_zone = " + str(end_zone) + " AND "
" did NOT IN (select did from all_results where start_zone = " + str(
start_zone) + " AND end_zone = " + str(end_zone) + " AND " + removed_lid_string + ")")
query1.next()
id_alt = str(query1.value(0))
# print("id_alt är: "+ id_alt)
if id_alt == "NULL":
# Either there's only one route in the route set or the route set is empty
query = db.exec_(
"SELECT MIN(did) FROM all_results where start_zone = " + str(start_zone) + " AND end_zone = " + str(
end_zone) + "")
query.next()
if query.value(0):
# There is no route that is not affected
return -1
else:
# There is no routes with that start and end zone
return -2;
elif id_alt == "1":
# print("Zon påverkas inte")
return -3
else:
# print("Zon påverkas och bästa id är:" + id_alt)
# Fetching cost of the optimal route and the alternative
query2 = db.exec_("SELECT sum(link_cost) from all_results where "
" (start_zone = " + str(start_zone) + " AND end_zone = " + str(end_zone) + ") AND "
"(did = 1 OR did = " + str(
id_alt) + ") group by did")
query2.next()
# Best cost
cost_opt = str(query2.value(0))
# Alternative cost
query2.next()
cost_alt = str(query2.value(0))
# Proportion of extra cost of alternative route in relation to opt route
# print("cost_opt = " + cost_opt + " and cost_alt = " + cost_alt)
return float(cost_alt) / float(cost_opt)
def analysis_multiple_zones(start_node, list, lids):
count3 = 0
count2 = 0
count1 = 0
count_detour = 0
sum_detour = 0
i = 0
while i < len(list):
if start_node != list[i]:
result_test = odEffect(start_node, list[i], lids)
if result_test == -3:
count3 += 1
elif result_test == -2:
count2 += 1
elif result_test == -1:
count1 += 1
else:
count_detour += 1
sum_detour += result_test
i = i + 1
if count_detour != 0:
mean_detour = sum_detour / count_detour
else:
mean_detour = -1
return [count3, count2, count1, mean_detour, i - 1]
# End of function definition
# Initialize TicToc function.
TicToc = TicTocGenerator()
# DATABASE CONNECTION ------------------------------------------------------
uri = QgsDataSourceUri()
# set host name, port, database name, username and password
uri.setConnection("localhost", "5432", "exjobb", "postgres", "password123")
print(uri.uri())
db = QSqlDatabase.addDatabase('QPSQL')
if db.isValid():
print("QPSQL db is valid")
db.setHostName(uri.host())
db.setDatabaseName(uri.database())
db.setPort(int(uri.port()))
db.setUserName(uri.username())
db.setPassword(uri.password())
# open (create) the connection
if db.open():
print("Opened %s" % uri.uri())
else:
err = db.lastError()
print(err.driverText())
# DATABASE CONNECTION COMPLETE ---------------------------------------------
def main():
tic()
if db.isValid:
removeRoutesLayers()
# Create layer for one route set (run routeSetGeneration before).
# printRoutes()
# printRoutesRejoin()
# Creates new visualisation layer for selected pairs (run selectedODResultTable before).
print_selected_pairs()
# All to all visualisation for all pairs in list (run AllToAllResultTable before).
# allToAll()
toc()
if __name__ == "__main__" or __name__ == "__console__":
main()
db.close()
| null | null | null | null | [
0
] |
919 | 4d9575c178b672815bb561116689b9b0721cb5ba | <mask token>
| <mask token>
if horast >= 41:
print('Valor a Pagar: ', resp3)
elif horast <= 40:
print('Valor a Pagar: ', resp4)
| <mask token>
horast = int(input('Horas Trabajadas: ' + '\n\t\t'))
tarifa = int(input('Tarifa por hora: ' + '\n\t\t'))
descu = int(input('Descuentos: ' + '\n\t\t'))
resp0 = horast - descu
resp1 = resp0 * tarifa / 2
resp2 = horast * tarifa + resp1
resp3 = resp2 - descu
resp4 = horast * tarifa
if horast >= 41:
print('Valor a Pagar: ', resp3)
elif horast <= 40:
print('Valor a Pagar: ', resp4)
| # -*- coding: utf-8 -*-
"""
Created on Thu Jun 25 15:14:15 2020
@author: luisa
"""
horast = int(input("Horas Trabajadas: "+"\n\t\t"))
tarifa = int(input("Tarifa por hora: "+"\n\t\t"))
descu = int(input("Descuentos: "+"\n\t\t"))
resp0 = horast - descu
resp1 = (resp0 * tarifa)/2
resp2 = (horast * tarifa) + resp1
resp3 = resp2 - descu
resp4 = horast * tarifa
if horast >= 41:
print("Valor a Pagar: ", resp3)
elif horast <= 40:
print("Valor a Pagar: ", resp4)
| null | [
0,
1,
2,
3
] |
920 | 8479c70fed36dc6f1e6094c832fb22d8c2e53e3a | <mask token>
class CommitAnalyzer:
<mask token>
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
<mask token>
<mask token>
| <mask token>
class CommitAnalyzer:
"""
Takes path of the repo
"""
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
def get_conflict_commits(self):
conflict_commits = []
current_date = datetime.now()
for commit in self.repo.iter_commits('master'):
parents = commit.parents
if len(parents) > 1 and 'conflict' in commit.message.lower(
) and '.java' in commit.message.lower():
conflict_commits.append(commit)
return conflict_commits
<mask token>
for commit in commit_analyzer.get_conflict_commits():
print(commit, time.asctime(time.gmtime(commit.committed_date)))
| <mask token>
class CommitAnalyzer:
"""
Takes path of the repo
"""
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
def get_conflict_commits(self):
conflict_commits = []
current_date = datetime.now()
for commit in self.repo.iter_commits('master'):
parents = commit.parents
if len(parents) > 1 and 'conflict' in commit.message.lower(
) and '.java' in commit.message.lower():
conflict_commits.append(commit)
return conflict_commits
commit_analyzer = CommitAnalyzer(os.getcwd())
for commit in commit_analyzer.get_conflict_commits():
print(commit, time.asctime(time.gmtime(commit.committed_date)))
| import os
import time
from datetime import datetime, timedelta
from git import Repo
class CommitAnalyzer:
"""
Takes path of the repo
"""
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
def get_conflict_commits(self):
conflict_commits = []
current_date = datetime.now()
for commit in self.repo.iter_commits('master'):
parents = commit.parents
if len(parents) > 1 and 'conflict' in commit.message.lower(
) and '.java' in commit.message.lower():
conflict_commits.append(commit)
return conflict_commits
commit_analyzer = CommitAnalyzer(os.getcwd())
for commit in commit_analyzer.get_conflict_commits():
print(commit, time.asctime(time.gmtime(commit.committed_date)))
| import os
import time
from datetime import datetime, timedelta
from git import Repo
class CommitAnalyzer():
"""
Takes path of the repo
"""
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
def get_conflict_commits(self):
conflict_commits = []
current_date = datetime.now()
for commit in self.repo.iter_commits('master'):
parents = commit.parents
if len(parents) > 1 and "conflict" in commit.message.lower() and ".java" in commit.message.lower():
#if datetime.fromtimestamp(commit.committed_date) >= current_date - timedelta(5):
conflict_commits.append(commit)
return conflict_commits
#run script in cloned repo
commit_analyzer = CommitAnalyzer(os.getcwd())
for commit in commit_analyzer.get_conflict_commits():
print (commit, time.asctime(time.gmtime(commit.committed_date))) | [
2,
5,
6,
7,
8
] |
921 | a41d00c86d0bdab1bced77c275e56c3569af4f4e | <mask token>
class RestAdminAppConfig(AppConfig):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
| <mask token>
class RestAdminAppConfig(AppConfig):
name = 'libraries.django_rest_admin'
verbose_name = 'Rest Admin'
loaded = False
def ready(self):
autodiscover()
<mask token>
| <mask token>
class RestAdminAppConfig(AppConfig):
name = 'libraries.django_rest_admin'
verbose_name = 'Rest Admin'
loaded = False
def ready(self):
autodiscover()
def autodiscover():
"""
Automatic discovering of rest_admin.py file inside apps.
similar to what Django admin does.
"""
from .register import rest_admin
if not RestAdminAppConfig.loaded:
for app in settings.INSTALLED_APPS:
try:
app_path = importlib.import_module(app).__path__
except AttributeError:
continue
if not importlib.find_loader('rest_admin', app_path):
continue
importlib.import_module('%s.rest_admin' % app)
RestAdminAppConfig.loaded = True
| from django.apps import AppConfig
from django.conf import settings
import importlib
import importlib.util
class RestAdminAppConfig(AppConfig):
name = 'libraries.django_rest_admin'
verbose_name = 'Rest Admin'
loaded = False
def ready(self):
autodiscover()
def autodiscover():
"""
Automatic discovering of rest_admin.py file inside apps.
similar to what Django admin does.
"""
from .register import rest_admin
if not RestAdminAppConfig.loaded:
for app in settings.INSTALLED_APPS:
try:
app_path = importlib.import_module(app).__path__
except AttributeError:
continue
if not importlib.find_loader('rest_admin', app_path):
continue
importlib.import_module('%s.rest_admin' % app)
RestAdminAppConfig.loaded = True
| from django.apps import AppConfig
from django.conf import settings
import importlib
import importlib.util
class RestAdminAppConfig(AppConfig):
name = 'libraries.django_rest_admin'
verbose_name = 'Rest Admin'
loaded = False
def ready(self):
autodiscover()
def autodiscover():
"""
Automatic discovering of rest_admin.py file inside apps.
similar to what Django admin does.
"""
from .register import rest_admin
if not RestAdminAppConfig.loaded:
for app in settings.INSTALLED_APPS:
# For each app, we need to look for an rest_admin.py inside that app's
# package. We can't use os.path here -- recall that modules may be
# imported different ways (think zip files) -- so we need to get
# the app's __path__ and look for rest_admin.py on that path.
# Step 1: find out the app's __path__ Import errors here will (and
# should) bubble up, but a missing __path__ (which is legal, but weird)
# fails silently -- apps that do weird things with __path__ might
# need to roll their own rest_admin registration.
try:
app_path = importlib.import_module(app).__path__
except AttributeError:
continue
# Step 2: use imp.find_module to find the app's rest_admin.py. For some
# reason imp.find_module raises ImportError if the app can't be found
# but doesn't actually try to import the module. So skip this app if
# its rest_admin.py doesn't exist
# try:
# importlib.util.find_spec('rest_admin', app_path)
# # imp.find_module('rest_admin', app_path)
# except ImportError:
# continue
#
if not importlib.find_loader('rest_admin', app_path):
continue
# Step 3: import the app's admin file. If this has errors we want them
# to bubble up.
importlib.import_module("%s.rest_admin" % app)
# autodiscover was successful, reset loading flag.
RestAdminAppConfig.loaded = True
| [
1,
3,
4,
5,
6
] |
922 | e0435b0b34fc011e7330ab8882865131f7f78882 | <mask token>
def test_keywordbid_rule_init(kwb_rule, account):
assert kwb_rule.get_max_bid_display() == kwb_rule.max_bid * 1000000
assert kwb_rule.get_bid_increase_percentage_display(
) == kwb_rule.bid_increase_percentage / 100
assert kwb_rule.get_target_bid_diff_display(
) == kwb_rule.target_bid_diff / 100
assert kwb_rule.account is account
assert kwb_rule.target_values == [1, 2, 3]
assert kwb_rule.get_target_type_display() in map(lambda t: t[1],
constants.KEYWORD_BID_TARGET_TYPES)
<mask token>
def test_map_keywordbid_rule(kwb_rule, account):
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
assert isinstance(kwb_ent, entities.KeywordBidRule)
assert kwb_ent.account == account.id
for f in kwb_rule._meta.fields:
if f.name in ('id', 'title'):
continue
model_attr = getattr(kwb_rule, f.name)
ent_attr = getattr(kwb_ent, f.name)
if not hasattr(model_attr, 'pk'):
try:
assert ent_attr == getattr(kwb_rule, f'get_{f.name}_display')()
except AttributeError:
assert ent_attr == model_attr
else:
assert ent_attr == model_attr.id
<mask token>
def test_calculate_keyword_bids(yd_gateway, kwb_rule, keyword_bids,
keyword_bids_w_warnings):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=keyword_bids)
mock.add(method='POST', url=url, status=200, json=
keyword_bids_w_warnings)
mock.add(method='POST', url=url, status=200, json={'error': {
'error_code': 0, 'error_message': 'oops!'}})
result = controllers.keyword_bids.calculate_keyword_bids(yd_gateway,
kwb_ent, selection_criteria={'CampaignIds': []})
assert len(result) == 1514
with pytest.raises(UnExpectedResult):
controllers.keyword_bids.calculate_keyword_bids(yd_gateway,
kwb_ent, selection_criteria={'CampaignIds': []})
| <mask token>
def test_keywordbid_rule_init(kwb_rule, account):
assert kwb_rule.get_max_bid_display() == kwb_rule.max_bid * 1000000
assert kwb_rule.get_bid_increase_percentage_display(
) == kwb_rule.bid_increase_percentage / 100
assert kwb_rule.get_target_bid_diff_display(
) == kwb_rule.target_bid_diff / 100
assert kwb_rule.account is account
assert kwb_rule.target_values == [1, 2, 3]
assert kwb_rule.get_target_type_display() in map(lambda t: t[1],
constants.KEYWORD_BID_TARGET_TYPES)
<mask token>
def test_map_keywordbid_rule(kwb_rule, account):
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
assert isinstance(kwb_ent, entities.KeywordBidRule)
assert kwb_ent.account == account.id
for f in kwb_rule._meta.fields:
if f.name in ('id', 'title'):
continue
model_attr = getattr(kwb_rule, f.name)
ent_attr = getattr(kwb_ent, f.name)
if not hasattr(model_attr, 'pk'):
try:
assert ent_attr == getattr(kwb_rule, f'get_{f.name}_display')()
except AttributeError:
assert ent_attr == model_attr
else:
assert ent_attr == model_attr.id
<mask token>
def test_set_keyword_bids(yd_gateway, keyword_bids, keyword_bids_w_warnings):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
kwb = controllers.keyword_bids.map_keyword_bids(keyword_bids['result'][
'KeywordBids'])
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=
keyword_bids_w_warnings)
response = controllers.keyword_bids.set_keyword_bids(yd_gateway, kwb)
assert len(list(response)) == 1514
def test_calculate_keyword_bids(yd_gateway, kwb_rule, keyword_bids,
keyword_bids_w_warnings):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=keyword_bids)
mock.add(method='POST', url=url, status=200, json=
keyword_bids_w_warnings)
mock.add(method='POST', url=url, status=200, json={'error': {
'error_code': 0, 'error_message': 'oops!'}})
result = controllers.keyword_bids.calculate_keyword_bids(yd_gateway,
kwb_ent, selection_criteria={'CampaignIds': []})
assert len(result) == 1514
with pytest.raises(UnExpectedResult):
controllers.keyword_bids.calculate_keyword_bids(yd_gateway,
kwb_ent, selection_criteria={'CampaignIds': []})
| <mask token>
def test_keywordbid_rule_init(kwb_rule, account):
assert kwb_rule.get_max_bid_display() == kwb_rule.max_bid * 1000000
assert kwb_rule.get_bid_increase_percentage_display(
) == kwb_rule.bid_increase_percentage / 100
assert kwb_rule.get_target_bid_diff_display(
) == kwb_rule.target_bid_diff / 100
assert kwb_rule.account is account
assert kwb_rule.target_values == [1, 2, 3]
assert kwb_rule.get_target_type_display() in map(lambda t: t[1],
constants.KEYWORD_BID_TARGET_TYPES)
def test_make_keywordbid_rule(kwb_rule):
kw_bid_rule = controllers.keyword_bid_rule.get_keywordbid_rule(kwb_rule.id)
assert kwb_rule == kw_bid_rule
assert kw_bid_rule.account == kwb_rule.account
not_found_kwb_rule = controllers.keyword_bid_rule.get_keywordbid_rule(0)
assert not_found_kwb_rule is None
def test_map_keywordbid_rule(kwb_rule, account):
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
assert isinstance(kwb_ent, entities.KeywordBidRule)
assert kwb_ent.account == account.id
for f in kwb_rule._meta.fields:
if f.name in ('id', 'title'):
continue
model_attr = getattr(kwb_rule, f.name)
ent_attr = getattr(kwb_ent, f.name)
if not hasattr(model_attr, 'pk'):
try:
assert ent_attr == getattr(kwb_rule, f'get_{f.name}_display')()
except AttributeError:
assert ent_attr == model_attr
else:
assert ent_attr == model_attr.id
def test_get_keyword_bids(yd_gateway, keyword_bids):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
data = keyword_bids
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=data)
mock.add(method='POST', url=url, status=404)
mock.add(method='POST', url=url, status=200, json=data)
kwb = controllers.keyword_bids.get_keyword_bids(yd_gateway,
selection_criteria={'CampaignIds': []})
assert next(kwb).keyword_id == 13102117581
assert next(kwb).keyword_id == 13102117582
kwb = controllers.keyword_bids.get_keyword_bids(yd_gateway,
selection_criteria={'CampaignIds': []})
with pytest.raises(UnExpectedResult):
next(kwb)
kwb = controllers.keyword_bids.get_keyword_bids(yd_gateway,
selection_criteria={'CampaignIds': []})
assert type(next(kwb).as_dict()) is dict
def test_set_keyword_bids(yd_gateway, keyword_bids, keyword_bids_w_warnings):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
kwb = controllers.keyword_bids.map_keyword_bids(keyword_bids['result'][
'KeywordBids'])
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=
keyword_bids_w_warnings)
response = controllers.keyword_bids.set_keyword_bids(yd_gateway, kwb)
assert len(list(response)) == 1514
def test_calculate_keyword_bids(yd_gateway, kwb_rule, keyword_bids,
keyword_bids_w_warnings):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=keyword_bids)
mock.add(method='POST', url=url, status=200, json=
keyword_bids_w_warnings)
mock.add(method='POST', url=url, status=200, json={'error': {
'error_code': 0, 'error_message': 'oops!'}})
result = controllers.keyword_bids.calculate_keyword_bids(yd_gateway,
kwb_ent, selection_criteria={'CampaignIds': []})
assert len(result) == 1514
with pytest.raises(UnExpectedResult):
controllers.keyword_bids.calculate_keyword_bids(yd_gateway,
kwb_ent, selection_criteria={'CampaignIds': []})
| import pytest
import responses
from auctioneer import constants, controllers, entities
from common.http import UnExpectedResult
def test_keywordbid_rule_init(kwb_rule, account):
assert kwb_rule.get_max_bid_display() == kwb_rule.max_bid * 1000000
assert kwb_rule.get_bid_increase_percentage_display(
) == kwb_rule.bid_increase_percentage / 100
assert kwb_rule.get_target_bid_diff_display(
) == kwb_rule.target_bid_diff / 100
assert kwb_rule.account is account
assert kwb_rule.target_values == [1, 2, 3]
assert kwb_rule.get_target_type_display() in map(lambda t: t[1],
constants.KEYWORD_BID_TARGET_TYPES)
def test_make_keywordbid_rule(kwb_rule):
kw_bid_rule = controllers.keyword_bid_rule.get_keywordbid_rule(kwb_rule.id)
assert kwb_rule == kw_bid_rule
assert kw_bid_rule.account == kwb_rule.account
not_found_kwb_rule = controllers.keyword_bid_rule.get_keywordbid_rule(0)
assert not_found_kwb_rule is None
def test_map_keywordbid_rule(kwb_rule, account):
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
assert isinstance(kwb_ent, entities.KeywordBidRule)
assert kwb_ent.account == account.id
for f in kwb_rule._meta.fields:
if f.name in ('id', 'title'):
continue
model_attr = getattr(kwb_rule, f.name)
ent_attr = getattr(kwb_ent, f.name)
if not hasattr(model_attr, 'pk'):
try:
assert ent_attr == getattr(kwb_rule, f'get_{f.name}_display')()
except AttributeError:
assert ent_attr == model_attr
else:
assert ent_attr == model_attr.id
def test_get_keyword_bids(yd_gateway, keyword_bids):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
data = keyword_bids
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=data)
mock.add(method='POST', url=url, status=404)
mock.add(method='POST', url=url, status=200, json=data)
kwb = controllers.keyword_bids.get_keyword_bids(yd_gateway,
selection_criteria={'CampaignIds': []})
assert next(kwb).keyword_id == 13102117581
assert next(kwb).keyword_id == 13102117582
kwb = controllers.keyword_bids.get_keyword_bids(yd_gateway,
selection_criteria={'CampaignIds': []})
with pytest.raises(UnExpectedResult):
next(kwb)
kwb = controllers.keyword_bids.get_keyword_bids(yd_gateway,
selection_criteria={'CampaignIds': []})
assert type(next(kwb).as_dict()) is dict
def test_set_keyword_bids(yd_gateway, keyword_bids, keyword_bids_w_warnings):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
kwb = controllers.keyword_bids.map_keyword_bids(keyword_bids['result'][
'KeywordBids'])
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=
keyword_bids_w_warnings)
response = controllers.keyword_bids.set_keyword_bids(yd_gateway, kwb)
assert len(list(response)) == 1514
def test_calculate_keyword_bids(yd_gateway, kwb_rule, keyword_bids,
keyword_bids_w_warnings):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=keyword_bids)
mock.add(method='POST', url=url, status=200, json=
keyword_bids_w_warnings)
mock.add(method='POST', url=url, status=200, json={'error': {
'error_code': 0, 'error_message': 'oops!'}})
result = controllers.keyword_bids.calculate_keyword_bids(yd_gateway,
kwb_ent, selection_criteria={'CampaignIds': []})
assert len(result) == 1514
with pytest.raises(UnExpectedResult):
controllers.keyword_bids.calculate_keyword_bids(yd_gateway,
kwb_ent, selection_criteria={'CampaignIds': []})
| import pytest
import responses
from auctioneer import constants, controllers, entities
from common.http import UnExpectedResult
def test_keywordbid_rule_init(kwb_rule, account):
assert kwb_rule.get_max_bid_display() == kwb_rule.max_bid * 1_000_000
assert kwb_rule.get_bid_increase_percentage_display() == kwb_rule.bid_increase_percentage / 100
assert kwb_rule.get_target_bid_diff_display() == kwb_rule.target_bid_diff / 100
assert kwb_rule.account is account
assert kwb_rule.target_values == [1,2,3]
assert kwb_rule.get_target_type_display() in map(lambda t: t[1], constants.KEYWORD_BID_TARGET_TYPES)
def test_make_keywordbid_rule(kwb_rule):
kw_bid_rule = controllers.keyword_bid_rule.get_keywordbid_rule(kwb_rule.id)
assert kwb_rule == kw_bid_rule
assert kw_bid_rule.account == kwb_rule.account
not_found_kwb_rule = controllers.keyword_bid_rule.get_keywordbid_rule(0)
assert not_found_kwb_rule is None
def test_map_keywordbid_rule(kwb_rule, account):
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
assert isinstance(kwb_ent, entities.KeywordBidRule)
assert kwb_ent.account == account.id
for f in kwb_rule._meta.fields:
if f.name in ('id', 'title') :
continue
model_attr = getattr(kwb_rule, f.name)
ent_attr = getattr(kwb_ent, f.name)
if not hasattr(model_attr, 'pk'):
try:
assert ent_attr == getattr(kwb_rule, f'get_{f.name}_display')()
except AttributeError:
assert ent_attr == model_attr
else:
assert ent_attr == model_attr.id
def test_get_keyword_bids(yd_gateway, keyword_bids):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
data = keyword_bids
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=data)
mock.add(method='POST', url=url, status=404)
mock.add(method='POST', url=url, status=200, json=data)
kwb = controllers.keyword_bids.get_keyword_bids(yd_gateway, selection_criteria={"CampaignIds": []})
assert next(kwb).keyword_id == 13102117581
assert next(kwb).keyword_id == 13102117582
kwb = controllers.keyword_bids.get_keyword_bids(yd_gateway, selection_criteria={"CampaignIds": []})
with pytest.raises(UnExpectedResult):
next(kwb)
kwb = controllers.keyword_bids.get_keyword_bids(yd_gateway, selection_criteria={"CampaignIds": []})
assert type(next(kwb).as_dict()) is dict
def test_set_keyword_bids(yd_gateway, keyword_bids, keyword_bids_w_warnings):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
kwb = controllers.keyword_bids.map_keyword_bids(keyword_bids['result']['KeywordBids'])
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=keyword_bids_w_warnings)
response = controllers.keyword_bids.set_keyword_bids(yd_gateway, kwb)
assert len(list(response)) == 1514
def test_calculate_keyword_bids(yd_gateway, kwb_rule, keyword_bids, keyword_bids_w_warnings):
url = f'{yd_gateway.get_api_url()}/{yd_gateway.endpoints.KEYWORD_BIDS}'
kwb_ent = controllers.keyword_bid_rule.map_keyword_bid_rule(kwb_rule)
with responses.RequestsMock() as mock:
mock.add(method='POST', url=url, status=200, json=keyword_bids)
mock.add(method='POST', url=url, status=200, json=keyword_bids_w_warnings)
mock.add(method='POST', url=url, status=200, json={'error': {'error_code': 0000, 'error_message': 'oops!'}})
result = controllers.keyword_bids.calculate_keyword_bids(yd_gateway, kwb_ent,
selection_criteria={"CampaignIds": []})
assert len(result) == 1514
with pytest.raises(UnExpectedResult):
controllers.keyword_bids.calculate_keyword_bids(yd_gateway, kwb_ent,
selection_criteria={"CampaignIds": []})
| [
3,
4,
6,
7,
8
] |
923 | 66904cbe3e57d9cc1ee385cd8a4c1ba3767626bd | <mask token>
| <mask token>
mpl.style.use('classic')
<mask token>
ax1.plot(chi2, color='r', linestyle='--', linewidth=2, markersize=5, label=
'$\\chi^B_2$')
ax1.axis([0, 300, -0.05, 0.2])
ax1.set_xlabel('$T\\,[\\mathrm{MeV}]$', fontsize=15, color='black')
ax1.set_ylabel('$\\chi_2$', fontsize=15, color='black')
for label in ax1.xaxis.get_ticklabels():
label.set_fontsize(10)
for label in ax1.yaxis.get_ticklabels():
label.set_fontsize(10)
<mask token>
ax2.plot(chi4, color='k', linestyle='-', linewidth=2, markersize=5, label=
'$\\chi^B_4$')
ax2.axis([0, 300, -0.15, 0.2])
ax2.set_xlabel('$T\\,[\\mathrm{MeV}]$', fontsize=15, color='black')
ax2.set_ylabel('$\\chi_4$', fontsize=15, color='black')
ax2.legend(loc=0, fontsize=7.3, frameon=False, shadow=True, handlelength=
3.0, borderpad=0.5, borderaxespad=1)
for label in ax2.xaxis.get_ticklabels():
label.set_fontsize(10)
for label in ax2.yaxis.get_ticklabels():
label.set_fontsize(10)
fig.subplots_adjust(top=0.9, bottom=0.15, left=0.1, right=0.95, hspace=0.35,
wspace=0.2)
fig.savefig('chi.pdf')
| <mask token>
mpl.style.use('classic')
chi2 = np.loadtxt('Lam0/buffer/chi2.dat')
chi4 = np.loadtxt('Lam0/buffer/chi4.dat')
fig = plt.figure(figsize=(9, 3.5))
ax1 = fig.add_subplot(121)
ax1.plot(chi2, color='r', linestyle='--', linewidth=2, markersize=5, label=
'$\\chi^B_2$')
ax1.axis([0, 300, -0.05, 0.2])
ax1.set_xlabel('$T\\,[\\mathrm{MeV}]$', fontsize=15, color='black')
ax1.set_ylabel('$\\chi_2$', fontsize=15, color='black')
for label in ax1.xaxis.get_ticklabels():
label.set_fontsize(10)
for label in ax1.yaxis.get_ticklabels():
label.set_fontsize(10)
ax2 = fig.add_subplot(122)
ax2.plot(chi4, color='k', linestyle='-', linewidth=2, markersize=5, label=
'$\\chi^B_4$')
ax2.axis([0, 300, -0.15, 0.2])
ax2.set_xlabel('$T\\,[\\mathrm{MeV}]$', fontsize=15, color='black')
ax2.set_ylabel('$\\chi_4$', fontsize=15, color='black')
ax2.legend(loc=0, fontsize=7.3, frameon=False, shadow=True, handlelength=
3.0, borderpad=0.5, borderaxespad=1)
for label in ax2.xaxis.get_ticklabels():
label.set_fontsize(10)
for label in ax2.yaxis.get_ticklabels():
label.set_fontsize(10)
fig.subplots_adjust(top=0.9, bottom=0.15, left=0.1, right=0.95, hspace=0.35,
wspace=0.2)
fig.savefig('chi.pdf')
| import matplotlib.pyplot as plt
import numpy as np
from matplotlib.ticker import NullFormatter
import matplotlib.ticker as ticker
import matplotlib as mpl
mpl.style.use('classic')
chi2 = np.loadtxt('Lam0/buffer/chi2.dat')
chi4 = np.loadtxt('Lam0/buffer/chi4.dat')
fig = plt.figure(figsize=(9, 3.5))
ax1 = fig.add_subplot(121)
ax1.plot(chi2, color='r', linestyle='--', linewidth=2, markersize=5, label=
'$\\chi^B_2$')
ax1.axis([0, 300, -0.05, 0.2])
ax1.set_xlabel('$T\\,[\\mathrm{MeV}]$', fontsize=15, color='black')
ax1.set_ylabel('$\\chi_2$', fontsize=15, color='black')
for label in ax1.xaxis.get_ticklabels():
label.set_fontsize(10)
for label in ax1.yaxis.get_ticklabels():
label.set_fontsize(10)
ax2 = fig.add_subplot(122)
ax2.plot(chi4, color='k', linestyle='-', linewidth=2, markersize=5, label=
'$\\chi^B_4$')
ax2.axis([0, 300, -0.15, 0.2])
ax2.set_xlabel('$T\\,[\\mathrm{MeV}]$', fontsize=15, color='black')
ax2.set_ylabel('$\\chi_4$', fontsize=15, color='black')
ax2.legend(loc=0, fontsize=7.3, frameon=False, shadow=True, handlelength=
3.0, borderpad=0.5, borderaxespad=1)
for label in ax2.xaxis.get_ticklabels():
label.set_fontsize(10)
for label in ax2.yaxis.get_ticklabels():
label.set_fontsize(10)
fig.subplots_adjust(top=0.9, bottom=0.15, left=0.1, right=0.95, hspace=0.35,
wspace=0.2)
fig.savefig('chi.pdf')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# sphinx_gallery_thumbnail_number = 3
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.ticker import NullFormatter # useful for `logit` scale
import matplotlib.ticker as ticker
import matplotlib as mpl
mpl.style.use('classic')
# Data for plotting
chi2=np.loadtxt(r'Lam0/buffer/chi2.dat')
chi4=np.loadtxt(r'Lam0/buffer/chi4.dat')
# Create figure
fig=plt.figure(figsize=(9, 3.5))
ax1=fig.add_subplot(121)
ax1.plot(chi2,color='r',linestyle='--',linewidth=2,markersize=5,label=r'$\chi^B_2$')
ax1.axis([0,300,-0.05,0.2])
ax1.set_xlabel('$T\,[\mathrm{MeV}]$', fontsize=15, color='black')
ax1.set_ylabel(r'$\chi_2$', fontsize=15, color='black')
for label in ax1.xaxis.get_ticklabels():
label.set_fontsize(10)
for label in ax1.yaxis.get_ticklabels():
label.set_fontsize(10)
# Plot two
ax2=fig.add_subplot(122)
ax2.plot(chi4,color='k',linestyle='-',linewidth=2,markersize=5,label=r'$\chi^B_4$')
ax2.axis([0,300,-0.15,0.2])
ax2.set_xlabel('$T\,[\mathrm{MeV}]$', fontsize=15, color='black')
ax2.set_ylabel(r'$\chi_4$', fontsize=15, color='black')
ax2.legend(loc=0,fontsize=7.3,frameon=False,shadow=True,handlelength=3.,borderpad=0.5,borderaxespad=1)
for label in ax2.xaxis.get_ticklabels():
label.set_fontsize(10)
for label in ax2.yaxis.get_ticklabels():
label.set_fontsize(10)
fig.subplots_adjust(top=0.9, bottom=0.15, left=0.1, right=0.95, hspace=0.35,
wspace=0.2)
fig.savefig("chi.pdf")
#plt.show()
| [
0,
1,
2,
3,
4
] |
924 | a01ca49c3fa8ea76de2880c1b04bf15ccd341edd | <mask token>
def klist(**kwargs):
kwargs.update({'teachers': [x for x in User.objects.filter(status=1) if
not x.is_demo()], 'admins': User.objects.filter(status=2)})
return kwargs
<mask token>
@must_be_admin
def account(request, account_id):
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
class AccountBaseForm(forms.ModelForm):
class Meta:
model = User
fields = ['name', 'surname', 'number']
widgets = {'name': forms.TextInput(), 'surname': forms.TextInput()}
if request.method == 'POST':
form = AccountBaseForm(request.POST, instance=acc)
if form.is_valid():
form.save()
messages.add_message(request, messages.SUCCESS, u'Zapisano.')
else:
form = AccountBaseForm(instance=acc)
if acc.status != 0:
return render_to_response('radmin/manage_accounts_acc.html',
request, **klist(account=acc, selected_user_id=acc.id, form=form))
else:
return render_to_response('radmin/manage_accounts_students_acc.html',
request, account=acc, selected_user_id=acc.id, form=form, page=
Paginator(User.objects.filter(status=0).order_by('surname',
'name'), 30).page(1))
<mask token>
@must_be_admin
def view_students(request, page='1'):
page = int(page)
students = User.objects.filter(status=0).order_by('surname', 'name')
students = [x for x in students if not x.is_demo()]
p = Paginator(students, 30)
cpage = p.page(page)
return render_to_response('radmin/manage_accounts_students_list.html',
request, page=cpage)
| <mask token>
def klist(**kwargs):
kwargs.update({'teachers': [x for x in User.objects.filter(status=1) if
not x.is_demo()], 'admins': User.objects.filter(status=2)})
return kwargs
<mask token>
@must_be_admin
def account(request, account_id):
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
class AccountBaseForm(forms.ModelForm):
class Meta:
model = User
fields = ['name', 'surname', 'number']
widgets = {'name': forms.TextInput(), 'surname': forms.TextInput()}
if request.method == 'POST':
form = AccountBaseForm(request.POST, instance=acc)
if form.is_valid():
form.save()
messages.add_message(request, messages.SUCCESS, u'Zapisano.')
else:
form = AccountBaseForm(instance=acc)
if acc.status != 0:
return render_to_response('radmin/manage_accounts_acc.html',
request, **klist(account=acc, selected_user_id=acc.id, form=form))
else:
return render_to_response('radmin/manage_accounts_students_acc.html',
request, account=acc, selected_user_id=acc.id, form=form, page=
Paginator(User.objects.filter(status=0).order_by('surname',
'name'), 30).page(1))
@must_be_admin
def reset_pwd(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
from random import choice
randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for
i in range(7)])
acc.set_password(randompass)
messages.add_message(request, messages.SUCCESS, u'Nowe hasło to %s' % (
randompass,))
return redirect('/admin/accounts/%s/' % (acc.id,))
@must_be_admin
def su(request, account_id):
"""Login as this user"""
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
request.logout()
request.login(acc.login)
messages.add_message(request, messages.SUCCESS, u'Zalogowano jako %s' %
(acc.login,))
return redirect('/')
@must_be_admin
def delete(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
if acc.login in ('[email protected]', '[email protected]',
'[email protected]'):
messages.add_message(request, messages.ERROR,
u'Nie można usunąć konta wbudowanego')
return redirect('/admin/accounts/%s/' % (acc.id,))
if acc.status == 1:
pass
messages.add_message(request, messages.SUCCESS,
u'Konto "%s %s" usunięte.' % (acc.name, acc.surname))
acc.delete()
return redirect('/admin/accounts/')
<mask token>
@must_be_admin
def view_students(request, page='1'):
page = int(page)
students = User.objects.filter(status=0).order_by('surname', 'name')
students = [x for x in students if not x.is_demo()]
p = Paginator(students, 30)
cpage = p.page(page)
return render_to_response('radmin/manage_accounts_students_list.html',
request, page=cpage)
| <mask token>
def klist(**kwargs):
kwargs.update({'teachers': [x for x in User.objects.filter(status=1) if
not x.is_demo()], 'admins': User.objects.filter(status=2)})
return kwargs
@must_be_admin
def list(request):
return render_to_response('radmin/manage_accounts_list.html', request,
**klist())
@must_be_admin
def account(request, account_id):
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
class AccountBaseForm(forms.ModelForm):
class Meta:
model = User
fields = ['name', 'surname', 'number']
widgets = {'name': forms.TextInput(), 'surname': forms.TextInput()}
if request.method == 'POST':
form = AccountBaseForm(request.POST, instance=acc)
if form.is_valid():
form.save()
messages.add_message(request, messages.SUCCESS, u'Zapisano.')
else:
form = AccountBaseForm(instance=acc)
if acc.status != 0:
return render_to_response('radmin/manage_accounts_acc.html',
request, **klist(account=acc, selected_user_id=acc.id, form=form))
else:
return render_to_response('radmin/manage_accounts_students_acc.html',
request, account=acc, selected_user_id=acc.id, form=form, page=
Paginator(User.objects.filter(status=0).order_by('surname',
'name'), 30).page(1))
@must_be_admin
def reset_pwd(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
from random import choice
randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for
i in range(7)])
acc.set_password(randompass)
messages.add_message(request, messages.SUCCESS, u'Nowe hasło to %s' % (
randompass,))
return redirect('/admin/accounts/%s/' % (acc.id,))
@must_be_admin
def su(request, account_id):
"""Login as this user"""
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
request.logout()
request.login(acc.login)
messages.add_message(request, messages.SUCCESS, u'Zalogowano jako %s' %
(acc.login,))
return redirect('/')
@must_be_admin
def delete(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
if acc.login in ('[email protected]', '[email protected]',
'[email protected]'):
messages.add_message(request, messages.ERROR,
u'Nie można usunąć konta wbudowanego')
return redirect('/admin/accounts/%s/' % (acc.id,))
if acc.status == 1:
pass
messages.add_message(request, messages.SUCCESS,
u'Konto "%s %s" usunięte.' % (acc.name, acc.surname))
acc.delete()
return redirect('/admin/accounts/')
<mask token>
@must_be_admin
def view_students(request, page='1'):
page = int(page)
students = User.objects.filter(status=0).order_by('surname', 'name')
students = [x for x in students if not x.is_demo()]
p = Paginator(students, 30)
cpage = p.page(page)
return render_to_response('radmin/manage_accounts_students_list.html',
request, page=cpage)
| <mask token>
def klist(**kwargs):
kwargs.update({'teachers': [x for x in User.objects.filter(status=1) if
not x.is_demo()], 'admins': User.objects.filter(status=2)})
return kwargs
@must_be_admin
def list(request):
return render_to_response('radmin/manage_accounts_list.html', request,
**klist())
@must_be_admin
def account(request, account_id):
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
class AccountBaseForm(forms.ModelForm):
class Meta:
model = User
fields = ['name', 'surname', 'number']
widgets = {'name': forms.TextInput(), 'surname': forms.TextInput()}
if request.method == 'POST':
form = AccountBaseForm(request.POST, instance=acc)
if form.is_valid():
form.save()
messages.add_message(request, messages.SUCCESS, u'Zapisano.')
else:
form = AccountBaseForm(instance=acc)
if acc.status != 0:
return render_to_response('radmin/manage_accounts_acc.html',
request, **klist(account=acc, selected_user_id=acc.id, form=form))
else:
return render_to_response('radmin/manage_accounts_students_acc.html',
request, account=acc, selected_user_id=acc.id, form=form, page=
Paginator(User.objects.filter(status=0).order_by('surname',
'name'), 30).page(1))
@must_be_admin
def reset_pwd(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
from random import choice
randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for
i in range(7)])
acc.set_password(randompass)
messages.add_message(request, messages.SUCCESS, u'Nowe hasło to %s' % (
randompass,))
return redirect('/admin/accounts/%s/' % (acc.id,))
@must_be_admin
def su(request, account_id):
"""Login as this user"""
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
request.logout()
request.login(acc.login)
messages.add_message(request, messages.SUCCESS, u'Zalogowano jako %s' %
(acc.login,))
return redirect('/')
@must_be_admin
def delete(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
if acc.login in ('[email protected]', '[email protected]',
'[email protected]'):
messages.add_message(request, messages.ERROR,
u'Nie można usunąć konta wbudowanego')
return redirect('/admin/accounts/%s/' % (acc.id,))
if acc.status == 1:
pass
messages.add_message(request, messages.SUCCESS,
u'Konto "%s %s" usunięte.' % (acc.name, acc.surname))
acc.delete()
return redirect('/admin/accounts/')
@must_be_admin
def create(request):
class NewAccountForm(forms.Form):
_CHOICE = (1, 'Nauczyciel'), (2, 'Adminstrator')
login = forms.EmailField(label=u'E-mail')
name = forms.CharField(label=u'Imię', required=False)
surname = forms.CharField(label=u'Nazwisko', required=False)
status = forms.ChoiceField(choices=_CHOICE, initial=1, label=u'Typ')
if request.method == 'POST':
form = NewAccountForm(request.POST)
if form.is_valid():
from random import choice
randompass = ''.join([choice(
'1234567890qwertyupasdfghjklzxcvbnm') for i in range(7)])
u = User(login=form.cleaned_data['login'], name=form.
cleaned_data['name'], surname=form.cleaned_data['surname'],
status=form.cleaned_data['status'])
u.save()
u.set_password(randompass)
messages.add_message(request, messages.SUCCESS,
u'Konto stworzone. Nowe hasło to %s' % (randompass,))
return redirect('/admin/accounts/%s/' % (u.id,))
else:
form = NewAccountForm()
return render_to_response('radmin/manage_accounts_add.html', request,
**klist(selected_user_id='create', form=form))
<mask token>
@must_be_admin
def view_students(request, page='1'):
page = int(page)
students = User.objects.filter(status=0).order_by('surname', 'name')
students = [x for x in students if not x.is_demo()]
p = Paginator(students, 30)
cpage = p.page(page)
return render_to_response('radmin/manage_accounts_students_list.html',
request, page=cpage)
| # coding=UTF-8
"""
View for managing accounts
"""
from django.contrib import messages
from django.http import Http404, HttpResponse
from django.shortcuts import redirect
from django import forms
from athena.core import render_to_response
from athena.users.models import User
from athena.users import must_be_admin
def klist(**kwargs):
kwargs.update({
'teachers': [x for x in User.objects.filter(status=1) if not x.is_demo()],
'admins': User.objects.filter(status=2),
})
return kwargs
@must_be_admin
def list(request):
return render_to_response('radmin/manage_accounts_list.html', request, **klist())
@must_be_admin
def account(request, account_id):
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
class AccountBaseForm(forms.ModelForm):
class Meta:
model = User
fields = ['name', 'surname', 'number']
widgets = {
'name': forms.TextInput(),
'surname': forms.TextInput(),
}
if request.method == 'POST':
form = AccountBaseForm(request.POST, instance=acc)
if form.is_valid():
form.save()
messages.add_message(request, messages.SUCCESS, u'Zapisano.')
else:
form = AccountBaseForm(instance=acc)
if acc.status != 0:
return render_to_response('radmin/manage_accounts_acc.html', request, **klist(
account=acc,
selected_user_id=acc.id,
form=form))
else:
return render_to_response('radmin/manage_accounts_students_acc.html', request,
account=acc,
selected_user_id=acc.id,
form=form,
page=Paginator(User.objects.filter(status=0).order_by('surname', 'name'), 30).page(1))
@must_be_admin
def reset_pwd(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
from random import choice
randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for i in range(7)])
acc.set_password(randompass)
messages.add_message(request, messages.SUCCESS, u'Nowe hasło to %s' % (randompass, ))
return redirect('/admin/accounts/%s/' % (acc.id, ))
@must_be_admin
def su(request, account_id):
"""Login as this user"""
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
request.logout()
request.login(acc.login)
messages.add_message(request, messages.SUCCESS, u'Zalogowano jako %s' % (acc.login, ))
return redirect('/')
@must_be_admin
def delete(request, account_id):
if request.method != 'POST':
return HttpResponse(status=400)
try:
acc = User.objects.get(id=int(account_id))
except:
raise Http404
if acc.login in ('[email protected]', '[email protected]', '[email protected]'):
messages.add_message(request, messages.ERROR, u'Nie można usunąć konta wbudowanego')
return redirect('/admin/accounts/%s/' % (acc.id, ))
if acc.status == 1:
# This is a teacher. You should reparent all of it's tests
# and groups to user to [email protected]
pass
messages.add_message(request, messages.SUCCESS, u'Konto "%s %s" usunięte.' % (acc.name, acc.surname))
acc.delete()
return redirect('/admin/accounts/')
@must_be_admin
def create(request):
class NewAccountForm(forms.Form):
_CHOICE = ((1, 'Nauczyciel'), (2, 'Adminstrator'))
login = forms.EmailField(label=u'E-mail')
name = forms.CharField(label=u'Imię', required=False)
surname = forms.CharField(label=u'Nazwisko', required=False)
status = forms.ChoiceField(choices=_CHOICE, initial=1, label=u'Typ')
if request.method == 'POST':
form = NewAccountForm(request.POST)
if form.is_valid():
# grab a random password
from random import choice
randompass = ''.join([choice('1234567890qwertyupasdfghjklzxcvbnm') for i in range(7)])
u = User(login=form.cleaned_data['login'],
name=form.cleaned_data['name'],
surname=form.cleaned_data['surname'],
status=form.cleaned_data['status'])
u.save()
u.set_password(randompass)
messages.add_message(request, messages.SUCCESS, u'Konto stworzone. Nowe hasło to %s' % (randompass, ))
return redirect('/admin/accounts/%s/' % (u.id, ))
else:
form = NewAccountForm()
return render_to_response('radmin/manage_accounts_add.html', request, **klist(
selected_user_id='create',
form=form))
from django.core.paginator import Paginator
@must_be_admin
def view_students(request, page='1'):
page = int(page)
students = User.objects.filter(status=0).order_by('surname', 'name')
students = [x for x in students if not x.is_demo()]
p = Paginator(students, 30)
cpage = p.page(page)
return render_to_response('radmin/manage_accounts_students_list.html', request,
page=cpage) | [
3,
6,
7,
8,
10
] |
925 | 9adff5da4e26088def9f0e32aa712a1f2b0336ba | class Step:
<mask token>
<mask token>
<mask token>
| class Step:
<mask token>
<mask token>
def __repr__(self) ->str:
return f'Step: {{action: {self.action.__str__()}}}'
| class Step:
def __init__(self, action):
self.action = action
<mask token>
def __repr__(self) ->str:
return f'Step: {{action: {self.action.__str__()}}}'
| class Step:
def __init__(self, action):
self.action = action
def __str__(self) ->str:
return f'Step: {{action: {self.action.__str__()}}}'
def __repr__(self) ->str:
return f'Step: {{action: {self.action.__str__()}}}'
| null | [
1,
2,
3,
4
] |
926 | 668a8005f2f66190d588fb9289293d73a608f767 | <mask token>
def handle(text, mic, profile):
messages1 = ['Naturally Sir ', 'Of course Sir ', "I'll get right at it"]
final = random.choice(messages1)
mic.say(final)
command = 'ssh pi@'
ip = profile['piip']
command += ip
command += ' pkill omxplayer'
os.system(command)
mic.say('The music process has successfully been killed')
<mask token>
| <mask token>
def handle(text, mic, profile):
messages1 = ['Naturally Sir ', 'Of course Sir ', "I'll get right at it"]
final = random.choice(messages1)
mic.say(final)
command = 'ssh pi@'
ip = profile['piip']
command += ip
command += ' pkill omxplayer'
os.system(command)
mic.say('The music process has successfully been killed')
def isValid(text):
"""
Returns True if the input is related to the meaning of life.
Arguments:
text -- user-input, typically transcribed speech
"""
return bool(re.search('\\b((kill|stop) the (alarm|clock|music))\\b',
text, re.IGNORECASE))
| <mask token>
WORDS = []
def handle(text, mic, profile):
messages1 = ['Naturally Sir ', 'Of course Sir ', "I'll get right at it"]
final = random.choice(messages1)
mic.say(final)
command = 'ssh pi@'
ip = profile['piip']
command += ip
command += ' pkill omxplayer'
os.system(command)
mic.say('The music process has successfully been killed')
def isValid(text):
"""
Returns True if the input is related to the meaning of life.
Arguments:
text -- user-input, typically transcribed speech
"""
return bool(re.search('\\b((kill|stop) the (alarm|clock|music))\\b',
text, re.IGNORECASE))
| import random
import re
from datetime import datetime, time
from phue import Bridge
import os
import glob
WORDS = []
def handle(text, mic, profile):
messages1 = ['Naturally Sir ', 'Of course Sir ', "I'll get right at it"]
final = random.choice(messages1)
mic.say(final)
command = 'ssh pi@'
ip = profile['piip']
command += ip
command += ' pkill omxplayer'
os.system(command)
mic.say('The music process has successfully been killed')
def isValid(text):
"""
Returns True if the input is related to the meaning of life.
Arguments:
text -- user-input, typically transcribed speech
"""
return bool(re.search('\\b((kill|stop) the (alarm|clock|music))\\b',
text, re.IGNORECASE))
| # -*- coding: utf-8-*-
import random
import re
from datetime import datetime, time
from phue import Bridge
import os
import glob
WORDS = []
def handle(text, mic, profile):
messages1 = ["Naturally Sir ","Of course Sir ","I'll get right at it"]
final = random.choice(messages1)
mic.say(final)
command = "ssh pi@"
ip = profile['piip']
command += ip
command += " pkill omxplayer"
os.system(command)
mic.say("The music process has successfully been killed")
def isValid(text):
"""
Returns True if the input is related to the meaning of life.
Arguments:
text -- user-input, typically transcribed speech
"""
return bool(re.search(r'\b((kill|stop) the (alarm|clock|music))\b', text, re.IGNORECASE))
| [
1,
2,
3,
4,
5
] |
927 | 2257f73a290dfd428a874e963c26e51f1c1f1efa | <mask token>
def register_extensions(app):
"""Register Flask extensions."""
assets.init_app(app)
hashing.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
migrate.init_app(app, db)
init_mailman(app)
init_talisman(app)
return None
<mask token>
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(public.views.blueprint)
app.register_blueprint(public.project.blueprint)
app.register_blueprint(public.auth.blueprint)
app.register_blueprint(public.api.blueprint)
app.register_blueprint(admin.views.blueprint)
return None
<mask token>
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
from dribdat.user.models import User
return {'db': db, 'User': User}
app.shell_context_processor(shell_context)
<mask token>
def register_filters(app):
"""Register filters for templates."""
Misaka(app, autolink=True, fenced_code=True, strikethrough=True, tables
=True)
app.oembed_providers = bootstrap_basic()
@app.template_filter()
def onebox(value):
return make_oembedplus(value, app.oembed_providers, maxwidth=600,
maxheight=400)
app.tz = timezone(app.config['TIME_ZONE'])
app.jinja_env.filters['quote_plus'] = lambda u: quote_plus(u or '', ':/?&='
)
@app.template_filter()
def since_date(value):
return timesince(value)
@app.template_filter()
def until_date(value):
return timesince(value, default='now!', until=True)
@app.template_filter()
def format_date(value, format='%d.%m.%Y'):
if value is None:
return ''
return value.strftime(format)
@app.template_filter()
def format_datetime(value, format='%d.%m.%Y %H:%M'):
if value is None:
return ''
return value.strftime(format)
def register_loggers(app):
"""Initialize and configure logging."""
if 'DEBUG' in app.config and not app.config['DEBUG']:
import logging
stream_handler = logging.StreamHandler()
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
<mask token>
| <mask token>
def init_app(config_object=ProdConfig):
"""Define an application factory.
See: http://flask.pocoo.org/docs/patterns/appfactories/
:param config_object: The configuration object to use.
"""
app = Flask(__name__)
app.config.from_object(config_object)
if app.config['SERVER_CORS']:
CORS(app, resources={'/api/*': {'origins': '*'}})
app.config['CORS_HEADERS'] = 'Content-Type'
if app.config['SERVER_PROXY']:
app.wsgi_app = ProxyFix(app, x_for=1, x_proto=1, x_host=1)
else:
app.wsgi_app = WhiteNoise(app.wsgi_app, prefix='static/')
for static in ('css', 'img', 'js', 'public'):
app.wsgi_app.add_files('dribdat/static/' + static)
register_extensions(app)
register_blueprints(app)
register_oauthhandlers(app)
register_errorhandlers(app)
register_filters(app)
register_loggers(app)
register_shellcontext(app)
register_commands(app)
register_caching(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
assets.init_app(app)
hashing.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
migrate.init_app(app, db)
init_mailman(app)
init_talisman(app)
return None
<mask token>
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(public.views.blueprint)
app.register_blueprint(public.project.blueprint)
app.register_blueprint(public.auth.blueprint)
app.register_blueprint(public.api.blueprint)
app.register_blueprint(admin.views.blueprint)
return None
def register_oauthhandlers(app):
"""Set up OAuth handlers based on configuration."""
blueprint = get_auth_blueprint(app)
if blueprint is not None:
app.register_blueprint(blueprint, url_prefix='/oauth')
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(error):
"""Render error template."""
error_code = getattr(error, 'code', 500)
return render_template('{0}.html'.format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
from dribdat.user.models import User
return {'db': db, 'User': User}
app.shell_context_processor(shell_context)
<mask token>
def register_filters(app):
"""Register filters for templates."""
Misaka(app, autolink=True, fenced_code=True, strikethrough=True, tables
=True)
app.oembed_providers = bootstrap_basic()
@app.template_filter()
def onebox(value):
return make_oembedplus(value, app.oembed_providers, maxwidth=600,
maxheight=400)
app.tz = timezone(app.config['TIME_ZONE'])
app.jinja_env.filters['quote_plus'] = lambda u: quote_plus(u or '', ':/?&='
)
@app.template_filter()
def since_date(value):
return timesince(value)
@app.template_filter()
def until_date(value):
return timesince(value, default='now!', until=True)
@app.template_filter()
def format_date(value, format='%d.%m.%Y'):
if value is None:
return ''
return value.strftime(format)
@app.template_filter()
def format_datetime(value, format='%d.%m.%Y %H:%M'):
if value is None:
return ''
return value.strftime(format)
def register_loggers(app):
"""Initialize and configure logging."""
if 'DEBUG' in app.config and not app.config['DEBUG']:
import logging
stream_handler = logging.StreamHandler()
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
def register_caching(app):
"""Prevent cached responses in debug."""
if 'DEBUG' in app.config and app.config['DEBUG']:
@app.after_request
def after_request(response):
response.headers['Cache-Control'
] = 'no-cache, no-store, must-revalidate, public, max-age=0'
response.headers['Expires'] = 0
response.headers['Pragma'] = 'no-cache'
return response
| <mask token>
def init_app(config_object=ProdConfig):
"""Define an application factory.
See: http://flask.pocoo.org/docs/patterns/appfactories/
:param config_object: The configuration object to use.
"""
app = Flask(__name__)
app.config.from_object(config_object)
if app.config['SERVER_CORS']:
CORS(app, resources={'/api/*': {'origins': '*'}})
app.config['CORS_HEADERS'] = 'Content-Type'
if app.config['SERVER_PROXY']:
app.wsgi_app = ProxyFix(app, x_for=1, x_proto=1, x_host=1)
else:
app.wsgi_app = WhiteNoise(app.wsgi_app, prefix='static/')
for static in ('css', 'img', 'js', 'public'):
app.wsgi_app.add_files('dribdat/static/' + static)
register_extensions(app)
register_blueprints(app)
register_oauthhandlers(app)
register_errorhandlers(app)
register_filters(app)
register_loggers(app)
register_shellcontext(app)
register_commands(app)
register_caching(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
assets.init_app(app)
hashing.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
migrate.init_app(app, db)
init_mailman(app)
init_talisman(app)
return None
<mask token>
def init_talisman(app):
"""Initialize Talisman support."""
if 'SERVER_SSL' in app.config and app.config['SERVER_SSL']:
Talisman(app, content_security_policy=app.config['CSP_DIRECTIVES'],
frame_options_allow_from='*')
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(public.views.blueprint)
app.register_blueprint(public.project.blueprint)
app.register_blueprint(public.auth.blueprint)
app.register_blueprint(public.api.blueprint)
app.register_blueprint(admin.views.blueprint)
return None
def register_oauthhandlers(app):
"""Set up OAuth handlers based on configuration."""
blueprint = get_auth_blueprint(app)
if blueprint is not None:
app.register_blueprint(blueprint, url_prefix='/oauth')
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(error):
"""Render error template."""
error_code = getattr(error, 'code', 500)
return render_template('{0}.html'.format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
from dribdat.user.models import User
return {'db': db, 'User': User}
app.shell_context_processor(shell_context)
<mask token>
def register_filters(app):
"""Register filters for templates."""
Misaka(app, autolink=True, fenced_code=True, strikethrough=True, tables
=True)
app.oembed_providers = bootstrap_basic()
@app.template_filter()
def onebox(value):
return make_oembedplus(value, app.oembed_providers, maxwidth=600,
maxheight=400)
app.tz = timezone(app.config['TIME_ZONE'])
app.jinja_env.filters['quote_plus'] = lambda u: quote_plus(u or '', ':/?&='
)
@app.template_filter()
def since_date(value):
return timesince(value)
@app.template_filter()
def until_date(value):
return timesince(value, default='now!', until=True)
@app.template_filter()
def format_date(value, format='%d.%m.%Y'):
if value is None:
return ''
return value.strftime(format)
@app.template_filter()
def format_datetime(value, format='%d.%m.%Y %H:%M'):
if value is None:
return ''
return value.strftime(format)
def register_loggers(app):
"""Initialize and configure logging."""
if 'DEBUG' in app.config and not app.config['DEBUG']:
import logging
stream_handler = logging.StreamHandler()
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
def register_caching(app):
"""Prevent cached responses in debug."""
if 'DEBUG' in app.config and app.config['DEBUG']:
@app.after_request
def after_request(response):
response.headers['Cache-Control'
] = 'no-cache, no-store, must-revalidate, public, max-age=0'
response.headers['Expires'] = 0
response.headers['Pragma'] = 'no-cache'
return response
| <mask token>
def init_app(config_object=ProdConfig):
"""Define an application factory.
See: http://flask.pocoo.org/docs/patterns/appfactories/
:param config_object: The configuration object to use.
"""
app = Flask(__name__)
app.config.from_object(config_object)
if app.config['SERVER_CORS']:
CORS(app, resources={'/api/*': {'origins': '*'}})
app.config['CORS_HEADERS'] = 'Content-Type'
if app.config['SERVER_PROXY']:
app.wsgi_app = ProxyFix(app, x_for=1, x_proto=1, x_host=1)
else:
app.wsgi_app = WhiteNoise(app.wsgi_app, prefix='static/')
for static in ('css', 'img', 'js', 'public'):
app.wsgi_app.add_files('dribdat/static/' + static)
register_extensions(app)
register_blueprints(app)
register_oauthhandlers(app)
register_errorhandlers(app)
register_filters(app)
register_loggers(app)
register_shellcontext(app)
register_commands(app)
register_caching(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
assets.init_app(app)
hashing.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
migrate.init_app(app, db)
init_mailman(app)
init_talisman(app)
return None
def init_mailman(app):
"""Initialize mailer support."""
if 'MAIL_SERVER' in app.config and app.config['MAIL_SERVER']:
if not app.config['MAIL_DEFAULT_SENDER']:
app.logger.warn('MAIL_DEFAULT_SENDER is required to send email')
else:
mail = Mail()
mail.init_app(app)
def init_talisman(app):
"""Initialize Talisman support."""
if 'SERVER_SSL' in app.config and app.config['SERVER_SSL']:
Talisman(app, content_security_policy=app.config['CSP_DIRECTIVES'],
frame_options_allow_from='*')
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(public.views.blueprint)
app.register_blueprint(public.project.blueprint)
app.register_blueprint(public.auth.blueprint)
app.register_blueprint(public.api.blueprint)
app.register_blueprint(admin.views.blueprint)
return None
def register_oauthhandlers(app):
"""Set up OAuth handlers based on configuration."""
blueprint = get_auth_blueprint(app)
if blueprint is not None:
app.register_blueprint(blueprint, url_prefix='/oauth')
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(error):
"""Render error template."""
error_code = getattr(error, 'code', 500)
return render_template('{0}.html'.format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
from dribdat.user.models import User
return {'db': db, 'User': User}
app.shell_context_processor(shell_context)
def register_commands(app):
"""Register Click commands."""
app.cli.add_command(commands.lint)
app.cli.add_command(commands.clean)
app.cli.add_command(commands.urls)
def register_filters(app):
"""Register filters for templates."""
Misaka(app, autolink=True, fenced_code=True, strikethrough=True, tables
=True)
app.oembed_providers = bootstrap_basic()
@app.template_filter()
def onebox(value):
return make_oembedplus(value, app.oembed_providers, maxwidth=600,
maxheight=400)
app.tz = timezone(app.config['TIME_ZONE'])
app.jinja_env.filters['quote_plus'] = lambda u: quote_plus(u or '', ':/?&='
)
@app.template_filter()
def since_date(value):
return timesince(value)
@app.template_filter()
def until_date(value):
return timesince(value, default='now!', until=True)
@app.template_filter()
def format_date(value, format='%d.%m.%Y'):
if value is None:
return ''
return value.strftime(format)
@app.template_filter()
def format_datetime(value, format='%d.%m.%Y %H:%M'):
if value is None:
return ''
return value.strftime(format)
def register_loggers(app):
"""Initialize and configure logging."""
if 'DEBUG' in app.config and not app.config['DEBUG']:
import logging
stream_handler = logging.StreamHandler()
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
def register_caching(app):
"""Prevent cached responses in debug."""
if 'DEBUG' in app.config and app.config['DEBUG']:
@app.after_request
def after_request(response):
response.headers['Cache-Control'
] = 'no-cache, no-store, must-revalidate, public, max-age=0'
response.headers['Expires'] = 0
response.headers['Pragma'] = 'no-cache'
return response
| # -*- coding: utf-8 -*-
"""The app module, containing the app factory function."""
from flask import Flask, render_template
from flask_cors import CORS
from flask_misaka import Misaka
from flask_mailman import Mail
from flask_talisman import Talisman
from werkzeug.middleware.proxy_fix import ProxyFix
from micawber.providers import bootstrap_basic
from whitenoise import WhiteNoise
from pytz import timezone
from urllib.parse import quote_plus
from dribdat import commands, public, admin
from dribdat.assets import assets # noqa: I005
from dribdat.sso import get_auth_blueprint
from dribdat.extensions import (
hashing,
cache,
db,
login_manager,
migrate,
)
from dribdat.settings import ProdConfig # noqa: I005
from dribdat.utils import timesince
from dribdat.onebox import make_oembedplus
def init_app(config_object=ProdConfig):
"""Define an application factory.
See: http://flask.pocoo.org/docs/patterns/appfactories/
:param config_object: The configuration object to use.
"""
app = Flask(__name__)
app.config.from_object(config_object)
# Set up cross-site access to the API
if app.config['SERVER_CORS']:
CORS(app, resources={r"/api/*": {"origins": "*"}})
app.config['CORS_HEADERS'] = 'Content-Type'
# Set up using an external proxy/static server
if app.config['SERVER_PROXY']:
app.wsgi_app = ProxyFix(app, x_for=1, x_proto=1, x_host=1)
else:
# Internally optimize static file hosting
app.wsgi_app = WhiteNoise(app.wsgi_app, prefix='static/')
for static in ('css', 'img', 'js', 'public'):
app.wsgi_app.add_files('dribdat/static/' + static)
register_extensions(app)
register_blueprints(app)
register_oauthhandlers(app)
register_errorhandlers(app)
register_filters(app)
register_loggers(app)
register_shellcontext(app)
register_commands(app)
register_caching(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
assets.init_app(app)
hashing.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
migrate.init_app(app, db)
init_mailman(app)
init_talisman(app)
return None
def init_mailman(app):
"""Initialize mailer support."""
if 'MAIL_SERVER' in app.config and app.config['MAIL_SERVER']:
if not app.config['MAIL_DEFAULT_SENDER']:
app.logger.warn('MAIL_DEFAULT_SENDER is required to send email')
else:
mail = Mail()
mail.init_app(app)
def init_talisman(app):
"""Initialize Talisman support."""
if 'SERVER_SSL' in app.config and app.config['SERVER_SSL']:
Talisman(app,
content_security_policy=app.config['CSP_DIRECTIVES'],
frame_options_allow_from='*')
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(public.views.blueprint)
app.register_blueprint(public.project.blueprint)
app.register_blueprint(public.auth.blueprint)
app.register_blueprint(public.api.blueprint)
app.register_blueprint(admin.views.blueprint)
return None
def register_oauthhandlers(app):
"""Set up OAuth handlers based on configuration."""
blueprint = get_auth_blueprint(app)
if blueprint is not None:
app.register_blueprint(blueprint, url_prefix="/oauth")
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(error):
"""Render error template."""
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template('{0}.html'.format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
from dribdat.user.models import User
return {
'db': db,
'User': User}
app.shell_context_processor(shell_context)
def register_commands(app):
"""Register Click commands."""
app.cli.add_command(commands.lint)
app.cli.add_command(commands.clean)
app.cli.add_command(commands.urls)
def register_filters(app):
"""Register filters for templates."""
#
# Conversion of Markdown to HTML
Misaka(app, autolink=True, fenced_code=True,
strikethrough=True, tables=True)
# Registration of handlers for micawber
app.oembed_providers = bootstrap_basic()
@app.template_filter()
def onebox(value):
return make_oembedplus(
value, app.oembed_providers, maxwidth=600, maxheight=400
)
# Timezone helper
app.tz = timezone(app.config['TIME_ZONE'])
# Lambda filters for safe image_url's
app.jinja_env.filters['quote_plus'] = lambda u: quote_plus(u or '', ':/?&=')
# Custom filters
@app.template_filter()
def since_date(value):
return timesince(value)
@app.template_filter()
def until_date(value):
return timesince(value, default="now!", until=True)
@app.template_filter()
def format_date(value, format='%d.%m.%Y'):
if value is None: return ''
return value.strftime(format)
@app.template_filter()
def format_datetime(value, format='%d.%m.%Y %H:%M'):
if value is None: return ''
return value.strftime(format)
def register_loggers(app):
"""Initialize and configure logging."""
if 'DEBUG' in app.config and not app.config['DEBUG']:
import logging
stream_handler = logging.StreamHandler()
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
def register_caching(app):
"""Prevent cached responses in debug."""
if 'DEBUG' in app.config and app.config['DEBUG']:
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate, public, max-age=0"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
| [
5,
9,
10,
12,
14
] |
928 | d39e3a552a7c558d3f5b410e0b228fb7409d732a | <mask token>
| # -*- coding:utf-8 -*-
"""
Author:xufei
Date:2021/1/21
"""
| null | null | null | [
0,
1
] |
929 | c18c407476375fb1647fefaedb5d7ea0e0aabe3a | <mask token>
def train_validate_test_split(df, train_percent=0.8, validate_percent=0.2,
seed=None):
np.random.seed(seed)
perm = np.random.permutation(df.index)
m = len(df.index)
train_end = int(train_percent * m)
train = df.iloc[:train_end]
validate = df.iloc[train_end:]
return train, validate
<mask token>
def get(df):
col = df[['review_body']]
print(col.head())
aspect = df[['Aspects']]
opinions = df[['Sentiments']]
print(df.shape[0])
now = ''
for o in range(0, df.shape[0]):
d = col.iloc[o:o + 1]
sd = d.to_string(index=False, header=None)
sd = sd[1:]
l = sent_tokenize(sd)
a = aspect.iloc[o:o + 1]
sa = a.to_string(index=False, header=None)
asp = sa.split(';')
a = opinions.iloc[o:o + 1]
sa = a.to_string(index=False, header=None)
senti = sa.split(';')
if len(asp) != len(senti) or len(l) != len(asp) or len(l) != len(senti
):
continue
it = 0
for i in l:
chks = [x.strip() for x in senti[it].split(',')]
chka = [x.strip() for x in asp[it].split(',')]
g = []
itr = 0
if len(chks) != len(chka):
continue
for k in chka:
f = k.split(' ')
num = chks[itr]
if len(f) > 1:
h = 0
for x in f:
x = x.strip(' ')
x = x.strip('"')
g += [x]
if h < len(f) - 1:
chks.insert(itr, '1')
h += 1
else:
g += f
itr += 1
chka = g
now += i
now += '####'
j = i.split(' ')
itr = 0
for word in j:
if itr < len(chka) and word == chka[itr]:
if chks[itr] == '1':
s = word + '=T-POS'
elif chks[itr] == '0':
s = word + '=T-NEU'
else:
s = word + '=T-NEG'
itr += 1
else:
s = word + '=O'
now += s + ' '
now += '\n'
it += 1
return now
<mask token>
| <mask token>
pd.set_option('display.max_colwidth', None)
<mask token>
def train_validate_test_split(df, train_percent=0.8, validate_percent=0.2,
seed=None):
np.random.seed(seed)
perm = np.random.permutation(df.index)
m = len(df.index)
train_end = int(train_percent * m)
train = df.iloc[:train_end]
validate = df.iloc[train_end:]
return train, validate
<mask token>
def get(df):
col = df[['review_body']]
print(col.head())
aspect = df[['Aspects']]
opinions = df[['Sentiments']]
print(df.shape[0])
now = ''
for o in range(0, df.shape[0]):
d = col.iloc[o:o + 1]
sd = d.to_string(index=False, header=None)
sd = sd[1:]
l = sent_tokenize(sd)
a = aspect.iloc[o:o + 1]
sa = a.to_string(index=False, header=None)
asp = sa.split(';')
a = opinions.iloc[o:o + 1]
sa = a.to_string(index=False, header=None)
senti = sa.split(';')
if len(asp) != len(senti) or len(l) != len(asp) or len(l) != len(senti
):
continue
it = 0
for i in l:
chks = [x.strip() for x in senti[it].split(',')]
chka = [x.strip() for x in asp[it].split(',')]
g = []
itr = 0
if len(chks) != len(chka):
continue
for k in chka:
f = k.split(' ')
num = chks[itr]
if len(f) > 1:
h = 0
for x in f:
x = x.strip(' ')
x = x.strip('"')
g += [x]
if h < len(f) - 1:
chks.insert(itr, '1')
h += 1
else:
g += f
itr += 1
chka = g
now += i
now += '####'
j = i.split(' ')
itr = 0
for word in j:
if itr < len(chka) and word == chka[itr]:
if chks[itr] == '1':
s = word + '=T-POS'
elif chks[itr] == '0':
s = word + '=T-NEU'
else:
s = word + '=T-NEG'
itr += 1
else:
s = word + '=O'
now += s + ' '
now += '\n'
it += 1
return now
<mask token>
text_file.close()
<mask token>
text_file.close()
| <mask token>
csv_file = open('/home/debajit15/train+dev.csv')
pd.set_option('display.max_colwidth', None)
df = pd.read_csv(csv_file, sep=',')
df = df[pd.notnull(df['Aspects'])]
def train_validate_test_split(df, train_percent=0.8, validate_percent=0.2,
seed=None):
np.random.seed(seed)
perm = np.random.permutation(df.index)
m = len(df.index)
train_end = int(train_percent * m)
train = df.iloc[:train_end]
validate = df.iloc[train_end:]
return train, validate
trainl, vall = train_validate_test_split(df)
def get(df):
col = df[['review_body']]
print(col.head())
aspect = df[['Aspects']]
opinions = df[['Sentiments']]
print(df.shape[0])
now = ''
for o in range(0, df.shape[0]):
d = col.iloc[o:o + 1]
sd = d.to_string(index=False, header=None)
sd = sd[1:]
l = sent_tokenize(sd)
a = aspect.iloc[o:o + 1]
sa = a.to_string(index=False, header=None)
asp = sa.split(';')
a = opinions.iloc[o:o + 1]
sa = a.to_string(index=False, header=None)
senti = sa.split(';')
if len(asp) != len(senti) or len(l) != len(asp) or len(l) != len(senti
):
continue
it = 0
for i in l:
chks = [x.strip() for x in senti[it].split(',')]
chka = [x.strip() for x in asp[it].split(',')]
g = []
itr = 0
if len(chks) != len(chka):
continue
for k in chka:
f = k.split(' ')
num = chks[itr]
if len(f) > 1:
h = 0
for x in f:
x = x.strip(' ')
x = x.strip('"')
g += [x]
if h < len(f) - 1:
chks.insert(itr, '1')
h += 1
else:
g += f
itr += 1
chka = g
now += i
now += '####'
j = i.split(' ')
itr = 0
for word in j:
if itr < len(chka) and word == chka[itr]:
if chks[itr] == '1':
s = word + '=T-POS'
elif chks[itr] == '0':
s = word + '=T-NEU'
else:
s = word + '=T-NEG'
itr += 1
else:
s = word + '=O'
now += s + ' '
now += '\n'
it += 1
return now
train = get(trainl)
val = get(vall)
text_file = open('/home/debajit15/train.txt', 'w')
n = text_file.write(train)
text_file.close()
text_file = open('/home/debajit15/dev.txt', 'w')
n = text_file.write(val)
text_file.close()
| import pandas as pd
import numpy as np
import csv
from nltk.tokenize import sent_tokenize
csv_file = open('/home/debajit15/train+dev.csv')
pd.set_option('display.max_colwidth', None)
df = pd.read_csv(csv_file, sep=',')
df = df[pd.notnull(df['Aspects'])]
def train_validate_test_split(df, train_percent=0.8, validate_percent=0.2,
seed=None):
np.random.seed(seed)
perm = np.random.permutation(df.index)
m = len(df.index)
train_end = int(train_percent * m)
train = df.iloc[:train_end]
validate = df.iloc[train_end:]
return train, validate
trainl, vall = train_validate_test_split(df)
def get(df):
col = df[['review_body']]
print(col.head())
aspect = df[['Aspects']]
opinions = df[['Sentiments']]
print(df.shape[0])
now = ''
for o in range(0, df.shape[0]):
d = col.iloc[o:o + 1]
sd = d.to_string(index=False, header=None)
sd = sd[1:]
l = sent_tokenize(sd)
a = aspect.iloc[o:o + 1]
sa = a.to_string(index=False, header=None)
asp = sa.split(';')
a = opinions.iloc[o:o + 1]
sa = a.to_string(index=False, header=None)
senti = sa.split(';')
if len(asp) != len(senti) or len(l) != len(asp) or len(l) != len(senti
):
continue
it = 0
for i in l:
chks = [x.strip() for x in senti[it].split(',')]
chka = [x.strip() for x in asp[it].split(',')]
g = []
itr = 0
if len(chks) != len(chka):
continue
for k in chka:
f = k.split(' ')
num = chks[itr]
if len(f) > 1:
h = 0
for x in f:
x = x.strip(' ')
x = x.strip('"')
g += [x]
if h < len(f) - 1:
chks.insert(itr, '1')
h += 1
else:
g += f
itr += 1
chka = g
now += i
now += '####'
j = i.split(' ')
itr = 0
for word in j:
if itr < len(chka) and word == chka[itr]:
if chks[itr] == '1':
s = word + '=T-POS'
elif chks[itr] == '0':
s = word + '=T-NEU'
else:
s = word + '=T-NEG'
itr += 1
else:
s = word + '=O'
now += s + ' '
now += '\n'
it += 1
return now
train = get(trainl)
val = get(vall)
text_file = open('/home/debajit15/train.txt', 'w')
n = text_file.write(train)
text_file.close()
text_file = open('/home/debajit15/dev.txt', 'w')
n = text_file.write(val)
text_file.close()
| import pandas as pd
import numpy as np
import csv
#import nltk
#nltk.download('punkt')
from nltk.tokenize import sent_tokenize
csv_file=open("/home/debajit15/train+dev.csv")
pd.set_option('display.max_colwidth', None)
df=pd.read_csv(csv_file,sep=',');
df = df[pd.notnull(df['Aspects'])]
#print(df['Opinion_Words'].iloc[0:1])
def train_validate_test_split(df, train_percent=.8, validate_percent=.2, seed=None):
np.random.seed(seed)
perm = np.random.permutation(df.index)
m = len(df.index)
train_end = int(train_percent * m)
train = df.iloc[:train_end]
validate = df.iloc[train_end:]
return train, validate
trainl,vall=train_validate_test_split(df)
def get(df):
col=df[['review_body']]
print(col.head())
aspect=df[['Aspects']]
opinions=df[['Sentiments']]
print(df.shape[0])
now=""
for o in range(0,df.shape[0]):
d=col.iloc[o:o+1]
sd=d.to_string(index=False,header=None)
sd=sd[1:]
l=sent_tokenize(sd)
a=aspect.iloc[o:o+1]
sa=a.to_string(index=False,header=None)
asp=sa.split(";")
a=opinions.iloc[o:o+1]
sa=a.to_string(index=False,header=None)
senti=sa.split(";")
if(len(asp)!=len(senti) or len(l)!=len(asp) or len(l)!=len(senti)):
continue
it=0
for i in l:
chks=[x.strip() for x in senti[it].split(",")]
chka=[x.strip() for x in asp[it].split(",")]
g=[]
itr=0
if(len(chks)!=len(chka)):
continue
for k in chka:
f=k.split(" ")
num=chks[itr]
if(len(f)>1):
h=0
for x in f:
x=x.strip(' ')
x=x.strip('"')
g+=[x]
if(h<len(f)-1):
chks.insert(itr,'1')
h+=1
else:
g+=f
itr+=1
chka=g
now+=i
now+="####"
j=i.split(" ")
itr=0
for word in j:
if itr<len(chka) and word==chka[itr] :
if chks[itr]=='1':
s=word+"=T-POS"
elif chks[itr]=='0':
s=word+"=T-NEU"
else:
s=word+"=T-NEG"
itr+=1
else:
s=word+"=O"
now+=s+" "
now+="\n"
it+=1
return now
train=get(trainl)
val=get(vall)
text_file = open("/home/debajit15/train.txt", "w")
n = text_file.write(train)
text_file.close()
text_file = open("/home/debajit15/dev.txt", "w")
n = text_file.write(val)
text_file.close()
# #print(df[['review_body']])
| [
2,
3,
4,
5,
6
] |
930 | 74b0ccb5193380ce596313d1ac3f898ff1fdd2f3 | <mask token>
| from .mail_utils import send_mail
from .request_utils import get_host_url
| null | null | null | [
0,
1
] |
931 | 4e1f7fddb6bd3413dd6a8ca21520d309af75c811 | <mask token>
| <mask token>
sys.path.insert(0, 'main')
<mask token>
main.hammer(workspace)
| <mask token>
sys.path.insert(0, 'main')
<mask token>
workspace = os.path.abspath(sys.argv[1])
main.hammer(workspace)
| import sys
import os
sys.path.insert(0, 'main')
import main
workspace = os.path.abspath(sys.argv[1])
main.hammer(workspace)
| import sys
import os
sys.path.insert(0, "main")
import main
workspace = os.path.abspath(sys.argv[1])
main.hammer(workspace)
| [
0,
1,
2,
3,
4
] |
932 | db1e3a109af2db2c8794a7c9c7dfb0c2ccee5800 | <mask token>
| <mask token>
def number_of_subscribers(subreddit):
"""return the number of subscribers from an Reddit API"""
import requests
resInf = requests.get('https://www.reddit.com/r/{}/about.json'.format(
subreddit), headers={'User-Agent': 'My-User-Agent'},
allow_redirects=False)
if resInf.status_code >= 300:
return 0
return resInf.json().get('data').get('subscribers')
| #!/usr/bin/python3
"""0. How many subs"""
def number_of_subscribers(subreddit):
"""return the number of subscribers from an Reddit API"""
import requests
resInf = requests.get("https://www.reddit.com/r/{}/about.json"
.format(subreddit),
headers={"User-Agent": "My-User-Agent"},
allow_redirects=False)
if resInf.status_code >= 300:
return 0
return resInf.json().get("data").get("subscribers")
| null | null | [
0,
1,
2
] |
933 | d20e41dd7054ff133be264bebf13e4e218710ae5 | <mask token>
class coreGetHome(TestCase):
<mask token>
<mask token>
def test_200_template_home(self):
self.assertEqual(200, self.resp.status_code)
| <mask token>
class coreGetHome(TestCase):
def setUp(self):
self.resp = self.client.get(r('core:core_home'))
<mask token>
def test_200_template_home(self):
self.assertEqual(200, self.resp.status_code)
| <mask token>
class coreGetHome(TestCase):
def setUp(self):
self.resp = self.client.get(r('core:core_home'))
def test_template_home(self):
self.assertTemplateUsed(self.resp, 'index.html')
def test_200_template_home(self):
self.assertEqual(200, self.resp.status_code)
| from django.shortcuts import resolve_url as r
from django.test import TestCase
class coreGetHome(TestCase):
def setUp(self):
self.resp = self.client.get(r('core:core_home'))
def test_template_home(self):
self.assertTemplateUsed(self.resp, 'index.html')
def test_200_template_home(self):
self.assertEqual(200, self.resp.status_code)
| null | [
2,
3,
4,
5
] |
934 | ff7a865822a4f8b343ab4cb490c24d6d530b14e1 | <mask token>
| <mask token>
pc.verifyParameters()
<mask token>
tour.Description(IG.Tour.TEXT, kube_description)
tour.Instructions(IG.Tour.MARKDOWN, kube_instruction)
rspec.addTour(tour)
pc.printRequestRSpec(rspec)
| kube_description = """
Compute Server
"""
kube_instruction = """
Not instructions yet
"""
<mask token>
pc = portal.Context()
params = pc.bindParameters()
pc.verifyParameters()
rspec = PG.Request()
compute = rspec.RawPC('compute')
compute.disk_image = (
'urn:publicid:IDN+emulab.net+image+emulab-ops:UBUNTU18-64-STD')
compute.hardware_type = 'd430'
compute.routable_control_ip = True
tour = IG.Tour()
tour.Description(IG.Tour.TEXT, kube_description)
tour.Instructions(IG.Tour.MARKDOWN, kube_instruction)
rspec.addTour(tour)
pc.printRequestRSpec(rspec)
| kube_description = """
Compute Server
"""
kube_instruction = """
Not instructions yet
"""
import geni.portal as portal
import geni.rspec.pg as PG
import geni.rspec.emulab as elab
import geni.rspec.igext as IG
import geni.urn as URN
import geni.rspec.emulab.pnext as PN
pc = portal.Context()
params = pc.bindParameters()
pc.verifyParameters()
rspec = PG.Request()
compute = rspec.RawPC('compute')
compute.disk_image = (
'urn:publicid:IDN+emulab.net+image+emulab-ops:UBUNTU18-64-STD')
compute.hardware_type = 'd430'
compute.routable_control_ip = True
tour = IG.Tour()
tour.Description(IG.Tour.TEXT, kube_description)
tour.Instructions(IG.Tour.MARKDOWN, kube_instruction)
rspec.addTour(tour)
pc.printRequestRSpec(rspec)
| #!/usr/bin/env python
kube_description= \
"""
Compute Server
"""
kube_instruction= \
"""
Not instructions yet
"""
#
# Standard geni-lib/portal libraries
#
import geni.portal as portal
import geni.rspec.pg as PG
import geni.rspec.emulab as elab
import geni.rspec.igext as IG
import geni.urn as URN
#
# PhantomNet extensions.
#
import geni.rspec.emulab.pnext as PN
#
# This geni-lib script is designed to run in the PhantomNet Portal.
#
pc = portal.Context()
params = pc.bindParameters()
#
# Give the library a chance to return nice JSON-formatted exception(s) and/or
# warnings; this might sys.exit().
#
pc.verifyParameters()
rspec = PG.Request()
compute = rspec.RawPC("compute")
compute.disk_image = 'urn:publicid:IDN+emulab.net+image+emulab-ops:UBUNTU18-64-STD'
compute.hardware_type = 'd430'
compute.routable_control_ip = True
tour = IG.Tour()
tour.Description(IG.Tour.TEXT,kube_description)
tour.Instructions(IG.Tour.MARKDOWN,kube_instruction)
rspec.addTour(tour)
#
# Print and go!
#
pc.printRequestRSpec(rspec)
| [
0,
1,
2,
3,
4
] |
935 | a93884757069393b4d96de5ec9c7d815d58a2ea5 | <mask token>
@jd.get('/routerjson')
def apply_jd_waybill(db):
query = bottle.request.query
if query['method'] == 'jingdong.etms.waybillcode.get':
jd_code, resp = jd_get_response_normal()
logging.debug('JD response: {} {}'.format(jd_code, resp))
db.add(JdWaybillApplyResp(jd_code, resp))
else:
jd_param = json.loads(query['360buy_param_json'])
delivery_id = jd_param['deliveryId']
order_id = jd_param['orderId']
resp = jd_send_response_normal(delivery_id, order_id)
db.add(JdWaybillSendResp(delivery_id, order_id, resp))
logging.debug('JD response: {}'.format(resp))
return resp
@jd.get('/jd_waybill')
def jd_waybill(db):
query = bottle.request.query
jd_rsp = db.query(JdWaybillSendResp).filter_by(wms_order_code=query.get
('wms_order_code')).first()
if jd_rsp:
return jd_rsp.body
return HTTPError(404, None)
def jd_get_response_normal():
code = str(uuid.uuid4()).split('-')[-1]
return code, json.dumps({'jingdong_etms_waybillcode_get_responce': {
'resultInfo': {'message': u'成功', 'code': 100, 'deliveryIdList': [
code]}, 'code': u'0'}})
def jd_send_response_normal(deliver_id, order_id):
return json.dumps({'jingdong_etms_waybill_send_responce': {'resultInfo':
{'message': u'成功', 'deliveryId': deliver_id, 'code': 100, 'orderId':
order_id}}})
| <mask token>
jd.install(plugin)
@jd.get('/routerjson')
def apply_jd_waybill(db):
query = bottle.request.query
if query['method'] == 'jingdong.etms.waybillcode.get':
jd_code, resp = jd_get_response_normal()
logging.debug('JD response: {} {}'.format(jd_code, resp))
db.add(JdWaybillApplyResp(jd_code, resp))
else:
jd_param = json.loads(query['360buy_param_json'])
delivery_id = jd_param['deliveryId']
order_id = jd_param['orderId']
resp = jd_send_response_normal(delivery_id, order_id)
db.add(JdWaybillSendResp(delivery_id, order_id, resp))
logging.debug('JD response: {}'.format(resp))
return resp
@jd.get('/jd_waybill')
def jd_waybill(db):
query = bottle.request.query
jd_rsp = db.query(JdWaybillSendResp).filter_by(wms_order_code=query.get
('wms_order_code')).first()
if jd_rsp:
return jd_rsp.body
return HTTPError(404, None)
def jd_get_response_normal():
code = str(uuid.uuid4()).split('-')[-1]
return code, json.dumps({'jingdong_etms_waybillcode_get_responce': {
'resultInfo': {'message': u'成功', 'code': 100, 'deliveryIdList': [
code]}, 'code': u'0'}})
def jd_send_response_normal(deliver_id, order_id):
return json.dumps({'jingdong_etms_waybill_send_responce': {'resultInfo':
{'message': u'成功', 'deliveryId': deliver_id, 'code': 100, 'orderId':
order_id}}})
| <mask token>
jd = bottle.Bottle(catchall=False)
plugin = sqlalchemy.Plugin(engine, Base.metadata, keyword='db', create=True,
commit=True, use_kwargs=False)
jd.install(plugin)
@jd.get('/routerjson')
def apply_jd_waybill(db):
query = bottle.request.query
if query['method'] == 'jingdong.etms.waybillcode.get':
jd_code, resp = jd_get_response_normal()
logging.debug('JD response: {} {}'.format(jd_code, resp))
db.add(JdWaybillApplyResp(jd_code, resp))
else:
jd_param = json.loads(query['360buy_param_json'])
delivery_id = jd_param['deliveryId']
order_id = jd_param['orderId']
resp = jd_send_response_normal(delivery_id, order_id)
db.add(JdWaybillSendResp(delivery_id, order_id, resp))
logging.debug('JD response: {}'.format(resp))
return resp
@jd.get('/jd_waybill')
def jd_waybill(db):
query = bottle.request.query
jd_rsp = db.query(JdWaybillSendResp).filter_by(wms_order_code=query.get
('wms_order_code')).first()
if jd_rsp:
return jd_rsp.body
return HTTPError(404, None)
def jd_get_response_normal():
code = str(uuid.uuid4()).split('-')[-1]
return code, json.dumps({'jingdong_etms_waybillcode_get_responce': {
'resultInfo': {'message': u'成功', 'code': 100, 'deliveryIdList': [
code]}, 'code': u'0'}})
def jd_send_response_normal(deliver_id, order_id):
return json.dumps({'jingdong_etms_waybill_send_responce': {'resultInfo':
{'message': u'成功', 'deliveryId': deliver_id, 'code': 100, 'orderId':
order_id}}})
| import logging
import uuid
import json
import xmltodict
import bottle
from bottle import HTTPError
from bottle.ext import sqlalchemy
from database import Base, engine
from database import JdWaybillSendResp, JdWaybillApplyResp
jd = bottle.Bottle(catchall=False)
plugin = sqlalchemy.Plugin(engine, Base.metadata, keyword='db', create=True,
commit=True, use_kwargs=False)
jd.install(plugin)
@jd.get('/routerjson')
def apply_jd_waybill(db):
query = bottle.request.query
if query['method'] == 'jingdong.etms.waybillcode.get':
jd_code, resp = jd_get_response_normal()
logging.debug('JD response: {} {}'.format(jd_code, resp))
db.add(JdWaybillApplyResp(jd_code, resp))
else:
jd_param = json.loads(query['360buy_param_json'])
delivery_id = jd_param['deliveryId']
order_id = jd_param['orderId']
resp = jd_send_response_normal(delivery_id, order_id)
db.add(JdWaybillSendResp(delivery_id, order_id, resp))
logging.debug('JD response: {}'.format(resp))
return resp
@jd.get('/jd_waybill')
def jd_waybill(db):
query = bottle.request.query
jd_rsp = db.query(JdWaybillSendResp).filter_by(wms_order_code=query.get
('wms_order_code')).first()
if jd_rsp:
return jd_rsp.body
return HTTPError(404, None)
def jd_get_response_normal():
code = str(uuid.uuid4()).split('-')[-1]
return code, json.dumps({'jingdong_etms_waybillcode_get_responce': {
'resultInfo': {'message': u'成功', 'code': 100, 'deliveryIdList': [
code]}, 'code': u'0'}})
def jd_send_response_normal(deliver_id, order_id):
return json.dumps({'jingdong_etms_waybill_send_responce': {'resultInfo':
{'message': u'成功', 'deliveryId': deliver_id, 'code': 100, 'orderId':
order_id}}})
| # coding: utf-8
import logging
import uuid
import json
import xmltodict
import bottle
from bottle import HTTPError
from bottle.ext import sqlalchemy
from database import Base, engine
from database import JdWaybillSendResp, JdWaybillApplyResp
jd = bottle.Bottle(catchall=False)
plugin = sqlalchemy.Plugin(
engine, # SQLAlchemy engine created with create_engine function.
Base.metadata, # SQLAlchemy metadata, required only if create=True.
keyword='db', # Keyword used to inject session database in a route (default 'db').
create=True, # If it is true, execute `metadata.create_all(engine)` when plugin is applied (default False).
commit=True, # If it is true, plugin commit changes after route is executed (default True).
use_kwargs=False
# If it is true and keyword is not defined,
# plugin uses **kwargs argument to inject session database (default False).
)
jd.install(plugin)
@jd.get('/routerjson')
def apply_jd_waybill(db):
query = bottle.request.query
if query['method'] == 'jingdong.etms.waybillcode.get':
jd_code, resp = jd_get_response_normal()
logging.debug('JD response: {} {}'.format(jd_code, resp))
db.add(JdWaybillApplyResp(jd_code, resp))
else: # '''jingdong.etms.waybillcode.send'''
jd_param = json.loads(query['360buy_param_json'])
delivery_id = jd_param['deliveryId']
order_id = jd_param['orderId']
resp = jd_send_response_normal(delivery_id, order_id)
db.add(JdWaybillSendResp(delivery_id, order_id, resp))
logging.debug('JD response: {}'.format(resp))
return resp
@jd.get('/jd_waybill')
def jd_waybill(db):
query = bottle.request.query
jd_rsp = db.query(JdWaybillSendResp).filter_by(wms_order_code=query.get('wms_order_code')).first()
if jd_rsp:
# return entities
return jd_rsp.body
return HTTPError(404, None)
def jd_get_response_normal():
code = str(uuid.uuid4()).split('-')[-1]
return code, json.dumps({
'jingdong_etms_waybillcode_get_responce':
{'resultInfo':
{'message': u'成功',
'code': 100,
'deliveryIdList': [code]
},
'code': u'0'
}
})
def jd_send_response_normal(deliver_id, order_id):
return json.dumps({
"jingdong_etms_waybill_send_responce": {
"resultInfo": {
"message": u"成功",
"deliveryId": deliver_id,
"code": 100,
"orderId": order_id
}
}
})
| [
4,
5,
6,
7,
8
] |
936 | 0cc1aaa182fcf002ff2ae6cbcd6cbb84a08a3bc1 | <mask token>
| <mask token>
conn.request('POST', '/api/v1/testsuites', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testsuites', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testsuites', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testsuites', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testsuites', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
<mask token>
conn.request('POST', '/api/v1/testcases', payload, headers)
| <mask token>
host = 'localhost:8000'
api_token = 'fuukp8LhdxxwoVdtJu5K8LQtpTods8ddLMq66wSUFXGsqJKpmJAa1YyqkHN3'
conn = http.client.HTTPConnection(host)
headers = {'authorization': 'Bearer ' + api_token, 'content-type':
'application/json', 'cache-control': 'no-cache', 'postman-token':
'44709a5c-ca4a-bbce-4b24-f0632a29bde4'}
payload = """{
"Name": "Create and edit project"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"Name": "Create and edit requirement"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Not selected project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Create project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Create project without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Check if overview contains project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Edit project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Create project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Create requirement"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Create requirement without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Overview contains requirement"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Edit requirement"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Cover requirement"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"Name": "Create and edit TestSuites and TestCase"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test suite"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test suite without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Check if overview contains suite"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Edit test suite"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test case without details"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test case with details"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test case without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Check if overview contains case"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Edit test case"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"Name": "Create test set and run"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create set"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Overview contains set"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create set without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create set without tests"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Edit test set"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create test run"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Overview contains run"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Execute contains tests"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"Name": "Registration and log test"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 5,
"Name": "Redirect to login page"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 5,
"Name": "Registration"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 5,
"Name": "Registrate same user"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 5,
"Name": "Log and logout"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
| import http.client
host = 'localhost:8000'
api_token = 'fuukp8LhdxxwoVdtJu5K8LQtpTods8ddLMq66wSUFXGsqJKpmJAa1YyqkHN3'
conn = http.client.HTTPConnection(host)
headers = {'authorization': 'Bearer ' + api_token, 'content-type':
'application/json', 'cache-control': 'no-cache', 'postman-token':
'44709a5c-ca4a-bbce-4b24-f0632a29bde4'}
payload = """{
"Name": "Create and edit project"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"Name": "Create and edit requirement"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Not selected project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Create project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Create project without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Check if overview contains project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 1,
"Name": "Edit project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Create project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Create requirement"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Create requirement without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Overview contains requirement"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Edit requirement"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 2,
"Name": "Cover requirement"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"Name": "Create and edit TestSuites and TestCase"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test suite"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test suite without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Check if overview contains suite"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Edit test suite"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test case without details"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test case with details"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Create test case without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Check if overview contains case"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 3,
"Name": "Edit test case"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"Name": "Create test set and run"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create project"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create set"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Overview contains set"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create set without name"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create set without tests"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Edit test set"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Create test run"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Overview contains run"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 4,
"Name": "Execute contains tests"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"Name": "Registration and log test"
}"""
conn.request('POST', '/api/v1/testsuites', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 5,
"Name": "Redirect to login page"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 5,
"Name": "Registration"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 5,
"Name": "Registrate same user"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
res = conn.getresponse()
data = res.read()
payload = """{
"TestSuite_id": 5,
"Name": "Log and logout"
}"""
conn.request('POST', '/api/v1/testcases', payload, headers)
| # Basic script which send some request via rest api to the test-management-tool.
# Be sure you setup host and api_token variable
import http.client
host = "localhost:8000"
api_token = "fuukp8LhdxxwoVdtJu5K8LQtpTods8ddLMq66wSUFXGsqJKpmJAa1YyqkHN3"
# Connection
conn = http.client.HTTPConnection(host)
# Create a header of http request
headers = {
'authorization': "Bearer " + api_token,
'content-type': "application/json",
'cache-control': "no-cache",
'postman-token': "44709a5c-ca4a-bbce-4b24-f0632a29bde4"
}
################################################
payload = "{\n \"Name\": \"Create and edit project\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
###
res = conn.getresponse()
data = res.read()
payload = "{\n \"Name\": \"Create and edit requirement\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Not selected project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Create project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Create project without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Check if overview contains project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 1,\n \"Name\": \"Edit project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
################################################
###
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Create project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Create requirement\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Create requirement without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Overview contains requirement\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Edit requirement\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 2,\n \"Name\": \"Cover requirement\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
################################################
payload = "{\n \"Name\": \"Create and edit TestSuites and TestCase\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
###
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test suite\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test suite without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Check if overview contains suite\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Edit test suite\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case without details\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case with details\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Create test case without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Check if overview contains case\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 3,\n \"Name\": \"Edit test case\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
################################################
payload = "{\n \"Name\": \"Create test set and run\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
###
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create project\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Overview contains set\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set without name\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create set without tests\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Edit test set\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Create test run\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Overview contains run\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 4,\n \"Name\": \"Execute contains tests\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
################################################
payload = "{\n \"Name\": \"Registration and log test\"\n}"
conn.request("POST", "/api/v1/testsuites", payload, headers)
###
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 5,\n \"Name\": \"Redirect to login page\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 5,\n \"Name\": \"Registration\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 5,\n \"Name\": \"Registrate same user\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
res = conn.getresponse()
data = res.read()
payload = "{\n \"TestSuite_id\": 5,\n \"Name\": \"Log and logout\"\n}"
conn.request("POST", "/api/v1/testcases", payload, headers)
| [
0,
1,
2,
3,
4
] |
937 | 5bd8cee2595215fda6ab523a646cf918e3d84a50 | <mask token>
| <mask token>
urlpatterns = [path('', HomeView.as_view(), name='HomeView'), path(
'LoginView/', LoginView.as_view(), name='LoginView'), path(
'SignUpView/', SignUpView.as_view(), name='SignUpView'), path(
'SettingsView/', SettingsView.as_view(), name='SettingsView'), path(
'LogoutView/', LogoutView.as_view(), name='LogoutView'), path(
'social_auth/', include('social_django.urls', namespace='social')),
path('users_list/', views.users_list, name='users_list'), path(
'CreatePostView/', CreatePostView.as_view(), name='CreatePostView'),
path('like/<int:id>/', views.like, name='like'), path(
'CommentPostView/<int:id>/', CommentPostView.as_view(), name=
'CommentPostView'), path('follow/<int:id>/', views.follow, name=
'follow'), path('followback/<int:id>/', views.followback, name=
'followback'), path('delete_request/<int:id>/', views.delete_request,
name='delete_request'), path('unfriend/<int:id>/', views.unfriend, name
='unfriend'), path('friendslist/<int:id>/', views.friendslist, name=
'friendslist'), path('PasswordChangeView/', PasswordChangeView.as_view(
), name='PasswordChangeView'), path('DetailsChangeView/',
DetailsChangeView.as_view(), name='DetailsChangeView'), path(
'user_profile_view/<int:id>/', views.user_profile_view, name=
'user_profile_view'), path('start_chat/<int:id>/', views.start_chat,
name='start_chat'), path('search_function/', views.search_function,
name='search_function')]
| from django.urls import path, include
from . import views
from user.views import DetailsChangeView, HomeView, PasswordChangeView, SignUpView, LoginView, SettingsView, LogoutView, CreatePostView, CommentPostView, PasswordChangeView
urlpatterns = [path('', HomeView.as_view(), name='HomeView'), path(
'LoginView/', LoginView.as_view(), name='LoginView'), path(
'SignUpView/', SignUpView.as_view(), name='SignUpView'), path(
'SettingsView/', SettingsView.as_view(), name='SettingsView'), path(
'LogoutView/', LogoutView.as_view(), name='LogoutView'), path(
'social_auth/', include('social_django.urls', namespace='social')),
path('users_list/', views.users_list, name='users_list'), path(
'CreatePostView/', CreatePostView.as_view(), name='CreatePostView'),
path('like/<int:id>/', views.like, name='like'), path(
'CommentPostView/<int:id>/', CommentPostView.as_view(), name=
'CommentPostView'), path('follow/<int:id>/', views.follow, name=
'follow'), path('followback/<int:id>/', views.followback, name=
'followback'), path('delete_request/<int:id>/', views.delete_request,
name='delete_request'), path('unfriend/<int:id>/', views.unfriend, name
='unfriend'), path('friendslist/<int:id>/', views.friendslist, name=
'friendslist'), path('PasswordChangeView/', PasswordChangeView.as_view(
), name='PasswordChangeView'), path('DetailsChangeView/',
DetailsChangeView.as_view(), name='DetailsChangeView'), path(
'user_profile_view/<int:id>/', views.user_profile_view, name=
'user_profile_view'), path('start_chat/<int:id>/', views.start_chat,
name='start_chat'), path('search_function/', views.search_function,
name='search_function')]
| from django.urls import path,include
from.import views
from user.views import DetailsChangeView, HomeView, PasswordChangeView,SignUpView,LoginView,SettingsView,LogoutView,CreatePostView,CommentPostView,PasswordChangeView
urlpatterns = [
path('', HomeView.as_view(), name = 'HomeView'),
path('LoginView/', LoginView.as_view(), name = 'LoginView'),
path('SignUpView/',SignUpView.as_view(), name = 'SignUpView' ),
path('SettingsView/', SettingsView.as_view(), name = 'SettingsView'),
path('LogoutView/', LogoutView.as_view(), name = 'LogoutView'),
path('social_auth/', include('social_django.urls', namespace = 'social')),
path('users_list/', views.users_list, name = 'users_list'),
path('CreatePostView/', CreatePostView.as_view(), name = 'CreatePostView'),
path('like/<int:id>/', views.like , name = 'like'),
path('CommentPostView/<int:id>/', CommentPostView.as_view(), name = 'CommentPostView'),
path('follow/<int:id>/', views.follow , name = 'follow'),
path('followback/<int:id>/', views.followback, name = 'followback'),
path('delete_request/<int:id>/',views.delete_request, name = 'delete_request'),
path('unfriend/<int:id>/', views.unfriend, name = 'unfriend'),
path('friendslist/<int:id>/',views.friendslist, name = 'friendslist'),
# path('FollowListView/<int:id>/',FollowListView.as_view(), name = 'FollowListView')
path('PasswordChangeView/', PasswordChangeView.as_view(), name = 'PasswordChangeView'),
path('DetailsChangeView/', DetailsChangeView.as_view(), name= 'DetailsChangeView'),
path('user_profile_view/<int:id>/',views.user_profile_view, name = 'user_profile_view'),
path('start_chat/<int:id>/', views.start_chat, name= 'start_chat'),
path('search_function/', views.search_function, name='search_function')
] | null | [
0,
1,
2,
3
] |
938 | 18dce1ce683b15201dbb5436cbd4288a0df99c28 | <mask token>
def get_last_argument(words):
return ' '.join(words)[:-1]
<mask token>
def parse_what_is_the(words):
question_number = None
arg = None
if words[3] == POPULATION_KEY:
question_number = 3
arg = get_last_argument(words[5:])
elif words[3] == AREA_KEY:
question_number = 4
arg = get_last_argument(words[5:])
elif words[3] == GOVERNMENT_KEY:
question_number = 5
arg = get_last_argument(words[5:])
elif words[3] == CAPITAL_KEY:
question_number = 6
arg = get_last_argument(words[5:])
return question_number, arg
<mask token>
def parse_user_question(string):
question_number = None
arg = None
words = string.lower().split(' ')
if len(words) == 0 or len(words) < 3 or words[len(words) - 1][-1] != '?':
return question_number, arg
if words[0] == WHO_KEY and words[1] == IS_KEY:
question_number, arg = parse_who_is(words)
elif len(words) > 5 and words[0] == WHAT_KEY and words[1
] == IS_KEY and words[2] == THE_KEY and words[4] == OF_KEY:
question_number, arg = parse_what_is_the(words)
elif len(words) > 6 and words[0] == WHEN_KEY and words[1
] == WAS_KEY and words[2] == THE_KEY:
question_number, arg = parse_when_was_the(words)
return question_number, arg
def do_request(question, arg):
ans = None
if question == 1:
print(president_of_country_query(arg))
elif question == 2:
print(prime_minister_of_country_query(arg))
elif question == 3:
print(population_of_country_query(arg))
elif question == 4:
print(area_of_country_query(arg))
elif question == 5:
print(government_of_country_query(arg))
elif question == 6:
print(capital_of_country_query(arg))
elif question == 7:
print(president_born_date_query(arg))
elif question == 8:
print(prime_minister_born_date_query(arg))
elif question == 9:
print(who_query(arg))
else:
print('ERROR')
def start_console(question):
question, arg = parse_user_question(question)
if question is None or arg is None:
print('Invalid question, please enter new question.')
else:
do_request(question, arg)
| <mask token>
def get_last_argument(words):
return ' '.join(words)[:-1]
<mask token>
def parse_what_is_the(words):
question_number = None
arg = None
if words[3] == POPULATION_KEY:
question_number = 3
arg = get_last_argument(words[5:])
elif words[3] == AREA_KEY:
question_number = 4
arg = get_last_argument(words[5:])
elif words[3] == GOVERNMENT_KEY:
question_number = 5
arg = get_last_argument(words[5:])
elif words[3] == CAPITAL_KEY:
question_number = 6
arg = get_last_argument(words[5:])
return question_number, arg
def parse_when_was_the(words):
question_number = None
arg = None
if words[3] == PRESIDENT_KEY and words[4] == OF_KEY and words[len(words
) - 1] == BORN_KEY:
question_number = 7
arg = get_last_argument(words[5:len(words) - 1])
elif words[3] == PRIME_KEY and words[4] == MINISTER_KEY and words[5
] == OF_KEY and words[len(words) - 1] == BORN_KEY:
question_number = 8
arg = get_last_argument(words[6:len(words) - 1])
return question_number, arg
def parse_user_question(string):
question_number = None
arg = None
words = string.lower().split(' ')
if len(words) == 0 or len(words) < 3 or words[len(words) - 1][-1] != '?':
return question_number, arg
if words[0] == WHO_KEY and words[1] == IS_KEY:
question_number, arg = parse_who_is(words)
elif len(words) > 5 and words[0] == WHAT_KEY and words[1
] == IS_KEY and words[2] == THE_KEY and words[4] == OF_KEY:
question_number, arg = parse_what_is_the(words)
elif len(words) > 6 and words[0] == WHEN_KEY and words[1
] == WAS_KEY and words[2] == THE_KEY:
question_number, arg = parse_when_was_the(words)
return question_number, arg
def do_request(question, arg):
ans = None
if question == 1:
print(president_of_country_query(arg))
elif question == 2:
print(prime_minister_of_country_query(arg))
elif question == 3:
print(population_of_country_query(arg))
elif question == 4:
print(area_of_country_query(arg))
elif question == 5:
print(government_of_country_query(arg))
elif question == 6:
print(capital_of_country_query(arg))
elif question == 7:
print(president_born_date_query(arg))
elif question == 8:
print(prime_minister_born_date_query(arg))
elif question == 9:
print(who_query(arg))
else:
print('ERROR')
def start_console(question):
question, arg = parse_user_question(question)
if question is None or arg is None:
print('Invalid question, please enter new question.')
else:
do_request(question, arg)
| <mask token>
def get_last_argument(words):
return ' '.join(words)[:-1]
def parse_who_is(words):
question_number = None
arg = None
if len(words) > 5 and (words[3] == PRIME_KEY or words[3] == PRESIDENT_KEY):
if words[3] == PRESIDENT_KEY and words[4] == OF_KEY:
question_number = 1
arg = get_last_argument(words[5:])
elif words[3] == PRIME_KEY and words[4] == MINISTER_KEY and words[5
] == OF_KEY:
question_number = 2
arg = get_last_argument(words[6:])
elif len(words) > 2:
question_number = 9
arg = get_last_argument(words[2:])
return question_number, arg
def parse_what_is_the(words):
question_number = None
arg = None
if words[3] == POPULATION_KEY:
question_number = 3
arg = get_last_argument(words[5:])
elif words[3] == AREA_KEY:
question_number = 4
arg = get_last_argument(words[5:])
elif words[3] == GOVERNMENT_KEY:
question_number = 5
arg = get_last_argument(words[5:])
elif words[3] == CAPITAL_KEY:
question_number = 6
arg = get_last_argument(words[5:])
return question_number, arg
def parse_when_was_the(words):
question_number = None
arg = None
if words[3] == PRESIDENT_KEY and words[4] == OF_KEY and words[len(words
) - 1] == BORN_KEY:
question_number = 7
arg = get_last_argument(words[5:len(words) - 1])
elif words[3] == PRIME_KEY and words[4] == MINISTER_KEY and words[5
] == OF_KEY and words[len(words) - 1] == BORN_KEY:
question_number = 8
arg = get_last_argument(words[6:len(words) - 1])
return question_number, arg
def parse_user_question(string):
question_number = None
arg = None
words = string.lower().split(' ')
if len(words) == 0 or len(words) < 3 or words[len(words) - 1][-1] != '?':
return question_number, arg
if words[0] == WHO_KEY and words[1] == IS_KEY:
question_number, arg = parse_who_is(words)
elif len(words) > 5 and words[0] == WHAT_KEY and words[1
] == IS_KEY and words[2] == THE_KEY and words[4] == OF_KEY:
question_number, arg = parse_what_is_the(words)
elif len(words) > 6 and words[0] == WHEN_KEY and words[1
] == WAS_KEY and words[2] == THE_KEY:
question_number, arg = parse_when_was_the(words)
return question_number, arg
def do_request(question, arg):
ans = None
if question == 1:
print(president_of_country_query(arg))
elif question == 2:
print(prime_minister_of_country_query(arg))
elif question == 3:
print(population_of_country_query(arg))
elif question == 4:
print(area_of_country_query(arg))
elif question == 5:
print(government_of_country_query(arg))
elif question == 6:
print(capital_of_country_query(arg))
elif question == 7:
print(president_born_date_query(arg))
elif question == 8:
print(prime_minister_born_date_query(arg))
elif question == 9:
print(who_query(arg))
else:
print('ERROR')
def start_console(question):
question, arg = parse_user_question(question)
if question is None or arg is None:
print('Invalid question, please enter new question.')
else:
do_request(question, arg)
| from const import BORN_KEY, PRESIDENT_KEY, CAPITAL_KEY, PRIME_KEY, MINISTER_KEY, POPULATION_KEY, GOVERNMENT_KEY, AREA_KEY, WHO_KEY, IS_KEY, THE_KEY, OF_KEY, WHAT_KEY, WHEN_KEY, WAS_KEY
from geq_queries import capital_of_country_query, area_of_country_query, government_of_country_query, population_of_country_query, president_of_country_query, prime_minister_of_country_query, prime_minister_born_date_query, president_born_date_query, who_query
def get_last_argument(words):
return ' '.join(words)[:-1]
def parse_who_is(words):
question_number = None
arg = None
if len(words) > 5 and (words[3] == PRIME_KEY or words[3] == PRESIDENT_KEY):
if words[3] == PRESIDENT_KEY and words[4] == OF_KEY:
question_number = 1
arg = get_last_argument(words[5:])
elif words[3] == PRIME_KEY and words[4] == MINISTER_KEY and words[5
] == OF_KEY:
question_number = 2
arg = get_last_argument(words[6:])
elif len(words) > 2:
question_number = 9
arg = get_last_argument(words[2:])
return question_number, arg
def parse_what_is_the(words):
question_number = None
arg = None
if words[3] == POPULATION_KEY:
question_number = 3
arg = get_last_argument(words[5:])
elif words[3] == AREA_KEY:
question_number = 4
arg = get_last_argument(words[5:])
elif words[3] == GOVERNMENT_KEY:
question_number = 5
arg = get_last_argument(words[5:])
elif words[3] == CAPITAL_KEY:
question_number = 6
arg = get_last_argument(words[5:])
return question_number, arg
def parse_when_was_the(words):
question_number = None
arg = None
if words[3] == PRESIDENT_KEY and words[4] == OF_KEY and words[len(words
) - 1] == BORN_KEY:
question_number = 7
arg = get_last_argument(words[5:len(words) - 1])
elif words[3] == PRIME_KEY and words[4] == MINISTER_KEY and words[5
] == OF_KEY and words[len(words) - 1] == BORN_KEY:
question_number = 8
arg = get_last_argument(words[6:len(words) - 1])
return question_number, arg
def parse_user_question(string):
question_number = None
arg = None
words = string.lower().split(' ')
if len(words) == 0 or len(words) < 3 or words[len(words) - 1][-1] != '?':
return question_number, arg
if words[0] == WHO_KEY and words[1] == IS_KEY:
question_number, arg = parse_who_is(words)
elif len(words) > 5 and words[0] == WHAT_KEY and words[1
] == IS_KEY and words[2] == THE_KEY and words[4] == OF_KEY:
question_number, arg = parse_what_is_the(words)
elif len(words) > 6 and words[0] == WHEN_KEY and words[1
] == WAS_KEY and words[2] == THE_KEY:
question_number, arg = parse_when_was_the(words)
return question_number, arg
def do_request(question, arg):
ans = None
if question == 1:
print(president_of_country_query(arg))
elif question == 2:
print(prime_minister_of_country_query(arg))
elif question == 3:
print(population_of_country_query(arg))
elif question == 4:
print(area_of_country_query(arg))
elif question == 5:
print(government_of_country_query(arg))
elif question == 6:
print(capital_of_country_query(arg))
elif question == 7:
print(president_born_date_query(arg))
elif question == 8:
print(prime_minister_born_date_query(arg))
elif question == 9:
print(who_query(arg))
else:
print('ERROR')
def start_console(question):
question, arg = parse_user_question(question)
if question is None or arg is None:
print('Invalid question, please enter new question.')
else:
do_request(question, arg)
| from const import BORN_KEY, PRESIDENT_KEY, CAPITAL_KEY, PRIME_KEY, MINISTER_KEY, POPULATION_KEY, \
GOVERNMENT_KEY,AREA_KEY, WHO_KEY, IS_KEY, THE_KEY, OF_KEY, WHAT_KEY, WHEN_KEY, WAS_KEY
from geq_queries import capital_of_country_query, area_of_country_query, government_of_country_query, \
population_of_country_query, \
president_of_country_query, prime_minister_of_country_query, prime_minister_born_date_query, \
president_born_date_query, who_query
def get_last_argument(words):
return ' '.join(words)[:-1]
def parse_who_is(words):
question_number = None
arg = None
if len(words) > 5 and (words[3] == PRIME_KEY or words[3] == PRESIDENT_KEY):
# can be i, ii
if words[3] == PRESIDENT_KEY and words[4] == OF_KEY:
question_number = 1
arg = get_last_argument(words[5:])
elif words[3] == PRIME_KEY and words[4] == MINISTER_KEY and words[5] == OF_KEY:
question_number = 2
arg = get_last_argument(words[6:])
elif len(words) > 2:
question_number = 9
arg = get_last_argument(words[2:])
return question_number, arg
def parse_what_is_the(words):
question_number = None
arg = None
# can be iii, iv, v ,vi
if words[3] == POPULATION_KEY:
# iii
question_number = 3
arg = get_last_argument(words[5:])
elif words[3] == AREA_KEY:
# iv
question_number = 4
arg = get_last_argument(words[5:])
elif words[3] == GOVERNMENT_KEY:
question_number = 5
arg = get_last_argument(words[5:])
# v
elif words[3] == CAPITAL_KEY:
# vi
question_number = 6
arg = get_last_argument(words[5:])
return question_number, arg
def parse_when_was_the(words):
question_number = None
arg = None
# can be vii, viii
if words[3] == PRESIDENT_KEY and words[4] == OF_KEY and words[len(words) - 1] == BORN_KEY:
question_number = 7
arg = get_last_argument(words[5:len(words) - 1])
# can be vii
elif words[3] == PRIME_KEY and words[4] == MINISTER_KEY and words[5] == OF_KEY and words[len(words) - 1] == BORN_KEY:
question_number = 8
arg = get_last_argument(words[6:len(words) - 1])
return question_number, arg
def parse_user_question(string):
question_number = None
arg = None
words = string.lower().split(" ")
if len(words) == 0 or len(words) < 3 or words[len(words) - 1][-1] != '?':
return question_number, arg
if words[0] == WHO_KEY and words[1] == IS_KEY:
# can be only i, ii, ix
question_number, arg = parse_who_is(words)
elif len(words) > 5 and words[0] == WHAT_KEY and words[1] == IS_KEY and words[2] == THE_KEY and words[4] == OF_KEY:
question_number, arg = parse_what_is_the(words)
elif len(words) > 6 and words[0] == WHEN_KEY and words[1] == WAS_KEY and words[2] == THE_KEY:
question_number, arg = parse_when_was_the(words)
return question_number, arg
def do_request(question, arg):
ans = None
if question == 1:
print(president_of_country_query(arg))
elif question == 2:
print(prime_minister_of_country_query(arg))
elif question == 3:
print(population_of_country_query(arg))
elif question == 4:
print(area_of_country_query(arg))
elif question == 5:
print(government_of_country_query(arg))
elif question == 6:
print(capital_of_country_query(arg))
elif question == 7:
print(president_born_date_query(arg))
elif question == 8:
print(prime_minister_born_date_query(arg))
elif question == 9:
print(who_query(arg))
else:
print("ERROR")
def start_console(question):
question, arg = parse_user_question(question)
if question is None or arg is None:
print('Invalid question, please enter new question.')
else:
do_request(question, arg)
| [
5,
6,
7,
8,
9
] |
939 | 1968923cd923e68dc5ff2148802f18e40a5e6c33 | <mask token>
class Test(unittest.TestCase):
<mask token>
def tearDown(self):
pass
def test_these_should_win_for_x(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'x',
'x'], ['o', 'x', 'o'], ['o', 'x', 'o']]), 'x', 'should return x')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'o',
'o'], ['o', 'x', 'o'], ['x', 'o', 'x']]), 'x', 'should return x')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['x', 'x', 'x'], ['-', '-', '-']]), 'x', 'should return x')
def test_these_should_win_for_o(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['o', 'x', 'x'], ['o', 'o', 'x']]), 'o', 'should return o')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'o',
'-'], ['o', 'o', 'o'], ['o', 'x', 'x']]), 'o', 'should return o')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['x', 'o', 'x'], ['-', '-', 'o']]), 'o', 'should return o')
def test_these_should_win_for_nobody(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'x',
'-'], ['o', '-', 'o'], ['o', '-', 'o']]), None,
'should return None')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['-', '-',
'-'], ['-', '-', '-'], ['x', 'o', 'x']]), None,
'should return None')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['-', '-', 'x'], ['-', 'o', 'o']]), None,
'should return None')
def test_make_move(self):
self.the_board.board_array = [['x', '-', 'x'], ['o', '-', 'o'], [
'o', 'x', '-']]
self.the_board.whose_turn = 'o'
self.the_board.MakeMove([1, 1])
self.assertEqual(self.the_board.board_array[1][1], 'o',
'should be an o')
self.assertEqual(self.the_board.whose_turn, 'x', 'turn should change')
<mask token>
<mask token>
<mask token>
def test_algorithm_by_playing_large_num_of_random_games(self):
NUM_GAMES = 10
NUM_GAMES = 10
for i in range(0, NUM_GAMES + 1):
win_result = StartNewGame(UseRandom=True)
self.assertTrue(win_result == 'Computer' or win_result == 'Tie')
def test_print(self):
self.the_board.board_array = [['-', '-', 'x'], ['-', 'o', '-'], [
'x', 'o', '-']]
self.the_board.PrintBoardToConsole()
def test_empty_squares(self):
pass
<mask token>
| <mask token>
class Test(unittest.TestCase):
def setUp(self):
self.the_board = TicTacToe_Board()
def tearDown(self):
pass
def test_these_should_win_for_x(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'x',
'x'], ['o', 'x', 'o'], ['o', 'x', 'o']]), 'x', 'should return x')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'o',
'o'], ['o', 'x', 'o'], ['x', 'o', 'x']]), 'x', 'should return x')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['x', 'x', 'x'], ['-', '-', '-']]), 'x', 'should return x')
def test_these_should_win_for_o(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['o', 'x', 'x'], ['o', 'o', 'x']]), 'o', 'should return o')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'o',
'-'], ['o', 'o', 'o'], ['o', 'x', 'x']]), 'o', 'should return o')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['x', 'o', 'x'], ['-', '-', 'o']]), 'o', 'should return o')
def test_these_should_win_for_nobody(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'x',
'-'], ['o', '-', 'o'], ['o', '-', 'o']]), None,
'should return None')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['-', '-',
'-'], ['-', '-', '-'], ['x', 'o', 'x']]), None,
'should return None')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['-', '-', 'x'], ['-', 'o', 'o']]), None,
'should return None')
def test_make_move(self):
self.the_board.board_array = [['x', '-', 'x'], ['o', '-', 'o'], [
'o', 'x', '-']]
self.the_board.whose_turn = 'o'
self.the_board.MakeMove([1, 1])
self.assertEqual(self.the_board.board_array[1][1], 'o',
'should be an o')
self.assertEqual(self.the_board.whose_turn, 'x', 'turn should change')
<mask token>
def test_get_winning_moves_for_opponent(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['x', '-', 'x'], ['-', 'o', '-'], [
'o', 'o', '-']]
self.the_board.whose_turn = 'x'
winning_moves = self.the_board.GetWinningMovesFor('human')
d_pr(winning_moves)
self.assertIn([0, 1], winning_moves)
self.assertIn([2, 2], winning_moves)
comp_player = ComputerPlayer('o', self.the_board)
self.the_board.human_player_x_or_o = 'x'
self.the_board.c_player_x_or_o = 'o'
self.the_board.board_array = [['x', '-', 'x'], ['-', 'o', '-'], [
'o', 'o', '-']]
self.the_board.whose_turn = 'o'
winning_moves = self.the_board.GetWinningMovesFor('human')
d_pr(winning_moves)
self.assertIn([0, 1], winning_moves)
def test_get_threatening_moves(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['-', '-', 'x'], ['-', 'o', '-'], [
'o', '-', '-']]
self.the_board.whose_turn = 'x'
threatening_moves = comp_player.GetThreateningMovesWithoutTraps(self
.the_board.GetEmptySquares())
self.assertIn([0, 0], threatening_moves)
self.assertIn([2, 2], threatening_moves)
d_pr('threats without traps: ' + str(threatening_moves))
self.assertEqual(len(threatening_moves), 2)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['-', '-', 'o'], ['-', 'x', '-'], [
'o', '-', '-']]
self.the_board.whose_turn = 'x'
threatening_moves = comp_player.GetThreateningMovesWithoutTraps(self
.the_board.GetEmptySquares())
self.assertIn([0, 1], threatening_moves)
self.assertIn([2, 1], threatening_moves)
self.assertIn([1, 0], threatening_moves)
self.assertIn([1, 2], threatening_moves)
d_pr('threats without traps: ' + str(threatening_moves))
self.assertEqual(len(threatening_moves), 4)
def test_algorithm_by_playing_large_num_of_random_games(self):
NUM_GAMES = 10
NUM_GAMES = 10
for i in range(0, NUM_GAMES + 1):
win_result = StartNewGame(UseRandom=True)
self.assertTrue(win_result == 'Computer' or win_result == 'Tie')
def test_print(self):
self.the_board.board_array = [['-', '-', 'x'], ['-', 'o', '-'], [
'x', 'o', '-']]
self.the_board.PrintBoardToConsole()
def test_empty_squares(self):
pass
<mask token>
| <mask token>
class Test(unittest.TestCase):
def setUp(self):
self.the_board = TicTacToe_Board()
def tearDown(self):
pass
def test_these_should_win_for_x(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'x',
'x'], ['o', 'x', 'o'], ['o', 'x', 'o']]), 'x', 'should return x')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'o',
'o'], ['o', 'x', 'o'], ['x', 'o', 'x']]), 'x', 'should return x')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['x', 'x', 'x'], ['-', '-', '-']]), 'x', 'should return x')
def test_these_should_win_for_o(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['o', 'x', 'x'], ['o', 'o', 'x']]), 'o', 'should return o')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'o',
'-'], ['o', 'o', 'o'], ['o', 'x', 'x']]), 'o', 'should return o')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['x', 'o', 'x'], ['-', '-', 'o']]), 'o', 'should return o')
def test_these_should_win_for_nobody(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'x',
'-'], ['o', '-', 'o'], ['o', '-', 'o']]), None,
'should return None')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['-', '-',
'-'], ['-', '-', '-'], ['x', 'o', 'x']]), None,
'should return None')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['-', '-', 'x'], ['-', 'o', 'o']]), None,
'should return None')
def test_make_move(self):
self.the_board.board_array = [['x', '-', 'x'], ['o', '-', 'o'], [
'o', 'x', '-']]
self.the_board.whose_turn = 'o'
self.the_board.MakeMove([1, 1])
self.assertEqual(self.the_board.board_array[1][1], 'o',
'should be an o')
self.assertEqual(self.the_board.whose_turn, 'x', 'turn should change')
def test_computer_player_get_outcome(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['-', '-', 'x'], ['-', 'o', '-'], [
'-', '-', '-']]
self.the_board.whose_turn = 'x'
move_seq_1 = [{'player': 'x', 'move': [0, 1]}, {'player': 'o',
'move': [2, 1]}, {'player': 'x', 'move': [0, 0]}]
out = self.the_board.GetOutcomeOfMoveSequence(move_seq_1)
self.assertEqual(out, 'x', 'x should win: outcome should be x')
move_seq_2 = [{'player': 'x', 'move': [0, 1]}, {'player': 'o',
'move': [2, 1]}]
out = self.the_board.GetOutcomeOfMoveSequence(move_seq_2)
self.assertEqual(out, None, 'no one should win: outcome will be None')
move_seq_3 = [{'player': 'x', 'move': [0, 1]}, {'player': 'o',
'move': [0, 0]}, {'player': 'x', 'move': [2, 1]}, {'player':
'o', 'move': [2, 2]}]
out = self.the_board.GetOutcomeOfMoveSequence(move_seq_3)
self.assertEqual(out, 'o', 'o should win')
def test_get_winning_moves_for_opponent(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['x', '-', 'x'], ['-', 'o', '-'], [
'o', 'o', '-']]
self.the_board.whose_turn = 'x'
winning_moves = self.the_board.GetWinningMovesFor('human')
d_pr(winning_moves)
self.assertIn([0, 1], winning_moves)
self.assertIn([2, 2], winning_moves)
comp_player = ComputerPlayer('o', self.the_board)
self.the_board.human_player_x_or_o = 'x'
self.the_board.c_player_x_or_o = 'o'
self.the_board.board_array = [['x', '-', 'x'], ['-', 'o', '-'], [
'o', 'o', '-']]
self.the_board.whose_turn = 'o'
winning_moves = self.the_board.GetWinningMovesFor('human')
d_pr(winning_moves)
self.assertIn([0, 1], winning_moves)
def test_get_threatening_moves(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['-', '-', 'x'], ['-', 'o', '-'], [
'o', '-', '-']]
self.the_board.whose_turn = 'x'
threatening_moves = comp_player.GetThreateningMovesWithoutTraps(self
.the_board.GetEmptySquares())
self.assertIn([0, 0], threatening_moves)
self.assertIn([2, 2], threatening_moves)
d_pr('threats without traps: ' + str(threatening_moves))
self.assertEqual(len(threatening_moves), 2)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['-', '-', 'o'], ['-', 'x', '-'], [
'o', '-', '-']]
self.the_board.whose_turn = 'x'
threatening_moves = comp_player.GetThreateningMovesWithoutTraps(self
.the_board.GetEmptySquares())
self.assertIn([0, 1], threatening_moves)
self.assertIn([2, 1], threatening_moves)
self.assertIn([1, 0], threatening_moves)
self.assertIn([1, 2], threatening_moves)
d_pr('threats without traps: ' + str(threatening_moves))
self.assertEqual(len(threatening_moves), 4)
def test_algorithm_by_playing_large_num_of_random_games(self):
NUM_GAMES = 10
NUM_GAMES = 10
for i in range(0, NUM_GAMES + 1):
win_result = StartNewGame(UseRandom=True)
self.assertTrue(win_result == 'Computer' or win_result == 'Tie')
def test_print(self):
self.the_board.board_array = [['-', '-', 'x'], ['-', 'o', '-'], [
'x', 'o', '-']]
self.the_board.PrintBoardToConsole()
def test_empty_squares(self):
pass
<mask token>
| <mask token>
class Test(unittest.TestCase):
def setUp(self):
self.the_board = TicTacToe_Board()
def tearDown(self):
pass
def test_these_should_win_for_x(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'x',
'x'], ['o', 'x', 'o'], ['o', 'x', 'o']]), 'x', 'should return x')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'o',
'o'], ['o', 'x', 'o'], ['x', 'o', 'x']]), 'x', 'should return x')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['x', 'x', 'x'], ['-', '-', '-']]), 'x', 'should return x')
def test_these_should_win_for_o(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['o', 'x', 'x'], ['o', 'o', 'x']]), 'o', 'should return o')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'o',
'-'], ['o', 'o', 'o'], ['o', 'x', 'x']]), 'o', 'should return o')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['x', 'o', 'x'], ['-', '-', 'o']]), 'o', 'should return o')
def test_these_should_win_for_nobody(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['x', 'x',
'-'], ['o', '-', 'o'], ['o', '-', 'o']]), None,
'should return None')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['-', '-',
'-'], ['-', '-', '-'], ['x', 'o', 'x']]), None,
'should return None')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([['o', 'x',
'o'], ['-', '-', 'x'], ['-', 'o', 'o']]), None,
'should return None')
def test_make_move(self):
self.the_board.board_array = [['x', '-', 'x'], ['o', '-', 'o'], [
'o', 'x', '-']]
self.the_board.whose_turn = 'o'
self.the_board.MakeMove([1, 1])
self.assertEqual(self.the_board.board_array[1][1], 'o',
'should be an o')
self.assertEqual(self.the_board.whose_turn, 'x', 'turn should change')
def test_computer_player_get_outcome(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['-', '-', 'x'], ['-', 'o', '-'], [
'-', '-', '-']]
self.the_board.whose_turn = 'x'
move_seq_1 = [{'player': 'x', 'move': [0, 1]}, {'player': 'o',
'move': [2, 1]}, {'player': 'x', 'move': [0, 0]}]
out = self.the_board.GetOutcomeOfMoveSequence(move_seq_1)
self.assertEqual(out, 'x', 'x should win: outcome should be x')
move_seq_2 = [{'player': 'x', 'move': [0, 1]}, {'player': 'o',
'move': [2, 1]}]
out = self.the_board.GetOutcomeOfMoveSequence(move_seq_2)
self.assertEqual(out, None, 'no one should win: outcome will be None')
move_seq_3 = [{'player': 'x', 'move': [0, 1]}, {'player': 'o',
'move': [0, 0]}, {'player': 'x', 'move': [2, 1]}, {'player':
'o', 'move': [2, 2]}]
out = self.the_board.GetOutcomeOfMoveSequence(move_seq_3)
self.assertEqual(out, 'o', 'o should win')
def test_get_winning_moves_for_opponent(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['x', '-', 'x'], ['-', 'o', '-'], [
'o', 'o', '-']]
self.the_board.whose_turn = 'x'
winning_moves = self.the_board.GetWinningMovesFor('human')
d_pr(winning_moves)
self.assertIn([0, 1], winning_moves)
self.assertIn([2, 2], winning_moves)
comp_player = ComputerPlayer('o', self.the_board)
self.the_board.human_player_x_or_o = 'x'
self.the_board.c_player_x_or_o = 'o'
self.the_board.board_array = [['x', '-', 'x'], ['-', 'o', '-'], [
'o', 'o', '-']]
self.the_board.whose_turn = 'o'
winning_moves = self.the_board.GetWinningMovesFor('human')
d_pr(winning_moves)
self.assertIn([0, 1], winning_moves)
def test_get_threatening_moves(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['-', '-', 'x'], ['-', 'o', '-'], [
'o', '-', '-']]
self.the_board.whose_turn = 'x'
threatening_moves = comp_player.GetThreateningMovesWithoutTraps(self
.the_board.GetEmptySquares())
self.assertIn([0, 0], threatening_moves)
self.assertIn([2, 2], threatening_moves)
d_pr('threats without traps: ' + str(threatening_moves))
self.assertEqual(len(threatening_moves), 2)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [['-', '-', 'o'], ['-', 'x', '-'], [
'o', '-', '-']]
self.the_board.whose_turn = 'x'
threatening_moves = comp_player.GetThreateningMovesWithoutTraps(self
.the_board.GetEmptySquares())
self.assertIn([0, 1], threatening_moves)
self.assertIn([2, 1], threatening_moves)
self.assertIn([1, 0], threatening_moves)
self.assertIn([1, 2], threatening_moves)
d_pr('threats without traps: ' + str(threatening_moves))
self.assertEqual(len(threatening_moves), 4)
def test_algorithm_by_playing_large_num_of_random_games(self):
NUM_GAMES = 10
NUM_GAMES = 10
for i in range(0, NUM_GAMES + 1):
win_result = StartNewGame(UseRandom=True)
self.assertTrue(win_result == 'Computer' or win_result == 'Tie')
def test_print(self):
self.the_board.board_array = [['-', '-', 'x'], ['-', 'o', '-'], [
'x', 'o', '-']]
self.the_board.PrintBoardToConsole()
def test_empty_squares(self):
pass
if __name__ == '__main__':
unittest.main()
| '''
Created on Nov 16, 2013
@author: mo
'''
import unittest
from Board import TicTacToe_Board
from ComputerPlayer import ComputerPlayer
from utils import debug_print as d_pr
from main import StartNewGame
class Test(unittest.TestCase):
def setUp(self):
self.the_board = TicTacToe_Board()
def tearDown(self):
pass
#these may be impossible boards, but still it tests the win detector
def test_these_should_win_for_x(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static( [ ['x', 'x', 'x'],
['o', 'x', 'o'],
['o', 'x', 'o']]), 'x', "should return x")
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([
['x', 'o', 'o'],
['o', 'x', 'o'],
['x', 'o', 'x']
]) , 'x', 'should return x')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([
['o','x', 'o'],
['x', 'x', 'x'],
['-', '-', '-']
]), 'x', 'should return x'
)
def test_these_should_win_for_o(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static( [ ['o', 'x', 'o'],
['o', 'x', 'x'],
['o', 'o', 'x']]), 'o', "should return o")
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([
['x', 'o', '-'],
['o', 'o', 'o'],
['o', 'x', 'x']
]) , 'o', 'should return o')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([
['o','x', 'o'],
['x', 'o', 'x'],
['-', '-', 'o']
]), 'o', 'should return o'
)
def test_these_should_win_for_nobody(self):
self.assertEqual(TicTacToe_Board.IsWinningBoard_static( [ ['x', 'x', '-'],
['o', '-', 'o'],
['o', '-', 'o']]), None, "should return None")
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([
['-', '-', '-'],
['-', '-', '-'],
['x', 'o', 'x']
]) , None, 'should return None')
self.assertEqual(TicTacToe_Board.IsWinningBoard_static([
['o','x', 'o'],
['-', '-', 'x'],
['-', 'o', 'o']
]), None, 'should return None'
)
def test_make_move(self):
self.the_board.board_array=[ ['x', '-', 'x'],
['o', '-', 'o'],
['o', 'x', '-']
]
self.the_board.whose_turn='o'
self.the_board.MakeMove([1,1])
self.assertEqual(self.the_board.board_array[1][1], 'o', "should be an o")
self.assertEqual(self.the_board.whose_turn, 'x', 'turn should change')
def test_computer_player_get_outcome(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [ ['-', '-', 'x'],
['-', 'o', '-'],
['-', '-', '-']
]
self.the_board.whose_turn = 'x'
move_seq_1 = [ {'player': 'x', 'move' : [0,1] }, {'player': 'o', 'move' : [2,1]}, {'player': 'x', 'move': [0,0]} ]
out=self.the_board.GetOutcomeOfMoveSequence(move_seq_1)
self.assertEqual(out, 'x', 'x should win: outcome should be x')
move_seq_2 = [{'player': 'x', 'move' : [0,1] }, {'player': 'o', 'move' : [2,1]}]
out = self.the_board.GetOutcomeOfMoveSequence(move_seq_2)
self.assertEqual(out, None, 'no one should win: outcome will be None')
move_seq_3 = [ {'player': 'x', 'move' : [0,1] }, {'player': 'o', 'move' : [0,0] }, {'player': 'x', 'move' : [2,1]},
{'player': 'o', 'move' : [2,2] }
]
out = self.the_board.GetOutcomeOfMoveSequence(move_seq_3)
self.assertEqual(out, 'o', 'o should win')
def test_get_winning_moves_for_opponent(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [ ['x', '-', 'x'],
['-', 'o', '-'],
['o', 'o', '-']
]
self.the_board.whose_turn = 'x'
winning_moves=self.the_board.GetWinningMovesFor( 'human')
d_pr(winning_moves)
self.assertIn([0,1], winning_moves)
self.assertIn([2,2], winning_moves)
comp_player = ComputerPlayer('o', self.the_board)
self.the_board.human_player_x_or_o = 'x'
self.the_board.c_player_x_or_o = 'o'
self.the_board.board_array = [ ['x', '-', 'x'],
['-', 'o', '-'],
['o', 'o', '-']
]
self.the_board.whose_turn = 'o'
winning_moves=self.the_board.GetWinningMovesFor( 'human')
d_pr(winning_moves)
self.assertIn([0,1], winning_moves)
def test_get_threatening_moves(self):
comp_player = ComputerPlayer('x', self.the_board)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [ ['-', '-', 'x'],
['-', 'o', '-'],
['o', '-', '-']
]
self.the_board.whose_turn = 'x'
threatening_moves=comp_player.GetThreateningMovesWithoutTraps(self.the_board.GetEmptySquares())
self.assertIn([0,0], threatening_moves)
self.assertIn([2,2], threatening_moves)
d_pr('threats without traps: ' + str(threatening_moves))
self.assertEqual(len(threatening_moves), 2)
self.the_board.human_player_x_or_o = 'o'
self.the_board.c_player_x_or_o = 'x'
self.the_board.board_array = [ ['-', '-', 'o'],
['-', 'x', '-'],
['o', '-', '-']
]
self.the_board.whose_turn = 'x'
threatening_moves=comp_player.GetThreateningMovesWithoutTraps(self.the_board.GetEmptySquares())
self.assertIn([0,1], threatening_moves)
self.assertIn([2,1], threatening_moves)
self.assertIn([1,0], threatening_moves)
self.assertIn([1,2], threatening_moves)
d_pr('threats without traps: ' + str(threatening_moves))
self.assertEqual(len(threatening_moves), 4)
def test_algorithm_by_playing_large_num_of_random_games(self):
NUM_GAMES = 10
#NUM_GAMES=100000 # this works but takes a long time
NUM_GAMES=10
for i in range(0, NUM_GAMES + 1):
win_result = StartNewGame(UseRandom=True)
self.assertTrue(win_result == 'Computer' or win_result == 'Tie')
def test_print(self):
self.the_board.board_array = [ ['-', '-', 'x'],
['-', 'o', '-'],
['x', 'o', '-']]
self.the_board.PrintBoardToConsole()
def test_empty_squares(self):
pass
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| [
9,
12,
13,
14,
16
] |
940 | 8e629ee53f11e29aa026763508d13b06f6ced5ba | <mask token>
| <mask token>
class BinaryTreeInorderTraversal(object):
<mask token>
| <mask token>
class BinaryTreeInorderTraversal(object):
def inorderTraversal(self, root: TreeNode) ->List[int]:
result = list()
inorder_stack = list()
while root or inorder_stack:
if root:
inorder_stack.append(root)
root = root.left
else:
root = inorder_stack.pop()
result.append(root.val)
root = root.right
return result
| __author__ = 'yangxin_ryan'
<mask token>
class BinaryTreeInorderTraversal(object):
def inorderTraversal(self, root: TreeNode) ->List[int]:
result = list()
inorder_stack = list()
while root or inorder_stack:
if root:
inorder_stack.append(root)
root = root.left
else:
root = inorder_stack.pop()
result.append(root.val)
root = root.right
return result
| # -*- coding:utf-8 -*-
__author__ = 'yangxin_ryan'
"""
Solutions:
题目要求非递归的中序遍历,
中序遍历的意思其实就是先遍历左孩子、然后是根结点、最后是右孩子。我们按照这个逻辑,应该先循环到root的最左孩子,
然后依次出栈,然后将结果放入结果集合result,然后是根的val,然后右孩子。
"""
class BinaryTreeInorderTraversal(object):
def inorderTraversal(self, root: TreeNode) -> List[int]:
result = list()
inorder_stack = list()
while root or inorder_stack:
if root:
inorder_stack.append(root)
root = root.left
else:
root = inorder_stack.pop()
result.append(root.val)
root = root.right
return result
| [
0,
1,
2,
3,
4
] |
941 | bc837d95ef22bd376f8b095e7aeb1f7d15c0e22e | <mask token>
| <mask token>
for char in word:
if count == 0:
print(char.upper(), end='')
count = 1
else:
print(char.lower(), end='')
count = 0
| <mask token>
word = str(input('please enter the word\n'))
count = 0
for char in word:
if count == 0:
print(char.upper(), end='')
count = 1
else:
print(char.lower(), end='')
count = 0
| """Write a program that asks the user to enter a word and then
capitalizes every other letter of that word. So if the user enters "rhinoceros",
the program should print "rHiNoCeRoS"""
word=str(input("please enter the word\n"))
count=0
for char in word:
if count==0:
print(char.upper(),end="")
count=1
else:
print(char.lower(),end="")
count=0
| null | [
0,
1,
2,
3
] |
942 | da34eb25ec08c8311fa839a0cdcd164eff036a5d | <mask token>
| <mask token>
print('Reading labels')
<mask token>
with open('/home/xilinx/jupyter_notebooks/bnn/t10k-labels-idx1-ubyte', 'rb'
) as lbl_file:
magicNum = int.from_bytes(lbl_file.read(4), byteorder='big')
countLbl = int.from_bytes(lbl_file.read(4), byteorder='big')
for idx in range(countLbl):
labels.append(int.from_bytes(lbl_file.read(1), byteorder='big'))
lbl_file.close()
print('Initiating classifier')
<mask token>
print('Testing throughput')
<mask token>
| <mask token>
print('Reading labels')
labels = []
with open('/home/xilinx/jupyter_notebooks/bnn/t10k-labels-idx1-ubyte', 'rb'
) as lbl_file:
magicNum = int.from_bytes(lbl_file.read(4), byteorder='big')
countLbl = int.from_bytes(lbl_file.read(4), byteorder='big')
for idx in range(countLbl):
labels.append(int.from_bytes(lbl_file.read(1), byteorder='big'))
lbl_file.close()
print('Initiating classifier')
lfcW1A1_classifier = bnn.LfcClassifier(bnn.NETWORK_LFCW1A1, 'mnist', bnn.
RUNTIME_HW)
print('Testing throughput')
result_W1A1 = lfcW1A1_classifier.classify_mnists(
'/home/xilinx/jupyter_notebooks/bnn/t10k-images-idx3-ubyte')
| import bnn
print('Reading labels')
labels = []
with open('/home/xilinx/jupyter_notebooks/bnn/t10k-labels-idx1-ubyte', 'rb'
) as lbl_file:
magicNum = int.from_bytes(lbl_file.read(4), byteorder='big')
countLbl = int.from_bytes(lbl_file.read(4), byteorder='big')
for idx in range(countLbl):
labels.append(int.from_bytes(lbl_file.read(1), byteorder='big'))
lbl_file.close()
print('Initiating classifier')
lfcW1A1_classifier = bnn.LfcClassifier(bnn.NETWORK_LFCW1A1, 'mnist', bnn.
RUNTIME_HW)
print('Testing throughput')
result_W1A1 = lfcW1A1_classifier.classify_mnists(
'/home/xilinx/jupyter_notebooks/bnn/t10k-images-idx3-ubyte')
| import bnn
#get
#!wget http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz
#!wget http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz
#unzip
#!gzip -d t10k-images-idx3-ubyte.gz
#!gzip -d t10k-labels-idx1-ubyte.gz
#read labels
print("Reading labels")
labels = []
with open("/home/xilinx/jupyter_notebooks/bnn/t10k-labels-idx1-ubyte","rb") as lbl_file:
#read magic number and number of labels (MSB first) -> MNIST header
magicNum = int.from_bytes(lbl_file.read(4), byteorder="big")
countLbl = int.from_bytes(lbl_file.read(4), byteorder="big")
#now the labels are following byte-wise
for idx in range(countLbl):
labels.append(int.from_bytes(lbl_file.read(1), byteorder="big"))
lbl_file.close()
print("Initiating classifier")
lfcW1A1_classifier = bnn.LfcClassifier(bnn.NETWORK_LFCW1A1,"mnist",bnn.RUNTIME_HW)
print("Testing throughput")
result_W1A1 = lfcW1A1_classifier.classify_mnists("/home/xilinx/jupyter_notebooks/bnn/t10k-images-idx3-ubyte") | [
0,
1,
2,
3,
4
] |
943 | 04b5df5cfd052390f057c6f13b2e21d27bac6449 | <mask token>
| <mask token>
if __name__ == '__main__':
import os
import time
from msl.equipment import EquipmentRecord, ConnectionRecord, Backend
from msl.equipment.resources.thorlabs import MotionControl
os.environ['PATH'] += os.pathsep + 'C:/Program Files/Thorlabs/Kinesis'
record = EquipmentRecord(manufacturer='Thorlabs', model='KSC101',
serial='68000297', connection=ConnectionRecord(backend=Backend.MSL,
address='SDK::Thorlabs.MotionControl.KCube.Solenoid.dll'))
def is_open():
return shutter.get_operating_state() == 1
MotionControl.build_device_list()
shutter = record.connect()
print('Connected to {}'.format(shutter))
shutter.start_polling(200)
shutter.set_operating_mode('Manual')
for i in range(5):
print('Opening the shutter...')
shutter.set_operating_state('Active')
while not is_open():
time.sleep(0.05)
print(' Is the shutter open? {}'.format(is_open()))
time.sleep(1)
print('Closing the shutter...')
shutter.set_operating_state('Inactive')
while is_open():
time.sleep(0.05)
print(' Is the shutter open? {}'.format(is_open()))
time.sleep(1)
shutter.stop_polling()
shutter.disconnect()
| """
This example shows how to communicate with a SH05 (shutter) connected to a KSC101 (KCube Solenoid).
"""
# this "if" statement is used so that Sphinx does not execute this script when the docs are being built
if __name__ == '__main__':
import os
import time
from msl.equipment import EquipmentRecord, ConnectionRecord, Backend
from msl.equipment.resources.thorlabs import MotionControl
# ensure that the Kinesis folder is available on PATH
os.environ['PATH'] += os.pathsep + 'C:/Program Files/Thorlabs/Kinesis'
# rather than reading the EquipmentRecord from a database we can create it manually
record = EquipmentRecord(
manufacturer='Thorlabs',
model='KSC101',
serial='68000297', # update the serial number for your KSC101
connection=ConnectionRecord(
backend=Backend.MSL,
address='SDK::Thorlabs.MotionControl.KCube.Solenoid.dll',
),
)
def is_open():
return shutter.get_operating_state() == 1
# avoid the FT_DeviceNotFound error
MotionControl.build_device_list()
# connect to the KCube Solenoid
shutter = record.connect()
print('Connected to {}'.format(shutter))
# start polling at 200 ms
shutter.start_polling(200)
# set the operating mode to SC_OperatingModes.SC_Manual
shutter.set_operating_mode('Manual')
for i in range(5):
# set the operating state to SC_OperatingStates.SC_Active
print('Opening the shutter...')
shutter.set_operating_state('Active')
while not is_open():
time.sleep(0.05)
print(' Is the shutter open? {}'.format(is_open()))
time.sleep(1)
# set the operating state to SC_OperatingStates.SC_Inactive
print('Closing the shutter...')
shutter.set_operating_state('Inactive')
while is_open():
time.sleep(0.05)
print(' Is the shutter open? {}'.format(is_open()))
time.sleep(1)
# stop polling and close the connection
shutter.stop_polling()
shutter.disconnect()
| null | null | [
0,
1,
2
] |
944 | 11f29508d52e856f4751a5dc8911a1f1c9832374 | <mask token>
| def test(d_iter):
from cqlengine import columns
from cqlengine.models import Model
from cqlengine.query import ModelQuerySet
from cqlengine import connection
from cqlengine.management import sync_table
from urllib2 import urlopen, Request
from pyspark.sql import SQLContext
import json
from cassandra.cluster import Cluster
from cassandra.query import SimpleStatement
import operator
from sets import Set
CASSANDRA_KEYSPACE = 'playground'
class table3_timeline(Model):
link_id = columns.Text(primary_key=True)
counts = columns.Integer()
time = columns.Integer(primary_key=True, partition_key=False)
class table3_comments(Model):
link_id = columns.Text()
author = columns.Text()
body = columns.Text()
created_utc = columns.Text()
parent_id = columns.Text()
subreddit = columns.Text()
subreddit_id = columns.Text()
name = columns.Text(primary_key=True)
score = columns.Integer(index=True)
class table3_links(Model):
link_id = columns.Text(primary_key=True)
title = columns.Text()
permalink = columns.Text()
subreddit = columns.Text()
subreddit_id = columns.Text()
selftext = columns.Text()
created = columns.Integer()
score = columns.Integer()
url = columns.Text()
top_comment = columns.Text()
top_score = columns.Integer()
connection.setup(['172.31.6.150'], CASSANDRA_KEYSPACE)
cluster = Cluster(['54.193.123.92'])
session = cluster.connect(CASSANDRA_KEYSPACE)
sync_table(table3_links)
sync_table(table3_comments)
sync_table(table3_timeline)
for d in d_iter:
table3_comments.create(**d)
input = {}
createdtime = 0
obj = table3_links.objects(link_id=d['link_id'])
cql = (
"SELECT top_score, created FROM table3_links WHERE link_id='" +
d['link_id'] + "'")
stmt = session.execute(cql)
current = []
for repo in stmt:
current.append(repo)
if len(current) > 0:
createdtime = current[0][1]
if int(current[0][0]) < int(d['score']):
obj.update(top_comment=d['name'])
obj.update(top_score=d['score'])
else:
source = 'http://www.reddit.com/by_id/' + d['link_id'] + '/.json'
request = Request(source)
response = urlopen(request)
data = json.loads(response.read())
input['title'] = data['data']['children'][0]['data']['title']
input['permalink'] = data['data']['children'][0]['data'][
'permalink']
input['subreddit'] = data['data']['children'][0]['data'][
'subreddit']
input['selftext'] = data['data']['children'][0]['data']['selftext']
input['subreddit_id'] = data['data']['children'][0]['data'][
'subreddit_id']
input['created'] = int(data['data']['children'][0]['data'][
'created'])
createdtime = input['created']
input['url'] = data['data']['children'][0]['data']['url']
input['score'] = data['data']['children'][0]['data']['score']
table3_links.create(link_id=d['link_id'], title=input['title'],
permalink=input['permalink'], subreddit=input['subreddit'],
selftext=input['selftext'], subreddit_id=input[
'subreddit_id'], created=input['created'], url=input['url'],
score=input['score'], top_comment=d['name'], top_score=d[
'score'])
table3_timeline.create(link_id=d['link_id'], time=0, counts=0)
timegap = int(abs(int(d['created_utc']) - createdtime) / 3600)
cql2 = "SELECT counts FROM table3_timeline WHERE link_id='" + d[
'link_id'] + "' AND time=" + str(timegap)
stmt = session.execute(cql2)
count_tmp = []
for rep in stmt:
count_tmp.append(rep)
if len(count_tmp) > 0:
timeslot = table3_timeline.objects(link_id=d['link_id'], time=
timegap)
timeslot.update(counts=count_tmp[0][0] + 1)
else:
table3_timeline.create(link_id=d['link_id'], time=timegap, counts=1
)
sync_table(table3_links)
sync_table(table3_comments)
sync_table(table3_timeline)
<mask token>
| def test(d_iter):
from cqlengine import columns
from cqlengine.models import Model
from cqlengine.query import ModelQuerySet
from cqlengine import connection
from cqlengine.management import sync_table
from urllib2 import urlopen, Request
from pyspark.sql import SQLContext
import json
from cassandra.cluster import Cluster
from cassandra.query import SimpleStatement
import operator
from sets import Set
CASSANDRA_KEYSPACE = 'playground'
class table3_timeline(Model):
link_id = columns.Text(primary_key=True)
counts = columns.Integer()
time = columns.Integer(primary_key=True, partition_key=False)
class table3_comments(Model):
link_id = columns.Text()
author = columns.Text()
body = columns.Text()
created_utc = columns.Text()
parent_id = columns.Text()
subreddit = columns.Text()
subreddit_id = columns.Text()
name = columns.Text(primary_key=True)
score = columns.Integer(index=True)
class table3_links(Model):
link_id = columns.Text(primary_key=True)
title = columns.Text()
permalink = columns.Text()
subreddit = columns.Text()
subreddit_id = columns.Text()
selftext = columns.Text()
created = columns.Integer()
score = columns.Integer()
url = columns.Text()
top_comment = columns.Text()
top_score = columns.Integer()
connection.setup(['172.31.6.150'], CASSANDRA_KEYSPACE)
cluster = Cluster(['54.193.123.92'])
session = cluster.connect(CASSANDRA_KEYSPACE)
sync_table(table3_links)
sync_table(table3_comments)
sync_table(table3_timeline)
for d in d_iter:
table3_comments.create(**d)
input = {}
createdtime = 0
obj = table3_links.objects(link_id=d['link_id'])
cql = (
"SELECT top_score, created FROM table3_links WHERE link_id='" +
d['link_id'] + "'")
stmt = session.execute(cql)
current = []
for repo in stmt:
current.append(repo)
if len(current) > 0:
createdtime = current[0][1]
if int(current[0][0]) < int(d['score']):
obj.update(top_comment=d['name'])
obj.update(top_score=d['score'])
else:
source = 'http://www.reddit.com/by_id/' + d['link_id'] + '/.json'
request = Request(source)
response = urlopen(request)
data = json.loads(response.read())
input['title'] = data['data']['children'][0]['data']['title']
input['permalink'] = data['data']['children'][0]['data'][
'permalink']
input['subreddit'] = data['data']['children'][0]['data'][
'subreddit']
input['selftext'] = data['data']['children'][0]['data']['selftext']
input['subreddit_id'] = data['data']['children'][0]['data'][
'subreddit_id']
input['created'] = int(data['data']['children'][0]['data'][
'created'])
createdtime = input['created']
input['url'] = data['data']['children'][0]['data']['url']
input['score'] = data['data']['children'][0]['data']['score']
table3_links.create(link_id=d['link_id'], title=input['title'],
permalink=input['permalink'], subreddit=input['subreddit'],
selftext=input['selftext'], subreddit_id=input[
'subreddit_id'], created=input['created'], url=input['url'],
score=input['score'], top_comment=d['name'], top_score=d[
'score'])
table3_timeline.create(link_id=d['link_id'], time=0, counts=0)
timegap = int(abs(int(d['created_utc']) - createdtime) / 3600)
cql2 = "SELECT counts FROM table3_timeline WHERE link_id='" + d[
'link_id'] + "' AND time=" + str(timegap)
stmt = session.execute(cql2)
count_tmp = []
for rep in stmt:
count_tmp.append(rep)
if len(count_tmp) > 0:
timeslot = table3_timeline.objects(link_id=d['link_id'], time=
timegap)
timeslot.update(counts=count_tmp[0][0] + 1)
else:
table3_timeline.create(link_id=d['link_id'], time=timegap, counts=1
)
sync_table(table3_links)
sync_table(table3_comments)
sync_table(table3_timeline)
<mask token>
test([])
rdd.foreachPartition(test)
| def test(d_iter):
from cqlengine import columns
from cqlengine.models import Model
from cqlengine.query import ModelQuerySet
from cqlengine import connection
from cqlengine.management import sync_table
from urllib2 import urlopen, Request
from pyspark.sql import SQLContext
import json
from cassandra.cluster import Cluster
from cassandra.query import SimpleStatement
import operator
from sets import Set
CASSANDRA_KEYSPACE = 'playground'
class table3_timeline(Model):
link_id = columns.Text(primary_key=True)
counts = columns.Integer()
time = columns.Integer(primary_key=True, partition_key=False)
class table3_comments(Model):
link_id = columns.Text()
author = columns.Text()
body = columns.Text()
created_utc = columns.Text()
parent_id = columns.Text()
subreddit = columns.Text()
subreddit_id = columns.Text()
name = columns.Text(primary_key=True)
score = columns.Integer(index=True)
class table3_links(Model):
link_id = columns.Text(primary_key=True)
title = columns.Text()
permalink = columns.Text()
subreddit = columns.Text()
subreddit_id = columns.Text()
selftext = columns.Text()
created = columns.Integer()
score = columns.Integer()
url = columns.Text()
top_comment = columns.Text()
top_score = columns.Integer()
connection.setup(['172.31.6.150'], CASSANDRA_KEYSPACE)
cluster = Cluster(['54.193.123.92'])
session = cluster.connect(CASSANDRA_KEYSPACE)
sync_table(table3_links)
sync_table(table3_comments)
sync_table(table3_timeline)
for d in d_iter:
table3_comments.create(**d)
input = {}
createdtime = 0
obj = table3_links.objects(link_id=d['link_id'])
cql = (
"SELECT top_score, created FROM table3_links WHERE link_id='" +
d['link_id'] + "'")
stmt = session.execute(cql)
current = []
for repo in stmt:
current.append(repo)
if len(current) > 0:
createdtime = current[0][1]
if int(current[0][0]) < int(d['score']):
obj.update(top_comment=d['name'])
obj.update(top_score=d['score'])
else:
source = 'http://www.reddit.com/by_id/' + d['link_id'] + '/.json'
request = Request(source)
response = urlopen(request)
data = json.loads(response.read())
input['title'] = data['data']['children'][0]['data']['title']
input['permalink'] = data['data']['children'][0]['data'][
'permalink']
input['subreddit'] = data['data']['children'][0]['data'][
'subreddit']
input['selftext'] = data['data']['children'][0]['data']['selftext']
input['subreddit_id'] = data['data']['children'][0]['data'][
'subreddit_id']
input['created'] = int(data['data']['children'][0]['data'][
'created'])
createdtime = input['created']
input['url'] = data['data']['children'][0]['data']['url']
input['score'] = data['data']['children'][0]['data']['score']
table3_links.create(link_id=d['link_id'], title=input['title'],
permalink=input['permalink'], subreddit=input['subreddit'],
selftext=input['selftext'], subreddit_id=input[
'subreddit_id'], created=input['created'], url=input['url'],
score=input['score'], top_comment=d['name'], top_score=d[
'score'])
table3_timeline.create(link_id=d['link_id'], time=0, counts=0)
timegap = int(abs(int(d['created_utc']) - createdtime) / 3600)
cql2 = "SELECT counts FROM table3_timeline WHERE link_id='" + d[
'link_id'] + "' AND time=" + str(timegap)
stmt = session.execute(cql2)
count_tmp = []
for rep in stmt:
count_tmp.append(rep)
if len(count_tmp) > 0:
timeslot = table3_timeline.objects(link_id=d['link_id'], time=
timegap)
timeslot.update(counts=count_tmp[0][0] + 1)
else:
table3_timeline.create(link_id=d['link_id'], time=timegap, counts=1
)
sync_table(table3_links)
sync_table(table3_comments)
sync_table(table3_timeline)
df = sqlContext.read.json('s3n://yy-data/testJSON.json')
rdd = df.map(lambda x: {'link_id': x.link_id, 'author': x.author, 'body': x
.body, 'created_utc': x.created_utc, 'parent_id': x.parent_id,
'subreddit': x.subreddit, 'subreddit_id': x.subreddit_id, 'name': x.
name, 'score': x.score})
test([])
rdd.foreachPartition(test)
| def test(d_iter):
from cqlengine import columns
from cqlengine.models import Model
from cqlengine.query import ModelQuerySet
from cqlengine import connection
from cqlengine.management import sync_table
from urllib2 import urlopen, Request
from pyspark.sql import SQLContext
import json
from cassandra.cluster import Cluster
from cassandra.query import SimpleStatement
import operator
from sets import Set
CASSANDRA_KEYSPACE = "playground"
class table3_timeline(Model):
link_id = columns.Text(primary_key=True)
counts = columns.Integer()
time = columns.Integer(primary_key=True, partition_key=False)
class table3_comments(Model):
link_id = columns.Text()
author = columns.Text()
body = columns.Text()
created_utc = columns.Text()
parent_id = columns.Text()
subreddit = columns.Text()
subreddit_id = columns.Text()
name = columns.Text(primary_key=True)
score = columns.Integer(index = True)
class table3_links(Model):
link_id = columns.Text(primary_key=True)
title = columns.Text()
permalink = columns.Text()
subreddit = columns.Text()
subreddit_id = columns.Text()
selftext = columns.Text()
created = columns.Integer()
score = columns.Integer()
url = columns.Text()
top_comment = columns.Text()
top_score = columns.Integer()
connection.setup(['172.31.6.150'], CASSANDRA_KEYSPACE)
cluster = Cluster(['54.193.123.92'])
session = cluster.connect(CASSANDRA_KEYSPACE)
sync_table(table3_links)
sync_table(table3_comments)
sync_table(table3_timeline)
for d in d_iter:
table3_comments.create(**d)
input = {}
createdtime = 0
obj = table3_links.objects(link_id=d['link_id'])
cql = "SELECT top_score, created FROM table3_links WHERE link_id='"+d['link_id']+"'"
stmt = session.execute(cql)
current = []
for repo in stmt:
current.append(repo)
if len(current) > 0:
createdtime = current[0][1]
if int(current[0][0]) < int(d['score']):
obj.update(top_comment = d['name'])
obj.update(top_score = d['score'])
else:
source = "http://www.reddit.com/by_id/"+d['link_id']+"/.json"
request = Request(source)
response = urlopen(request)
data = json.loads(response.read())
input['title'] = data['data']['children'][0]['data']['title']
input['permalink'] = data['data']['children'][0]['data']['permalink']
input['subreddit'] = data['data']['children'][0]['data']['subreddit']
input['selftext'] = data['data']['children'][0]['data']['selftext']
input['subreddit_id'] = data['data']['children'][0]['data']['subreddit_id']
input['created'] = int(data['data']['children'][0]['data']['created'])
createdtime = input['created']
input['url'] = data['data']['children'][0]['data']['url']
input['score'] = data['data']['children'][0]['data']['score']
table3_links.create( link_id = d['link_id'],
title = input['title'],
permalink = input['permalink'],
subreddit = input['subreddit'],
selftext = input['selftext'],
subreddit_id = input['subreddit_id'],
created = input['created'],
url = input['url'],
score = input['score'],
top_comment = d['name'],
top_score = d['score'])
table3_timeline.create(link_id=d['link_id'], time=0, counts=0)
timegap = int(abs(int(d['created_utc']) - createdtime)/3600) # one hour
cql2 = "SELECT counts FROM table3_timeline WHERE link_id='"+d['link_id']+"' AND time=" + str(timegap)
stmt = session.execute(cql2)
count_tmp = []
for rep in stmt:
count_tmp.append(rep)
if len(count_tmp) > 0:
timeslot = table3_timeline.objects(link_id=d['link_id'], time=timegap)
timeslot.update(counts=(count_tmp[0][0]+1))
else:
table3_timeline.create(link_id=d['link_id'], time=timegap, counts=1)
sync_table(table3_links)
sync_table(table3_comments)
sync_table(table3_timeline)
df = sqlContext.read.json("s3n://yy-data/testJSON.json")
# s3n://reddit-comments/2007/RC_2007-10
rdd = df.map(lambda x: {"link_id": x.link_id,
"author": x.author,
"body": x.body,
"created_utc": x.created_utc,
"parent_id": x.parent_id,
"subreddit": x.subreddit,
"subreddit_id": x.subreddit_id,
"name": x.name,
"score": x.score})
test([])
rdd.foreachPartition(test) | [
0,
1,
2,
3,
4
] |
945 | 71cdddfdd7c1327a8a77808dbdd0ff98d827231f | <mask token>
class BaseResource(Resource):
<mask token>
def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs)
self._user = None
<mask token>
@property
def current_user(self):
return current_user._get_current_object()
@property
def current_org(self):
return current_org._get_current_object()
def record_event(self, options):
options.update({'user_id': self.current_user.id, 'org_id': self.
current_org.id})
record_event.delay(options)
<mask token>
| <mask token>
class BaseResource(Resource):
<mask token>
def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs)
self._user = None
def dispatch_request(self, *args, **kwargs):
kwargs.pop('org_slug', None)
return super(BaseResource, self).dispatch_request(*args, **kwargs)
@property
def current_user(self):
return current_user._get_current_object()
@property
def current_org(self):
return current_org._get_current_object()
def record_event(self, options):
options.update({'user_id': self.current_user.id, 'org_id': self.
current_org.id})
record_event.delay(options)
<mask token>
| <mask token>
class BaseResource(Resource):
decorators = [login_required]
def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs)
self._user = None
def dispatch_request(self, *args, **kwargs):
kwargs.pop('org_slug', None)
return super(BaseResource, self).dispatch_request(*args, **kwargs)
@property
def current_user(self):
return current_user._get_current_object()
@property
def current_org(self):
return current_org._get_current_object()
def record_event(self, options):
options.update({'user_id': self.current_user.id, 'org_id': self.
current_org.id})
record_event.delay(options)
<mask token>
| <mask token>
class BaseResource(Resource):
decorators = [login_required]
def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs)
self._user = None
def dispatch_request(self, *args, **kwargs):
kwargs.pop('org_slug', None)
return super(BaseResource, self).dispatch_request(*args, **kwargs)
@property
def current_user(self):
return current_user._get_current_object()
@property
def current_org(self):
return current_org._get_current_object()
def record_event(self, options):
options.update({'user_id': self.current_user.id, 'org_id': self.
current_org.id})
record_event.delay(options)
<mask token>
def get_object_or_404(fn, *args, **kwargs):
try:
return fn(*args, **kwargs)
except DoesNotExist:
abort(404)
| from flask.ext.restful import Resource, abort
from flask_login import current_user, login_required
from peewee import DoesNotExist
from redash.authentication.org_resolving import current_org
from redash.tasks import record_event
class BaseResource(Resource):
decorators = [login_required]
def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs)
self._user = None
def dispatch_request(self, *args, **kwargs):
kwargs.pop('org_slug', None)
return super(BaseResource, self).dispatch_request(*args, **kwargs)
@property
def current_user(self):
return current_user._get_current_object()
@property
def current_org(self):
return current_org._get_current_object()
def record_event(self, options):
options.update({
'user_id': self.current_user.id,
'org_id': self.current_org.id
})
record_event.delay(options)
def require_fields(req, fields):
for f in fields:
if f not in req:
abort(400)
def get_object_or_404(fn, *args, **kwargs):
try:
return fn(*args, **kwargs)
except DoesNotExist:
abort(404)
| [
5,
6,
7,
8,
11
] |
946 | 0b3f16ee9b287c6c77acde674abec9deb4053c83 | <mask token>
def house_model(y_new):
xs = np.array([0, 1, 2, 4, 6, 8, 10], dtype=float)
ys = np.array([0.5, 0.1, 1.5, 2.5, 3.5, 4.5, 5.5], dtype=float)
model = tf.keras.Sequential([keras.layers.Dense(units=1, input_shape=[1])])
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(xs, ys, epochs=100)
return model.predict(y_new)[0]
<mask token>
| <mask token>
def house_model(y_new):
xs = np.array([0, 1, 2, 4, 6, 8, 10], dtype=float)
ys = np.array([0.5, 0.1, 1.5, 2.5, 3.5, 4.5, 5.5], dtype=float)
model = tf.keras.Sequential([keras.layers.Dense(units=1, input_shape=[1])])
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(xs, ys, epochs=100)
return model.predict(y_new)[0]
<mask token>
print(prediction)
| <mask token>
def house_model(y_new):
xs = np.array([0, 1, 2, 4, 6, 8, 10], dtype=float)
ys = np.array([0.5, 0.1, 1.5, 2.5, 3.5, 4.5, 5.5], dtype=float)
model = tf.keras.Sequential([keras.layers.Dense(units=1, input_shape=[1])])
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(xs, ys, epochs=100)
return model.predict(y_new)[0]
prediction = house_model([7.0])
print(prediction)
| import tensorflow as tf
import keras
import numpy as np
def house_model(y_new):
xs = np.array([0, 1, 2, 4, 6, 8, 10], dtype=float)
ys = np.array([0.5, 0.1, 1.5, 2.5, 3.5, 4.5, 5.5], dtype=float)
model = tf.keras.Sequential([keras.layers.Dense(units=1, input_shape=[1])])
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(xs, ys, epochs=100)
return model.predict(y_new)[0]
prediction = house_model([7.0])
print(prediction)
| import tensorflow as tf
import keras
import numpy as np
def house_model(y_new):
xs = np.array([0, 1, 2, 4, 6, 8, 10], dtype=float) # Your Code Here#
ys = np.array([0.50, 0.100, 1.50, 2.50, 3.50, 4.50, 5.50], dtype=float) # Your Code Here#
model = tf.keras.Sequential([keras.layers.Dense(units=1, input_shape=[1])]) # Your Code Here#
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(xs,ys, epochs=100)
return model.predict(y_new)[0]
prediction = house_model([7.0])
print(prediction)
| [
1,
2,
3,
4,
5
] |
947 | 1754bce54a47cb78dce3b545d3dce835a4e0e69f | <mask token>
def get_common_logger(name='common', logfile=None):
"""
args: name (str): logger name
logfile (str): log file, use stream handler (stdout) as default.
return:
logger obj
"""
my_logger = logging.getLogger(name)
my_logger.setLevel(config.LOG_LEVEL)
if logfile:
handler = logging.FileHandler(logfile)
else:
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s'
)
handler.setFormatter(formatter)
my_logger.addHandler(handler)
my_logger.propagate = False
return my_logger
<mask token>
| <mask token>
def get_common_logger(name='common', logfile=None):
"""
args: name (str): logger name
logfile (str): log file, use stream handler (stdout) as default.
return:
logger obj
"""
my_logger = logging.getLogger(name)
my_logger.setLevel(config.LOG_LEVEL)
if logfile:
handler = logging.FileHandler(logfile)
else:
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s'
)
handler.setFormatter(formatter)
my_logger.addHandler(handler)
my_logger.propagate = False
return my_logger
<mask token>
if __name__ == '__main__':
COMMON_LOGGER.debug('test')
| <mask token>
def get_common_logger(name='common', logfile=None):
"""
args: name (str): logger name
logfile (str): log file, use stream handler (stdout) as default.
return:
logger obj
"""
my_logger = logging.getLogger(name)
my_logger.setLevel(config.LOG_LEVEL)
if logfile:
handler = logging.FileHandler(logfile)
else:
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s'
)
handler.setFormatter(formatter)
my_logger.addHandler(handler)
my_logger.propagate = False
return my_logger
COMMON_LOGGER = get_common_logger('common logger')
if __name__ == '__main__':
COMMON_LOGGER.debug('test')
| import logging
import config
def get_common_logger(name='common', logfile=None):
"""
args: name (str): logger name
logfile (str): log file, use stream handler (stdout) as default.
return:
logger obj
"""
my_logger = logging.getLogger(name)
my_logger.setLevel(config.LOG_LEVEL)
if logfile:
handler = logging.FileHandler(logfile)
else:
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s'
)
handler.setFormatter(formatter)
my_logger.addHandler(handler)
my_logger.propagate = False
return my_logger
COMMON_LOGGER = get_common_logger('common logger')
if __name__ == '__main__':
COMMON_LOGGER.debug('test')
| #!/usr/bin/env python
# coding: utf-8
import logging
import config
def get_common_logger(name='common', logfile=None):
'''
args: name (str): logger name
logfile (str): log file, use stream handler (stdout) as default.
return:
logger obj
'''
my_logger = logging.getLogger(name)
my_logger.setLevel(config.LOG_LEVEL)
if logfile:
handler = logging.FileHandler(logfile)
else:
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s')
handler.setFormatter(formatter)
my_logger.addHandler(handler)
# Stop logger propagate, forbiden duplicate log.
my_logger.propagate = False
return my_logger
COMMON_LOGGER = get_common_logger('common logger')
if __name__ == '__main__':
COMMON_LOGGER.debug('test')
| [
1,
2,
3,
4,
5
] |
948 | 6affc182f5d3353d46f6e9a21344bc85bf894165 | <mask token>
| <mask token>
def create_app(settings_override={}):
app = Flask(__name__)
app.config.from_object('zezin.settings.Configuration')
app.config.update(settings_override)
db.init_app(app)
from zezin.views import partners_routes
app.register_blueprint(blueprint=partners_routes)
return app
<mask token>
| <mask token>
db = SQLAlchemy()
def create_app(settings_override={}):
app = Flask(__name__)
app.config.from_object('zezin.settings.Configuration')
app.config.update(settings_override)
db.init_app(app)
from zezin.views import partners_routes
app.register_blueprint(blueprint=partners_routes)
return app
<mask token>
| from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app(settings_override={}):
app = Flask(__name__)
app.config.from_object('zezin.settings.Configuration')
app.config.update(settings_override)
db.init_app(app)
from zezin.views import partners_routes
app.register_blueprint(blueprint=partners_routes)
return app
import zezin.models
| from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
# pylint: disable=dangerous-default-value,wrong-import-position,unused-import, import-outside-toplevel
def create_app(settings_override={}):
app = Flask(__name__)
app.config.from_object('zezin.settings.Configuration')
app.config.update(settings_override)
db.init_app(app)
from zezin.views import partners_routes
app.register_blueprint(blueprint=partners_routes)
return app
import zezin.models # isort:skip
| [
0,
1,
2,
3,
4
] |
949 | 9abf2b9b90d18332ede94cf1af778e0dda54330b | <mask token>
class RSTTable(Table):
def run(self) ->List[Node]:
...
class CSVTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
class DocutilsDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
escapechar: str = ...
def __init__(self, options: Dict[str, Any]) ->None:
...
class HeaderDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
escapechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
def check_requirements(self) ->None:
...
def run(self) ->List[Node]:
...
def get_csv_data(self) ->Tuple[List[str], str]:
...
decode_from_csv: Callable[[str], str] = ...
encode_for_csv: Callable[[str], str] = ...
def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any,
source: str) ->Tuple[List[Tuple[int, int, int, StringList]], int]:
...
class ListTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
def run(self) ->List[Node]:
...
def check_list_content(self, node: Node) ->Tuple[int, List[int]]:
...
def build_table_from_list(self, table_data: List[List[N_co]],
col_widths: List[int], header_rows: int, stub_columns: int) ->table:
...
| <mask token>
class Table(Directive):
optional_arguments: int = ...
final_argument_whitespace: bool = ...
option_spec: Dict[str, Callable[[str], Any]] = ...
has_content: bool = ...
def make_title(self) ->Tuple[title, List[system_message]]:
...
def process_header_option(self) ->Tuple[List[Node], int]:
...
def check_table_dimensions(self, rows: List[List[N_co]], header_rows:
int, stub_columns: int) ->None:
...
def set_table_width(self, table_node: table) ->None:
...
@property
def widths(self) ->str:
...
def get_column_widths(self, max_cols: int) ->List[int]:
...
def extend_short_rows_with_empty_cells(self, columns: int, parts: Tuple
[List[N_co], List[N_co]]) ->None:
...
class RSTTable(Table):
def run(self) ->List[Node]:
...
class CSVTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
class DocutilsDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
escapechar: str = ...
def __init__(self, options: Dict[str, Any]) ->None:
...
class HeaderDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
escapechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
def check_requirements(self) ->None:
...
def run(self) ->List[Node]:
...
def get_csv_data(self) ->Tuple[List[str], str]:
...
decode_from_csv: Callable[[str], str] = ...
encode_for_csv: Callable[[str], str] = ...
def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any,
source: str) ->Tuple[List[Tuple[int, int, int, StringList]], int]:
...
class ListTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
def run(self) ->List[Node]:
...
def check_list_content(self, node: Node) ->Tuple[int, List[int]]:
...
def build_table_from_list(self, table_data: List[List[N_co]],
col_widths: List[int], header_rows: int, stub_columns: int) ->table:
...
| <mask token>
def align(argument: str) ->str:
...
class Table(Directive):
optional_arguments: int = ...
final_argument_whitespace: bool = ...
option_spec: Dict[str, Callable[[str], Any]] = ...
has_content: bool = ...
def make_title(self) ->Tuple[title, List[system_message]]:
...
def process_header_option(self) ->Tuple[List[Node], int]:
...
def check_table_dimensions(self, rows: List[List[N_co]], header_rows:
int, stub_columns: int) ->None:
...
def set_table_width(self, table_node: table) ->None:
...
@property
def widths(self) ->str:
...
def get_column_widths(self, max_cols: int) ->List[int]:
...
def extend_short_rows_with_empty_cells(self, columns: int, parts: Tuple
[List[N_co], List[N_co]]) ->None:
...
class RSTTable(Table):
def run(self) ->List[Node]:
...
class CSVTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
class DocutilsDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
escapechar: str = ...
def __init__(self, options: Dict[str, Any]) ->None:
...
class HeaderDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
escapechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
def check_requirements(self) ->None:
...
def run(self) ->List[Node]:
...
def get_csv_data(self) ->Tuple[List[str], str]:
...
decode_from_csv: Callable[[str], str] = ...
encode_for_csv: Callable[[str], str] = ...
def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any,
source: str) ->Tuple[List[Tuple[int, int, int, StringList]], int]:
...
class ListTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
def run(self) ->List[Node]:
...
def check_list_content(self, node: Node) ->Tuple[int, List[int]]:
...
def build_table_from_list(self, table_data: List[List[N_co]],
col_widths: List[int], header_rows: int, stub_columns: int) ->table:
...
| <mask token>
N_co = TypeVar('N_co', bound=Node, covariant=True)
__docformat__: str
def align(argument: str) ->str:
...
class Table(Directive):
optional_arguments: int = ...
final_argument_whitespace: bool = ...
option_spec: Dict[str, Callable[[str], Any]] = ...
has_content: bool = ...
def make_title(self) ->Tuple[title, List[system_message]]:
...
def process_header_option(self) ->Tuple[List[Node], int]:
...
def check_table_dimensions(self, rows: List[List[N_co]], header_rows:
int, stub_columns: int) ->None:
...
def set_table_width(self, table_node: table) ->None:
...
@property
def widths(self) ->str:
...
def get_column_widths(self, max_cols: int) ->List[int]:
...
def extend_short_rows_with_empty_cells(self, columns: int, parts: Tuple
[List[N_co], List[N_co]]) ->None:
...
class RSTTable(Table):
def run(self) ->List[Node]:
...
class CSVTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
class DocutilsDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
escapechar: str = ...
def __init__(self, options: Dict[str, Any]) ->None:
...
class HeaderDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
escapechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
def check_requirements(self) ->None:
...
def run(self) ->List[Node]:
...
def get_csv_data(self) ->Tuple[List[str], str]:
...
decode_from_csv: Callable[[str], str] = ...
encode_for_csv: Callable[[str], str] = ...
def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any,
source: str) ->Tuple[List[Tuple[int, int, int, StringList]], int]:
...
class ListTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
def run(self) ->List[Node]:
...
def check_list_content(self, node: Node) ->Tuple[int, List[int]]:
...
def build_table_from_list(self, table_data: List[List[N_co]],
col_widths: List[int], header_rows: int, stub_columns: int) ->table:
...
| # Stubs for docutils.parsers.rst.directives.tables (Python 3.6)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
import csv
from docutils.statemachine import StringList
from docutils.nodes import Node, system_message, table, title
from docutils.parsers.rst import Directive
from typing import Any, Callable, Dict, List, Tuple, TypeVar
N_co = TypeVar('N_co', bound=Node, covariant=True)
__docformat__: str
def align(argument: str) -> str: ...
class Table(Directive):
optional_arguments: int = ...
final_argument_whitespace: bool = ...
option_spec: Dict[str, Callable[[str], Any]] = ...
has_content: bool = ...
def make_title(self) -> Tuple[title, List[system_message]]: ...
def process_header_option(self) -> Tuple[List[Node], int]: ...
def check_table_dimensions(self, rows: List[List[N_co]], header_rows: int, stub_columns: int) -> None: ...
def set_table_width(self, table_node: table) -> None: ...
@property
def widths(self) -> str: ...
def get_column_widths(self, max_cols: int) -> List[int]: ...
def extend_short_rows_with_empty_cells(self, columns: int, parts: Tuple[List[N_co], List[N_co]]) -> None: ...
class RSTTable(Table):
def run(self) -> List[Node]: ...
class CSVTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
class DocutilsDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
escapechar: str = ...
def __init__(self, options: Dict[str, Any]) -> None: ...
class HeaderDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
escapechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
def check_requirements(self) -> None: ...
def run(self) -> List[Node]: ...
def get_csv_data(self) -> Tuple[List[str], str]: ...
decode_from_csv: Callable[[str], str] = ...
encode_for_csv: Callable[[str], str] = ...
def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any, source: str) -> Tuple[List[Tuple[int, int, int, StringList]], int]: ...
class ListTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
def run(self) -> List[Node]: ...
def check_list_content(self, node: Node) -> Tuple[int, List[int]]: ...
def build_table_from_list(self, table_data: List[List[N_co]], col_widths: List[int], header_rows: int, stub_columns: int) -> table: ...
| [
11,
19,
20,
22,
24
] |
950 | 94e8f0532da76c803b23fe2217b07dc8cf285710 | <mask token>
| <mask token>
print(dataset.info())
<mask token>
regressor.fit(features, labels)
<mask token>
regressor.predict(x)
| <mask token>
dataset = pd.read_csv('University_data.csv')
print(dataset.info())
features = dataset.iloc[:, :-1].values
labels = dataset.iloc[:, -1:].values
<mask token>
labelencoder = LabelEncoder()
features[:, 0] = labelencoder.fit_transform(features[:, 0])
<mask token>
onehotencoder = OneHotEncoder(categorical_features=[0])
features = onehotencoder.fit_transform(features).toarray()
features = features[:, 1:]
<mask token>
regressor = LinearRegression()
regressor.fit(features, labels)
x = ['Cabrini', 337, 1.5, 2.3, 9.0, 0]
x = np.array(x).reshape(1, -1)
x[:, 0] = labelencoder.transform(x[:, 0])
x = onehotencoder.transform(x).toarray()
x = x[:, 1:]
regressor.predict(x)
| <mask token>
import numpy as np
import pandas as pd
dataset = pd.read_csv('University_data.csv')
print(dataset.info())
features = dataset.iloc[:, :-1].values
labels = dataset.iloc[:, -1:].values
from sklearn.preprocessing import LabelEncoder
labelencoder = LabelEncoder()
features[:, 0] = labelencoder.fit_transform(features[:, 0])
from sklearn.preprocessing import OneHotEncoder
onehotencoder = OneHotEncoder(categorical_features=[0])
features = onehotencoder.fit_transform(features).toarray()
features = features[:, 1:]
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
regressor.fit(features, labels)
x = ['Cabrini', 337, 1.5, 2.3, 9.0, 0]
x = np.array(x).reshape(1, -1)
x[:, 0] = labelencoder.transform(x[:, 0])
x = onehotencoder.transform(x).toarray()
x = x[:, 1:]
regressor.predict(x)
| # -*- coding: utf-8 -*-
"""
Created on Mon May 27 17:38:50 2019
@author: User
"""
import numpy as np
import pandas as pd
dataset = pd.read_csv('University_data.csv')
print(dataset.info())
features = dataset.iloc[:, :-1].values
labels = dataset.iloc[:, -1:].values
from sklearn.preprocessing import LabelEncoder
labelencoder = LabelEncoder()
features[:, 0] = labelencoder.fit_transform(features[:, 0])
from sklearn.preprocessing import OneHotEncoder
onehotencoder = OneHotEncoder(categorical_features = [0])
features = onehotencoder.fit_transform(features).toarray()
features = features[:, 1:]
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
regressor.fit(features, labels)
x = ["Cabrini",337,1.5,2.3,9.0,0]
x = np.array(x).reshape(1,-1)
x[:,0] = labelencoder.transform(x[:,0])
x = onehotencoder.transform(x).toarray()
x = x[:,1:]
regressor.predict(x) | [
0,
1,
2,
3,
4
] |
951 | f5d353694a719472320f4d6fa28bc9d2cc5a69b0 | <mask token>
def talk(text):
engine.say('heyo' + text)
engine.runAndWait()
def take_command():
try:
with sr.Microphone() as source:
print('listening....')
voice = listener.listen(source)
command = listener.recognize_google(voice)
command = command.lower()
if 'lisa' in command:
command = command.replace('lisa', '')
except:
print('something went wrong')
return command
<mask token>
| <mask token>
engine.setProperty('voice', voices[1].id)
engine.say(
"hey, My name is 'lisa, human cyborg relations. Please see the console for what I can do for you."
)
engine.runAndWait()
print(
"""I can play videos (Lisa, play....),
teach (Lisa, teach me about...),
tell you more (Lisa, tell me more about...),
tell time (Lisa, what time is it),
and tell jokes (Lisa, tell me a joke...)."""
)
def talk(text):
engine.say('heyo' + text)
engine.runAndWait()
def take_command():
try:
with sr.Microphone() as source:
print('listening....')
voice = listener.listen(source)
command = listener.recognize_google(voice)
command = command.lower()
if 'lisa' in command:
command = command.replace('lisa', '')
except:
print('something went wrong')
return command
def run_lisa():
command = take_command()
if 'play' in command:
song = command.replace('play', '')
talk('hey playing' + song)
print('playing...' + song)
pywhatkit.playonyt(song)
elif 'time' in command:
time = datetime.datetime.now().strftime('%H %M')
talk('Right now it is ' + time)
elif 'teach me about' in command:
info = command.replace('teach me about', '')
teach = wikipedia.summary(info, 2)
print(teach)
talk(teach)
elif 'tell me more about' in command:
info = command.replace('tell me more about', '')
teach = wikipedia.summary(info, 6)
print(teach)
talk(teach)
elif 'joke' in command:
talk(dadjokes.joke())
elif 'good one' in command:
talk("yeah thanks! I'll be here all week folks!")
while True:
run_lisa()
| <mask token>
listener = sr.Recognizer()
engine = pyttsx3.init()
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[1].id)
engine.say(
"hey, My name is 'lisa, human cyborg relations. Please see the console for what I can do for you."
)
engine.runAndWait()
print(
"""I can play videos (Lisa, play....),
teach (Lisa, teach me about...),
tell you more (Lisa, tell me more about...),
tell time (Lisa, what time is it),
and tell jokes (Lisa, tell me a joke...)."""
)
def talk(text):
engine.say('heyo' + text)
engine.runAndWait()
def take_command():
try:
with sr.Microphone() as source:
print('listening....')
voice = listener.listen(source)
command = listener.recognize_google(voice)
command = command.lower()
if 'lisa' in command:
command = command.replace('lisa', '')
except:
print('something went wrong')
return command
def run_lisa():
command = take_command()
if 'play' in command:
song = command.replace('play', '')
talk('hey playing' + song)
print('playing...' + song)
pywhatkit.playonyt(song)
elif 'time' in command:
time = datetime.datetime.now().strftime('%H %M')
talk('Right now it is ' + time)
elif 'teach me about' in command:
info = command.replace('teach me about', '')
teach = wikipedia.summary(info, 2)
print(teach)
talk(teach)
elif 'tell me more about' in command:
info = command.replace('tell me more about', '')
teach = wikipedia.summary(info, 6)
print(teach)
talk(teach)
elif 'joke' in command:
talk(dadjokes.joke())
elif 'good one' in command:
talk("yeah thanks! I'll be here all week folks!")
while True:
run_lisa()
| <mask token>
import speech_recognition as sr
import pyttsx3
import pywhatkit
import datetime
import wikipedia
import dadjokes
listener = sr.Recognizer()
engine = pyttsx3.init()
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[1].id)
engine.say(
"hey, My name is 'lisa, human cyborg relations. Please see the console for what I can do for you."
)
engine.runAndWait()
print(
"""I can play videos (Lisa, play....),
teach (Lisa, teach me about...),
tell you more (Lisa, tell me more about...),
tell time (Lisa, what time is it),
and tell jokes (Lisa, tell me a joke...)."""
)
def talk(text):
engine.say('heyo' + text)
engine.runAndWait()
def take_command():
try:
with sr.Microphone() as source:
print('listening....')
voice = listener.listen(source)
command = listener.recognize_google(voice)
command = command.lower()
if 'lisa' in command:
command = command.replace('lisa', '')
except:
print('something went wrong')
return command
def run_lisa():
command = take_command()
if 'play' in command:
song = command.replace('play', '')
talk('hey playing' + song)
print('playing...' + song)
pywhatkit.playonyt(song)
elif 'time' in command:
time = datetime.datetime.now().strftime('%H %M')
talk('Right now it is ' + time)
elif 'teach me about' in command:
info = command.replace('teach me about', '')
teach = wikipedia.summary(info, 2)
print(teach)
talk(teach)
elif 'tell me more about' in command:
info = command.replace('tell me more about', '')
teach = wikipedia.summary(info, 6)
print(teach)
talk(teach)
elif 'joke' in command:
talk(dadjokes.joke())
elif 'good one' in command:
talk("yeah thanks! I'll be here all week folks!")
while True:
run_lisa()
| # -*- coding: utf-8 -*-
"""
This is the very first A.I. in this series.
The vision is to devlop 'protocol droid' to talk to, to help with tasks, and with whom to play games.
The droid will be able to translate langages and connect ppl.
"""
import speech_recognition as sr
import pyttsx3
import pywhatkit
import datetime
import wikipedia
import dadjokes
listener = sr.Recognizer()
engine = pyttsx3.init()
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[1].id)
engine.say("hey, My name is 'lisa, human cyborg relations. Please see the console for what I can do for you.")
#engine.say("hey, .")
engine.runAndWait()
print("I can play videos (Lisa, play....),\n teach (Lisa, teach me about...),\n tell you more (Lisa, tell me more about...),\n tell time (Lisa, what time is it),\n and tell jokes (Lisa, tell me a joke...).")
def talk(text):
engine.say("heyo"+text)
engine.runAndWait()
def take_command():
try:
with sr.Microphone() as source:
print('listening....')
voice = listener.listen(source)
command = listener.recognize_google(voice)
command = command.lower()
if 'lisa' in command:
command = command.replace('lisa','')
except:
print("something went wrong")
return command
def run_lisa():
command = take_command()
if 'play' in command:
song = command.replace('play','')
talk('hey playing' + song)
print('playing...'+ song)
pywhatkit.playonyt(song)
elif 'time' in command:
#needs a more natural way of expressing time
#i would like mil time
time = datetime.datetime.now().strftime('%H %M')
talk('Right now it is '+time)
elif "teach me about" in command:
info = command.replace('teach me about','')
teach = wikipedia.summary(info,2)
print(teach)
talk(teach)
elif "tell me more about" in command:
info = command.replace('tell me more about','')
teach = wikipedia.summary(info,6)
print(teach)
talk(teach)
elif "joke" in command:
talk(dadjokes.joke())
elif "good one" in command:
talk("yeah thanks! I'll be here all week folks!")
while True:
run_lisa()
| [
2,
4,
5,
6,
7
] |
952 | 27a12a0f5ea6120036b66ee1cdd903da868a037f | <mask token>
class Hdmovie14Ag(SimpleScraperBase):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def _fetch_search_url(self, search_term, media_type):
return '{base_url}/search-movies/{search_term}.html'.format(base_url
=self.BASE_URL, search_term=search_term)
def _fetch_no_results_text(self):
return None
def _fetch_next_button(self, soup):
next_button = soup.find('a', text=u'»')
if next_button:
return next_button.href
return None
<mask token>
def _parse_parse_page(self, soup):
index_page_title = self.util.get_page_title(soup)
series_season = series_episode = None
title = soup.select_one('h1')
if title and title.text:
series_season, series_episode = self.util.extract_season_episode(
title.text)
for results in soup.select('div.server_line'):
try:
movie_link = self.make_soup(base64.decodestring(self.
get_soup(results.select_one('a').href).select_one(
'div#media-player script').text.split('("')[-1].split(
'")')[0])).select_one('iframe')['src']
except AttributeError:
movie_link = self.get_soup(results.select_one('a').href
).select_one('div#media-player a')['href']
self.submit_parse_result(index_page_title=index_page_title,
link_url=movie_link, link_title=movie_link, series_season=
series_season, series_episode=series_episode)
| <mask token>
class Hdmovie14Ag(SimpleScraperBase):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def _fetch_search_url(self, search_term, media_type):
return '{base_url}/search-movies/{search_term}.html'.format(base_url
=self.BASE_URL, search_term=search_term)
def _fetch_no_results_text(self):
return None
def _fetch_next_button(self, soup):
next_button = soup.find('a', text=u'»')
if next_button:
return next_button.href
return None
def _parse_search_result_page(self, soup):
found = 0
for result in soup.select('div.ml-item'):
link = result.select_one('a')
self.submit_search_result(link_url=link.href, link_title=link.
text, image=self.util.find_image_src_or_none(result, 'img'))
found = 1
if not found:
return self.submit_search_no_results()
def _parse_parse_page(self, soup):
index_page_title = self.util.get_page_title(soup)
series_season = series_episode = None
title = soup.select_one('h1')
if title and title.text:
series_season, series_episode = self.util.extract_season_episode(
title.text)
for results in soup.select('div.server_line'):
try:
movie_link = self.make_soup(base64.decodestring(self.
get_soup(results.select_one('a').href).select_one(
'div#media-player script').text.split('("')[-1].split(
'")')[0])).select_one('iframe')['src']
except AttributeError:
movie_link = self.get_soup(results.select_one('a').href
).select_one('div#media-player a')['href']
self.submit_parse_result(index_page_title=index_page_title,
link_url=movie_link, link_title=movie_link, series_season=
series_season, series_episode=series_episode)
| <mask token>
class Hdmovie14Ag(SimpleScraperBase):
BASE_URL = 'http://www1.solarmovie.net'
OTHER_URLS = ['http://solarmovie.net', 'http://hdmovie14.ag']
SCRAPER_TYPES = [ScraperBase.SCRAPER_TYPE_OSP]
LANGUAGE = 'eng'
MEDIA_TYPES = [ScraperBase.MEDIA_TYPE_FILM, ScraperBase.MEDIA_TYPE_TV]
URL_TYPES = [ScraperBase.URL_TYPE_SEARCH, ScraperBase.URL_TYPE_LISTING]
def _fetch_search_url(self, search_term, media_type):
return '{base_url}/search-movies/{search_term}.html'.format(base_url
=self.BASE_URL, search_term=search_term)
def _fetch_no_results_text(self):
return None
def _fetch_next_button(self, soup):
next_button = soup.find('a', text=u'»')
if next_button:
return next_button.href
return None
def _parse_search_result_page(self, soup):
found = 0
for result in soup.select('div.ml-item'):
link = result.select_one('a')
self.submit_search_result(link_url=link.href, link_title=link.
text, image=self.util.find_image_src_or_none(result, 'img'))
found = 1
if not found:
return self.submit_search_no_results()
def _parse_parse_page(self, soup):
index_page_title = self.util.get_page_title(soup)
series_season = series_episode = None
title = soup.select_one('h1')
if title and title.text:
series_season, series_episode = self.util.extract_season_episode(
title.text)
for results in soup.select('div.server_line'):
try:
movie_link = self.make_soup(base64.decodestring(self.
get_soup(results.select_one('a').href).select_one(
'div#media-player script').text.split('("')[-1].split(
'")')[0])).select_one('iframe')['src']
except AttributeError:
movie_link = self.get_soup(results.select_one('a').href
).select_one('div#media-player a')['href']
self.submit_parse_result(index_page_title=index_page_title,
link_url=movie_link, link_title=movie_link, series_season=
series_season, series_episode=series_episode)
| import base64
from sandcrawler.scraper import ScraperBase, SimpleScraperBase
class Hdmovie14Ag(SimpleScraperBase):
BASE_URL = 'http://www1.solarmovie.net'
OTHER_URLS = ['http://solarmovie.net', 'http://hdmovie14.ag']
SCRAPER_TYPES = [ScraperBase.SCRAPER_TYPE_OSP]
LANGUAGE = 'eng'
MEDIA_TYPES = [ScraperBase.MEDIA_TYPE_FILM, ScraperBase.MEDIA_TYPE_TV]
URL_TYPES = [ScraperBase.URL_TYPE_SEARCH, ScraperBase.URL_TYPE_LISTING]
def _fetch_search_url(self, search_term, media_type):
return '{base_url}/search-movies/{search_term}.html'.format(base_url
=self.BASE_URL, search_term=search_term)
def _fetch_no_results_text(self):
return None
def _fetch_next_button(self, soup):
next_button = soup.find('a', text=u'»')
if next_button:
return next_button.href
return None
def _parse_search_result_page(self, soup):
found = 0
for result in soup.select('div.ml-item'):
link = result.select_one('a')
self.submit_search_result(link_url=link.href, link_title=link.
text, image=self.util.find_image_src_or_none(result, 'img'))
found = 1
if not found:
return self.submit_search_no_results()
def _parse_parse_page(self, soup):
index_page_title = self.util.get_page_title(soup)
series_season = series_episode = None
title = soup.select_one('h1')
if title and title.text:
series_season, series_episode = self.util.extract_season_episode(
title.text)
for results in soup.select('div.server_line'):
try:
movie_link = self.make_soup(base64.decodestring(self.
get_soup(results.select_one('a').href).select_one(
'div#media-player script').text.split('("')[-1].split(
'")')[0])).select_one('iframe')['src']
except AttributeError:
movie_link = self.get_soup(results.select_one('a').href
).select_one('div#media-player a')['href']
self.submit_parse_result(index_page_title=index_page_title,
link_url=movie_link, link_title=movie_link, series_season=
series_season, series_episode=series_episode)
| # coding=utf-8
import base64
from sandcrawler.scraper import ScraperBase, SimpleScraperBase
class Hdmovie14Ag(SimpleScraperBase):
BASE_URL = 'http://www1.solarmovie.net'
OTHER_URLS = ['http://solarmovie.net', 'http://hdmovie14.ag']
SCRAPER_TYPES = [ ScraperBase.SCRAPER_TYPE_OSP, ]
LANGUAGE = 'eng'
MEDIA_TYPES = [ ScraperBase.MEDIA_TYPE_FILM, ScraperBase.MEDIA_TYPE_TV, ]
URL_TYPES = [ScraperBase.URL_TYPE_SEARCH, ScraperBase.URL_TYPE_LISTING, ]
def _fetch_search_url(self, search_term, media_type):
return '{base_url}/search-movies/{search_term}.html'.format(base_url=self.BASE_URL, search_term=search_term)
def _fetch_no_results_text(self):
return None
def _fetch_next_button(self, soup):
next_button = soup.find('a', text=u'»')
if next_button:
return next_button.href
return None
def _parse_search_result_page(self, soup):
found=0
for result in soup.select('div.ml-item'):
link = result.select_one('a')
self.submit_search_result(
link_url=link.href,
link_title=link.text,
image=self.util.find_image_src_or_none(result, 'img'),
)
found=1
if not found:
return self.submit_search_no_results()
def _parse_parse_page(self, soup):
index_page_title = self.util.get_page_title(soup)
series_season = series_episode = None
title = soup.select_one('h1')
if title and title.text:
series_season, series_episode = self.util.extract_season_episode(title.text)
for results in soup.select('div.server_line'):
try:
movie_link = self.make_soup(base64.decodestring(self.get_soup(results.select_one('a').href).
select_one('div#media-player script').text.split('("')[-1].
split('")')[0])).select_one('iframe')['src']
except AttributeError:
movie_link = self.get_soup(results.select_one('a').href).select_one('div#media-player a')['href']
self.submit_parse_result(
index_page_title=index_page_title,
link_url=movie_link,
link_title=movie_link,
series_season=series_season,
series_episode=series_episode,
)
| [
5,
6,
7,
8,
9
] |
953 | aba2a0a262c14f286c278f21ba42871410c174f0 | <mask token>
def book_add(request):
if request.user.is_authenticated:
context = {}
if request.method == 'GET':
form = BookCreateModelForm()
context['form'] = form
return render(request, 'addbook.html', context)
elif request.method == 'POST':
context = {}
form = BookCreateModelForm(request.POST)
if form.is_valid():
form.save()
return redirect('index')
else:
return render(request, 'addbook.html', context)
else:
return redirect('singn')
def get_books(request):
if request.user.is_authenticated:
form = SearchForm()
context = {}
books = Books.objects.all()
context['books'] = books
context['form'] = form
if request.method == 'POST':
form = SearchForm(request.POST)
if form.is_valid():
book_name = form.cleaned_data['book_name']
books = Books.objects.filter(book_name__contains=book_name)
context['books'] = books
return render(request, 'book_list.html', context)
else:
context['form'] = form
return render(request, 'book_list.html', context)
return render(request, 'book_list.html', context)
else:
return redirect('singn')
<mask token>
def remove_book(request, id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
book.delete()
return redirect('books')
else:
return redirect('singn')
<mask token>
def create_account(request):
form = RegistrationForm()
context = {'form': form}
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
form.save()
print('account created')
return redirect('singn')
else:
context['form'] = form
return render(request, 'createaccount.html', context)
return render(request, 'createaccount.html', context)
<mask token>
| <mask token>
def book_add(request):
if request.user.is_authenticated:
context = {}
if request.method == 'GET':
form = BookCreateModelForm()
context['form'] = form
return render(request, 'addbook.html', context)
elif request.method == 'POST':
context = {}
form = BookCreateModelForm(request.POST)
if form.is_valid():
form.save()
return redirect('index')
else:
return render(request, 'addbook.html', context)
else:
return redirect('singn')
def get_books(request):
if request.user.is_authenticated:
form = SearchForm()
context = {}
books = Books.objects.all()
context['books'] = books
context['form'] = form
if request.method == 'POST':
form = SearchForm(request.POST)
if form.is_valid():
book_name = form.cleaned_data['book_name']
books = Books.objects.filter(book_name__contains=book_name)
context['books'] = books
return render(request, 'book_list.html', context)
else:
context['form'] = form
return render(request, 'book_list.html', context)
return render(request, 'book_list.html', context)
else:
return redirect('singn')
def book_details(request, id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
context = {}
context['book'] = book
return render(request, 'book_details.html', context)
else:
return redirect('singn')
def remove_book(request, id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
book.delete()
return redirect('books')
else:
return redirect('singn')
<mask token>
def create_account(request):
form = RegistrationForm()
context = {'form': form}
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
form.save()
print('account created')
return redirect('singn')
else:
context['form'] = form
return render(request, 'createaccount.html', context)
return render(request, 'createaccount.html', context)
<mask token>
def signout(request):
if request.user.is_authenticated:
logout(request)
return redirect('singn')
else:
return redirect('singn')
| <mask token>
def book_add(request):
if request.user.is_authenticated:
context = {}
if request.method == 'GET':
form = BookCreateModelForm()
context['form'] = form
return render(request, 'addbook.html', context)
elif request.method == 'POST':
context = {}
form = BookCreateModelForm(request.POST)
if form.is_valid():
form.save()
return redirect('index')
else:
return render(request, 'addbook.html', context)
else:
return redirect('singn')
def get_books(request):
if request.user.is_authenticated:
form = SearchForm()
context = {}
books = Books.objects.all()
context['books'] = books
context['form'] = form
if request.method == 'POST':
form = SearchForm(request.POST)
if form.is_valid():
book_name = form.cleaned_data['book_name']
books = Books.objects.filter(book_name__contains=book_name)
context['books'] = books
return render(request, 'book_list.html', context)
else:
context['form'] = form
return render(request, 'book_list.html', context)
return render(request, 'book_list.html', context)
else:
return redirect('singn')
def book_details(request, id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
context = {}
context['book'] = book
return render(request, 'book_details.html', context)
else:
return redirect('singn')
def remove_book(request, id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
book.delete()
return redirect('books')
else:
return redirect('singn')
def update_book(request, id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
form = BookCreateModelForm(instance=book)
context = {}
context['form'] = form
if request.method == 'POST':
book = Books.objects.get(id=id)
form = BookCreateModelForm(instance=book, data=request.POST)
if form.is_valid():
form.save()
return redirect('books')
else:
form = BookCreateModelForm(request.POST)
context['form'] = form
print(form)
return render(request, 'edit.html', context)
return render(request, 'edit.html', context)
else:
return redirect('singn')
def create_account(request):
form = RegistrationForm()
context = {'form': form}
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
form.save()
print('account created')
return redirect('singn')
else:
context['form'] = form
return render(request, 'createaccount.html', context)
return render(request, 'createaccount.html', context)
<mask token>
def signout(request):
if request.user.is_authenticated:
logout(request)
return redirect('singn')
else:
return redirect('singn')
| <mask token>
def book_add(request):
if request.user.is_authenticated:
context = {}
if request.method == 'GET':
form = BookCreateModelForm()
context['form'] = form
return render(request, 'addbook.html', context)
elif request.method == 'POST':
context = {}
form = BookCreateModelForm(request.POST)
if form.is_valid():
form.save()
return redirect('index')
else:
return render(request, 'addbook.html', context)
else:
return redirect('singn')
def get_books(request):
if request.user.is_authenticated:
form = SearchForm()
context = {}
books = Books.objects.all()
context['books'] = books
context['form'] = form
if request.method == 'POST':
form = SearchForm(request.POST)
if form.is_valid():
book_name = form.cleaned_data['book_name']
books = Books.objects.filter(book_name__contains=book_name)
context['books'] = books
return render(request, 'book_list.html', context)
else:
context['form'] = form
return render(request, 'book_list.html', context)
return render(request, 'book_list.html', context)
else:
return redirect('singn')
def book_details(request, id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
context = {}
context['book'] = book
return render(request, 'book_details.html', context)
else:
return redirect('singn')
def remove_book(request, id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
book.delete()
return redirect('books')
else:
return redirect('singn')
def update_book(request, id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
form = BookCreateModelForm(instance=book)
context = {}
context['form'] = form
if request.method == 'POST':
book = Books.objects.get(id=id)
form = BookCreateModelForm(instance=book, data=request.POST)
if form.is_valid():
form.save()
return redirect('books')
else:
form = BookCreateModelForm(request.POST)
context['form'] = form
print(form)
return render(request, 'edit.html', context)
return render(request, 'edit.html', context)
else:
return redirect('singn')
def create_account(request):
form = RegistrationForm()
context = {'form': form}
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
form.save()
print('account created')
return redirect('singn')
else:
context['form'] = form
return render(request, 'createaccount.html', context)
return render(request, 'createaccount.html', context)
def singn_in(request):
form = SignInForm()
context = {'form': form}
if request.method == 'POST':
form = SignInForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
password = form.cleaned_data['password']
user = authenticate(request, username=username, password=password)
if user:
login(request, user)
return redirect('index')
else:
context['form'] = form
return render(request, 'signin.html', context)
return render(request, 'signin.html', context)
def signout(request):
if request.user.is_authenticated:
logout(request)
return redirect('singn')
else:
return redirect('singn')
| from django.shortcuts import render
from django.shortcuts import redirect
# Create your views here.
from .forms import AddBookForm ,UpdateBookForm,BookCreateModelForm,SearchForm,RegistrationForm,SignInForm
from book.models import Books
from django.contrib.auth import authenticate,login,logout
def book_add(request):
if request.user.is_authenticated:
context = {}
if request.method == "GET":
form = BookCreateModelForm()
context["form"] = form
return render(request, "addbook.html", context)
elif request.method == "POST":
context = {}
form = BookCreateModelForm(request.POST)
if form.is_valid():
form.save()
# context["form"] = form
# book_name = form.cleaned_data["book_name"]
# author= form.cleaned_data["author"]
# category=form.cleaned_data["category"]
# prices=form.cleaned_data["price"]
# copies=form.cleaned_data["number_copies"]
# print(book_name,author,category,prices,copies)
# book=Books(book_name=book_name,author=author,category=category,price=prices,copies=copies)
# book.save()
return redirect("index")
else:
return render(request, "addbook.html",context)
else:
return redirect('singn')
def get_books(request):
if request.user.is_authenticated:
form=SearchForm()
context = {}
books=Books.objects.all()
context["books"]=books
context['form']=form
if request.method=="POST":
form=SearchForm(request.POST)
if form.is_valid():
book_name=form.cleaned_data["book_name"]
books=Books.objects.filter(book_name__contains=book_name)
context['books']=books
return render(request,"book_list.html",context)
else:
context['form']=form
return render(request, "book_list.html", context)
return render(request, "book_list.html", context)
else:
return redirect('singn')
def book_details(request,id):
if request.user.is_authenticated:
book=Books.objects.get(id=id)
context = {}
context["book"]=book
return render(request,"book_details.html",context)
else:
return redirect('singn')
def remove_book(request,id):
if request.user.is_authenticated:
book=Books.objects.get(id=id)
book.delete()
return redirect("books")
else:
return redirect('singn')
def update_book(request,id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
form=BookCreateModelForm(instance=book)
# form=BookCreateModelForm(initial={
# "book_name":book.book_name,
# "author":book.author,
# "category":book.category,
# "price":book.price,
# "number_copies":book.copies})
context = {}
context['form']=form
if request.method=="POST":
book = Books.objects.get(id=id)
form=BookCreateModelForm(instance=book,data=request.POST)
if form.is_valid():
form.save()
# form=BookCreateModelForm(request.POST)
#
# if form.is_valid():
# book.book_name=form.cleaned_data["book_name"]
# book.author=form.cleaned_data["author"]
# book.category=form.cleaned_data["category"]
# book.price=form.cleaned_data["price"]
# book.copies=form.cleaned_data["number_copies"]
# book.save()
return redirect("books")
else:
form=BookCreateModelForm(request.POST)
context["form"]=form
print(form)
return render(request, "edit.html", context)
return render(request,"edit.html",context)
else:
return redirect('singn')
def create_account(request):
form=RegistrationForm()
context={'form':form}
if request.method=="POST":
form=RegistrationForm(request.POST)
if form.is_valid():
form.save()
print("account created")
return redirect("singn")
else:
context["form"]=form
return render(request, "createaccount.html", context)
return render(request,"createaccount.html",context)
def singn_in(request):
form=SignInForm()
context={'form':form}
if request.method=="POST":
form=SignInForm(request.POST)
if form.is_valid():
username=form.cleaned_data["username"]
password=form.cleaned_data["password"]
user=authenticate(request,username=username,password=password)
if user:
login(request,user)
return redirect("index")
else:
context['form']=form
return render(request, "signin.html", context)
return render(request,"signin.html",context)
def signout(request):
if request.user.is_authenticated:
logout(request)
return redirect("singn")
else:
return redirect('singn')
| [
4,
6,
7,
8,
10
] |
954 | d5903698eb8ed6be531b0cc522d4feff6b79da4e | <mask token>
class Deck:
def __init__(self, num_cols, front, back):
self.flashcards = []
self.num_cols = num_cols
self.front = front
self.back = back
class Flashcard:
def __init__(self, deck, front, back, column, row):
self.deck = deck
self.front = front
self.back = back
self.column = column
self.row = row
self.correct = False
def show_front(self):
r = '{}: {}'.format(self.deck.front, self.front)
return r
def show_back(self):
return '{}: {}'.format(self.deck.back, self.back)
def show_card(self):
return '{}: {}, {}: {}'.format(self.deck.front, self.front, self.
deck.back, self.back)
def show_reverse(self):
return '{}: {}, {}: {}'.format(self.deck.back, self.back, self.deck
.front, self.front)
<mask token>
| <mask token>
class Deck:
def __init__(self, num_cols, front, back):
self.flashcards = []
self.num_cols = num_cols
self.front = front
self.back = back
class Flashcard:
def __init__(self, deck, front, back, column, row):
self.deck = deck
self.front = front
self.back = back
self.column = column
self.row = row
self.correct = False
def show_front(self):
r = '{}: {}'.format(self.deck.front, self.front)
return r
def show_back(self):
return '{}: {}'.format(self.deck.back, self.back)
def show_card(self):
return '{}: {}, {}: {}'.format(self.deck.front, self.front, self.
deck.back, self.back)
def show_reverse(self):
return '{}: {}, {}: {}'.format(self.deck.back, self.back, self.deck
.front, self.front)
def create_deck(filename, num_cols):
df = pd.read_excel(filename)
front = df.columns.values[0]
back = df.columns.values[1]
deck = Deck(num_cols, front, back)
for i in range(num_cols):
front_column = '{}.{}'.format(front, i) if i else front
back_column = '{}.{}'.format(back, i) if i else back
for row in range(df[front_column].size):
f = df[front_column][row]
b = df[back_column][row]
if not (pd.isnull(f) or pd.isnull(b)):
fc = Flashcard(deck, f.strip(), b.strip(), i, row)
deck.flashcards.append(fc)
return deck
def get_cards_from_deck(deck, first_letter, start_index, number_of_cards):
flashcards = [fc for fc in deck.flashcards if fc.column == first_letter or
first_letter == -1]
return flashcards[start_index:number_of_cards + start_index]
def play_game(deck, mode, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index,
number_of_cards)
play_cards(mode, deck, flashcards)
def play_cards(mode, deck, cards):
source = deck.front if mode % 2 == 0 else deck.back
target = deck.back if mode % 2 == 0 else deck.front
if mode >= 2:
random.shuffle(cards)
num_cards = len(cards)
start_time = time.time()
for i, fc in enumerate(cards):
source_word = fc.front if mode % 2 == 0 else fc.back
target_word = fc.back if mode % 2 == 0 else fc.front
quiz(fc, source, source_word, target, target_word, i, num_cards)
print('All Done!')
correct = sum(fc.correct == True for fc in cards)
incorrect = len(cards) - correct
print('Correct: {}'.format(correct))
print('Incorrect: {}'.format(incorrect))
if incorrect:
incorrect_cards = [fc for fc in cards if not fc.correct]
print('\n'.join([fc.show_card() for fc in incorrect_cards]))
again = input('review incorrect words (y/n): ')
if again == 'y' or again == '1' or again == 'да':
play_cards(mode, deck, incorrect_cards)
else:
finish_time = time.time()
time_diff = time.gmtime(finish_time - start_time)
avg_time = time.gmtime((finish_time - start_time) / num_cards)
print('Total Time: {}'.format(time.strftime('%H:%M:%S', time_diff)))
print('Time per card: {}'.format(time.strftime('%H:%M:%S', avg_time)))
def quiz(fc, source_language, source_word, target_language, target_word, i,
number_of_cards):
print('Card {}/{}'.format(i + 1, number_of_cards))
print('{} word: {}'.format(source_language, source_word))
answer = input('Enter {} translation: '.format(target_language))
if is_correct(answer, target_word):
fc.correct = True
print('Correct!')
else:
print('Incorrect! Correct answer was: {}'.format(target_word))
n = input('Enter {} translation for {}: '.format(target_language,
source_word))
def is_correct(answer, target):
return format_for_comparison(answer) == format_for_comparison(target)
def format_for_comparison(word):
word = word.strip().lower()
word = word.split('(')
word[0] = word[0].split(', ')
word[0].sort()
word[0] = ', '.join(word[0])
word = '('.join(word)
return word
def learn_words(deck, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index,
number_of_cards)
for i, card in enumerate(flashcards):
print('Card {}/{}'.format(i + 1, number_of_cards))
input('{}\nPractice: '.format(card.show_card()))
input('{}\nPractice: '.format(card.show_front()))
input('{}\nPractice: '.format(card.show_back()))
print('Done! Review learned words:')
for card in flashcards:
print('{}'.format(card.show_card()))
def main(filename, first_letter, start_index, number_of_cards, mode):
num_cols = 9
deck = create_deck(filename, num_cols)
print('Welcome to The Flashcard Learner!')
print("Okay! Let's play!")
if mode == 4:
learn_words(deck, first_letter, start_index, number_of_cards)
else:
play_game(deck, mode, first_letter, start_index, number_of_cards)
<mask token>
| <mask token>
class Deck:
def __init__(self, num_cols, front, back):
self.flashcards = []
self.num_cols = num_cols
self.front = front
self.back = back
class Flashcard:
def __init__(self, deck, front, back, column, row):
self.deck = deck
self.front = front
self.back = back
self.column = column
self.row = row
self.correct = False
def show_front(self):
r = '{}: {}'.format(self.deck.front, self.front)
return r
def show_back(self):
return '{}: {}'.format(self.deck.back, self.back)
def show_card(self):
return '{}: {}, {}: {}'.format(self.deck.front, self.front, self.
deck.back, self.back)
def show_reverse(self):
return '{}: {}, {}: {}'.format(self.deck.back, self.back, self.deck
.front, self.front)
def create_deck(filename, num_cols):
df = pd.read_excel(filename)
front = df.columns.values[0]
back = df.columns.values[1]
deck = Deck(num_cols, front, back)
for i in range(num_cols):
front_column = '{}.{}'.format(front, i) if i else front
back_column = '{}.{}'.format(back, i) if i else back
for row in range(df[front_column].size):
f = df[front_column][row]
b = df[back_column][row]
if not (pd.isnull(f) or pd.isnull(b)):
fc = Flashcard(deck, f.strip(), b.strip(), i, row)
deck.flashcards.append(fc)
return deck
def get_cards_from_deck(deck, first_letter, start_index, number_of_cards):
flashcards = [fc for fc in deck.flashcards if fc.column == first_letter or
first_letter == -1]
return flashcards[start_index:number_of_cards + start_index]
def play_game(deck, mode, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index,
number_of_cards)
play_cards(mode, deck, flashcards)
def play_cards(mode, deck, cards):
source = deck.front if mode % 2 == 0 else deck.back
target = deck.back if mode % 2 == 0 else deck.front
if mode >= 2:
random.shuffle(cards)
num_cards = len(cards)
start_time = time.time()
for i, fc in enumerate(cards):
source_word = fc.front if mode % 2 == 0 else fc.back
target_word = fc.back if mode % 2 == 0 else fc.front
quiz(fc, source, source_word, target, target_word, i, num_cards)
print('All Done!')
correct = sum(fc.correct == True for fc in cards)
incorrect = len(cards) - correct
print('Correct: {}'.format(correct))
print('Incorrect: {}'.format(incorrect))
if incorrect:
incorrect_cards = [fc for fc in cards if not fc.correct]
print('\n'.join([fc.show_card() for fc in incorrect_cards]))
again = input('review incorrect words (y/n): ')
if again == 'y' or again == '1' or again == 'да':
play_cards(mode, deck, incorrect_cards)
else:
finish_time = time.time()
time_diff = time.gmtime(finish_time - start_time)
avg_time = time.gmtime((finish_time - start_time) / num_cards)
print('Total Time: {}'.format(time.strftime('%H:%M:%S', time_diff)))
print('Time per card: {}'.format(time.strftime('%H:%M:%S', avg_time)))
def quiz(fc, source_language, source_word, target_language, target_word, i,
number_of_cards):
print('Card {}/{}'.format(i + 1, number_of_cards))
print('{} word: {}'.format(source_language, source_word))
answer = input('Enter {} translation: '.format(target_language))
if is_correct(answer, target_word):
fc.correct = True
print('Correct!')
else:
print('Incorrect! Correct answer was: {}'.format(target_word))
n = input('Enter {} translation for {}: '.format(target_language,
source_word))
def is_correct(answer, target):
return format_for_comparison(answer) == format_for_comparison(target)
def format_for_comparison(word):
word = word.strip().lower()
word = word.split('(')
word[0] = word[0].split(', ')
word[0].sort()
word[0] = ', '.join(word[0])
word = '('.join(word)
return word
def learn_words(deck, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index,
number_of_cards)
for i, card in enumerate(flashcards):
print('Card {}/{}'.format(i + 1, number_of_cards))
input('{}\nPractice: '.format(card.show_card()))
input('{}\nPractice: '.format(card.show_front()))
input('{}\nPractice: '.format(card.show_back()))
print('Done! Review learned words:')
for card in flashcards:
print('{}'.format(card.show_card()))
def main(filename, first_letter, start_index, number_of_cards, mode):
num_cols = 9
deck = create_deck(filename, num_cols)
print('Welcome to The Flashcard Learner!')
print("Okay! Let's play!")
if mode == 4:
learn_words(deck, first_letter, start_index, number_of_cards)
else:
play_game(deck, mode, first_letter, start_index, number_of_cards)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Learn flashcards')
parser.add_argument('filename', help='name of .xlsx file with vocab',
default='RussianVocab.xlsx')
parser.add_argument('category', type=int, help=
'e.g. which letter are you learning? (-1: all, 0:a, 1:б, 2:в, etc.)')
parser.add_argument('start', type=int, help=
'start index (lists are 0-indexed)')
parser.add_argument('num', type=int, help=
"number of cards you'd like to see")
parser.add_argument('mode', type=int)
args = parser.parse_args()
main(args.filename, args.category, args.start, args.num, args.mode)
| import argparse
import pandas as pd
import random
import time
class Deck:
def __init__(self, num_cols, front, back):
self.flashcards = []
self.num_cols = num_cols
self.front = front
self.back = back
class Flashcard:
def __init__(self, deck, front, back, column, row):
self.deck = deck
self.front = front
self.back = back
self.column = column
self.row = row
self.correct = False
def show_front(self):
r = '{}: {}'.format(self.deck.front, self.front)
return r
def show_back(self):
return '{}: {}'.format(self.deck.back, self.back)
def show_card(self):
return '{}: {}, {}: {}'.format(self.deck.front, self.front, self.
deck.back, self.back)
def show_reverse(self):
return '{}: {}, {}: {}'.format(self.deck.back, self.back, self.deck
.front, self.front)
def create_deck(filename, num_cols):
df = pd.read_excel(filename)
front = df.columns.values[0]
back = df.columns.values[1]
deck = Deck(num_cols, front, back)
for i in range(num_cols):
front_column = '{}.{}'.format(front, i) if i else front
back_column = '{}.{}'.format(back, i) if i else back
for row in range(df[front_column].size):
f = df[front_column][row]
b = df[back_column][row]
if not (pd.isnull(f) or pd.isnull(b)):
fc = Flashcard(deck, f.strip(), b.strip(), i, row)
deck.flashcards.append(fc)
return deck
def get_cards_from_deck(deck, first_letter, start_index, number_of_cards):
flashcards = [fc for fc in deck.flashcards if fc.column == first_letter or
first_letter == -1]
return flashcards[start_index:number_of_cards + start_index]
def play_game(deck, mode, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index,
number_of_cards)
play_cards(mode, deck, flashcards)
def play_cards(mode, deck, cards):
source = deck.front if mode % 2 == 0 else deck.back
target = deck.back if mode % 2 == 0 else deck.front
if mode >= 2:
random.shuffle(cards)
num_cards = len(cards)
start_time = time.time()
for i, fc in enumerate(cards):
source_word = fc.front if mode % 2 == 0 else fc.back
target_word = fc.back if mode % 2 == 0 else fc.front
quiz(fc, source, source_word, target, target_word, i, num_cards)
print('All Done!')
correct = sum(fc.correct == True for fc in cards)
incorrect = len(cards) - correct
print('Correct: {}'.format(correct))
print('Incorrect: {}'.format(incorrect))
if incorrect:
incorrect_cards = [fc for fc in cards if not fc.correct]
print('\n'.join([fc.show_card() for fc in incorrect_cards]))
again = input('review incorrect words (y/n): ')
if again == 'y' or again == '1' or again == 'да':
play_cards(mode, deck, incorrect_cards)
else:
finish_time = time.time()
time_diff = time.gmtime(finish_time - start_time)
avg_time = time.gmtime((finish_time - start_time) / num_cards)
print('Total Time: {}'.format(time.strftime('%H:%M:%S', time_diff)))
print('Time per card: {}'.format(time.strftime('%H:%M:%S', avg_time)))
def quiz(fc, source_language, source_word, target_language, target_word, i,
number_of_cards):
print('Card {}/{}'.format(i + 1, number_of_cards))
print('{} word: {}'.format(source_language, source_word))
answer = input('Enter {} translation: '.format(target_language))
if is_correct(answer, target_word):
fc.correct = True
print('Correct!')
else:
print('Incorrect! Correct answer was: {}'.format(target_word))
n = input('Enter {} translation for {}: '.format(target_language,
source_word))
def is_correct(answer, target):
return format_for_comparison(answer) == format_for_comparison(target)
def format_for_comparison(word):
word = word.strip().lower()
word = word.split('(')
word[0] = word[0].split(', ')
word[0].sort()
word[0] = ', '.join(word[0])
word = '('.join(word)
return word
def learn_words(deck, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index,
number_of_cards)
for i, card in enumerate(flashcards):
print('Card {}/{}'.format(i + 1, number_of_cards))
input('{}\nPractice: '.format(card.show_card()))
input('{}\nPractice: '.format(card.show_front()))
input('{}\nPractice: '.format(card.show_back()))
print('Done! Review learned words:')
for card in flashcards:
print('{}'.format(card.show_card()))
def main(filename, first_letter, start_index, number_of_cards, mode):
num_cols = 9
deck = create_deck(filename, num_cols)
print('Welcome to The Flashcard Learner!')
print("Okay! Let's play!")
if mode == 4:
learn_words(deck, first_letter, start_index, number_of_cards)
else:
play_game(deck, mode, first_letter, start_index, number_of_cards)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Learn flashcards')
parser.add_argument('filename', help='name of .xlsx file with vocab',
default='RussianVocab.xlsx')
parser.add_argument('category', type=int, help=
'e.g. which letter are you learning? (-1: all, 0:a, 1:б, 2:в, etc.)')
parser.add_argument('start', type=int, help=
'start index (lists are 0-indexed)')
parser.add_argument('num', type=int, help=
"number of cards you'd like to see")
parser.add_argument('mode', type=int)
args = parser.parse_args()
main(args.filename, args.category, args.start, args.num, args.mode)
| import argparse
import pandas as pd
import random
import time
class Deck:
def __init__(self, num_cols, front, back):
self.flashcards = []
self.num_cols = num_cols
self.front = front
self.back = back
class Flashcard:
def __init__(self, deck, front, back, column, row):
self.deck = deck
self.front = front
self.back = back
self.column = column
self.row = row
self.correct = False
def show_front(self):
r = "{}: {}".format(self.deck.front, self.front)
return r
def show_back(self):
return "{}: {}".format(self.deck.back, self.back)
def show_card(self):
return "{}: {}, {}: {}".format(self.deck.front, self.front, self.deck.back, self.back)
def show_reverse(self):
return "{}: {}, {}: {}".format(self.deck.back, self.back, self.deck.front, self.front)
def create_deck(filename, num_cols):
df = pd.read_excel(filename)
front = df.columns.values[0]
back = df.columns.values[1]
deck = Deck(num_cols, front, back)
for i in range(num_cols):
front_column = "{}.{}".format(front, i) if i else front
back_column = "{}.{}".format(back, i) if i else back
for row in range(df[front_column].size):
f = df[front_column][row]
b = df[back_column][row]
if not (pd.isnull(f) or pd.isnull(b)):
fc = Flashcard(deck, f.strip(), b.strip(), i, row)
deck.flashcards.append(fc)
return deck
def get_cards_from_deck(deck, first_letter, start_index, number_of_cards):
flashcards = [fc for fc in deck.flashcards if fc.column == first_letter or first_letter == -1]
return flashcards[start_index:number_of_cards+start_index]
def play_game(deck, mode, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index, number_of_cards)
play_cards(mode, deck, flashcards)
def play_cards(mode, deck, cards):
source = deck.front if mode%2 == 0 else deck.back
target = deck.back if mode%2 == 0 else deck.front
if mode >= 2:
random.shuffle(cards)
num_cards = len(cards)
start_time = time.time()
for i, fc in enumerate(cards):
source_word = fc.front if mode%2==0 else fc.back
target_word = fc.back if mode%2==0 else fc.front
quiz(fc, source, source_word, target, target_word, i, num_cards)
print("All Done!")
correct = sum(fc.correct == True for fc in cards)
incorrect = len(cards) - correct
print("Correct: {}".format(correct))
print("Incorrect: {}".format(incorrect))
if (incorrect):
incorrect_cards = [fc for fc in cards if not fc.correct]
print("\n".join([fc.show_card() for fc in incorrect_cards]))
again = input("review incorrect words (y/n): ")
if again == 'y' or again == '1' or again == 'да':
play_cards(mode, deck, incorrect_cards)
else:
finish_time = time.time()
time_diff = time.gmtime(finish_time - start_time)
avg_time = time.gmtime((finish_time - start_time) / num_cards)
print("Total Time: {}".format(time.strftime("%H:%M:%S", time_diff)))
print("Time per card: {}".format(time.strftime("%H:%M:%S", avg_time)))
def quiz(fc, source_language, source_word, target_language, target_word, i, number_of_cards):
print("Card {}/{}".format(i+1, number_of_cards))
print("{} word: {}".format(source_language, source_word))
answer = input("Enter {} translation: ".format(target_language))
if is_correct(answer, target_word):
fc.correct = True
print("Correct!")
else:
print("Incorrect! Correct answer was: {}".format(target_word))
n = input("Enter {} translation for {}: ".format(target_language, source_word))
def is_correct(answer, target):
return format_for_comparison(answer) == format_for_comparison(target)
def format_for_comparison(word):
# strip whitespace and lowercase
word = word.strip().lower()
# pop off the declensions from the end
word = word.split('(')
# sort the list of meanings
word[0] = word[0].split(', ')
word[0].sort()
# join the first part back together:
word[0] = ', '.join(word[0])
# now add the declensions back on
word = '('.join(word)
return word
def learn_words(deck, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index, number_of_cards)
for i, card in enumerate(flashcards):
print("Card {}/{}".format(i+1, number_of_cards))
input("{}\nPractice: ".format(card.show_card()))
input("{}\nPractice: ".format(card.show_front()))
input("{}\nPractice: ".format(card.show_back()))
print("Done! Review learned words:")
for card in flashcards:
print("{}".format(card.show_card()))
def main(filename, first_letter, start_index, number_of_cards, mode):
num_cols = 9
deck = create_deck(filename, num_cols)
print("Welcome to The Flashcard Learner!")
# print("Available Modes:")
# print("0: Quiz - Given a word in {}, provide {} translation".format(deck.front.lower(), deck.back.lower()))
# print("1: Quiz - Given a word in {}, provide {} translation".format(deck.back.lower(), deck.front.lower()))
# print("2: Mode 0 with cards given in random order")
# print("3: Mode 1 with cards given in random order")
# print("4: Learning - Shown {} and {} side by side, practice typing both".format(deck.front.lower(), deck.back.lower()))
# mode = int(input("Enter mode: "))
print("Okay! Let's play!")
if mode == 4:
learn_words(deck, first_letter, start_index, number_of_cards)
else:
play_game(deck, mode, first_letter, start_index, number_of_cards)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Learn flashcards")
parser.add_argument("filename", help="name of .xlsx file with vocab", default="RussianVocab.xlsx")
parser.add_argument("category", type=int, help="e.g. which letter are you learning? (-1: all, 0:a, 1:б, 2:в, etc.)")
parser.add_argument("start", type=int, help="start index (lists are 0-indexed)")
parser.add_argument("num", type=int, help="number of cards you'd like to see")
parser.add_argument("mode", type=int)
args = parser.parse_args()
main(args.filename, args.category, args.start, args.num, args.mode)
| [
8,
17,
18,
19,
20
] |
955 | 48291ab3deb1ca1ba672d3e642d55635a7270171 | <mask token>
class CommunicationController:
<mask token>
def sendCommand(self, right, back, left):
self.count += 1
if self.count >= BUFFER_RESET_BOUND:
board.reset_output_buffer()
board.reset_input_buffer()
self.count = 0
command = ':'.join(('sd', str(right), str(left), str(back)))
if board.is_open:
board.write(command + '\n')
<mask token>
| <mask token>
class CommunicationController:
def __init__(self):
global board
board = serial.Serial(ROBOT_SERIAL, BAUDRATE, serial.EIGHTBITS,
timeout=0)
self.count = 0
print('Communication controller')
def sendCommand(self, right, back, left):
self.count += 1
if self.count >= BUFFER_RESET_BOUND:
board.reset_output_buffer()
board.reset_input_buffer()
self.count = 0
command = ':'.join(('sd', str(right), str(left), str(back)))
if board.is_open:
board.write(command + '\n')
<mask token>
| <mask token>
class CommunicationController:
def __init__(self):
global board
board = serial.Serial(ROBOT_SERIAL, BAUDRATE, serial.EIGHTBITS,
timeout=0)
self.count = 0
print('Communication controller')
def sendCommand(self, right, back, left):
self.count += 1
if self.count >= BUFFER_RESET_BOUND:
board.reset_output_buffer()
board.reset_input_buffer()
self.count = 0
command = ':'.join(('sd', str(right), str(left), str(back)))
if board.is_open:
board.write(command + '\n')
def throwBall(self, value):
if board.is_open:
command = ':'.join(('d', str(value)))
print(command)
board.write(command + '\r\n')
print('Throw')
else:
print('No board')
| import serial
from settings import *
class CommunicationController:
def __init__(self):
global board
board = serial.Serial(ROBOT_SERIAL, BAUDRATE, serial.EIGHTBITS,
timeout=0)
self.count = 0
print('Communication controller')
def sendCommand(self, right, back, left):
self.count += 1
if self.count >= BUFFER_RESET_BOUND:
board.reset_output_buffer()
board.reset_input_buffer()
self.count = 0
command = ':'.join(('sd', str(right), str(left), str(back)))
if board.is_open:
board.write(command + '\n')
def throwBall(self, value):
if board.is_open:
command = ':'.join(('d', str(value)))
print(command)
board.write(command + '\r\n')
print('Throw')
else:
print('No board')
| import serial
from settings import *
class CommunicationController:
def __init__(self):
global board
board = serial.Serial(ROBOT_SERIAL, BAUDRATE, serial.EIGHTBITS, timeout=0)
self.count = 0
print("Communication controller")
def sendCommand(self, right, back, left):
self.count += 1
if self.count >= BUFFER_RESET_BOUND:
board.reset_output_buffer()
board.reset_input_buffer()
self.count = 0
#format:
#sd:BACKWHEEL:RIGHTWHEEL:LEFTWHEEL\n
command = ":".join(("sd", str(right), str(left), str(back) ))
if board.is_open:
board.write(command + '\n')
# print(command)
def throwBall(self, value):
if board.is_open:
command = ":".join(("d",str(value)))
print(command)
board.write(command + '\r\n')
print("Throw")
else:
print("No board")
| [
2,
3,
4,
5,
6
] |
956 | d7b0ff6549d854d21ad1d2d0f5a9e7f75f4ac1d5 | <mask token>
class CompetenceTest(TestCase):
<mask token>
def test_translation(self):
competence = Competence.objects.first()
self.assertEqual(competence.name, 'mining')
competence.set_current_language('sv')
self.assertEqual(competence.name, 'gruvarbete')
<mask token>
| <mask token>
class CompetenceTest(TestCase):
<mask token>
def test_translation(self):
competence = Competence.objects.first()
self.assertEqual(competence.name, 'mining')
competence.set_current_language('sv')
self.assertEqual(competence.name, 'gruvarbete')
def test_translation_fallback(self):
competence = Competence.objects.first()
competence.set_current_language('fi')
self.assertEqual(competence.name, 'mining')
| <mask token>
class CompetenceTest(TestCase):
def setUp(self):
self.competence = Competence.objects.create(name='mining')
self.competence.set_current_language('sv')
self.competence.name = 'gruvarbete'
self.competence.save()
def test_translation(self):
competence = Competence.objects.first()
self.assertEqual(competence.name, 'mining')
competence.set_current_language('sv')
self.assertEqual(competence.name, 'gruvarbete')
def test_translation_fallback(self):
competence = Competence.objects.first()
competence.set_current_language('fi')
self.assertEqual(competence.name, 'mining')
| from django.test import TestCase
from recruitmentapp.apps.core.models import Competence
class CompetenceTest(TestCase):
def setUp(self):
self.competence = Competence.objects.create(name='mining')
self.competence.set_current_language('sv')
self.competence.name = 'gruvarbete'
self.competence.save()
def test_translation(self):
competence = Competence.objects.first()
self.assertEqual(competence.name, 'mining')
competence.set_current_language('sv')
self.assertEqual(competence.name, 'gruvarbete')
def test_translation_fallback(self):
competence = Competence.objects.first()
competence.set_current_language('fi')
self.assertEqual(competence.name, 'mining')
| null | [
2,
3,
4,
5
] |
957 | 8457cdde8f8ad069505c7729b8217e5d272be41e | <mask token>
| <mask token>
class GameMother:
async def a_game(self, num_slots: int, num_colors: int, max_guesses:
int, secret_code: list[Color], reference: (str | None)=None) ->Game:
async with provide(MongoUnitOfWork) as uow:
game = Game.new(id=uow.games.next_id(), num_slots=num_slots,
num_colors=num_colors, max_guesses=max_guesses)
game.secret_code = secret_code
if reference:
game.reference = reference
await uow.games.asave(game)
await uow.commit()
return game
| from apps.mastermind.core.domain.domain import Color, Game
from apps.mastermind.infrastructure.mongo_persistence.uow import MongoUnitOfWork
from composite_root.container import provide
class GameMother:
async def a_game(self, num_slots: int, num_colors: int, max_guesses:
int, secret_code: list[Color], reference: (str | None)=None) ->Game:
async with provide(MongoUnitOfWork) as uow:
game = Game.new(id=uow.games.next_id(), num_slots=num_slots,
num_colors=num_colors, max_guesses=max_guesses)
game.secret_code = secret_code
if reference:
game.reference = reference
await uow.games.asave(game)
await uow.commit()
return game
| from apps.mastermind.core.domain.domain import Color, Game
from apps.mastermind.infrastructure.mongo_persistence.uow import MongoUnitOfWork
from composite_root.container import provide
class GameMother:
async def a_game(
self,
num_slots: int,
num_colors: int,
max_guesses: int,
secret_code: list[Color],
reference: str | None = None,
) -> Game:
async with provide(MongoUnitOfWork) as uow:
game = Game.new(
id=uow.games.next_id(),
num_slots=num_slots,
num_colors=num_colors,
max_guesses=max_guesses,
)
game.secret_code = secret_code
if reference:
game.reference = reference
await uow.games.asave(game)
await uow.commit()
return game
| null | [
0,
1,
2,
3
] |
958 | 690e7cc9047b3a445bf330524df52e2b359f1f13 | <mask token>
| AuthorPath = 'data/Author.csv'
PaperPath = 'buff/Paper.TitleCut.csv'
PaperAuthorPath = 'data/PaperAuthor.csv'
AffilListPath = 'buff/AffilList2.csv'
StopwordPath = 'InternalData/en.lst'
| null | null | null | [
0,
1
] |
959 | 7a9515b1f8cc196eb7551137a1418d5a387e7fd3 | <mask token>
def get_close(x):
if len(x) == 0:
return ''
return x[0]
<mask token>
| <mask token>
def get_close(x):
if len(x) == 0:
return ''
return x[0]
<mask token>
result.to_csv(output_file, sep=',', encoding='utf-8')
| <mask token>
def get_close(x):
if len(x) == 0:
return ''
return x[0]
list_file = sys.argv[1]
rating_file = sys.argv[2]
output_file = sys.argv[3]
movie_list = open(list_file).read().splitlines()
movie_data = pd.DataFrame({'movie': movie_list})
rating_data = pd.read_csv(rating_file)
rating_data['rating'] = rating_data['rating'].astype(str).astype(float)
rating_data['counts'] = pd.Series(1, index=rating_data.index)
rating_data = rating_data.groupby(['title'])['counts', 'rating'].sum(
).reset_index()
rating_data['average_rating'] = pd.Series(rating_data['rating'] /
rating_data['counts'], index=rating_data.index)
movie_data['closed'] = pd.Series(movie_data['movie'], index=movie_data.index)
movie_data['closed'] = movie_data['closed'].apply(lambda x: dl.
get_close_matches(x, rating_data['title'], n=1))
movie_data['closed'] = movie_data['closed'].apply(get_close)
result = movie_data.set_index('closed').join(rating_data.set_index('title')
).reset_index()
result['average_rating'] = result['average_rating'].apply(lambda x: round(x, 2)
)
result = result.drop(['closed', 'rating', 'counts'], axis=1)
result = result.set_index('movie')
result.to_csv(output_file, sep=',', encoding='utf-8')
| import pandas as pd
import numpy as np
import difflib as dl
import sys
def get_close(x):
if len(x) == 0:
return ''
return x[0]
list_file = sys.argv[1]
rating_file = sys.argv[2]
output_file = sys.argv[3]
movie_list = open(list_file).read().splitlines()
movie_data = pd.DataFrame({'movie': movie_list})
rating_data = pd.read_csv(rating_file)
rating_data['rating'] = rating_data['rating'].astype(str).astype(float)
rating_data['counts'] = pd.Series(1, index=rating_data.index)
rating_data = rating_data.groupby(['title'])['counts', 'rating'].sum(
).reset_index()
rating_data['average_rating'] = pd.Series(rating_data['rating'] /
rating_data['counts'], index=rating_data.index)
movie_data['closed'] = pd.Series(movie_data['movie'], index=movie_data.index)
movie_data['closed'] = movie_data['closed'].apply(lambda x: dl.
get_close_matches(x, rating_data['title'], n=1))
movie_data['closed'] = movie_data['closed'].apply(get_close)
result = movie_data.set_index('closed').join(rating_data.set_index('title')
).reset_index()
result['average_rating'] = result['average_rating'].apply(lambda x: round(x, 2)
)
result = result.drop(['closed', 'rating', 'counts'], axis=1)
result = result.set_index('movie')
result.to_csv(output_file, sep=',', encoding='utf-8')
| import pandas as pd
import numpy as np
import difflib as dl
import sys
def get_close(x):
if len(x) == 0:
return ""
return x[0]
list_file = sys.argv[1]
rating_file = sys.argv[2]
output_file = sys.argv[3]
movie_list = open(list_file).read().splitlines()
movie_data = pd.DataFrame({'movie': movie_list})
rating_data = pd.read_csv(rating_file)
rating_data['rating'] = rating_data['rating'].astype(str).astype(float)
rating_data['counts'] = pd.Series(1, index=rating_data.index)
rating_data = rating_data.groupby(['title'])['counts', 'rating'].sum().reset_index()
rating_data['average_rating'] = pd.Series(rating_data['rating']/rating_data['counts'], index=rating_data.index)
movie_data['closed'] = pd.Series(movie_data['movie'], index=movie_data.index)
movie_data['closed'] = movie_data['closed'].apply(lambda x: dl.get_close_matches(x, rating_data['title'], n=1))
movie_data['closed'] = movie_data['closed'].apply(get_close)
result = movie_data.set_index('closed').join(rating_data.set_index('title')).reset_index()
result['average_rating'] = result['average_rating'].apply(lambda x: round(x, 2))
result = result.drop(['closed', 'rating', 'counts'], axis=1)
result = result.set_index('movie')
result.to_csv(output_file, sep=',', encoding='utf-8')
| [
1,
2,
3,
4,
5
] |
960 | 19aad7d45416e311530aa2ce3e854cf1f65d18f5 | <mask token>
def foo():
time.sleep(0.1)
<mask token>
| <mask token>
def foo():
time.sleep(0.1)
<mask token>
p.start()
print('process running: ', p, p.is_alive())
p.terminate()
print('process running: ', p, p.is_alive())
p.join()
print('process running: ', p, p.is_alive())
print('process exit code:', p.exitcode)
| <mask token>
def foo():
time.sleep(0.1)
p = multiprocessing.Process(target=foo)
p.start()
print('process running: ', p, p.is_alive())
p.terminate()
print('process running: ', p, p.is_alive())
p.join()
print('process running: ', p, p.is_alive())
print('process exit code:', p.exitcode)
| import multiprocessing
import time
def foo():
time.sleep(0.1)
p = multiprocessing.Process(target=foo)
p.start()
print('process running: ', p, p.is_alive())
p.terminate()
print('process running: ', p, p.is_alive())
p.join()
print('process running: ', p, p.is_alive())
print('process exit code:', p.exitcode)
| import multiprocessing
import time
def foo():
time.sleep(0.1)
p = multiprocessing.Process(target=foo)
p.start()
print("process running: ", p, p.is_alive())
p.terminate()
print("process running: ", p, p.is_alive())
p.join()
print("process running: ", p, p.is_alive())
print("process exit code:", p.exitcode)
| [
1,
2,
3,
4,
5
] |
961 | 623bd858923d5f9cc109af586fdda01cd3d5fff3 | <mask token>
class CreateRoomForm(FlaskForm):
<mask token>
<mask token>
<mask token>
class CreateUsernameForm(FlaskForm):
username = StringField('username', validators=[InputRequired(message=
'Username Required')])
submit_button = SubmitField('Join Room')
class RegistrationForm(FlaskForm):
""" Registration form """
username = StringField('username_label', validators=[InputRequired(
message='Username required'), Length(min=4, max=25, message=
'Username must be between 4 and 25 characters')])
password = PasswordField('password_label', validators=[InputRequired(
message='Password required'), Length(min=4, message=
'Password must be at least 4 characters')])
confirm_pswd = PasswordField('confirm_pswd_label', validators=[
InputRequired(message='Please retype your password'), EqualTo(
'password', message='Passwords must match')])
submit_button = SubmitField('Click Here to Start')
def validate_username(self, username):
user_object = User.query.filter_by(username=username.data).first()
if user_object:
raise ValidationError(
'Username already exists. Select a different username')
class LoginForm(FlaskForm):
""" Login form """
username = StringField('username_label', validators=[InputRequired(
message='Username required')])
password = PasswordField('password_label', validators=[InputRequired(
message='Password required'), invalid_credentials])
submit_button = SubmitField('Login')
| <mask token>
class CreateRoomForm(FlaskForm):
username = StringField('username', validators=[InputRequired(message=
'Username Required')])
numplayers = StringField('numplayers', validators=[InputRequired(
message='Username Required')])
submit_button = SubmitField('Join Room')
class CreateUsernameForm(FlaskForm):
username = StringField('username', validators=[InputRequired(message=
'Username Required')])
submit_button = SubmitField('Join Room')
class RegistrationForm(FlaskForm):
""" Registration form """
username = StringField('username_label', validators=[InputRequired(
message='Username required'), Length(min=4, max=25, message=
'Username must be between 4 and 25 characters')])
password = PasswordField('password_label', validators=[InputRequired(
message='Password required'), Length(min=4, message=
'Password must be at least 4 characters')])
confirm_pswd = PasswordField('confirm_pswd_label', validators=[
InputRequired(message='Please retype your password'), EqualTo(
'password', message='Passwords must match')])
submit_button = SubmitField('Click Here to Start')
def validate_username(self, username):
user_object = User.query.filter_by(username=username.data).first()
if user_object:
raise ValidationError(
'Username already exists. Select a different username')
class LoginForm(FlaskForm):
""" Login form """
username = StringField('username_label', validators=[InputRequired(
message='Username required')])
password = PasswordField('password_label', validators=[InputRequired(
message='Password required'), invalid_credentials])
submit_button = SubmitField('Login')
| <mask token>
class JoinRoomForm(FlaskForm):
room_code = StringField('room_code', validators=[InputRequired(message=
'Room Code Required')])
username = StringField('username', validators=[InputRequired(message=
'Username Required')])
submit_button = SubmitField('Join Room')
class CreateRoomForm(FlaskForm):
username = StringField('username', validators=[InputRequired(message=
'Username Required')])
numplayers = StringField('numplayers', validators=[InputRequired(
message='Username Required')])
submit_button = SubmitField('Join Room')
class CreateUsernameForm(FlaskForm):
username = StringField('username', validators=[InputRequired(message=
'Username Required')])
submit_button = SubmitField('Join Room')
class RegistrationForm(FlaskForm):
""" Registration form """
username = StringField('username_label', validators=[InputRequired(
message='Username required'), Length(min=4, max=25, message=
'Username must be between 4 and 25 characters')])
password = PasswordField('password_label', validators=[InputRequired(
message='Password required'), Length(min=4, message=
'Password must be at least 4 characters')])
confirm_pswd = PasswordField('confirm_pswd_label', validators=[
InputRequired(message='Please retype your password'), EqualTo(
'password', message='Passwords must match')])
submit_button = SubmitField('Click Here to Start')
def validate_username(self, username):
user_object = User.query.filter_by(username=username.data).first()
if user_object:
raise ValidationError(
'Username already exists. Select a different username')
class LoginForm(FlaskForm):
""" Login form """
username = StringField('username_label', validators=[InputRequired(
message='Username required')])
password = PasswordField('password_label', validators=[InputRequired(
message='Password required'), invalid_credentials])
submit_button = SubmitField('Login')
| <mask token>
def invalid_credentials(form, field):
""" Username and password checker """
username_entered = form.username.data
password_entered = field.data
user_object = User.query.filter_by(username=username_entered).first()
if user_object is None:
raise ValidationError('Username or password is incorrect')
elif not pbkdf2_sha256.verify(password_entered, user_object.password):
raise ValidationError('Username or password is incorrect')
class JoinRoomForm(FlaskForm):
room_code = StringField('room_code', validators=[InputRequired(message=
'Room Code Required')])
username = StringField('username', validators=[InputRequired(message=
'Username Required')])
submit_button = SubmitField('Join Room')
class CreateRoomForm(FlaskForm):
username = StringField('username', validators=[InputRequired(message=
'Username Required')])
numplayers = StringField('numplayers', validators=[InputRequired(
message='Username Required')])
submit_button = SubmitField('Join Room')
class CreateUsernameForm(FlaskForm):
username = StringField('username', validators=[InputRequired(message=
'Username Required')])
submit_button = SubmitField('Join Room')
class RegistrationForm(FlaskForm):
""" Registration form """
username = StringField('username_label', validators=[InputRequired(
message='Username required'), Length(min=4, max=25, message=
'Username must be between 4 and 25 characters')])
password = PasswordField('password_label', validators=[InputRequired(
message='Password required'), Length(min=4, message=
'Password must be at least 4 characters')])
confirm_pswd = PasswordField('confirm_pswd_label', validators=[
InputRequired(message='Please retype your password'), EqualTo(
'password', message='Passwords must match')])
submit_button = SubmitField('Click Here to Start')
def validate_username(self, username):
user_object = User.query.filter_by(username=username.data).first()
if user_object:
raise ValidationError(
'Username already exists. Select a different username')
class LoginForm(FlaskForm):
""" Login form """
username = StringField('username_label', validators=[InputRequired(
message='Username required')])
password = PasswordField('password_label', validators=[InputRequired(
message='Password required'), invalid_credentials])
submit_button = SubmitField('Login')
| from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, SubmitField
from wtforms.validators import InputRequired, Length, EqualTo, ValidationError
from passlib.hash import pbkdf2_sha256
from models import User
def invalid_credentials(form, field):
''' Username and password checker '''
username_entered = form.username.data
password_entered = field.data
user_object = User.query.filter_by(username=username_entered).first()
if user_object is None:
raise ValidationError("Username or password is incorrect")
elif not pbkdf2_sha256.verify(password_entered, user_object.password):
raise ValidationError("Username or password is incorrect")
class JoinRoomForm(FlaskForm):
room_code = StringField('room_code', validators=[InputRequired(message="Room Code Required")])
username = StringField('username', validators=[InputRequired(message="Username Required")])
# room_code = StringField('room_code')
# username = StringField('username')
submit_button = SubmitField('Join Room')
class CreateRoomForm(FlaskForm):
username = StringField('username', validators=[InputRequired(message="Username Required")])
numplayers = StringField('numplayers', validators=[InputRequired(message="Username Required")])
# room_code = StringField('room_code')
# username = StringField('username')
submit_button = SubmitField('Join Room')
class CreateUsernameForm(FlaskForm):
username = StringField('username', validators=[InputRequired(message="Username Required")])
# room_code = StringField('room_code')
# username = StringField('username')
submit_button = SubmitField('Join Room')
class RegistrationForm(FlaskForm):
''' Registration form '''
username = StringField('username_label', validators=[InputRequired(message="Username required"),
Length(min=4, max=25, message="Username must be between 4 and 25 characters")])
password = PasswordField('password_label', validators=[InputRequired(message="Password required"),
Length(min=4, message="Password must be at least 4 characters")])
confirm_pswd = PasswordField('confirm_pswd_label', validators=[InputRequired(message="Please retype your password"),
EqualTo('password', message="Passwords must match")])
submit_button = SubmitField('Click Here to Start')
def validate_username(self, username):
user_object = User.query.filter_by(username=username.data).first()
if user_object:
raise ValidationError("Username already exists. Select a different username")
class LoginForm(FlaskForm):
''' Login form '''
username = StringField('username_label', validators=[InputRequired(message="Username required")])
password = PasswordField('password_label', validators=[InputRequired(message="Password required"), invalid_credentials])
submit_button = SubmitField('Login')
| [
10,
11,
13,
14,
16
] |
962 | 9abc5f18e2eb07afe6bc31d6bd27298350707d1d | <mask token>
| <mask token>
def fairCandySwap(A, B):
sumA, sumB = sum(A), sum(B)
setA, setB = set(A), set(B)
delta = (sumA - sumB) // 2
for j in setB:
if j + delta in setA:
return j + delta, j
<mask token>
| <mask token>
def fairCandySwap(A, B):
sumA, sumB = sum(A), sum(B)
setA, setB = set(A), set(B)
delta = (sumA - sumB) // 2
for j in setB:
if j + delta in setA:
return j + delta, j
print(fairCandySwap(A=[1, 1], B=[2, 2]))
print(fairCandySwap(A=[1, 2], B=[2, 3]))
print(fairCandySwap(A=[2], B=[1, 3]))
print(fairCandySwap(A=[1, 2, 5], B=[2, 4]))
| """
爱丽丝和鲍勃有不同大小的糖果棒:A[i] 是爱丽丝拥有的第 i 根糖果棒的大小,B[j] 是鲍勃拥有的第 j 根糖果棒的大小。
因为他们是朋友,所以他们想交换一根糖果棒,这样交换后,他们都有相同的糖果总量。(一个人拥有的糖果总量是他们拥有的糖果棒大小的总和。)
返回一个整数数组 ans,其中 ans[0] 是爱丽丝必须交换的糖果棒的大小,ans[1] 是 Bob 必须交换的糖果棒的大小。
如果有多个答案,你可以返回其中任何一个。保证答案存在。
"""
def fairCandySwap(A, B):
sumA, sumB = sum(A), sum(B)
setA, setB = set(A), set(B)
delta = (sumA -sumB) // 2
for j in setB:
if j + delta in setA:
return (j+delta, j)
print(fairCandySwap(A = [1,1], B = [2,2]))
print(fairCandySwap(A = [1,2], B = [2,3]))
print(fairCandySwap(A = [2], B = [1,3]))
print(fairCandySwap(A = [1,2,5], B = [2,4]))
| null | [
0,
1,
2,
3
] |
963 | 6f698196e9391d73bd99cda0a098a5bf7a3832ff | <mask token>
| <mask token>
while True:
n = input('Right or left? ')
if n == 'right':
right(60)
forward(100)
elif n == 'left':
left(60)
forward(100)
| from turtle import *
while True:
n = input('Right or left? ')
if n == 'right':
right(60)
forward(100)
elif n == 'left':
left(60)
forward(100)
| from turtle import *
while True:
n=input("Right or left? ")
if n == 'right':
right(60)
forward(100)
elif n == 'left':
left(60)
forward(100)
| null | [
0,
1,
2,
3
] |
964 | b4d412e8b45722a855a16dd64b7bce9b303d0ffe | <mask token>
class Graph:
def __init__(self):
self._graph = defaultdict(list)
self._odd_vertices = []
def add_vertex(self, v):
if not v in self._graph:
self._graph[v] = list()
<mask token>
<mask token>
def check_eulerian(self):
odd_deg = {v: e for e, v in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg or False
def check_semi_eulerian(self):
odd_deg = {v: e for e, v in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg in [0, 2] or False
| <mask token>
class Graph:
def __init__(self):
self._graph = defaultdict(list)
self._odd_vertices = []
def add_vertex(self, v):
if not v in self._graph:
self._graph[v] = list()
def add_edge(self, v1, v2):
self._graph[v1].append(v2)
self._check_odd_vertex(v1)
self._graph[v2].append(v1)
self._check_odd_vertex(v2)
<mask token>
def check_eulerian(self):
odd_deg = {v: e for e, v in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg or False
def check_semi_eulerian(self):
odd_deg = {v: e for e, v in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg in [0, 2] or False
| <mask token>
class Graph:
def __init__(self):
self._graph = defaultdict(list)
self._odd_vertices = []
def add_vertex(self, v):
if not v in self._graph:
self._graph[v] = list()
def add_edge(self, v1, v2):
self._graph[v1].append(v2)
self._check_odd_vertex(v1)
self._graph[v2].append(v1)
self._check_odd_vertex(v2)
def _check_odd_vertex(self, v):
return len(self._graph[v]) % 2
def check_eulerian(self):
odd_deg = {v: e for e, v in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg or False
def check_semi_eulerian(self):
odd_deg = {v: e for e, v in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg in [0, 2] or False
| from collections import defaultdict
class Graph:
def __init__(self):
self._graph = defaultdict(list)
self._odd_vertices = []
def add_vertex(self, v):
if not v in self._graph:
self._graph[v] = list()
def add_edge(self, v1, v2):
self._graph[v1].append(v2)
self._check_odd_vertex(v1)
self._graph[v2].append(v1)
self._check_odd_vertex(v2)
def _check_odd_vertex(self, v):
return len(self._graph[v]) % 2
def check_eulerian(self):
odd_deg = {v: e for e, v in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg or False
def check_semi_eulerian(self):
odd_deg = {v: e for e, v in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg in [0, 2] or False
| from collections import defaultdict
class Graph:
def __init__(self):
self._graph = defaultdict(list)
self._odd_vertices = []
def add_vertex(self, v):
if not v in self._graph:
self._graph[v] = list()
def add_edge(self, v1, v2):
self._graph[v1].append(v2)
self._check_odd_vertex(v1)
self._graph[v2].append(v1)
self._check_odd_vertex(v2)
def _check_odd_vertex(self, v):
return len(self._graph[v]) % 2
def check_eulerian(self):
odd_deg = {v:e for (e, v) in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg or False
def check_semi_eulerian(self):
odd_deg = {v:e for (e, v) in self._graph.iteritems() if len(v) % 2 != 0}
return odd_deg in [0, 2] or False
| [
5,
6,
7,
8,
9
] |
965 | bab78e8a88f9a26cc13fe0c301f82880cee2b680 | <mask token>
| <mask token>
@admin.register(Predictions)
class PredictionsAdmin(admin.ModelAdmin):
pass
| from django.contrib import admin
from .models import Predictions
@admin.register(Predictions)
class PredictionsAdmin(admin.ModelAdmin):
pass
| null | null | [
0,
1,
2
] |
966 | fd450b5454b65ed69b411028788c587f9674760c | <mask token>
def eff_param_string():
"""
The parametrization of the efficiencies from AN-2015-11 as a string that can
be used in a TF1 constructor.
p0 * (1 - p1 * (Erf(pT + p2) - p1 / alpha * (pT - p3 * (pT^2 - p3 / beta * pT^3))))
"""
return (
'[0] * (1 - [1] * (TMath::Erf(x[0] + [2]) - [1] / [4] * (x[0] - [3] * (pow(x[0], 2) - [3] / [5] * pow(x[0], 3)))))'
)
def eff_param():
"""
Get the parametrization as ROOT.TF1
"""
return r.TF1('photon_eff_param', eff_param_string(), 0, 7)
def eff_param_sym():
"""
Get the parametrization as sympy symbolic expression by doing some string
manipulation on the parametrization and then using sympy.sympify
"""
param_str = eff_param_string()
param_str = param_str.replace('TMath::Erf', 'erf').replace('x[0]', 'x')
param_str = re.sub('\\[([0-9])\\]', 'p\\1', param_str)
param_str = re.sub('pow\\((.*?)\\s*?,\\s*?([0-9])\\)', '\\1**\\2',
param_str)
return sp.sympify(param_str)
<mask token>
def get_cov_func(params, corr):
"""
Get the uncertainty function where only pT is left as a free parameter.
This will return a python function that can be evaluated at any given point
"""
eff = eff_param_sym()
free_params = []
for sym in eff.free_symbols:
if sym.name in params and params[sym.name][1] != 0:
free_params.append(sym)
free_params.sort(key=lambda x: int(x.name.replace('p', '')))
cov_eff = func_cov(eff, free_params)
subst_vals = {p: v[0] for p, v in params.iteritems()}
subst_vals.update({('sigma_' + p): v[1] for p, v in params.iteritems()})
subst_vals.update(get_corr_subs_values(corr))
return sp.lambdify(sp.symbols('x'), cov_eff.subs(subst_vals))
<mask token>
def set_params_errors(func, *params):
"""
Set all the parameters as pairs of value and uncertainty (in the order they)
are in the params list. If uncertainty = 0, the parameter is fixed
"""
central = np.array([p[0] for p in params])
uncer = np.array([p[1] for p in params])
func.SetParameters(central)
func.SetParErrors(uncer)
for idx, err in enumerate(uncer):
if err == 0:
func.FixParameter(idx, func.GetParameter(idx))
def load_params(param_file):
"""
Load the parameter file and return the list of dicts stored in it
"""
with open(param_file, 'r') as pfile:
eff_params = json.load(pfile)
return eff_params
def create_param(params, sigma_shift, uncorrelated):
"""
Create the function from the passed params and give it an appropriate name
"""
if sigma_shift == 0:
func = eff_param()
set_params_errors(func, params['p0'], params['p1'], params['p2'],
params['p3'], params['alpha'], params['beta'])
func.SetName(get_name(params['eta'], 'photon_eff_pt'))
return func
params['p4'] = params['alpha']
params['p5'] = params['beta']
corr = np.identity(4) if uncorrelated else CORRELATIONS
graph = get_graph_err(params, corr, np.abs(sigma_shift), 200)
if sigma_shift < 0:
graph = get_lower_band(graph)
else:
graph = get_upper_band(graph)
graph.SetName(get_name(params['eta'], 'photon_eff_pt'))
return graph
def main(args):
"""Main"""
file_option = 'update' if args.update else 'recreate'
outfile = r.TFile.Open(args.outfile, file_option)
all_params = load_params(args.paramfile)
for params in all_params:
eff = create_param(params, args.sigma, args.uncorrelated)
eff.Write()
outfile.Close()
<mask token>
| <mask token>
def eff_param_string():
"""
The parametrization of the efficiencies from AN-2015-11 as a string that can
be used in a TF1 constructor.
p0 * (1 - p1 * (Erf(pT + p2) - p1 / alpha * (pT - p3 * (pT^2 - p3 / beta * pT^3))))
"""
return (
'[0] * (1 - [1] * (TMath::Erf(x[0] + [2]) - [1] / [4] * (x[0] - [3] * (pow(x[0], 2) - [3] / [5] * pow(x[0], 3)))))'
)
def eff_param():
"""
Get the parametrization as ROOT.TF1
"""
return r.TF1('photon_eff_param', eff_param_string(), 0, 7)
def eff_param_sym():
"""
Get the parametrization as sympy symbolic expression by doing some string
manipulation on the parametrization and then using sympy.sympify
"""
param_str = eff_param_string()
param_str = param_str.replace('TMath::Erf', 'erf').replace('x[0]', 'x')
param_str = re.sub('\\[([0-9])\\]', 'p\\1', param_str)
param_str = re.sub('pow\\((.*?)\\s*?,\\s*?([0-9])\\)', '\\1**\\2',
param_str)
return sp.sympify(param_str)
<mask token>
def get_cov_func(params, corr):
"""
Get the uncertainty function where only pT is left as a free parameter.
This will return a python function that can be evaluated at any given point
"""
eff = eff_param_sym()
free_params = []
for sym in eff.free_symbols:
if sym.name in params and params[sym.name][1] != 0:
free_params.append(sym)
free_params.sort(key=lambda x: int(x.name.replace('p', '')))
cov_eff = func_cov(eff, free_params)
subst_vals = {p: v[0] for p, v in params.iteritems()}
subst_vals.update({('sigma_' + p): v[1] for p, v in params.iteritems()})
subst_vals.update(get_corr_subs_values(corr))
return sp.lambdify(sp.symbols('x'), cov_eff.subs(subst_vals))
def get_graph_err(params, corr, n_sigma=1.0, n_points=100):
"""
Get the function evaluated at n_points with uncertainties taking into
account correlations between the parameters
"""
eff_f = eff_param_sym()
eff_f = eff_f.subs({p: v[0] for p, v in params.iteritems()})
eff_f = sp.lambdify(sp.symbols('x'), eff_f)
var_f = get_cov_func(params, corr)
x_bins = np.linspace(0.4, 7, n_points + 1)
x_cent = 0.5 * (x_bins[1:] + x_bins[:-1])
x_err = np.diff(x_bins)
y_cent = np.array([eff_f(x) for x in x_cent])
y_err = np.sqrt(np.array([var_f(x) for x in x_cent])) * n_sigma
return r.TGraphErrors(len(x_cent), x_cent, y_cent, x_err, y_err)
def set_params_errors(func, *params):
"""
Set all the parameters as pairs of value and uncertainty (in the order they)
are in the params list. If uncertainty = 0, the parameter is fixed
"""
central = np.array([p[0] for p in params])
uncer = np.array([p[1] for p in params])
func.SetParameters(central)
func.SetParErrors(uncer)
for idx, err in enumerate(uncer):
if err == 0:
func.FixParameter(idx, func.GetParameter(idx))
def load_params(param_file):
"""
Load the parameter file and return the list of dicts stored in it
"""
with open(param_file, 'r') as pfile:
eff_params = json.load(pfile)
return eff_params
def create_param(params, sigma_shift, uncorrelated):
"""
Create the function from the passed params and give it an appropriate name
"""
if sigma_shift == 0:
func = eff_param()
set_params_errors(func, params['p0'], params['p1'], params['p2'],
params['p3'], params['alpha'], params['beta'])
func.SetName(get_name(params['eta'], 'photon_eff_pt'))
return func
params['p4'] = params['alpha']
params['p5'] = params['beta']
corr = np.identity(4) if uncorrelated else CORRELATIONS
graph = get_graph_err(params, corr, np.abs(sigma_shift), 200)
if sigma_shift < 0:
graph = get_lower_band(graph)
else:
graph = get_upper_band(graph)
graph.SetName(get_name(params['eta'], 'photon_eff_pt'))
return graph
def main(args):
"""Main"""
file_option = 'update' if args.update else 'recreate'
outfile = r.TFile.Open(args.outfile, file_option)
all_params = load_params(args.paramfile)
for params in all_params:
eff = create_param(params, args.sigma, args.uncorrelated)
eff.Write()
outfile.Close()
<mask token>
| <mask token>
def eff_param_string():
"""
The parametrization of the efficiencies from AN-2015-11 as a string that can
be used in a TF1 constructor.
p0 * (1 - p1 * (Erf(pT + p2) - p1 / alpha * (pT - p3 * (pT^2 - p3 / beta * pT^3))))
"""
return (
'[0] * (1 - [1] * (TMath::Erf(x[0] + [2]) - [1] / [4] * (x[0] - [3] * (pow(x[0], 2) - [3] / [5] * pow(x[0], 3)))))'
)
def eff_param():
"""
Get the parametrization as ROOT.TF1
"""
return r.TF1('photon_eff_param', eff_param_string(), 0, 7)
def eff_param_sym():
"""
Get the parametrization as sympy symbolic expression by doing some string
manipulation on the parametrization and then using sympy.sympify
"""
param_str = eff_param_string()
param_str = param_str.replace('TMath::Erf', 'erf').replace('x[0]', 'x')
param_str = re.sub('\\[([0-9])\\]', 'p\\1', param_str)
param_str = re.sub('pow\\((.*?)\\s*?,\\s*?([0-9])\\)', '\\1**\\2',
param_str)
return sp.sympify(param_str)
def get_corr_subs_values(corr):
"""
Get the dictionary of substitution values for the correlation matrix
"""
subs_dict = {}
n_dim = corr.shape[0]
for irow in xrange(0, n_dim):
for icol in xrange(irow + 1, n_dim):
subs_dict['rho_p{}p{}'.format(irow, icol)] = corr[irow, icol]
return subs_dict
def get_cov_func(params, corr):
"""
Get the uncertainty function where only pT is left as a free parameter.
This will return a python function that can be evaluated at any given point
"""
eff = eff_param_sym()
free_params = []
for sym in eff.free_symbols:
if sym.name in params and params[sym.name][1] != 0:
free_params.append(sym)
free_params.sort(key=lambda x: int(x.name.replace('p', '')))
cov_eff = func_cov(eff, free_params)
subst_vals = {p: v[0] for p, v in params.iteritems()}
subst_vals.update({('sigma_' + p): v[1] for p, v in params.iteritems()})
subst_vals.update(get_corr_subs_values(corr))
return sp.lambdify(sp.symbols('x'), cov_eff.subs(subst_vals))
def get_graph_err(params, corr, n_sigma=1.0, n_points=100):
"""
Get the function evaluated at n_points with uncertainties taking into
account correlations between the parameters
"""
eff_f = eff_param_sym()
eff_f = eff_f.subs({p: v[0] for p, v in params.iteritems()})
eff_f = sp.lambdify(sp.symbols('x'), eff_f)
var_f = get_cov_func(params, corr)
x_bins = np.linspace(0.4, 7, n_points + 1)
x_cent = 0.5 * (x_bins[1:] + x_bins[:-1])
x_err = np.diff(x_bins)
y_cent = np.array([eff_f(x) for x in x_cent])
y_err = np.sqrt(np.array([var_f(x) for x in x_cent])) * n_sigma
return r.TGraphErrors(len(x_cent), x_cent, y_cent, x_err, y_err)
def set_params_errors(func, *params):
"""
Set all the parameters as pairs of value and uncertainty (in the order they)
are in the params list. If uncertainty = 0, the parameter is fixed
"""
central = np.array([p[0] for p in params])
uncer = np.array([p[1] for p in params])
func.SetParameters(central)
func.SetParErrors(uncer)
for idx, err in enumerate(uncer):
if err == 0:
func.FixParameter(idx, func.GetParameter(idx))
def load_params(param_file):
"""
Load the parameter file and return the list of dicts stored in it
"""
with open(param_file, 'r') as pfile:
eff_params = json.load(pfile)
return eff_params
def create_param(params, sigma_shift, uncorrelated):
"""
Create the function from the passed params and give it an appropriate name
"""
if sigma_shift == 0:
func = eff_param()
set_params_errors(func, params['p0'], params['p1'], params['p2'],
params['p3'], params['alpha'], params['beta'])
func.SetName(get_name(params['eta'], 'photon_eff_pt'))
return func
params['p4'] = params['alpha']
params['p5'] = params['beta']
corr = np.identity(4) if uncorrelated else CORRELATIONS
graph = get_graph_err(params, corr, np.abs(sigma_shift), 200)
if sigma_shift < 0:
graph = get_lower_band(graph)
else:
graph = get_upper_band(graph)
graph.SetName(get_name(params['eta'], 'photon_eff_pt'))
return graph
def main(args):
"""Main"""
file_option = 'update' if args.update else 'recreate'
outfile = r.TFile.Open(args.outfile, file_option)
all_params = load_params(args.paramfile)
for params in all_params:
eff = create_param(params, args.sigma, args.uncorrelated)
eff.Write()
outfile.Close()
<mask token>
| <mask token>
r.PyConfig.IgnoreCommandLineOptions = True
<mask token>
COVARIANCE = np.array([[1.181e-06, 1.545e-06, -4.328e-06, 4.156e-06], [
1.545e-06, 7.215e-06, -1.714e-05, 5.177e-06], [-4.328e-06, -1.714e-05,
4.228e-05, -1.481e-05], [4.156e-06, 5.177e-06, -1.481e-05, 1.506e-05]])
CORRELATIONS = np.matmul(np.matmul(np.diag(1 / np.sqrt(np.diag(COVARIANCE))
), COVARIANCE), np.diag(1 / np.sqrt(np.diag(COVARIANCE))))
def eff_param_string():
"""
The parametrization of the efficiencies from AN-2015-11 as a string that can
be used in a TF1 constructor.
p0 * (1 - p1 * (Erf(pT + p2) - p1 / alpha * (pT - p3 * (pT^2 - p3 / beta * pT^3))))
"""
return (
'[0] * (1 - [1] * (TMath::Erf(x[0] + [2]) - [1] / [4] * (x[0] - [3] * (pow(x[0], 2) - [3] / [5] * pow(x[0], 3)))))'
)
def eff_param():
"""
Get the parametrization as ROOT.TF1
"""
return r.TF1('photon_eff_param', eff_param_string(), 0, 7)
def eff_param_sym():
"""
Get the parametrization as sympy symbolic expression by doing some string
manipulation on the parametrization and then using sympy.sympify
"""
param_str = eff_param_string()
param_str = param_str.replace('TMath::Erf', 'erf').replace('x[0]', 'x')
param_str = re.sub('\\[([0-9])\\]', 'p\\1', param_str)
param_str = re.sub('pow\\((.*?)\\s*?,\\s*?([0-9])\\)', '\\1**\\2',
param_str)
return sp.sympify(param_str)
def get_corr_subs_values(corr):
"""
Get the dictionary of substitution values for the correlation matrix
"""
subs_dict = {}
n_dim = corr.shape[0]
for irow in xrange(0, n_dim):
for icol in xrange(irow + 1, n_dim):
subs_dict['rho_p{}p{}'.format(irow, icol)] = corr[irow, icol]
return subs_dict
def get_cov_func(params, corr):
"""
Get the uncertainty function where only pT is left as a free parameter.
This will return a python function that can be evaluated at any given point
"""
eff = eff_param_sym()
free_params = []
for sym in eff.free_symbols:
if sym.name in params and params[sym.name][1] != 0:
free_params.append(sym)
free_params.sort(key=lambda x: int(x.name.replace('p', '')))
cov_eff = func_cov(eff, free_params)
subst_vals = {p: v[0] for p, v in params.iteritems()}
subst_vals.update({('sigma_' + p): v[1] for p, v in params.iteritems()})
subst_vals.update(get_corr_subs_values(corr))
return sp.lambdify(sp.symbols('x'), cov_eff.subs(subst_vals))
def get_graph_err(params, corr, n_sigma=1.0, n_points=100):
"""
Get the function evaluated at n_points with uncertainties taking into
account correlations between the parameters
"""
eff_f = eff_param_sym()
eff_f = eff_f.subs({p: v[0] for p, v in params.iteritems()})
eff_f = sp.lambdify(sp.symbols('x'), eff_f)
var_f = get_cov_func(params, corr)
x_bins = np.linspace(0.4, 7, n_points + 1)
x_cent = 0.5 * (x_bins[1:] + x_bins[:-1])
x_err = np.diff(x_bins)
y_cent = np.array([eff_f(x) for x in x_cent])
y_err = np.sqrt(np.array([var_f(x) for x in x_cent])) * n_sigma
return r.TGraphErrors(len(x_cent), x_cent, y_cent, x_err, y_err)
def set_params_errors(func, *params):
"""
Set all the parameters as pairs of value and uncertainty (in the order they)
are in the params list. If uncertainty = 0, the parameter is fixed
"""
central = np.array([p[0] for p in params])
uncer = np.array([p[1] for p in params])
func.SetParameters(central)
func.SetParErrors(uncer)
for idx, err in enumerate(uncer):
if err == 0:
func.FixParameter(idx, func.GetParameter(idx))
def load_params(param_file):
"""
Load the parameter file and return the list of dicts stored in it
"""
with open(param_file, 'r') as pfile:
eff_params = json.load(pfile)
return eff_params
def create_param(params, sigma_shift, uncorrelated):
"""
Create the function from the passed params and give it an appropriate name
"""
if sigma_shift == 0:
func = eff_param()
set_params_errors(func, params['p0'], params['p1'], params['p2'],
params['p3'], params['alpha'], params['beta'])
func.SetName(get_name(params['eta'], 'photon_eff_pt'))
return func
params['p4'] = params['alpha']
params['p5'] = params['beta']
corr = np.identity(4) if uncorrelated else CORRELATIONS
graph = get_graph_err(params, corr, np.abs(sigma_shift), 200)
if sigma_shift < 0:
graph = get_lower_band(graph)
else:
graph = get_upper_band(graph)
graph.SetName(get_name(params['eta'], 'photon_eff_pt'))
return graph
def main(args):
"""Main"""
file_option = 'update' if args.update else 'recreate'
outfile = r.TFile.Open(args.outfile, file_option)
all_params = load_params(args.paramfile)
for params in all_params:
eff = create_param(params, args.sigma, args.uncorrelated)
eff.Write()
outfile.Close()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description=
'script to generate TF1 photon efficiency parametrizations from json file holding the fit parameters'
)
parser.add_argument('paramfile', help=
'json file containing the fitted parameters')
parser.add_argument('-o', '--outfile', help=
'root file into which the TF1 should be stored', default=
'photon_effs_param.root')
parser.add_argument('-u', '--update', help=
'update the output file instead of recreating it', default=False,
action='store_true')
parser.add_argument('-s', '--sigma', help=
'Use the central value + [sigma] * uncertainty for each parameter',
type=float, default=0)
parser.add_argument('--uncorrelated', default=False, action=
'store_true', help=
'Assume that the free parameters are uncorrelated instead of using correlation parameters from a global fit'
)
clargs = parser.parse_args()
main(clargs)
| #!/usr/bin/env python
"""
Script that generates the photon efficiency curves and stores them in a root
file.
For the moment only the pT curves for the different eta bins are created
"""
import re
import json
import ROOT as r
r.PyConfig.IgnoreCommandLineOptions = True
import numpy as np
import sympy as sp
from utils.symbolic import func_cov
from utils.graph_utils import get_lower_band, get_upper_band
from common_func import get_name
# Covariance matrix from the fit integrated over the whole eta range, where
# alpha and beta were fixed. This will be used to calculate the correlation
# coefficients between the fitted parameters, which will then be used to get
# the uncertainty bands for the parametrization
COVARIANCE = np.array([
[1.181e-06, 1.545e-06, -4.328e-06, 4.156e-06],
[1.545e-06, 7.215e-06, -1.714e-05, 5.177e-06],
[-4.328e-06, -1.714e-05, 4.228e-05, -1.481e-05],
[4.156e-06, 5.177e-06, -1.481e-05, 1.506e-05],
])
# corr = diag(cov)^{-1/2} * cov * diag(cov)^{-1/2}
CORRELATIONS = np.matmul(
np.matmul(
np.diag(1/np.sqrt(np.diag(COVARIANCE))), COVARIANCE,
), np.diag(1/np.sqrt(np.diag(COVARIANCE)))
)
def eff_param_string():
"""
The parametrization of the efficiencies from AN-2015-11 as a string that can
be used in a TF1 constructor.
p0 * (1 - p1 * (Erf(pT + p2) - p1 / alpha * (pT - p3 * (pT^2 - p3 / beta * pT^3))))
"""
return '[0] * (1 - [1] * (TMath::Erf(x[0] + [2]) - [1] / [4] * (x[0] - [3] * (pow(x[0], 2) - [3] / [5] * pow(x[0], 3)))))'
def eff_param():
"""
Get the parametrization as ROOT.TF1
"""
return r.TF1('photon_eff_param', eff_param_string(), 0, 7)
def eff_param_sym():
"""
Get the parametrization as sympy symbolic expression by doing some string
manipulation on the parametrization and then using sympy.sympify
"""
param_str = eff_param_string()
# replace call to ROOTs erf and give x[0] a parseable name
param_str = param_str.replace('TMath::Erf', 'erf').replace('x[0]', 'x')
# convert parameters from [x] notation to px notation
param_str = re.sub(r'\[([0-9])\]', r'p\1', param_str)
# replace pow(x, y) with x**y (pythonic) syntax
param_str = re.sub(r'pow\((.*?)\s*?,\s*?([0-9])\)', r'\1**\2', param_str)
return sp.sympify(param_str)
def get_corr_subs_values(corr):
"""
Get the dictionary of substitution values for the correlation matrix
"""
subs_dict = {}
n_dim = corr.shape[0]
for irow in xrange(0, n_dim):
for icol in xrange(irow + 1, n_dim):
subs_dict['rho_p{}p{}'.format(irow, icol)] = corr[irow, icol]
return subs_dict
def get_cov_func(params, corr):
"""
Get the uncertainty function where only pT is left as a free parameter.
This will return a python function that can be evaluated at any given point
"""
eff = eff_param_sym()
# get the list of free parameters
free_params = []
for sym in eff.free_symbols:
if sym.name in params and params[sym.name][1] != 0:
free_params.append(sym)
# sort the parameters according to their name, such that the correlation
# coefficients actually match
free_params.sort(key=lambda x: int(x.name.replace('p', '')))
cov_eff = func_cov(eff, free_params)
# build up the dictionary of symbol -> value that will be substituted.
# In the end the returned function will only have one free parameter left
subst_vals = {
p: v[0] for p, v in params.iteritems()
}
subst_vals.update({
'sigma_' + p: v[1] for p, v in params.iteritems()
})
subst_vals.update(
get_corr_subs_values(corr)
)
# NOTE: here it is assumed that 'x' is the only free parameter left
return sp.lambdify(sp.symbols('x'), cov_eff.subs(subst_vals))
def get_graph_err(params, corr, n_sigma=1.0, n_points=100):
"""
Get the function evaluated at n_points with uncertainties taking into
account correlations between the parameters
"""
# central function
eff_f = eff_param_sym()
eff_f = eff_f.subs({p: v[0] for p, v in params.iteritems()})
# NOTE: assume that 'x' is the only free parameter left
eff_f = sp.lambdify(sp.symbols('x'), eff_f)
# uncertainty function (as function of pT)
var_f = get_cov_func(params, corr)
x_bins = np.linspace(0.4, 7, n_points + 1)
x_cent = 0.5 * (x_bins[1:] + x_bins[:-1]) # bin centers
x_err = np.diff(x_bins) # "uncertainties" in x
y_cent = np.array([eff_f(x) for x in x_cent])
y_err = np.sqrt(np.array([var_f(x) for x in x_cent])) * n_sigma
return r.TGraphErrors(len(x_cent), x_cent, y_cent, x_err, y_err)
def set_params_errors(func, *params):
"""
Set all the parameters as pairs of value and uncertainty (in the order they)
are in the params list. If uncertainty = 0, the parameter is fixed
"""
central = np.array([p[0] for p in params])
uncer = np.array([p[1] for p in params])
func.SetParameters(central)
func.SetParErrors(uncer)
for idx, err in enumerate(uncer):
if err == 0:
func.FixParameter(idx, func.GetParameter(idx))
def load_params(param_file):
"""
Load the parameter file and return the list of dicts stored in it
"""
with open(param_file, 'r') as pfile:
eff_params = json.load(pfile)
return eff_params
def create_param(params, sigma_shift, uncorrelated):
"""
Create the function from the passed params and give it an appropriate name
"""
# if the central results are desired. Use the exact parametrization as TF1
if sigma_shift == 0:
func = eff_param()
set_params_errors(func, params["p0"], params["p1"], params["p2"],
params["p3"], params["alpha"], params["beta"])
func.SetName(get_name(params["eta"], 'photon_eff_pt'))
return func
# else get an aproximation by evaluating the function at a given number of
# points and determine the uncertainties at these points, then store the
# points as a TGraph where the y-values are the central + uncertainty values
# at each evaluation point
# NOTE: Since eff_param_sym names alpha and beta p4 and p5 respectively
# (can't use beta in an expression that goes through sympy.sympify), we have
# to clone them here. We can leave the original values in, since they will
# not be picked up by the substitution command
params['p4'] = params['alpha']
params['p5'] = params['beta']
# use the global correlation matrix or an identity matrix if uncorrelated
# parameters are desired
corr = np.identity(4) if uncorrelated else CORRELATIONS
graph = get_graph_err(params, corr, np.abs(sigma_shift), 200)
if sigma_shift < 0:
graph = get_lower_band(graph)
else:
graph = get_upper_band(graph)
graph.SetName(get_name(params['eta'], 'photon_eff_pt'))
return graph
def main(args):
"""Main"""
file_option = 'update' if args.update else 'recreate'
outfile = r.TFile.Open(args.outfile, file_option)
all_params = load_params(args.paramfile)
for params in all_params:
eff = create_param(params, args.sigma, args.uncorrelated)
eff.Write()
outfile.Close()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='script to generate TF1 '
'photon efficiency parametrizations from '
'json file holding the fit parameters')
parser.add_argument('paramfile', help='json file containing the fitted '
'parameters')
parser.add_argument('-o', '--outfile', help='root file into which the TF1 '
'should be stored', default='photon_effs_param.root')
parser.add_argument('-u', '--update', help='update the output file instead '
'of recreating it', default=False, action='store_true')
parser.add_argument('-s', '--sigma', help='Use the central value + [sigma] '
'* uncertainty for each parameter', type=float,
default=0)
parser.add_argument('--uncorrelated', default=False, action='store_true',
help='Assume that the free parameters are uncorrelated '
'instead of using correlation parameters from a global '
'fit')
clargs = parser.parse_args()
main(clargs)
| [
8,
9,
10,
12,
14
] |
967 | 176ffac7ad47f5c43a24acc664631f8353ec5100 | <mask token>
| <mask token>
with open('txt.txt', 'r') as f:
data = f.readlines()
line = 0
for i in range(10, 110, 10):
agg = 0
for j in range(num_tests):
agg += int(data[line])
line += 1
res.append(agg / num_tests)
<mask token>
plt.plot(x, y, 'o')
plt.plot(x, p(x), label='Best fit 2 degree polynomial')
plt.title('#messages vs. #nodes in graph (GHS algo.) (Averaged over 100 runs)')
plt.xlabel('Number of nodes in fully connected graph')
plt.ylabel('Number of messages')
plt.legend()
plt.savefig('Messages.svg')
plt.clf()
<mask token>
with open('txt2.txt', 'r') as f:
data = f.readlines()
line = 0
for procs in range(1, 13):
times = []
for i in range(10, 110, 10):
temp = 0
for num in range(num_tests):
temp += float(data[line].split()[1])
line += 3
times.append(temp / num_tests)
res.append(times)
<mask token>
plt.title('Time taken vs. number of cores used (Averaged over 10 runs)')
plt.xlabel('Number of nodes in fully connected graph')
plt.ylabel('Time taken (in seconds)')
for procs in [1, 2, 4, 8, 12]:
plt.plot(x, res[procs - 1], label=str(procs) + ' Cores')
plt.legend()
plt.savefig('Time.svg')
| <mask token>
steps = 10
num_tests = 100
res = []
with open('txt.txt', 'r') as f:
data = f.readlines()
line = 0
for i in range(10, 110, 10):
agg = 0
for j in range(num_tests):
agg += int(data[line])
line += 1
res.append(agg / num_tests)
x = list(range(10, 110, steps))
y = res
z = np.polyfit(x, res, 2)
p = np.poly1d(z)
plt.plot(x, y, 'o')
plt.plot(x, p(x), label='Best fit 2 degree polynomial')
plt.title('#messages vs. #nodes in graph (GHS algo.) (Averaged over 100 runs)')
plt.xlabel('Number of nodes in fully connected graph')
plt.ylabel('Number of messages')
plt.legend()
plt.savefig('Messages.svg')
plt.clf()
steps = 10
num_tests = 10
res = []
with open('txt2.txt', 'r') as f:
data = f.readlines()
line = 0
for procs in range(1, 13):
times = []
for i in range(10, 110, 10):
temp = 0
for num in range(num_tests):
temp += float(data[line].split()[1])
line += 3
times.append(temp / num_tests)
res.append(times)
x = list(range(10, 110, steps))
y = res
plt.title('Time taken vs. number of cores used (Averaged over 10 runs)')
plt.xlabel('Number of nodes in fully connected graph')
plt.ylabel('Time taken (in seconds)')
for procs in [1, 2, 4, 8, 12]:
plt.plot(x, res[procs - 1], label=str(procs) + ' Cores')
plt.legend()
plt.savefig('Time.svg')
| import matplotlib.pyplot as plt
import numpy as np
steps = 10
num_tests = 100
res = []
with open('txt.txt', 'r') as f:
data = f.readlines()
line = 0
for i in range(10, 110, 10):
agg = 0
for j in range(num_tests):
agg += int(data[line])
line += 1
res.append(agg / num_tests)
x = list(range(10, 110, steps))
y = res
z = np.polyfit(x, res, 2)
p = np.poly1d(z)
plt.plot(x, y, 'o')
plt.plot(x, p(x), label='Best fit 2 degree polynomial')
plt.title('#messages vs. #nodes in graph (GHS algo.) (Averaged over 100 runs)')
plt.xlabel('Number of nodes in fully connected graph')
plt.ylabel('Number of messages')
plt.legend()
plt.savefig('Messages.svg')
plt.clf()
steps = 10
num_tests = 10
res = []
with open('txt2.txt', 'r') as f:
data = f.readlines()
line = 0
for procs in range(1, 13):
times = []
for i in range(10, 110, 10):
temp = 0
for num in range(num_tests):
temp += float(data[line].split()[1])
line += 3
times.append(temp / num_tests)
res.append(times)
x = list(range(10, 110, steps))
y = res
plt.title('Time taken vs. number of cores used (Averaged over 10 runs)')
plt.xlabel('Number of nodes in fully connected graph')
plt.ylabel('Time taken (in seconds)')
for procs in [1, 2, 4, 8, 12]:
plt.plot(x, res[procs - 1], label=str(procs) + ' Cores')
plt.legend()
plt.savefig('Time.svg')
| import matplotlib.pyplot as plt
import numpy as np
steps = 10
num_tests = 100
res = []
with open('txt.txt', 'r') as f:
data = f.readlines()
line = 0
for i in range(10, 110, 10):
agg = 0
for j in range(num_tests):
agg += int(data[line])
line += 1
res.append(agg/num_tests)
x = list(range(10, 110, steps))
y = res
z = np.polyfit(x, res, 2)
# print(z)
p = np.poly1d(z)
plt.plot(x, y, 'o')
plt.plot(x, p(x),label = "Best fit 2 degree polynomial")
plt.title("#messages vs. #nodes in graph (GHS algo.) (Averaged over 100 runs)")
plt.xlabel("Number of nodes in fully connected graph")
plt.ylabel("Number of messages")
plt.legend()
# plt.show()
plt.savefig("Messages.svg")
plt.clf()
steps = 10
num_tests = 10
res = []
with open('txt2.txt', 'r') as f:
data = f.readlines()
line = 0
for procs in range(1,13):
times = []
for i in range(10, 110, 10):
temp = 0
for num in range(num_tests):
temp += float(data[line].split()[1])
line += 3
times.append(temp/num_tests)
res.append(times)
x = list(range(10, 110, steps))
y = res
# z = np.polyfit(x, res, 2)
# print(z)
# p = np.poly1d(z)
# plt.plot(x, y, 'o')
# plt.plot(x, p(x),label = "Best fit 2 degree polynomial")
plt.title("Time taken vs. number of cores used (Averaged over 10 runs)")
plt.xlabel("Number of nodes in fully connected graph")
plt.ylabel("Time taken (in seconds)")
# for procs in range(1,13):
for procs in [1,2,4,8,12]:
plt.plot(x,res[procs-1],label = str((procs))+' Cores')
plt.legend()
# plt.show()
plt.savefig("Time.svg")
| [
0,
1,
2,
3,
4
] |
968 | b3d26d01d45c073192d06c8e94c06f7eae267b14 | <mask token>
| <mask token>
for line in old_file.readlines():
cleaned_line = line.replace(',', '.')
new_file.write(cleaned_line)
old_file.close
new_file.close
| old_file = open('new.csv', 'r')
new_file = open('new1,csv', 'w')
for line in old_file.readlines():
cleaned_line = line.replace(',', '.')
new_file.write(cleaned_line)
old_file.close
new_file.close
| old_file = open("new.csv", "r")
new_file = open("new1,csv", "w")
for line in old_file.readlines():
cleaned_line =line.replace(',','.')
new_file.write(cleaned_line)
old_file.close
new_file.close | null | [
0,
1,
2,
3
] |
969 | aa13278a4686e9bab7948c2f212f87f9bd6eee00 | <mask token>
def recvall(sock):
BUFF_SIZE = 4096
data = b''
while True:
part = sock.recv(BUFF_SIZE)
data += part
if len(part) < BUFF_SIZE:
break
return data
<mask token>
def extract_start(data):
i = 0
while True:
if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0
]:
return i + 3
i += 1
def bytes_to_bits(data, begin):
bits = ''
for i in range(begin, len(data)):
bits += format(data[i], '08b')
return bits
def data_to_extract(data, dict):
begin = extract_start(data)
print(begin)
data = bytes_to_bits(data, begin)
dict = {y: x for x, y in dict.items()}
text = ''
temp_code = ''
for i in range(len(data)):
temp_code += data[i]
if temp_code in dict:
text += dict[temp_code]
temp_code = ''
return text
def recieve_data(codedpath, decodedpath, ip, port):
port = int(port)
sock = socket.socket()
sock.bind((ip, int(port)))
sock.listen()
conn, addr = sock.accept()
print('Połączono:', addr)
rec_data = recvall(conn)
rec_dict = create_dict(rec_data)
extracted = data_to_extract(rec_data, rec_dict)
print('ODEBRANY SLOWNIK\n')
print(rec_dict)
print(extracted)
f = open(codedpath, 'wb')
f.write(rec_data)
f.close()
f = open(decodedpath, 'w')
f.write(extracted)
f.close()
return 0
| <mask token>
def recvall(sock):
BUFF_SIZE = 4096
data = b''
while True:
part = sock.recv(BUFF_SIZE)
data += part
if len(part) < BUFF_SIZE:
break
return data
def create_dict(data):
dict = {}
i = 0
while True:
dict[chr(data[i])] = ''
j = 1
while data[i + j] != END[0]:
dict[chr(data[i])] += str(chr(data[i + j]))
j += 1
i += 1 + j
if data[i] == END[0] and data[i + 1] == END[0]:
break
return dict
def extract_start(data):
i = 0
while True:
if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0
]:
return i + 3
i += 1
def bytes_to_bits(data, begin):
bits = ''
for i in range(begin, len(data)):
bits += format(data[i], '08b')
return bits
def data_to_extract(data, dict):
begin = extract_start(data)
print(begin)
data = bytes_to_bits(data, begin)
dict = {y: x for x, y in dict.items()}
text = ''
temp_code = ''
for i in range(len(data)):
temp_code += data[i]
if temp_code in dict:
text += dict[temp_code]
temp_code = ''
return text
def recieve_data(codedpath, decodedpath, ip, port):
port = int(port)
sock = socket.socket()
sock.bind((ip, int(port)))
sock.listen()
conn, addr = sock.accept()
print('Połączono:', addr)
rec_data = recvall(conn)
rec_dict = create_dict(rec_data)
extracted = data_to_extract(rec_data, rec_dict)
print('ODEBRANY SLOWNIK\n')
print(rec_dict)
print(extracted)
f = open(codedpath, 'wb')
f.write(rec_data)
f.close()
f = open(decodedpath, 'w')
f.write(extracted)
f.close()
return 0
| <mask token>
END = bytearray()
END.append(255)
print(END[0])
def recvall(sock):
BUFF_SIZE = 4096
data = b''
while True:
part = sock.recv(BUFF_SIZE)
data += part
if len(part) < BUFF_SIZE:
break
return data
def create_dict(data):
dict = {}
i = 0
while True:
dict[chr(data[i])] = ''
j = 1
while data[i + j] != END[0]:
dict[chr(data[i])] += str(chr(data[i + j]))
j += 1
i += 1 + j
if data[i] == END[0] and data[i + 1] == END[0]:
break
return dict
def extract_start(data):
i = 0
while True:
if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0
]:
return i + 3
i += 1
def bytes_to_bits(data, begin):
bits = ''
for i in range(begin, len(data)):
bits += format(data[i], '08b')
return bits
def data_to_extract(data, dict):
begin = extract_start(data)
print(begin)
data = bytes_to_bits(data, begin)
dict = {y: x for x, y in dict.items()}
text = ''
temp_code = ''
for i in range(len(data)):
temp_code += data[i]
if temp_code in dict:
text += dict[temp_code]
temp_code = ''
return text
def recieve_data(codedpath, decodedpath, ip, port):
port = int(port)
sock = socket.socket()
sock.bind((ip, int(port)))
sock.listen()
conn, addr = sock.accept()
print('Połączono:', addr)
rec_data = recvall(conn)
rec_dict = create_dict(rec_data)
extracted = data_to_extract(rec_data, rec_dict)
print('ODEBRANY SLOWNIK\n')
print(rec_dict)
print(extracted)
f = open(codedpath, 'wb')
f.write(rec_data)
f.close()
f = open(decodedpath, 'w')
f.write(extracted)
f.close()
return 0
| import socket
END = bytearray()
END.append(255)
print(END[0])
def recvall(sock):
BUFF_SIZE = 4096
data = b''
while True:
part = sock.recv(BUFF_SIZE)
data += part
if len(part) < BUFF_SIZE:
break
return data
def create_dict(data):
dict = {}
i = 0
while True:
dict[chr(data[i])] = ''
j = 1
while data[i + j] != END[0]:
dict[chr(data[i])] += str(chr(data[i + j]))
j += 1
i += 1 + j
if data[i] == END[0] and data[i + 1] == END[0]:
break
return dict
def extract_start(data):
i = 0
while True:
if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0
]:
return i + 3
i += 1
def bytes_to_bits(data, begin):
bits = ''
for i in range(begin, len(data)):
bits += format(data[i], '08b')
return bits
def data_to_extract(data, dict):
begin = extract_start(data)
print(begin)
data = bytes_to_bits(data, begin)
dict = {y: x for x, y in dict.items()}
text = ''
temp_code = ''
for i in range(len(data)):
temp_code += data[i]
if temp_code in dict:
text += dict[temp_code]
temp_code = ''
return text
def recieve_data(codedpath, decodedpath, ip, port):
port = int(port)
sock = socket.socket()
sock.bind((ip, int(port)))
sock.listen()
conn, addr = sock.accept()
print('Połączono:', addr)
rec_data = recvall(conn)
rec_dict = create_dict(rec_data)
extracted = data_to_extract(rec_data, rec_dict)
print('ODEBRANY SLOWNIK\n')
print(rec_dict)
print(extracted)
f = open(codedpath, 'wb')
f.write(rec_data)
f.close()
f = open(decodedpath, 'w')
f.write(extracted)
f.close()
return 0
| import socket
END = bytearray()
END.append(255)
print(END[0])
def recvall(sock): # Odbiór danych
BUFF_SIZE = 4096 # 4 KiB
data = b''
while True: # odbieramy dane, pakiety 4KiB
part = sock.recv(BUFF_SIZE)
data += part
if len(part) < BUFF_SIZE:
# 0 lub koniec danych
break
return data
def create_dict(data): # Odczytuje otrzymany słownik
dict = {}
i = 0
while True:
dict[chr(data[i])] = ''
j = 1
while data[i + j] != END[0]: # Dopóki nie znajdzie FF, uznaje bajty za 'kod' slowa
dict[chr(data[i])] += str(chr(data[i + j]))
j += 1
i += 1 + j
if data[i] == END[0] and data[i + 1] == END[0]: # Gdy znajdzie 3x FF, kończy słownik
break
return dict
def extract_start(data): # Poszukuje pącztka segmentu danych
i = 0
while True:
if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0]:
return i + 3
i += 1
def bytes_to_bits(data, begin): # Zamienia bajty na znakowy odpowiednik w bitach
bits = ''
for i in range(begin, len(data)):
bits += format(data[i], "08b")
return bits
def data_to_extract(data, dict): # Otrzymane dane na podstawie slownika odczytuje do tekstu
begin = extract_start(data) # Szukamy początku tekstu
print(begin)
data = bytes_to_bits(data, begin)
dict = {y: x for x, y in dict.items()} # Zamiana kluczy z wartością w słowniku
text = ''
temp_code = ''
for i in range(len(data)): # Dla kazdego bitu
temp_code += data[i]
if temp_code in dict: # Szukamy czy utworzona tymczasowo zmienna nie zawiera się
# w słowniku
text += dict[temp_code]
temp_code = ''
return text
def recieve_data(codedpath, decodedpath, ip, port):
port = int(port) #Segment odpowiedzialny za utworzenie połaczenia przy użyciu gniazda
sock = socket.socket()
sock.bind((ip, int(port)))
sock.listen()
conn, addr = sock.accept()
print('Połączono:', addr)
rec_data = recvall(conn) #Odbierz dane
rec_dict = create_dict(rec_data) #Utwórz słownik z danych
extracted = data_to_extract(rec_data, rec_dict) #Na podstawie słownika, odkoduj tekst
print("ODEBRANY SLOWNIK\n")
print(rec_dict)
print(extracted)
f = open(codedpath, "wb") #Zapis otrzymanych danych
f.write(rec_data)
f.close()
f = open(decodedpath, "w")
f.write(extracted)
f.close()
return 0
| [
5,
6,
8,
9,
10
] |
970 | 736fee6f9a46b8568b2dd217b81d54d689306630 | <mask token>
class bcolors:
HEADER = '\x1b[95m'
OKBLUE = '\x1b[94m'
OKGREEN = '\x1b[92m'
WARNING = '\x1b[93m'
FAIL = '\x1b[91m'
ENDC = '\x1b[0m'
BOLD = '\x1b[1m'
UNDERLINE = '\x1b[4m'
def get_image(f_sdss):
img = f_sdss[0].data
return img
<mask token>
| <mask token>
class bcolors:
HEADER = '\x1b[95m'
OKBLUE = '\x1b[94m'
OKGREEN = '\x1b[92m'
WARNING = '\x1b[93m'
FAIL = '\x1b[91m'
ENDC = '\x1b[0m'
BOLD = '\x1b[1m'
UNDERLINE = '\x1b[4m'
def get_image(f_sdss):
img = f_sdss[0].data
return img
<mask token>
sex.config['PARAMETERS_LIST'].append('FLUX_ISO')
sex.config['PARAMETERS_LIST'].append('MAG_ISOCOR')
sex.config['PARAMETERS_LIST'].append('MAG_AUTO')
sex.config['PARAMETERS_LIST'].append('PETRO_RADIUS')
sex.config['PARAMETERS_LIST'].append('ISOAREA_IMAGE')
sex.config['PARAMETERS_LIST'].append('ALPHA_J2000')
sex.config['PARAMETERS_LIST'].append('DELTA_J2000')
sex.config['PARAMETERS_LIST'].append('FWHM_WORLD')
sex.config['PARAMETERS_LIST'].append('CLASS_STAR')
<mask token>
sex.run(fname)
<mask token>
for i_object in range(13, 14):
window_size = 250
filter_seg = 'rSDSS'
ra = df_cat['ra']
dec = df_cat['dec']
image_r = fits.open('data/frame-r-002507-4-0226.fits')
wcsys = wcs.WCS(header=image_r[0].header)
y, x = wcsys.wcs_world2pix(ra, dec, 1)
interval = int(round(x[i_object] - window_size / 2)), int(round(x[
i_object] + window_size / 2)), int(round(y[i_object] - window_size / 2)
), int(round(y[i_object] + window_size / 2))
df = pd.DataFrame()
df_sky = pd.DataFrame()
seg_sex = segmap[interval[0]:interval[1], interval[2]:interval[3]]
for i_gal in range(len(df_fit)):
f_sdss = fits.open('data/frame-%s-%s' % (df_fit['filter'][i_gal],
df_fit['name'][i_gal]))
img = get_image(f_sdss)
img_cut = img[interval[0]:interval[1], interval[2]:interval[3]]
plt.figure(1)
plt.clf()
plt.imshow(100 * np.log10(img_cut / 255), cmap='spectral')
plt.colorbar()
band = df_fit['filter'][i_gal]
nrows, ncols = img_cut.shape
xx, yy = np.meshgrid(*np.ogrid[:ncols, :nrows])
table = np.column_stack((xx.flatten(), yy.flatten(), img_cut.flatten())
)
temp = pd.DataFrame(table, columns=['x', 'y', band])
df = pd.concat([df, temp], axis=1)
sky_r = fits.open('data/frame-%s-%s' % (df_fit['filter'][i_gal],
df_fit['name'][i_gal]))
sky = get_image(sky_r)
wcsys = wcs.WCS(header=sky_r[0].header)
yc, xc = wcsys.wcs_world2pix(351.101, 14.737636, 1)
delta_x = 85
delta_y = 85
interval_sky = int(round(xc - delta_x / 2)), int(round(xc + delta_x /
2)), int(round(yc - delta_y / 2)), int(round(yc + delta_y / 2))
img_sky = sky[interval_sky[0]:interval_sky[1], interval_sky[2]:
interval_sky[3]]
sky_nrows, sky_ncols = img_sky.shape
xxc, yyc = np.meshgrid(*np.ogrid[:sky_ncols, :sky_nrows])
table_sky = np.column_stack((xxc.flatten(), yyc.flatten(), img_sky.
flatten()))
temp_sky = pd.DataFrame(table_sky, columns=['x', 'y', band])
df_sky = pd.concat([df_sky, temp_sky], axis=1)
df = df.ix[:, [0, 1, 2, 5, 8, 11, 14]]
df_sky = df_sky.ix[:, [0, 1, 2, 5, 8, 11, 14]]
"""
Imagem da galaxia, na banda r.
"""
plt.figure(1)
plt.clf()
r_sdss = fits.open('data/frame-r-%s' % df_fit['name'][i_gal])
img_r = get_image(r_sdss)
img_cut_r = img_r[interval[0]:interval[1], interval[2]:interval[3]]
cx = cubehelix.cmap(reverse=True, start=0.0, rot=-0.5)
imgplot = plt.imshow(100 * np.log10(img_cut_r / 255), cmap='spectral')
titulo = 'Galaxy #%s - banda r' % df_cat['num'][i_object]
plt.title(titulo)
plt.colorbar()
figura = 'figures/galaxy_#%s' % df_cat['num'][i_object]
plt.savefig(figura)
"""
Imagem segmentada da galaxia, na banda r.
"""
plt.figure(1)
plt.clf()
cx = cubehelix.cmap(reverse=True, start=0.0, rot=-0.5)
imgplot = plt.imshow(seg_sex, cmap='spectral')
titulo = 'Segmentation Galaxy #%s - banda r' % df_cat['num'][i_object]
plt.title(titulo)
plt.colorbar()
figura = 'figures/seg_galaxy_#%s' % df_cat['num'][i_object]
plt.savefig(figura)
"""
================================================================================
Salvando os fluxos de cada galaxia em um arquivo txt
================================================================================
"""
saida_fluxes = 'data/all_band_fluxes_%s.txt' % df_cat['num'][i_object]
formats = ['%d', '%d', '%5.4f', '%5.4f', '%5.4f', '%5.4f', '%5.4f']
headers2 = 'x\ty\tu\tg\tr\ti\tz'
np.savetxt(saida_fluxes, df, delimiter='\t', header=headers2, fmt=formats)
print('')
print('>> Os dados estao em: "%s".' % saida_fluxes)
"""
================================================================================
Subtraindo o ceu, na banda r
================================================================================
"""
df_aux = df.ix[:, 2:]
df_aux1 = df.ix[:, :2]
df_sky_aux = df_sky.ix[:, 2:]
df_aux3 = df_aux - df_sky_aux.mean()
df_rss = df_aux1.join(df_aux3)
"""
A segmentacao consiste de usar um limiar para separar o objeto do fundo.
No nosso caso, usamos limiar = alpha*std_ceu
"""
"""
================================================================================
SEGMENTACAO
================================================================================
"""
limiar = 2.5 * df_sky.r.std()
df_seg = df_rss.ix[df_rss['r'] > limiar]
print('Pixeis acima do limiar: %d' % len(df_seg))
np.savetxt('fof2.txt', df_seg, delimiter='\t')
<mask token>
print('')
print(bcolors.HEADER + 'tempo de processamento: %fs' % time_proc + bcolors.ENDC
)
| <mask token>
__author__ = 'pnovais'
ini = time.time()
class bcolors:
HEADER = '\x1b[95m'
OKBLUE = '\x1b[94m'
OKGREEN = '\x1b[92m'
WARNING = '\x1b[93m'
FAIL = '\x1b[91m'
ENDC = '\x1b[0m'
BOLD = '\x1b[1m'
UNDERLINE = '\x1b[4m'
def get_image(f_sdss):
img = f_sdss[0].data
return img
df_fit = pd.read_csv('data/arquivo_fits.csv')
<mask token>
fname = 'data/frame-r-002507-4-0226.fits'
sex = SExtractor()
sex.config['PARAMETERS_LIST'].append('FLUX_ISO')
sex.config['PARAMETERS_LIST'].append('MAG_ISOCOR')
sex.config['PARAMETERS_LIST'].append('MAG_AUTO')
sex.config['PARAMETERS_LIST'].append('PETRO_RADIUS')
sex.config['PARAMETERS_LIST'].append('ISOAREA_IMAGE')
sex.config['PARAMETERS_LIST'].append('ALPHA_J2000')
sex.config['PARAMETERS_LIST'].append('DELTA_J2000')
sex.config['PARAMETERS_LIST'].append('FWHM_WORLD')
sex.config['PARAMETERS_LIST'].append('CLASS_STAR')
sex.config['CHECKIMAGE_TYPE'] = 'SEGMENTATION'
sex.run(fname)
segmap = fits.open('check.fits')[0].data
df_cat = pd.read_table('py-sextractor.cat', delim_whitespace=True, header=16)
df_cat.columns = ['num', 'flux_best', 'fluxerr_best', 'x', 'y', 'flags',
'fwhm_image', 'flux_iso', 'mag_isocor', 'mag_auto', 'petro_radius',
'ISO_AREA', 'ra', 'dec', 'fwhm_world', 'class_star']
df_cat = df_cat.ix[(df_cat['fwhm_image'] > 4.5) & (df_cat['mag_auto'] < -7)]
df_cat = df_cat.reset_index()
df_cat = df_cat.ix[:, 1:15]
<mask token>
df = pd.DataFrame()
df_sky = pd.DataFrame()
for i_object in range(13, 14):
window_size = 250
filter_seg = 'rSDSS'
ra = df_cat['ra']
dec = df_cat['dec']
image_r = fits.open('data/frame-r-002507-4-0226.fits')
wcsys = wcs.WCS(header=image_r[0].header)
y, x = wcsys.wcs_world2pix(ra, dec, 1)
interval = int(round(x[i_object] - window_size / 2)), int(round(x[
i_object] + window_size / 2)), int(round(y[i_object] - window_size / 2)
), int(round(y[i_object] + window_size / 2))
df = pd.DataFrame()
df_sky = pd.DataFrame()
seg_sex = segmap[interval[0]:interval[1], interval[2]:interval[3]]
for i_gal in range(len(df_fit)):
f_sdss = fits.open('data/frame-%s-%s' % (df_fit['filter'][i_gal],
df_fit['name'][i_gal]))
img = get_image(f_sdss)
img_cut = img[interval[0]:interval[1], interval[2]:interval[3]]
plt.figure(1)
plt.clf()
plt.imshow(100 * np.log10(img_cut / 255), cmap='spectral')
plt.colorbar()
band = df_fit['filter'][i_gal]
nrows, ncols = img_cut.shape
xx, yy = np.meshgrid(*np.ogrid[:ncols, :nrows])
table = np.column_stack((xx.flatten(), yy.flatten(), img_cut.flatten())
)
temp = pd.DataFrame(table, columns=['x', 'y', band])
df = pd.concat([df, temp], axis=1)
sky_r = fits.open('data/frame-%s-%s' % (df_fit['filter'][i_gal],
df_fit['name'][i_gal]))
sky = get_image(sky_r)
wcsys = wcs.WCS(header=sky_r[0].header)
yc, xc = wcsys.wcs_world2pix(351.101, 14.737636, 1)
delta_x = 85
delta_y = 85
interval_sky = int(round(xc - delta_x / 2)), int(round(xc + delta_x /
2)), int(round(yc - delta_y / 2)), int(round(yc + delta_y / 2))
img_sky = sky[interval_sky[0]:interval_sky[1], interval_sky[2]:
interval_sky[3]]
sky_nrows, sky_ncols = img_sky.shape
xxc, yyc = np.meshgrid(*np.ogrid[:sky_ncols, :sky_nrows])
table_sky = np.column_stack((xxc.flatten(), yyc.flatten(), img_sky.
flatten()))
temp_sky = pd.DataFrame(table_sky, columns=['x', 'y', band])
df_sky = pd.concat([df_sky, temp_sky], axis=1)
df = df.ix[:, [0, 1, 2, 5, 8, 11, 14]]
df_sky = df_sky.ix[:, [0, 1, 2, 5, 8, 11, 14]]
"""
Imagem da galaxia, na banda r.
"""
plt.figure(1)
plt.clf()
r_sdss = fits.open('data/frame-r-%s' % df_fit['name'][i_gal])
img_r = get_image(r_sdss)
img_cut_r = img_r[interval[0]:interval[1], interval[2]:interval[3]]
cx = cubehelix.cmap(reverse=True, start=0.0, rot=-0.5)
imgplot = plt.imshow(100 * np.log10(img_cut_r / 255), cmap='spectral')
titulo = 'Galaxy #%s - banda r' % df_cat['num'][i_object]
plt.title(titulo)
plt.colorbar()
figura = 'figures/galaxy_#%s' % df_cat['num'][i_object]
plt.savefig(figura)
"""
Imagem segmentada da galaxia, na banda r.
"""
plt.figure(1)
plt.clf()
cx = cubehelix.cmap(reverse=True, start=0.0, rot=-0.5)
imgplot = plt.imshow(seg_sex, cmap='spectral')
titulo = 'Segmentation Galaxy #%s - banda r' % df_cat['num'][i_object]
plt.title(titulo)
plt.colorbar()
figura = 'figures/seg_galaxy_#%s' % df_cat['num'][i_object]
plt.savefig(figura)
"""
================================================================================
Salvando os fluxos de cada galaxia em um arquivo txt
================================================================================
"""
saida_fluxes = 'data/all_band_fluxes_%s.txt' % df_cat['num'][i_object]
formats = ['%d', '%d', '%5.4f', '%5.4f', '%5.4f', '%5.4f', '%5.4f']
headers2 = 'x\ty\tu\tg\tr\ti\tz'
np.savetxt(saida_fluxes, df, delimiter='\t', header=headers2, fmt=formats)
print('')
print('>> Os dados estao em: "%s".' % saida_fluxes)
"""
================================================================================
Subtraindo o ceu, na banda r
================================================================================
"""
df_aux = df.ix[:, 2:]
df_aux1 = df.ix[:, :2]
df_sky_aux = df_sky.ix[:, 2:]
df_aux3 = df_aux - df_sky_aux.mean()
df_rss = df_aux1.join(df_aux3)
"""
A segmentacao consiste de usar um limiar para separar o objeto do fundo.
No nosso caso, usamos limiar = alpha*std_ceu
"""
"""
================================================================================
SEGMENTACAO
================================================================================
"""
limiar = 2.5 * df_sky.r.std()
df_seg = df_rss.ix[df_rss['r'] > limiar]
print('Pixeis acima do limiar: %d' % len(df_seg))
np.savetxt('fof2.txt', df_seg, delimiter='\t')
fim = time.time()
time_proc = fim - ini
print('')
print(bcolors.HEADER + 'tempo de processamento: %fs' % time_proc + bcolors.ENDC
)
| import pandas as pd
import numpy as np
import datetime
import time
from sys import exit
from matplotlib import colors, pyplot as plt
from functools import reduce
import matplotlib.cm as cm
import seaborn as sns
from astropy.io import ascii, fits
from astropy.wcs import wcs
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
from scipy.interpolate import interp2d
import matplotlib.mlab as mlab
import scipy, pylab
import rpy2
import cubehelix
import math
from pysextractor import SExtractor
__author__ = 'pnovais'
ini = time.time()
class bcolors:
HEADER = '\x1b[95m'
OKBLUE = '\x1b[94m'
OKGREEN = '\x1b[92m'
WARNING = '\x1b[93m'
FAIL = '\x1b[91m'
ENDC = '\x1b[0m'
BOLD = '\x1b[1m'
UNDERLINE = '\x1b[4m'
def get_image(f_sdss):
img = f_sdss[0].data
return img
df_fit = pd.read_csv('data/arquivo_fits.csv')
<mask token>
fname = 'data/frame-r-002507-4-0226.fits'
sex = SExtractor()
sex.config['PARAMETERS_LIST'].append('FLUX_ISO')
sex.config['PARAMETERS_LIST'].append('MAG_ISOCOR')
sex.config['PARAMETERS_LIST'].append('MAG_AUTO')
sex.config['PARAMETERS_LIST'].append('PETRO_RADIUS')
sex.config['PARAMETERS_LIST'].append('ISOAREA_IMAGE')
sex.config['PARAMETERS_LIST'].append('ALPHA_J2000')
sex.config['PARAMETERS_LIST'].append('DELTA_J2000')
sex.config['PARAMETERS_LIST'].append('FWHM_WORLD')
sex.config['PARAMETERS_LIST'].append('CLASS_STAR')
sex.config['CHECKIMAGE_TYPE'] = 'SEGMENTATION'
sex.run(fname)
segmap = fits.open('check.fits')[0].data
df_cat = pd.read_table('py-sextractor.cat', delim_whitespace=True, header=16)
df_cat.columns = ['num', 'flux_best', 'fluxerr_best', 'x', 'y', 'flags',
'fwhm_image', 'flux_iso', 'mag_isocor', 'mag_auto', 'petro_radius',
'ISO_AREA', 'ra', 'dec', 'fwhm_world', 'class_star']
df_cat = df_cat.ix[(df_cat['fwhm_image'] > 4.5) & (df_cat['mag_auto'] < -7)]
df_cat = df_cat.reset_index()
df_cat = df_cat.ix[:, 1:15]
<mask token>
df = pd.DataFrame()
df_sky = pd.DataFrame()
for i_object in range(13, 14):
window_size = 250
filter_seg = 'rSDSS'
ra = df_cat['ra']
dec = df_cat['dec']
image_r = fits.open('data/frame-r-002507-4-0226.fits')
wcsys = wcs.WCS(header=image_r[0].header)
y, x = wcsys.wcs_world2pix(ra, dec, 1)
interval = int(round(x[i_object] - window_size / 2)), int(round(x[
i_object] + window_size / 2)), int(round(y[i_object] - window_size / 2)
), int(round(y[i_object] + window_size / 2))
df = pd.DataFrame()
df_sky = pd.DataFrame()
seg_sex = segmap[interval[0]:interval[1], interval[2]:interval[3]]
for i_gal in range(len(df_fit)):
f_sdss = fits.open('data/frame-%s-%s' % (df_fit['filter'][i_gal],
df_fit['name'][i_gal]))
img = get_image(f_sdss)
img_cut = img[interval[0]:interval[1], interval[2]:interval[3]]
plt.figure(1)
plt.clf()
plt.imshow(100 * np.log10(img_cut / 255), cmap='spectral')
plt.colorbar()
band = df_fit['filter'][i_gal]
nrows, ncols = img_cut.shape
xx, yy = np.meshgrid(*np.ogrid[:ncols, :nrows])
table = np.column_stack((xx.flatten(), yy.flatten(), img_cut.flatten())
)
temp = pd.DataFrame(table, columns=['x', 'y', band])
df = pd.concat([df, temp], axis=1)
sky_r = fits.open('data/frame-%s-%s' % (df_fit['filter'][i_gal],
df_fit['name'][i_gal]))
sky = get_image(sky_r)
wcsys = wcs.WCS(header=sky_r[0].header)
yc, xc = wcsys.wcs_world2pix(351.101, 14.737636, 1)
delta_x = 85
delta_y = 85
interval_sky = int(round(xc - delta_x / 2)), int(round(xc + delta_x /
2)), int(round(yc - delta_y / 2)), int(round(yc + delta_y / 2))
img_sky = sky[interval_sky[0]:interval_sky[1], interval_sky[2]:
interval_sky[3]]
sky_nrows, sky_ncols = img_sky.shape
xxc, yyc = np.meshgrid(*np.ogrid[:sky_ncols, :sky_nrows])
table_sky = np.column_stack((xxc.flatten(), yyc.flatten(), img_sky.
flatten()))
temp_sky = pd.DataFrame(table_sky, columns=['x', 'y', band])
df_sky = pd.concat([df_sky, temp_sky], axis=1)
df = df.ix[:, [0, 1, 2, 5, 8, 11, 14]]
df_sky = df_sky.ix[:, [0, 1, 2, 5, 8, 11, 14]]
"""
Imagem da galaxia, na banda r.
"""
plt.figure(1)
plt.clf()
r_sdss = fits.open('data/frame-r-%s' % df_fit['name'][i_gal])
img_r = get_image(r_sdss)
img_cut_r = img_r[interval[0]:interval[1], interval[2]:interval[3]]
cx = cubehelix.cmap(reverse=True, start=0.0, rot=-0.5)
imgplot = plt.imshow(100 * np.log10(img_cut_r / 255), cmap='spectral')
titulo = 'Galaxy #%s - banda r' % df_cat['num'][i_object]
plt.title(titulo)
plt.colorbar()
figura = 'figures/galaxy_#%s' % df_cat['num'][i_object]
plt.savefig(figura)
"""
Imagem segmentada da galaxia, na banda r.
"""
plt.figure(1)
plt.clf()
cx = cubehelix.cmap(reverse=True, start=0.0, rot=-0.5)
imgplot = plt.imshow(seg_sex, cmap='spectral')
titulo = 'Segmentation Galaxy #%s - banda r' % df_cat['num'][i_object]
plt.title(titulo)
plt.colorbar()
figura = 'figures/seg_galaxy_#%s' % df_cat['num'][i_object]
plt.savefig(figura)
"""
================================================================================
Salvando os fluxos de cada galaxia em um arquivo txt
================================================================================
"""
saida_fluxes = 'data/all_band_fluxes_%s.txt' % df_cat['num'][i_object]
formats = ['%d', '%d', '%5.4f', '%5.4f', '%5.4f', '%5.4f', '%5.4f']
headers2 = 'x\ty\tu\tg\tr\ti\tz'
np.savetxt(saida_fluxes, df, delimiter='\t', header=headers2, fmt=formats)
print('')
print('>> Os dados estao em: "%s".' % saida_fluxes)
"""
================================================================================
Subtraindo o ceu, na banda r
================================================================================
"""
df_aux = df.ix[:, 2:]
df_aux1 = df.ix[:, :2]
df_sky_aux = df_sky.ix[:, 2:]
df_aux3 = df_aux - df_sky_aux.mean()
df_rss = df_aux1.join(df_aux3)
"""
A segmentacao consiste de usar um limiar para separar o objeto do fundo.
No nosso caso, usamos limiar = alpha*std_ceu
"""
"""
================================================================================
SEGMENTACAO
================================================================================
"""
limiar = 2.5 * df_sky.r.std()
df_seg = df_rss.ix[df_rss['r'] > limiar]
print('Pixeis acima do limiar: %d' % len(df_seg))
np.savetxt('fof2.txt', df_seg, delimiter='\t')
fim = time.time()
time_proc = fim - ini
print('')
print(bcolors.HEADER + 'tempo de processamento: %fs' % time_proc + bcolors.ENDC
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import datetime
import time
from sys import exit
from matplotlib import colors, pyplot as plt
from functools import reduce
import matplotlib.cm as cm
import seaborn as sns
from astropy.io import ascii, fits
from astropy.wcs import wcs
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
from scipy.interpolate import interp2d
import matplotlib.mlab as mlab
import scipy, pylab
import rpy2
import cubehelix
import math
from pysextractor import SExtractor
__author__ = 'pnovais'
ini=time.time()
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
#definindo a classe que ira ler as imagens fits
def get_image(f_sdss):
img = f_sdss[0].data
# sky = f_sdss[2].data
return img
#abertura do arquivo com o nome das imagens, nas n bandas
df_fit = pd.read_csv('data/arquivo_fits.csv')
'''
================================================================================
Rodando o SExtractor na imagem na banda r, criando uma segmentacao e um catalogo
com os objetos obtidos
ATUALIZAR NOME DA BANDA DE SEGMENTACAO
================================================================================
'''
fname = 'data/frame-r-002507-4-0226.fits'
sex = SExtractor()
sex.config['PARAMETERS_LIST'].append('FLUX_ISO')
sex.config['PARAMETERS_LIST'].append('MAG_ISOCOR')
sex.config['PARAMETERS_LIST'].append('MAG_AUTO')
sex.config['PARAMETERS_LIST'].append('PETRO_RADIUS')
sex.config['PARAMETERS_LIST'].append('ISOAREA_IMAGE')
sex.config['PARAMETERS_LIST'].append('ALPHA_J2000')
sex.config['PARAMETERS_LIST'].append('DELTA_J2000')
sex.config['PARAMETERS_LIST'].append('FWHM_WORLD')
sex.config['PARAMETERS_LIST'].append('CLASS_STAR')
sex.config['CHECKIMAGE_TYPE'] = 'SEGMENTATION'
sex.run(fname)
segmap = fits.open('check.fits')[0].data
df_cat = pd.read_table('py-sextractor.cat', delim_whitespace=True, header=16)
df_cat.columns = ['num','flux_best','fluxerr_best', 'x','y','flags',
'fwhm_image', 'flux_iso','mag_isocor','mag_auto',
'petro_radius','ISO_AREA','ra','dec',
'fwhm_world','class_star']
#selecao dos objetos que devem ser galaxias
df_cat = df_cat.ix[(df_cat['fwhm_image'] > 4.5) & (df_cat['mag_auto'] < -7)]
df_cat = df_cat.reset_index()
df_cat = df_cat.ix[:,1:15]
'''
================================================================================
Lendo as imagens, em todas as bandas, e gerando um dataframe para cada galaxia
utilizando astropy
Calculando o ceu em todas as bandas
ATUALIZAR NOME DA BANDA DE SEGMENTACAO
================================================================================
'''
df = pd.DataFrame()
df_sky = pd.DataFrame()
for i_object in range(13,14):
window_size = 250
filter_seg = 'rSDSS'
ra = df_cat['ra']
dec = df_cat['dec']
image_r = fits.open('data/frame-r-002507-4-0226.fits')
wcsys = wcs.WCS(header=image_r[0].header)
y, x = wcsys.wcs_world2pix(ra, dec, 1)
interval = (int(round(x[i_object] - window_size / 2)), int(round(x[i_object] + window_size / 2)),
int(round(y[i_object] - window_size / 2)), int(round(y[i_object] + window_size / 2)))
df = pd.DataFrame()
df_sky = pd.DataFrame()
seg_sex = segmap[interval[0]:interval[1], interval[2]:interval[3]]
for i_gal in range(len(df_fit)):
f_sdss = fits.open('data/frame-%s-%s' %(df_fit['filter'][i_gal],
df_fit['name'][i_gal]))
img = get_image(f_sdss)
img_cut = img[interval[0]:interval[1], interval[2]:interval[3]]
plt.figure(1)
plt.clf()
plt.imshow(100*np.log10(img_cut/255), cmap='spectral')
plt.colorbar()
band=df_fit['filter'][i_gal]
nrows, ncols = img_cut.shape
xx, yy = np.meshgrid( *np.ogrid[:ncols, :nrows] )
table = np.column_stack(( xx.flatten(), yy.flatten(), img_cut.flatten() ))
temp = pd.DataFrame(table, columns=['x','y',band])
df = pd.concat([df,temp], axis=1)
sky_r = fits.open('data/frame-%s-%s' %(df_fit['filter'][i_gal],
df_fit['name'][i_gal]))
sky = get_image(sky_r)
wcsys = wcs.WCS(header=sky_r[0].header)
yc, xc = wcsys.wcs_world2pix(351.101, 14.737636, 1)
delta_x = 85
delta_y = 85
interval_sky = (int(round(xc - delta_x / 2)), int(round(xc + delta_x / 2)), int(round(yc - delta_y / 2)),
int(round(yc + delta_y / 2)))
img_sky = sky[interval_sky[0]:interval_sky[1], interval_sky[2]:interval_sky[3]]
sky_nrows, sky_ncols = img_sky.shape
xxc, yyc = np.meshgrid( *np.ogrid[:sky_ncols, :sky_nrows] )
table_sky = np.column_stack(( xxc.flatten(), yyc.flatten(), img_sky.flatten() ))
temp_sky = pd.DataFrame(table_sky, columns=['x','y',band])
df_sky = pd.concat([df_sky,temp_sky], axis=1)
df = df.ix[:, [0,1,2,5,8,11,14]]
df_sky = df_sky.ix[:, [0,1,2,5,8,11,14]]
'''
Imagem da galaxia, na banda r.
'''
plt.figure(1)
plt.clf()
r_sdss = fits.open('data/frame-r-%s' %(df_fit['name'][i_gal]))
img_r = get_image(r_sdss)
img_cut_r = img_r[interval[0]:interval[1], interval[2]:interval[3]]
cx = cubehelix.cmap(reverse=True, start=0., rot=-0.5)
imgplot = plt.imshow(100*np.log10(img_cut_r/255), cmap='spectral')
titulo='Galaxy #%s - banda r' %(df_cat['num'][i_object])
plt.title(titulo)
plt.colorbar()
figura = 'figures/galaxy_#%s' %df_cat['num'][i_object]
plt.savefig(figura)
'''
Imagem segmentada da galaxia, na banda r.
'''
plt.figure(1)
plt.clf()
cx = cubehelix.cmap(reverse=True, start=0., rot=-0.5)
imgplot = plt.imshow(seg_sex, cmap='spectral')
titulo='Segmentation Galaxy #%s - banda r' %(df_cat['num'][i_object])
plt.title(titulo)
plt.colorbar()
figura = 'figures/seg_galaxy_#%s' %df_cat['num'][i_object]
plt.savefig(figura)
'''
================================================================================
Salvando os fluxos de cada galaxia em um arquivo txt
================================================================================
'''
saida_fluxes = 'data/all_band_fluxes_%s.txt' %df_cat['num'][i_object]
formats=['%d','%d','%5.4f','%5.4f','%5.4f','%5.4f','%5.4f']
headers2='x\ty\tu\tg\tr\ti\tz'
np.savetxt(saida_fluxes,df, delimiter='\t',header=headers2, fmt = formats)
print('')
print('>> Os dados estao em: "%s".' %saida_fluxes)
'''
================================================================================
Subtraindo o ceu, na banda r
================================================================================
'''
df_aux=df.ix[:,2:]
df_aux1=df.ix[:,:2]
df_sky_aux = df_sky.ix[:,2:]
df_aux3 = (df_aux - df_sky_aux.mean())
df_rss=df_aux1.join(df_aux3)
"""
A segmentacao consiste de usar um limiar para separar o objeto do fundo.
No nosso caso, usamos limiar = alpha*std_ceu
"""
'''
================================================================================
SEGMENTACAO
================================================================================
'''
#SELECAO DOS PIXEIS ACIMA DO LIMIAR
limiar = 2.5*df_sky.r.std()
df_seg = df_rss.ix[df_rss['r'] > limiar]
print('Pixeis acima do limiar: %d' %len(df_seg))
np.savetxt('fof2.txt',df_seg,delimiter='\t')
fim = time.time()
time_proc = fim - ini
print('')
print(bcolors.HEADER + 'tempo de processamento: %fs' %time_proc + bcolors.ENDC)
| [
3,
4,
5,
6,
7
] |
971 | dbe3aa107de8e62822803d1740773a4b22f41edf | <mask token>
| <mask token>
sys.path.append(os.pardir)
<mask token>
for i in range(iters_num):
print(i)
batch_mask = np.random.choice(train_size, batch_size)
x_batch = x_train[batch_mask]
t_batch = t_train[batch_mask]
grad = network.gradient(x_batch, t_batch)
for key in ('W1', 'b1', 'W2', 'b2'):
network.params[key] -= learning_rate * grad[key]
loss = network.loss(x_batch, t_batch)
train_loss_list.append(loss)
print('{} : {}'.format(i, train_loss_list[i]))
print(train_loss_list)
| <mask token>
sys.path.append(os.pardir)
<mask token>
(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True,
one_hot_label=True)
train_loss_list = []
iters_num = 1000
train_size = x_train.shape[0]
batch_size = 100
learning_rate = 0.1
network = TwoLayerNet(input_size=784, hidden_size=50, output_size=10)
for i in range(iters_num):
print(i)
batch_mask = np.random.choice(train_size, batch_size)
x_batch = x_train[batch_mask]
t_batch = t_train[batch_mask]
grad = network.gradient(x_batch, t_batch)
for key in ('W1', 'b1', 'W2', 'b2'):
network.params[key] -= learning_rate * grad[key]
loss = network.loss(x_batch, t_batch)
train_loss_list.append(loss)
print('{} : {}'.format(i, train_loss_list[i]))
print(train_loss_list)
| import sys, os
sys.path.append(os.pardir)
import numpy as np
from dataset.mnist import load_mnist
from two_layer_net import TwoLayerNet
(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True,
one_hot_label=True)
train_loss_list = []
iters_num = 1000
train_size = x_train.shape[0]
batch_size = 100
learning_rate = 0.1
network = TwoLayerNet(input_size=784, hidden_size=50, output_size=10)
for i in range(iters_num):
print(i)
batch_mask = np.random.choice(train_size, batch_size)
x_batch = x_train[batch_mask]
t_batch = t_train[batch_mask]
grad = network.gradient(x_batch, t_batch)
for key in ('W1', 'b1', 'W2', 'b2'):
network.params[key] -= learning_rate * grad[key]
loss = network.loss(x_batch, t_batch)
train_loss_list.append(loss)
print('{} : {}'.format(i, train_loss_list[i]))
print(train_loss_list)
| import sys, os
sys.path.append(os.pardir)
import numpy as np
from dataset.mnist import load_mnist
from two_layer_net import TwoLayerNet
(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True, one_hot_label = True)
train_loss_list = []
#hiper param
iters_num = 1000
train_size = x_train.shape[0]
batch_size = 100
learning_rate = 0.1
network = TwoLayerNet(input_size = 784, hidden_size=50, output_size=10)
for i in range(iters_num):
print(i)
#get batch
batch_mask = np.random.choice(train_size, batch_size)
x_batch = x_train[batch_mask]
t_batch = t_train[batch_mask]
#calc gradient
grad = network.gradient(x_batch, t_batch)
#update param
for key in ('W1', 'b1', 'W2', 'b2'):
network.params[key] -= learning_rate * grad[key]
#recode
loss = network.loss(x_batch, t_batch)
train_loss_list.append(loss)
print("{} : {}".format(i, train_loss_list[i]))
print(train_loss_list)
| [
0,
1,
2,
3,
4
] |
972 | 3f9be81c86852a758440c6a144b8caba736b3868 | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
dependencies = [('usuarios', '0001_initial'), ('plataforma',
'0005_auto_20210219_2343')]
operations = [migrations.AlterField(model_name='plataforma', name=
'usuario', field=models.ForeignKey(on_delete=django.db.models.
deletion.CASCADE, to='usuarios.usuario'))]
| from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [('usuarios', '0001_initial'), ('plataforma',
'0005_auto_20210219_2343')]
operations = [migrations.AlterField(model_name='plataforma', name=
'usuario', field=models.ForeignKey(on_delete=django.db.models.
deletion.CASCADE, to='usuarios.usuario'))]
| # Generated by Django 3.1.7 on 2021-02-20 02:52
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('usuarios', '0001_initial'),
('plataforma', '0005_auto_20210219_2343'),
]
operations = [
migrations.AlterField(
model_name='plataforma',
name='usuario',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='usuarios.usuario'),
),
]
| [
0,
1,
2,
3,
4
] |
973 | e06b740f27e41b9f120c962fd76a38a29d54af3c | <mask token>
| <mask token>
def test_body() ->None:
for func in (weight, shower, food, water):
assert ilen(func()) >= 1
| from more_itertools import ilen
from my.body import weight, shower, food, water
def test_body() ->None:
for func in (weight, shower, food, water):
assert ilen(func()) >= 1
| null | null | [
0,
1,
2
] |
974 | 1fd4d1a44270ef29512e601af737accb916dc441 | from estmd import ESTMD
input_directory = "test.avi"
e = ESTMD()
e.open_movie(input_directory)
e.run(by_frame=True)
r = e.create_list_of_arrays()
print "Done testing!"
| null | null | null | null | [
0
] |
975 | c7c412fe4e2d53af1b4f2a55bd3453496767890d | <mask token>
@pytest.mark.usefixtures('driver')
class Test_001_ShedulePage:
<mask token>
<mask token>
def test_001_elements_exists(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
найти кнопку отмены, кнопку карты, поле поиска"""
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
self.event_detail_page.click(*self.event_detail_page_locators.
btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
self.shedule_page.find_element(*self.shedule_locators.btn_back)
self.shedule_page.find_element(*self.shedule_locators.btn_map)
self.shedule_page.find_element(*self.shedule_locators.search_field)
def test_002_valid_filters(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
проверить соответствие фильтров и ответа сервера
проверить порядок фильтров"""
dbg_api = DebugAPI.run(request=False, mapi_handler=HandlersAPI.
url_creations_movie_schedule_filter)
try:
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
sleep(5)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
sleep(5)
self.event_detail_page.click(*self.
event_detail_page_locators.btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
sleep(5)
self.shedule_page.check_rows_filters(dbg_api)
finally:
dbg_api.kill()
def test_003_check_time_ticket_filter(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
проверять соответствие времени на билетах с выставленными фильтрами"""
dbg_api = DebugAPI.run(request=False, mapi_handler=HandlersAPI.
url_creations_movie_schedule_filter)
try:
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
sleep(10)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
self.event_detail_page.click(*self.
event_detail_page_locators.btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
sleep(2)
self.shedule_page.compare_tickets_datetime_options_second_filter(
dbg_api)
finally:
dbg_api.kill()
| <mask token>
@pytest.mark.usefixtures('driver')
class Test_001_ShedulePage:
<mask token>
@staticmethod
def teardown_class(cls):
enable_proxy(mode=False)
def test_001_elements_exists(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
найти кнопку отмены, кнопку карты, поле поиска"""
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
self.event_detail_page.click(*self.event_detail_page_locators.
btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
self.shedule_page.find_element(*self.shedule_locators.btn_back)
self.shedule_page.find_element(*self.shedule_locators.btn_map)
self.shedule_page.find_element(*self.shedule_locators.search_field)
def test_002_valid_filters(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
проверить соответствие фильтров и ответа сервера
проверить порядок фильтров"""
dbg_api = DebugAPI.run(request=False, mapi_handler=HandlersAPI.
url_creations_movie_schedule_filter)
try:
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
sleep(5)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
sleep(5)
self.event_detail_page.click(*self.
event_detail_page_locators.btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
sleep(5)
self.shedule_page.check_rows_filters(dbg_api)
finally:
dbg_api.kill()
def test_003_check_time_ticket_filter(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
проверять соответствие времени на билетах с выставленными фильтрами"""
dbg_api = DebugAPI.run(request=False, mapi_handler=HandlersAPI.
url_creations_movie_schedule_filter)
try:
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
sleep(10)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
self.event_detail_page.click(*self.
event_detail_page_locators.btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
sleep(2)
self.shedule_page.compare_tickets_datetime_options_second_filter(
dbg_api)
finally:
dbg_api.kill()
| <mask token>
@pytest.mark.usefixtures('driver')
class Test_001_ShedulePage:
@classmethod
def setup_class(cls):
cls.movies_locators = MoviesPageLocators()
cls.shedule_locators = ShedulePageLocators()
cls.event_detail_page_locators = MoviesDetailsPageLocators()
@staticmethod
def teardown_class(cls):
enable_proxy(mode=False)
def test_001_elements_exists(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
найти кнопку отмены, кнопку карты, поле поиска"""
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
self.event_detail_page.click(*self.event_detail_page_locators.
btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
self.shedule_page.find_element(*self.shedule_locators.btn_back)
self.shedule_page.find_element(*self.shedule_locators.btn_map)
self.shedule_page.find_element(*self.shedule_locators.search_field)
def test_002_valid_filters(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
проверить соответствие фильтров и ответа сервера
проверить порядок фильтров"""
dbg_api = DebugAPI.run(request=False, mapi_handler=HandlersAPI.
url_creations_movie_schedule_filter)
try:
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
sleep(5)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
sleep(5)
self.event_detail_page.click(*self.
event_detail_page_locators.btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
sleep(5)
self.shedule_page.check_rows_filters(dbg_api)
finally:
dbg_api.kill()
def test_003_check_time_ticket_filter(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
проверять соответствие времени на билетах с выставленными фильтрами"""
dbg_api = DebugAPI.run(request=False, mapi_handler=HandlersAPI.
url_creations_movie_schedule_filter)
try:
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
sleep(10)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
self.event_detail_page.click(*self.
event_detail_page_locators.btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
sleep(2)
self.shedule_page.compare_tickets_datetime_options_second_filter(
dbg_api)
finally:
dbg_api.kill()
| from time import sleep
import pytest
import allure
from app.debug_api import DebugAPI
from app.check_api import HandlersAPI
from locators.movies_details_locators import MoviesDetailsPageLocators
from locators.movies_locators import MoviesPageLocators
from locators.shedule_locators import ShedulePageLocators
from screens.MoviesPage import MoviesPage
from screens.MoviesDetailsPage import MoviesDetailsPage
from screens.ShedulePage import ShedulePage
from utils.internet import enable_proxy
@pytest.mark.usefixtures('driver')
class Test_001_ShedulePage:
@classmethod
def setup_class(cls):
cls.movies_locators = MoviesPageLocators()
cls.shedule_locators = ShedulePageLocators()
cls.event_detail_page_locators = MoviesDetailsPageLocators()
@staticmethod
def teardown_class(cls):
enable_proxy(mode=False)
def test_001_elements_exists(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
найти кнопку отмены, кнопку карты, поле поиска"""
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
self.event_detail_page.click(*self.event_detail_page_locators.
btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
self.shedule_page.find_element(*self.shedule_locators.btn_back)
self.shedule_page.find_element(*self.shedule_locators.btn_map)
self.shedule_page.find_element(*self.shedule_locators.search_field)
def test_002_valid_filters(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
проверить соответствие фильтров и ответа сервера
проверить порядок фильтров"""
dbg_api = DebugAPI.run(request=False, mapi_handler=HandlersAPI.
url_creations_movie_schedule_filter)
try:
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
sleep(5)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
sleep(5)
self.event_detail_page.click(*self.
event_detail_page_locators.btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
sleep(5)
self.shedule_page.check_rows_filters(dbg_api)
finally:
dbg_api.kill()
def test_003_check_time_ticket_filter(self, driver):
"""тапнуть на фичерс,
тапнуть на смотреть расписание,
проверять соответствие времени на билетах с выставленными фильтрами"""
dbg_api = DebugAPI.run(request=False, mapi_handler=HandlersAPI.
url_creations_movie_schedule_filter)
try:
with allure.step('MoviesPage'):
self.movie_page = MoviesPage(driver)
self.movie_page.set_custom_wait(10)
sleep(10)
self.movie_page.act.click_by_coords(50, 30)
with allure.step('EventDetailsPage'):
self.event_detail_page = MoviesDetailsPage(driver)
self.event_detail_page.set_custom_wait(10)
self.event_detail_page.click(*self.
event_detail_page_locators.btn_view_timetable)
with allure.step('ShedulePage'):
self.shedule_page = ShedulePage(driver)
self.shedule_page.set_custom_wait(10)
sleep(2)
self.shedule_page.compare_tickets_datetime_options_second_filter(
dbg_api)
finally:
dbg_api.kill()
| null | [
4,
5,
6,
7
] |
976 | 327371d373819273a2f77f63e0cedee6950dbc46 | <mask token>
class RiskAnalysis(gtk.VPaned):
<mask token>
<mask token>
def create_risk_analysis_page(self, notebook):
"""
Method to create the development environment risk analysis page and add
it to the risk analysis gtk.Notebook().
:param gtk.Notebook notebook: the gtk.Notebook() instance that will
hold the development environment risk
analysis questions.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
_hpaned = gtk.HPaned()
self.pack1(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Organization'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(
u'1. There are separate design and coding organizations.'), _(
u'2. There is an independent software test organization.'), _(
u'3. There is an independent software quality assurance organization.'
), _(
u'4. There is an independent software configuration management organization.'
), _(
u'5. There is an independent software verification and validation organization.'
), _(
u'6. A structured programming team will develop the software.'),
_(
u'7. The educational level of the software team members is above average.'
), _(
u'8. The experience level of the software team members is above average.'
)]
_x_pos, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_x_pos += 125
_fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Methods'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(u'1. Standards are defined and will be enforced.'), _(
u'2. Software will be developed using a higher order language.'
), _(
u'3. The development process will include formal reviews (PDR, CDR, etc.).'
), _(
u'4. The development process will include frequent walkthroughs.'
), _(
u'5. Development will take a top-down and structured approach.'
), _(u'6. Unit development folders will be used.'), _(
u'7. A software development library will be used.'), _(
u'8. A formal change and error reporting process will be used.'
), _(u'9. Progress and status will routinely be reported.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])
_hpaned = gtk.HPaned()
self.pack2(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Documentation'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(
u' 1. System requirements specifications will be documented.'),
_(
u' 2. Software requirements specifications will be documented.'
), _(u' 3. Interface design specifications will be documented.'
), _(u' 4. Software design specification will be documented.'),
_(
u' 5. Test plans, procedures, and reports will be documented.'),
_(u' 6. The software development plan will be documented.'), _(
u' 7. The software quality assurance plan will be documented.'),
_(
u' 8. The software configuration management plan will be documented.'
), _(u' 9. A requirements traceability matrix will be used.'),
_(u'10. The software version description will be documented.'),
_(u'11. All software discrepancies will be documented.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Tools & Test Techniques'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(
u' 1. The software language requirements will be specified.'),
_(u' 2. Formal program design language will be used.'), _(
u' 3. Program design graphical techniques (flowcharts, HIPO, etc.) will be used.'
), _(u' 4. Simulation/emulation tools will be used.'), _(
u' 5. Configuration management tools will be used.'), _(
u' 6. A code auditing tool will be used.'), _(
u' 7. A data flow analyzer will be used.'), _(
u" 8. A programmer's workbench will be used."), _(
u' 9. Measurement tools will be used.'), _(
u'10. Software code reviews will be used.'), _(
u'11. Software branch testing will be used.'), _(
u'12. Random testing will be used.'), _(
u'13. Functional testing will be used.'), _(
u'14. Error and anomaly detection testing will be used.'), _(
u'15. Structure analysis will be used.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])
_fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])
_fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])
_fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])
_fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])
_label = gtk.Label()
_label.set_markup("<span weight='bold'>" + _(
u'Development\nEnvironment') + '</span>')
_label.set_alignment(xalign=0.5, yalign=0.5)
_label.set_justify(gtk.JUSTIFY_CENTER)
_label.set_angle(0)
_label.show_all()
_label.set_tooltip_text(_(
u'Assesses risk due to the development environment.'))
notebook.insert_page(self, tab_label=_label, position=-1)
return False
def load(self, model):
"""
Method to load the Development Environment Risk Analysis answers.
:param `rtk.software.Software` model: the Software data model to load
the gtk.ToggleButton() from.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
self._software_model = model
self.chkDevEnvQ1.set_active(model.lst_development[0])
self.chkDevEnvQ2.set_active(model.lst_development[1])
self.chkDevEnvQ3.set_active(model.lst_development[2])
self.chkDevEnvQ4.set_active(model.lst_development[3])
self.chkDevEnvQ5.set_active(model.lst_development[4])
self.chkDevEnvQ6.set_active(model.lst_development[5])
self.chkDevEnvQ7.set_active(model.lst_development[6])
self.chkDevEnvQ8.set_active(model.lst_development[7])
self.chkDevEnvQ9.set_active(model.lst_development[8])
self.chkDevEnvQ10.set_active(model.lst_development[9])
self.chkDevEnvQ11.set_active(model.lst_development[10])
self.chkDevEnvQ12.set_active(model.lst_development[11])
self.chkDevEnvQ13.set_active(model.lst_development[12])
self.chkDevEnvQ14.set_active(model.lst_development[13])
self.chkDevEnvQ15.set_active(model.lst_development[14])
self.chkDevEnvQ16.set_active(model.lst_development[15])
self.chkDevEnvQ17.set_active(model.lst_development[16])
self.chkDevEnvQ18.set_active(model.lst_development[17])
self.chkDevEnvQ19.set_active(model.lst_development[18])
self.chkDevEnvQ20.set_active(model.lst_development[19])
self.chkDevEnvQ21.set_active(model.lst_development[20])
self.chkDevEnvQ22.set_active(model.lst_development[21])
self.chkDevEnvQ23.set_active(model.lst_development[22])
self.chkDevEnvQ24.set_active(model.lst_development[23])
self.chkDevEnvQ25.set_active(model.lst_development[24])
self.chkDevEnvQ26.set_active(model.lst_development[25])
self.chkDevEnvQ27.set_active(model.lst_development[26])
self.chkDevEnvQ28.set_active(model.lst_development[27])
self.chkDevEnvQ29.set_active(model.lst_development[28])
self.chkDevEnvQ30.set_active(model.lst_development[29])
self.chkDevEnvQ31.set_active(model.lst_development[30])
self.chkDevEnvQ32.set_active(model.lst_development[31])
self.chkDevEnvQ33.set_active(model.lst_development[32])
self.chkDevEnvQ34.set_active(model.lst_development[33])
self.chkDevEnvQ35.set_active(model.lst_development[34])
self.chkDevEnvQ36.set_active(model.lst_development[35])
self.chkDevEnvQ37.set_active(model.lst_development[36])
self.chkDevEnvQ38.set_active(model.lst_development[37])
self.chkDevEnvQ39.set_active(model.lst_development[38])
self.chkDevEnvQ40.set_active(model.lst_development[39])
self.chkDevEnvQ41.set_active(model.lst_development[40])
self.chkDevEnvQ42.set_active(model.lst_development[41])
self.chkDevEnvQ43.set_active(model.lst_development[42])
return False
def _on_toggled(self, check, index):
"""
Callback method for gtk.CheckButton() 'toggled' event.
:param gtk.CheckButton check: the gtk.CheckButton() that called this
method.
:param int index: the index of the Development Environment question
associated with the gtk.CheckButton() that was
toggled.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
check.handler_block(self._lst_handler_id[index])
self._software_model.lst_development[index] = int(check.get_active())
check.handler_unblock(self._lst_handler_id[index])
return False
| <mask token>
class RiskAnalysis(gtk.VPaned):
<mask token>
def __init__(self):
"""
Method to initialize the development environment risk analysis
questions Work Book page.
"""
gtk.VPaned.__init__(self)
self._lst_handler_id = []
self._software_model = None
self.chkDevEnvQ1 = Widgets.make_check_button()
self.chkDevEnvQ2 = Widgets.make_check_button()
self.chkDevEnvQ3 = Widgets.make_check_button()
self.chkDevEnvQ4 = Widgets.make_check_button()
self.chkDevEnvQ5 = Widgets.make_check_button()
self.chkDevEnvQ6 = Widgets.make_check_button()
self.chkDevEnvQ7 = Widgets.make_check_button()
self.chkDevEnvQ8 = Widgets.make_check_button()
self.chkDevEnvQ9 = Widgets.make_check_button()
self.chkDevEnvQ10 = Widgets.make_check_button()
self.chkDevEnvQ11 = Widgets.make_check_button()
self.chkDevEnvQ12 = Widgets.make_check_button()
self.chkDevEnvQ13 = Widgets.make_check_button()
self.chkDevEnvQ14 = Widgets.make_check_button()
self.chkDevEnvQ15 = Widgets.make_check_button()
self.chkDevEnvQ16 = Widgets.make_check_button()
self.chkDevEnvQ17 = Widgets.make_check_button()
self.chkDevEnvQ18 = Widgets.make_check_button()
self.chkDevEnvQ19 = Widgets.make_check_button()
self.chkDevEnvQ20 = Widgets.make_check_button()
self.chkDevEnvQ21 = Widgets.make_check_button()
self.chkDevEnvQ22 = Widgets.make_check_button()
self.chkDevEnvQ23 = Widgets.make_check_button()
self.chkDevEnvQ24 = Widgets.make_check_button()
self.chkDevEnvQ25 = Widgets.make_check_button()
self.chkDevEnvQ26 = Widgets.make_check_button()
self.chkDevEnvQ27 = Widgets.make_check_button()
self.chkDevEnvQ28 = Widgets.make_check_button()
self.chkDevEnvQ29 = Widgets.make_check_button()
self.chkDevEnvQ30 = Widgets.make_check_button()
self.chkDevEnvQ31 = Widgets.make_check_button()
self.chkDevEnvQ32 = Widgets.make_check_button()
self.chkDevEnvQ33 = Widgets.make_check_button()
self.chkDevEnvQ34 = Widgets.make_check_button()
self.chkDevEnvQ35 = Widgets.make_check_button()
self.chkDevEnvQ36 = Widgets.make_check_button()
self.chkDevEnvQ37 = Widgets.make_check_button()
self.chkDevEnvQ38 = Widgets.make_check_button()
self.chkDevEnvQ39 = Widgets.make_check_button()
self.chkDevEnvQ40 = Widgets.make_check_button()
self.chkDevEnvQ41 = Widgets.make_check_button()
self.chkDevEnvQ42 = Widgets.make_check_button()
self.chkDevEnvQ43 = Widgets.make_check_button()
self._lst_handler_id.append(self.chkDevEnvQ1.connect('toggled',
self._on_toggled, 0))
self._lst_handler_id.append(self.chkDevEnvQ2.connect('toggled',
self._on_toggled, 1))
self._lst_handler_id.append(self.chkDevEnvQ3.connect('toggled',
self._on_toggled, 2))
self._lst_handler_id.append(self.chkDevEnvQ4.connect('toggled',
self._on_toggled, 3))
self._lst_handler_id.append(self.chkDevEnvQ5.connect('toggled',
self._on_toggled, 4))
self._lst_handler_id.append(self.chkDevEnvQ6.connect('toggled',
self._on_toggled, 5))
self._lst_handler_id.append(self.chkDevEnvQ7.connect('toggled',
self._on_toggled, 6))
self._lst_handler_id.append(self.chkDevEnvQ8.connect('toggled',
self._on_toggled, 7))
self._lst_handler_id.append(self.chkDevEnvQ9.connect('toggled',
self._on_toggled, 8))
self._lst_handler_id.append(self.chkDevEnvQ10.connect('toggled',
self._on_toggled, 9))
self._lst_handler_id.append(self.chkDevEnvQ11.connect('toggled',
self._on_toggled, 10))
self._lst_handler_id.append(self.chkDevEnvQ12.connect('toggled',
self._on_toggled, 11))
self._lst_handler_id.append(self.chkDevEnvQ13.connect('toggled',
self._on_toggled, 12))
self._lst_handler_id.append(self.chkDevEnvQ14.connect('toggled',
self._on_toggled, 13))
self._lst_handler_id.append(self.chkDevEnvQ15.connect('toggled',
self._on_toggled, 14))
self._lst_handler_id.append(self.chkDevEnvQ16.connect('toggled',
self._on_toggled, 15))
self._lst_handler_id.append(self.chkDevEnvQ17.connect('toggled',
self._on_toggled, 16))
self._lst_handler_id.append(self.chkDevEnvQ18.connect('toggled',
self._on_toggled, 17))
self._lst_handler_id.append(self.chkDevEnvQ19.connect('toggled',
self._on_toggled, 18))
self._lst_handler_id.append(self.chkDevEnvQ20.connect('toggled',
self._on_toggled, 19))
self._lst_handler_id.append(self.chkDevEnvQ21.connect('toggled',
self._on_toggled, 20))
self._lst_handler_id.append(self.chkDevEnvQ22.connect('toggled',
self._on_toggled, 21))
self._lst_handler_id.append(self.chkDevEnvQ23.connect('toggled',
self._on_toggled, 22))
self._lst_handler_id.append(self.chkDevEnvQ24.connect('toggled',
self._on_toggled, 23))
self._lst_handler_id.append(self.chkDevEnvQ25.connect('toggled',
self._on_toggled, 24))
self._lst_handler_id.append(self.chkDevEnvQ26.connect('toggled',
self._on_toggled, 25))
self._lst_handler_id.append(self.chkDevEnvQ27.connect('toggled',
self._on_toggled, 26))
self._lst_handler_id.append(self.chkDevEnvQ28.connect('toggled',
self._on_toggled, 27))
self._lst_handler_id.append(self.chkDevEnvQ29.connect('toggled',
self._on_toggled, 28))
self._lst_handler_id.append(self.chkDevEnvQ30.connect('toggled',
self._on_toggled, 29))
self._lst_handler_id.append(self.chkDevEnvQ31.connect('toggled',
self._on_toggled, 30))
self._lst_handler_id.append(self.chkDevEnvQ32.connect('toggled',
self._on_toggled, 31))
self._lst_handler_id.append(self.chkDevEnvQ33.connect('toggled',
self._on_toggled, 32))
self._lst_handler_id.append(self.chkDevEnvQ34.connect('toggled',
self._on_toggled, 33))
self._lst_handler_id.append(self.chkDevEnvQ35.connect('toggled',
self._on_toggled, 34))
self._lst_handler_id.append(self.chkDevEnvQ36.connect('toggled',
self._on_toggled, 35))
self._lst_handler_id.append(self.chkDevEnvQ37.connect('toggled',
self._on_toggled, 36))
self._lst_handler_id.append(self.chkDevEnvQ38.connect('toggled',
self._on_toggled, 37))
self._lst_handler_id.append(self.chkDevEnvQ39.connect('toggled',
self._on_toggled, 38))
self._lst_handler_id.append(self.chkDevEnvQ40.connect('toggled',
self._on_toggled, 39))
self._lst_handler_id.append(self.chkDevEnvQ41.connect('toggled',
self._on_toggled, 40))
self._lst_handler_id.append(self.chkDevEnvQ42.connect('toggled',
self._on_toggled, 41))
self._lst_handler_id.append(self.chkDevEnvQ43.connect('toggled',
self._on_toggled, 42))
def create_risk_analysis_page(self, notebook):
"""
Method to create the development environment risk analysis page and add
it to the risk analysis gtk.Notebook().
:param gtk.Notebook notebook: the gtk.Notebook() instance that will
hold the development environment risk
analysis questions.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
_hpaned = gtk.HPaned()
self.pack1(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Organization'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(
u'1. There are separate design and coding organizations.'), _(
u'2. There is an independent software test organization.'), _(
u'3. There is an independent software quality assurance organization.'
), _(
u'4. There is an independent software configuration management organization.'
), _(
u'5. There is an independent software verification and validation organization.'
), _(
u'6. A structured programming team will develop the software.'),
_(
u'7. The educational level of the software team members is above average.'
), _(
u'8. The experience level of the software team members is above average.'
)]
_x_pos, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_x_pos += 125
_fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Methods'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(u'1. Standards are defined and will be enforced.'), _(
u'2. Software will be developed using a higher order language.'
), _(
u'3. The development process will include formal reviews (PDR, CDR, etc.).'
), _(
u'4. The development process will include frequent walkthroughs.'
), _(
u'5. Development will take a top-down and structured approach.'
), _(u'6. Unit development folders will be used.'), _(
u'7. A software development library will be used.'), _(
u'8. A formal change and error reporting process will be used.'
), _(u'9. Progress and status will routinely be reported.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])
_hpaned = gtk.HPaned()
self.pack2(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Documentation'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(
u' 1. System requirements specifications will be documented.'),
_(
u' 2. Software requirements specifications will be documented.'
), _(u' 3. Interface design specifications will be documented.'
), _(u' 4. Software design specification will be documented.'),
_(
u' 5. Test plans, procedures, and reports will be documented.'),
_(u' 6. The software development plan will be documented.'), _(
u' 7. The software quality assurance plan will be documented.'),
_(
u' 8. The software configuration management plan will be documented.'
), _(u' 9. A requirements traceability matrix will be used.'),
_(u'10. The software version description will be documented.'),
_(u'11. All software discrepancies will be documented.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Tools & Test Techniques'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(
u' 1. The software language requirements will be specified.'),
_(u' 2. Formal program design language will be used.'), _(
u' 3. Program design graphical techniques (flowcharts, HIPO, etc.) will be used.'
), _(u' 4. Simulation/emulation tools will be used.'), _(
u' 5. Configuration management tools will be used.'), _(
u' 6. A code auditing tool will be used.'), _(
u' 7. A data flow analyzer will be used.'), _(
u" 8. A programmer's workbench will be used."), _(
u' 9. Measurement tools will be used.'), _(
u'10. Software code reviews will be used.'), _(
u'11. Software branch testing will be used.'), _(
u'12. Random testing will be used.'), _(
u'13. Functional testing will be used.'), _(
u'14. Error and anomaly detection testing will be used.'), _(
u'15. Structure analysis will be used.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])
_fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])
_fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])
_fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])
_fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])
_label = gtk.Label()
_label.set_markup("<span weight='bold'>" + _(
u'Development\nEnvironment') + '</span>')
_label.set_alignment(xalign=0.5, yalign=0.5)
_label.set_justify(gtk.JUSTIFY_CENTER)
_label.set_angle(0)
_label.show_all()
_label.set_tooltip_text(_(
u'Assesses risk due to the development environment.'))
notebook.insert_page(self, tab_label=_label, position=-1)
return False
def load(self, model):
"""
Method to load the Development Environment Risk Analysis answers.
:param `rtk.software.Software` model: the Software data model to load
the gtk.ToggleButton() from.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
self._software_model = model
self.chkDevEnvQ1.set_active(model.lst_development[0])
self.chkDevEnvQ2.set_active(model.lst_development[1])
self.chkDevEnvQ3.set_active(model.lst_development[2])
self.chkDevEnvQ4.set_active(model.lst_development[3])
self.chkDevEnvQ5.set_active(model.lst_development[4])
self.chkDevEnvQ6.set_active(model.lst_development[5])
self.chkDevEnvQ7.set_active(model.lst_development[6])
self.chkDevEnvQ8.set_active(model.lst_development[7])
self.chkDevEnvQ9.set_active(model.lst_development[8])
self.chkDevEnvQ10.set_active(model.lst_development[9])
self.chkDevEnvQ11.set_active(model.lst_development[10])
self.chkDevEnvQ12.set_active(model.lst_development[11])
self.chkDevEnvQ13.set_active(model.lst_development[12])
self.chkDevEnvQ14.set_active(model.lst_development[13])
self.chkDevEnvQ15.set_active(model.lst_development[14])
self.chkDevEnvQ16.set_active(model.lst_development[15])
self.chkDevEnvQ17.set_active(model.lst_development[16])
self.chkDevEnvQ18.set_active(model.lst_development[17])
self.chkDevEnvQ19.set_active(model.lst_development[18])
self.chkDevEnvQ20.set_active(model.lst_development[19])
self.chkDevEnvQ21.set_active(model.lst_development[20])
self.chkDevEnvQ22.set_active(model.lst_development[21])
self.chkDevEnvQ23.set_active(model.lst_development[22])
self.chkDevEnvQ24.set_active(model.lst_development[23])
self.chkDevEnvQ25.set_active(model.lst_development[24])
self.chkDevEnvQ26.set_active(model.lst_development[25])
self.chkDevEnvQ27.set_active(model.lst_development[26])
self.chkDevEnvQ28.set_active(model.lst_development[27])
self.chkDevEnvQ29.set_active(model.lst_development[28])
self.chkDevEnvQ30.set_active(model.lst_development[29])
self.chkDevEnvQ31.set_active(model.lst_development[30])
self.chkDevEnvQ32.set_active(model.lst_development[31])
self.chkDevEnvQ33.set_active(model.lst_development[32])
self.chkDevEnvQ34.set_active(model.lst_development[33])
self.chkDevEnvQ35.set_active(model.lst_development[34])
self.chkDevEnvQ36.set_active(model.lst_development[35])
self.chkDevEnvQ37.set_active(model.lst_development[36])
self.chkDevEnvQ38.set_active(model.lst_development[37])
self.chkDevEnvQ39.set_active(model.lst_development[38])
self.chkDevEnvQ40.set_active(model.lst_development[39])
self.chkDevEnvQ41.set_active(model.lst_development[40])
self.chkDevEnvQ42.set_active(model.lst_development[41])
self.chkDevEnvQ43.set_active(model.lst_development[42])
return False
def _on_toggled(self, check, index):
"""
Callback method for gtk.CheckButton() 'toggled' event.
:param gtk.CheckButton check: the gtk.CheckButton() that called this
method.
:param int index: the index of the Development Environment question
associated with the gtk.CheckButton() that was
toggled.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
check.handler_block(self._lst_handler_id[index])
self._software_model.lst_development[index] = int(check.get_active())
check.handler_unblock(self._lst_handler_id[index])
return False
| <mask token>
try:
import pygtk
pygtk.require('2.0')
except ImportError:
sys.exit(1)
try:
import gtk
except ImportError:
sys.exit(1)
try:
import gtk.glade
except ImportError:
sys.exit(1)
try:
import Configuration
import gui.gtk.Widgets as Widgets
except ImportError:
import rtk.Configuration as Configuration
import rtk.gui.gtk.Widgets as Widgets
<mask token>
try:
locale.setlocale(locale.LC_ALL, Configuration.LOCALE)
except locale.Error:
locale.setlocale(locale.LC_ALL, '')
<mask token>
class RiskAnalysis(gtk.VPaned):
"""
The Work Book view for analyzing and displaying the risk associated with
the development environment. The attributes of a development environment
Work Book view are:
:ivar list _lst_handler_id: the list of gtk.Widget() signal handler IDs.
:ivar _software_model: the :py:class:`rtk.software.Software.Model` to
display.
"""
def __init__(self):
"""
Method to initialize the development environment risk analysis
questions Work Book page.
"""
gtk.VPaned.__init__(self)
self._lst_handler_id = []
self._software_model = None
self.chkDevEnvQ1 = Widgets.make_check_button()
self.chkDevEnvQ2 = Widgets.make_check_button()
self.chkDevEnvQ3 = Widgets.make_check_button()
self.chkDevEnvQ4 = Widgets.make_check_button()
self.chkDevEnvQ5 = Widgets.make_check_button()
self.chkDevEnvQ6 = Widgets.make_check_button()
self.chkDevEnvQ7 = Widgets.make_check_button()
self.chkDevEnvQ8 = Widgets.make_check_button()
self.chkDevEnvQ9 = Widgets.make_check_button()
self.chkDevEnvQ10 = Widgets.make_check_button()
self.chkDevEnvQ11 = Widgets.make_check_button()
self.chkDevEnvQ12 = Widgets.make_check_button()
self.chkDevEnvQ13 = Widgets.make_check_button()
self.chkDevEnvQ14 = Widgets.make_check_button()
self.chkDevEnvQ15 = Widgets.make_check_button()
self.chkDevEnvQ16 = Widgets.make_check_button()
self.chkDevEnvQ17 = Widgets.make_check_button()
self.chkDevEnvQ18 = Widgets.make_check_button()
self.chkDevEnvQ19 = Widgets.make_check_button()
self.chkDevEnvQ20 = Widgets.make_check_button()
self.chkDevEnvQ21 = Widgets.make_check_button()
self.chkDevEnvQ22 = Widgets.make_check_button()
self.chkDevEnvQ23 = Widgets.make_check_button()
self.chkDevEnvQ24 = Widgets.make_check_button()
self.chkDevEnvQ25 = Widgets.make_check_button()
self.chkDevEnvQ26 = Widgets.make_check_button()
self.chkDevEnvQ27 = Widgets.make_check_button()
self.chkDevEnvQ28 = Widgets.make_check_button()
self.chkDevEnvQ29 = Widgets.make_check_button()
self.chkDevEnvQ30 = Widgets.make_check_button()
self.chkDevEnvQ31 = Widgets.make_check_button()
self.chkDevEnvQ32 = Widgets.make_check_button()
self.chkDevEnvQ33 = Widgets.make_check_button()
self.chkDevEnvQ34 = Widgets.make_check_button()
self.chkDevEnvQ35 = Widgets.make_check_button()
self.chkDevEnvQ36 = Widgets.make_check_button()
self.chkDevEnvQ37 = Widgets.make_check_button()
self.chkDevEnvQ38 = Widgets.make_check_button()
self.chkDevEnvQ39 = Widgets.make_check_button()
self.chkDevEnvQ40 = Widgets.make_check_button()
self.chkDevEnvQ41 = Widgets.make_check_button()
self.chkDevEnvQ42 = Widgets.make_check_button()
self.chkDevEnvQ43 = Widgets.make_check_button()
self._lst_handler_id.append(self.chkDevEnvQ1.connect('toggled',
self._on_toggled, 0))
self._lst_handler_id.append(self.chkDevEnvQ2.connect('toggled',
self._on_toggled, 1))
self._lst_handler_id.append(self.chkDevEnvQ3.connect('toggled',
self._on_toggled, 2))
self._lst_handler_id.append(self.chkDevEnvQ4.connect('toggled',
self._on_toggled, 3))
self._lst_handler_id.append(self.chkDevEnvQ5.connect('toggled',
self._on_toggled, 4))
self._lst_handler_id.append(self.chkDevEnvQ6.connect('toggled',
self._on_toggled, 5))
self._lst_handler_id.append(self.chkDevEnvQ7.connect('toggled',
self._on_toggled, 6))
self._lst_handler_id.append(self.chkDevEnvQ8.connect('toggled',
self._on_toggled, 7))
self._lst_handler_id.append(self.chkDevEnvQ9.connect('toggled',
self._on_toggled, 8))
self._lst_handler_id.append(self.chkDevEnvQ10.connect('toggled',
self._on_toggled, 9))
self._lst_handler_id.append(self.chkDevEnvQ11.connect('toggled',
self._on_toggled, 10))
self._lst_handler_id.append(self.chkDevEnvQ12.connect('toggled',
self._on_toggled, 11))
self._lst_handler_id.append(self.chkDevEnvQ13.connect('toggled',
self._on_toggled, 12))
self._lst_handler_id.append(self.chkDevEnvQ14.connect('toggled',
self._on_toggled, 13))
self._lst_handler_id.append(self.chkDevEnvQ15.connect('toggled',
self._on_toggled, 14))
self._lst_handler_id.append(self.chkDevEnvQ16.connect('toggled',
self._on_toggled, 15))
self._lst_handler_id.append(self.chkDevEnvQ17.connect('toggled',
self._on_toggled, 16))
self._lst_handler_id.append(self.chkDevEnvQ18.connect('toggled',
self._on_toggled, 17))
self._lst_handler_id.append(self.chkDevEnvQ19.connect('toggled',
self._on_toggled, 18))
self._lst_handler_id.append(self.chkDevEnvQ20.connect('toggled',
self._on_toggled, 19))
self._lst_handler_id.append(self.chkDevEnvQ21.connect('toggled',
self._on_toggled, 20))
self._lst_handler_id.append(self.chkDevEnvQ22.connect('toggled',
self._on_toggled, 21))
self._lst_handler_id.append(self.chkDevEnvQ23.connect('toggled',
self._on_toggled, 22))
self._lst_handler_id.append(self.chkDevEnvQ24.connect('toggled',
self._on_toggled, 23))
self._lst_handler_id.append(self.chkDevEnvQ25.connect('toggled',
self._on_toggled, 24))
self._lst_handler_id.append(self.chkDevEnvQ26.connect('toggled',
self._on_toggled, 25))
self._lst_handler_id.append(self.chkDevEnvQ27.connect('toggled',
self._on_toggled, 26))
self._lst_handler_id.append(self.chkDevEnvQ28.connect('toggled',
self._on_toggled, 27))
self._lst_handler_id.append(self.chkDevEnvQ29.connect('toggled',
self._on_toggled, 28))
self._lst_handler_id.append(self.chkDevEnvQ30.connect('toggled',
self._on_toggled, 29))
self._lst_handler_id.append(self.chkDevEnvQ31.connect('toggled',
self._on_toggled, 30))
self._lst_handler_id.append(self.chkDevEnvQ32.connect('toggled',
self._on_toggled, 31))
self._lst_handler_id.append(self.chkDevEnvQ33.connect('toggled',
self._on_toggled, 32))
self._lst_handler_id.append(self.chkDevEnvQ34.connect('toggled',
self._on_toggled, 33))
self._lst_handler_id.append(self.chkDevEnvQ35.connect('toggled',
self._on_toggled, 34))
self._lst_handler_id.append(self.chkDevEnvQ36.connect('toggled',
self._on_toggled, 35))
self._lst_handler_id.append(self.chkDevEnvQ37.connect('toggled',
self._on_toggled, 36))
self._lst_handler_id.append(self.chkDevEnvQ38.connect('toggled',
self._on_toggled, 37))
self._lst_handler_id.append(self.chkDevEnvQ39.connect('toggled',
self._on_toggled, 38))
self._lst_handler_id.append(self.chkDevEnvQ40.connect('toggled',
self._on_toggled, 39))
self._lst_handler_id.append(self.chkDevEnvQ41.connect('toggled',
self._on_toggled, 40))
self._lst_handler_id.append(self.chkDevEnvQ42.connect('toggled',
self._on_toggled, 41))
self._lst_handler_id.append(self.chkDevEnvQ43.connect('toggled',
self._on_toggled, 42))
def create_risk_analysis_page(self, notebook):
"""
Method to create the development environment risk analysis page and add
it to the risk analysis gtk.Notebook().
:param gtk.Notebook notebook: the gtk.Notebook() instance that will
hold the development environment risk
analysis questions.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
_hpaned = gtk.HPaned()
self.pack1(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Organization'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(
u'1. There are separate design and coding organizations.'), _(
u'2. There is an independent software test organization.'), _(
u'3. There is an independent software quality assurance organization.'
), _(
u'4. There is an independent software configuration management organization.'
), _(
u'5. There is an independent software verification and validation organization.'
), _(
u'6. A structured programming team will develop the software.'),
_(
u'7. The educational level of the software team members is above average.'
), _(
u'8. The experience level of the software team members is above average.'
)]
_x_pos, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_x_pos += 125
_fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Methods'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(u'1. Standards are defined and will be enforced.'), _(
u'2. Software will be developed using a higher order language.'
), _(
u'3. The development process will include formal reviews (PDR, CDR, etc.).'
), _(
u'4. The development process will include frequent walkthroughs.'
), _(
u'5. Development will take a top-down and structured approach.'
), _(u'6. Unit development folders will be used.'), _(
u'7. A software development library will be used.'), _(
u'8. A formal change and error reporting process will be used.'
), _(u'9. Progress and status will routinely be reported.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])
_hpaned = gtk.HPaned()
self.pack2(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Documentation'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(
u' 1. System requirements specifications will be documented.'),
_(
u' 2. Software requirements specifications will be documented.'
), _(u' 3. Interface design specifications will be documented.'
), _(u' 4. Software design specification will be documented.'),
_(
u' 5. Test plans, procedures, and reports will be documented.'),
_(u' 6. The software development plan will be documented.'), _(
u' 7. The software quality assurance plan will be documented.'),
_(
u' 8. The software configuration management plan will be documented.'
), _(u' 9. A requirements traceability matrix will be used.'),
_(u'10. The software version description will be documented.'),
_(u'11. All software discrepancies will be documented.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Tools & Test Techniques'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(
u' 1. The software language requirements will be specified.'),
_(u' 2. Formal program design language will be used.'), _(
u' 3. Program design graphical techniques (flowcharts, HIPO, etc.) will be used.'
), _(u' 4. Simulation/emulation tools will be used.'), _(
u' 5. Configuration management tools will be used.'), _(
u' 6. A code auditing tool will be used.'), _(
u' 7. A data flow analyzer will be used.'), _(
u" 8. A programmer's workbench will be used."), _(
u' 9. Measurement tools will be used.'), _(
u'10. Software code reviews will be used.'), _(
u'11. Software branch testing will be used.'), _(
u'12. Random testing will be used.'), _(
u'13. Functional testing will be used.'), _(
u'14. Error and anomaly detection testing will be used.'), _(
u'15. Structure analysis will be used.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])
_fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])
_fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])
_fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])
_fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])
_label = gtk.Label()
_label.set_markup("<span weight='bold'>" + _(
u'Development\nEnvironment') + '</span>')
_label.set_alignment(xalign=0.5, yalign=0.5)
_label.set_justify(gtk.JUSTIFY_CENTER)
_label.set_angle(0)
_label.show_all()
_label.set_tooltip_text(_(
u'Assesses risk due to the development environment.'))
notebook.insert_page(self, tab_label=_label, position=-1)
return False
def load(self, model):
"""
Method to load the Development Environment Risk Analysis answers.
:param `rtk.software.Software` model: the Software data model to load
the gtk.ToggleButton() from.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
self._software_model = model
self.chkDevEnvQ1.set_active(model.lst_development[0])
self.chkDevEnvQ2.set_active(model.lst_development[1])
self.chkDevEnvQ3.set_active(model.lst_development[2])
self.chkDevEnvQ4.set_active(model.lst_development[3])
self.chkDevEnvQ5.set_active(model.lst_development[4])
self.chkDevEnvQ6.set_active(model.lst_development[5])
self.chkDevEnvQ7.set_active(model.lst_development[6])
self.chkDevEnvQ8.set_active(model.lst_development[7])
self.chkDevEnvQ9.set_active(model.lst_development[8])
self.chkDevEnvQ10.set_active(model.lst_development[9])
self.chkDevEnvQ11.set_active(model.lst_development[10])
self.chkDevEnvQ12.set_active(model.lst_development[11])
self.chkDevEnvQ13.set_active(model.lst_development[12])
self.chkDevEnvQ14.set_active(model.lst_development[13])
self.chkDevEnvQ15.set_active(model.lst_development[14])
self.chkDevEnvQ16.set_active(model.lst_development[15])
self.chkDevEnvQ17.set_active(model.lst_development[16])
self.chkDevEnvQ18.set_active(model.lst_development[17])
self.chkDevEnvQ19.set_active(model.lst_development[18])
self.chkDevEnvQ20.set_active(model.lst_development[19])
self.chkDevEnvQ21.set_active(model.lst_development[20])
self.chkDevEnvQ22.set_active(model.lst_development[21])
self.chkDevEnvQ23.set_active(model.lst_development[22])
self.chkDevEnvQ24.set_active(model.lst_development[23])
self.chkDevEnvQ25.set_active(model.lst_development[24])
self.chkDevEnvQ26.set_active(model.lst_development[25])
self.chkDevEnvQ27.set_active(model.lst_development[26])
self.chkDevEnvQ28.set_active(model.lst_development[27])
self.chkDevEnvQ29.set_active(model.lst_development[28])
self.chkDevEnvQ30.set_active(model.lst_development[29])
self.chkDevEnvQ31.set_active(model.lst_development[30])
self.chkDevEnvQ32.set_active(model.lst_development[31])
self.chkDevEnvQ33.set_active(model.lst_development[32])
self.chkDevEnvQ34.set_active(model.lst_development[33])
self.chkDevEnvQ35.set_active(model.lst_development[34])
self.chkDevEnvQ36.set_active(model.lst_development[35])
self.chkDevEnvQ37.set_active(model.lst_development[36])
self.chkDevEnvQ38.set_active(model.lst_development[37])
self.chkDevEnvQ39.set_active(model.lst_development[38])
self.chkDevEnvQ40.set_active(model.lst_development[39])
self.chkDevEnvQ41.set_active(model.lst_development[40])
self.chkDevEnvQ42.set_active(model.lst_development[41])
self.chkDevEnvQ43.set_active(model.lst_development[42])
return False
def _on_toggled(self, check, index):
"""
Callback method for gtk.CheckButton() 'toggled' event.
:param gtk.CheckButton check: the gtk.CheckButton() that called this
method.
:param int index: the index of the Development Environment question
associated with the gtk.CheckButton() that was
toggled.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
check.handler_block(self._lst_handler_id[index])
self._software_model.lst_development[index] = int(check.get_active())
check.handler_unblock(self._lst_handler_id[index])
return False
| <mask token>
import sys
import gettext
import locale
try:
import pygtk
pygtk.require('2.0')
except ImportError:
sys.exit(1)
try:
import gtk
except ImportError:
sys.exit(1)
try:
import gtk.glade
except ImportError:
sys.exit(1)
try:
import Configuration
import gui.gtk.Widgets as Widgets
except ImportError:
import rtk.Configuration as Configuration
import rtk.gui.gtk.Widgets as Widgets
__author__ = 'Andrew Rowland'
__email__ = '[email protected]'
__organization__ = 'ReliaQual Associates, LLC'
__copyright__ = 'Copyright 2007 - 2015 Andrew "weibullguy" Rowland'
try:
locale.setlocale(locale.LC_ALL, Configuration.LOCALE)
except locale.Error:
locale.setlocale(locale.LC_ALL, '')
_ = gettext.gettext
class RiskAnalysis(gtk.VPaned):
"""
The Work Book view for analyzing and displaying the risk associated with
the development environment. The attributes of a development environment
Work Book view are:
:ivar list _lst_handler_id: the list of gtk.Widget() signal handler IDs.
:ivar _software_model: the :py:class:`rtk.software.Software.Model` to
display.
"""
def __init__(self):
"""
Method to initialize the development environment risk analysis
questions Work Book page.
"""
gtk.VPaned.__init__(self)
self._lst_handler_id = []
self._software_model = None
self.chkDevEnvQ1 = Widgets.make_check_button()
self.chkDevEnvQ2 = Widgets.make_check_button()
self.chkDevEnvQ3 = Widgets.make_check_button()
self.chkDevEnvQ4 = Widgets.make_check_button()
self.chkDevEnvQ5 = Widgets.make_check_button()
self.chkDevEnvQ6 = Widgets.make_check_button()
self.chkDevEnvQ7 = Widgets.make_check_button()
self.chkDevEnvQ8 = Widgets.make_check_button()
self.chkDevEnvQ9 = Widgets.make_check_button()
self.chkDevEnvQ10 = Widgets.make_check_button()
self.chkDevEnvQ11 = Widgets.make_check_button()
self.chkDevEnvQ12 = Widgets.make_check_button()
self.chkDevEnvQ13 = Widgets.make_check_button()
self.chkDevEnvQ14 = Widgets.make_check_button()
self.chkDevEnvQ15 = Widgets.make_check_button()
self.chkDevEnvQ16 = Widgets.make_check_button()
self.chkDevEnvQ17 = Widgets.make_check_button()
self.chkDevEnvQ18 = Widgets.make_check_button()
self.chkDevEnvQ19 = Widgets.make_check_button()
self.chkDevEnvQ20 = Widgets.make_check_button()
self.chkDevEnvQ21 = Widgets.make_check_button()
self.chkDevEnvQ22 = Widgets.make_check_button()
self.chkDevEnvQ23 = Widgets.make_check_button()
self.chkDevEnvQ24 = Widgets.make_check_button()
self.chkDevEnvQ25 = Widgets.make_check_button()
self.chkDevEnvQ26 = Widgets.make_check_button()
self.chkDevEnvQ27 = Widgets.make_check_button()
self.chkDevEnvQ28 = Widgets.make_check_button()
self.chkDevEnvQ29 = Widgets.make_check_button()
self.chkDevEnvQ30 = Widgets.make_check_button()
self.chkDevEnvQ31 = Widgets.make_check_button()
self.chkDevEnvQ32 = Widgets.make_check_button()
self.chkDevEnvQ33 = Widgets.make_check_button()
self.chkDevEnvQ34 = Widgets.make_check_button()
self.chkDevEnvQ35 = Widgets.make_check_button()
self.chkDevEnvQ36 = Widgets.make_check_button()
self.chkDevEnvQ37 = Widgets.make_check_button()
self.chkDevEnvQ38 = Widgets.make_check_button()
self.chkDevEnvQ39 = Widgets.make_check_button()
self.chkDevEnvQ40 = Widgets.make_check_button()
self.chkDevEnvQ41 = Widgets.make_check_button()
self.chkDevEnvQ42 = Widgets.make_check_button()
self.chkDevEnvQ43 = Widgets.make_check_button()
self._lst_handler_id.append(self.chkDevEnvQ1.connect('toggled',
self._on_toggled, 0))
self._lst_handler_id.append(self.chkDevEnvQ2.connect('toggled',
self._on_toggled, 1))
self._lst_handler_id.append(self.chkDevEnvQ3.connect('toggled',
self._on_toggled, 2))
self._lst_handler_id.append(self.chkDevEnvQ4.connect('toggled',
self._on_toggled, 3))
self._lst_handler_id.append(self.chkDevEnvQ5.connect('toggled',
self._on_toggled, 4))
self._lst_handler_id.append(self.chkDevEnvQ6.connect('toggled',
self._on_toggled, 5))
self._lst_handler_id.append(self.chkDevEnvQ7.connect('toggled',
self._on_toggled, 6))
self._lst_handler_id.append(self.chkDevEnvQ8.connect('toggled',
self._on_toggled, 7))
self._lst_handler_id.append(self.chkDevEnvQ9.connect('toggled',
self._on_toggled, 8))
self._lst_handler_id.append(self.chkDevEnvQ10.connect('toggled',
self._on_toggled, 9))
self._lst_handler_id.append(self.chkDevEnvQ11.connect('toggled',
self._on_toggled, 10))
self._lst_handler_id.append(self.chkDevEnvQ12.connect('toggled',
self._on_toggled, 11))
self._lst_handler_id.append(self.chkDevEnvQ13.connect('toggled',
self._on_toggled, 12))
self._lst_handler_id.append(self.chkDevEnvQ14.connect('toggled',
self._on_toggled, 13))
self._lst_handler_id.append(self.chkDevEnvQ15.connect('toggled',
self._on_toggled, 14))
self._lst_handler_id.append(self.chkDevEnvQ16.connect('toggled',
self._on_toggled, 15))
self._lst_handler_id.append(self.chkDevEnvQ17.connect('toggled',
self._on_toggled, 16))
self._lst_handler_id.append(self.chkDevEnvQ18.connect('toggled',
self._on_toggled, 17))
self._lst_handler_id.append(self.chkDevEnvQ19.connect('toggled',
self._on_toggled, 18))
self._lst_handler_id.append(self.chkDevEnvQ20.connect('toggled',
self._on_toggled, 19))
self._lst_handler_id.append(self.chkDevEnvQ21.connect('toggled',
self._on_toggled, 20))
self._lst_handler_id.append(self.chkDevEnvQ22.connect('toggled',
self._on_toggled, 21))
self._lst_handler_id.append(self.chkDevEnvQ23.connect('toggled',
self._on_toggled, 22))
self._lst_handler_id.append(self.chkDevEnvQ24.connect('toggled',
self._on_toggled, 23))
self._lst_handler_id.append(self.chkDevEnvQ25.connect('toggled',
self._on_toggled, 24))
self._lst_handler_id.append(self.chkDevEnvQ26.connect('toggled',
self._on_toggled, 25))
self._lst_handler_id.append(self.chkDevEnvQ27.connect('toggled',
self._on_toggled, 26))
self._lst_handler_id.append(self.chkDevEnvQ28.connect('toggled',
self._on_toggled, 27))
self._lst_handler_id.append(self.chkDevEnvQ29.connect('toggled',
self._on_toggled, 28))
self._lst_handler_id.append(self.chkDevEnvQ30.connect('toggled',
self._on_toggled, 29))
self._lst_handler_id.append(self.chkDevEnvQ31.connect('toggled',
self._on_toggled, 30))
self._lst_handler_id.append(self.chkDevEnvQ32.connect('toggled',
self._on_toggled, 31))
self._lst_handler_id.append(self.chkDevEnvQ33.connect('toggled',
self._on_toggled, 32))
self._lst_handler_id.append(self.chkDevEnvQ34.connect('toggled',
self._on_toggled, 33))
self._lst_handler_id.append(self.chkDevEnvQ35.connect('toggled',
self._on_toggled, 34))
self._lst_handler_id.append(self.chkDevEnvQ36.connect('toggled',
self._on_toggled, 35))
self._lst_handler_id.append(self.chkDevEnvQ37.connect('toggled',
self._on_toggled, 36))
self._lst_handler_id.append(self.chkDevEnvQ38.connect('toggled',
self._on_toggled, 37))
self._lst_handler_id.append(self.chkDevEnvQ39.connect('toggled',
self._on_toggled, 38))
self._lst_handler_id.append(self.chkDevEnvQ40.connect('toggled',
self._on_toggled, 39))
self._lst_handler_id.append(self.chkDevEnvQ41.connect('toggled',
self._on_toggled, 40))
self._lst_handler_id.append(self.chkDevEnvQ42.connect('toggled',
self._on_toggled, 41))
self._lst_handler_id.append(self.chkDevEnvQ43.connect('toggled',
self._on_toggled, 42))
def create_risk_analysis_page(self, notebook):
"""
Method to create the development environment risk analysis page and add
it to the risk analysis gtk.Notebook().
:param gtk.Notebook notebook: the gtk.Notebook() instance that will
hold the development environment risk
analysis questions.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
_hpaned = gtk.HPaned()
self.pack1(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Organization'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(
u'1. There are separate design and coding organizations.'), _(
u'2. There is an independent software test organization.'), _(
u'3. There is an independent software quality assurance organization.'
), _(
u'4. There is an independent software configuration management organization.'
), _(
u'5. There is an independent software verification and validation organization.'
), _(
u'6. A structured programming team will develop the software.'),
_(
u'7. The educational level of the software team members is above average.'
), _(
u'8. The experience level of the software team members is above average.'
)]
_x_pos, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_x_pos += 125
_fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Methods'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(u'1. Standards are defined and will be enforced.'), _(
u'2. Software will be developed using a higher order language.'
), _(
u'3. The development process will include formal reviews (PDR, CDR, etc.).'
), _(
u'4. The development process will include frequent walkthroughs.'
), _(
u'5. Development will take a top-down and structured approach.'
), _(u'6. Unit development folders will be used.'), _(
u'7. A software development library will be used.'), _(
u'8. A formal change and error reporting process will be used.'
), _(u'9. Progress and status will routinely be reported.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])
_hpaned = gtk.HPaned()
self.pack2(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Documentation'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(
u' 1. System requirements specifications will be documented.'),
_(
u' 2. Software requirements specifications will be documented.'
), _(u' 3. Interface design specifications will be documented.'
), _(u' 4. Software design specification will be documented.'),
_(
u' 5. Test plans, procedures, and reports will be documented.'),
_(u' 6. The software development plan will be documented.'), _(
u' 7. The software quality assurance plan will be documented.'),
_(
u' 8. The software configuration management plan will be documented.'
), _(u' 9. A requirements traceability matrix will be used.'),
_(u'10. The software version description will be documented.'),
_(u'11. All software discrepancies will be documented.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u'Tools & Test Techniques'))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(
u' 1. The software language requirements will be specified.'),
_(u' 2. Formal program design language will be used.'), _(
u' 3. Program design graphical techniques (flowcharts, HIPO, etc.) will be used.'
), _(u' 4. Simulation/emulation tools will be used.'), _(
u' 5. Configuration management tools will be used.'), _(
u' 6. A code auditing tool will be used.'), _(
u' 7. A data flow analyzer will be used.'), _(
u" 8. A programmer's workbench will be used."), _(
u' 9. Measurement tools will be used.'), _(
u'10. Software code reviews will be used.'), _(
u'11. Software branch testing will be used.'), _(
u'12. Random testing will be used.'), _(
u'13. Functional testing will be used.'), _(
u'14. Error and anomaly detection testing will be used.'), _(
u'15. Structure analysis will be used.')]
__, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])
_fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])
_fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])
_fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])
_fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])
_label = gtk.Label()
_label.set_markup("<span weight='bold'>" + _(
u'Development\nEnvironment') + '</span>')
_label.set_alignment(xalign=0.5, yalign=0.5)
_label.set_justify(gtk.JUSTIFY_CENTER)
_label.set_angle(0)
_label.show_all()
_label.set_tooltip_text(_(
u'Assesses risk due to the development environment.'))
notebook.insert_page(self, tab_label=_label, position=-1)
return False
def load(self, model):
"""
Method to load the Development Environment Risk Analysis answers.
:param `rtk.software.Software` model: the Software data model to load
the gtk.ToggleButton() from.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
self._software_model = model
self.chkDevEnvQ1.set_active(model.lst_development[0])
self.chkDevEnvQ2.set_active(model.lst_development[1])
self.chkDevEnvQ3.set_active(model.lst_development[2])
self.chkDevEnvQ4.set_active(model.lst_development[3])
self.chkDevEnvQ5.set_active(model.lst_development[4])
self.chkDevEnvQ6.set_active(model.lst_development[5])
self.chkDevEnvQ7.set_active(model.lst_development[6])
self.chkDevEnvQ8.set_active(model.lst_development[7])
self.chkDevEnvQ9.set_active(model.lst_development[8])
self.chkDevEnvQ10.set_active(model.lst_development[9])
self.chkDevEnvQ11.set_active(model.lst_development[10])
self.chkDevEnvQ12.set_active(model.lst_development[11])
self.chkDevEnvQ13.set_active(model.lst_development[12])
self.chkDevEnvQ14.set_active(model.lst_development[13])
self.chkDevEnvQ15.set_active(model.lst_development[14])
self.chkDevEnvQ16.set_active(model.lst_development[15])
self.chkDevEnvQ17.set_active(model.lst_development[16])
self.chkDevEnvQ18.set_active(model.lst_development[17])
self.chkDevEnvQ19.set_active(model.lst_development[18])
self.chkDevEnvQ20.set_active(model.lst_development[19])
self.chkDevEnvQ21.set_active(model.lst_development[20])
self.chkDevEnvQ22.set_active(model.lst_development[21])
self.chkDevEnvQ23.set_active(model.lst_development[22])
self.chkDevEnvQ24.set_active(model.lst_development[23])
self.chkDevEnvQ25.set_active(model.lst_development[24])
self.chkDevEnvQ26.set_active(model.lst_development[25])
self.chkDevEnvQ27.set_active(model.lst_development[26])
self.chkDevEnvQ28.set_active(model.lst_development[27])
self.chkDevEnvQ29.set_active(model.lst_development[28])
self.chkDevEnvQ30.set_active(model.lst_development[29])
self.chkDevEnvQ31.set_active(model.lst_development[30])
self.chkDevEnvQ32.set_active(model.lst_development[31])
self.chkDevEnvQ33.set_active(model.lst_development[32])
self.chkDevEnvQ34.set_active(model.lst_development[33])
self.chkDevEnvQ35.set_active(model.lst_development[34])
self.chkDevEnvQ36.set_active(model.lst_development[35])
self.chkDevEnvQ37.set_active(model.lst_development[36])
self.chkDevEnvQ38.set_active(model.lst_development[37])
self.chkDevEnvQ39.set_active(model.lst_development[38])
self.chkDevEnvQ40.set_active(model.lst_development[39])
self.chkDevEnvQ41.set_active(model.lst_development[40])
self.chkDevEnvQ42.set_active(model.lst_development[41])
self.chkDevEnvQ43.set_active(model.lst_development[42])
return False
def _on_toggled(self, check, index):
"""
Callback method for gtk.CheckButton() 'toggled' event.
:param gtk.CheckButton check: the gtk.CheckButton() that called this
method.
:param int index: the index of the Development Environment question
associated with the gtk.CheckButton() that was
toggled.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
check.handler_block(self._lst_handler_id[index])
self._software_model.lst_development[index] = int(check.get_active())
check.handler_unblock(self._lst_handler_id[index])
return False
| #!/usr/bin/env python
"""
##############################################################################
Software Package Risk Analysis Development Environment Specific Work Book View
##############################################################################
"""
# -*- coding: utf-8 -*-
#
# rtk.software.__gui.gtk.DevelopmentEnvironment.py is part of The RTK
# Project
#
# All rights reserved.
import sys
# Import modules for localization support.
import gettext
import locale
# Modules required for the GUI.
try:
import pygtk
pygtk.require('2.0')
except ImportError:
sys.exit(1)
try:
import gtk
except ImportError:
sys.exit(1)
try:
import gtk.glade
except ImportError:
sys.exit(1)
# Import other RTK modules.
try:
import Configuration
import gui.gtk.Widgets as Widgets
except ImportError:
import rtk.Configuration as Configuration
import rtk.gui.gtk.Widgets as Widgets
__author__ = 'Andrew Rowland'
__email__ = '[email protected]'
__organization__ = 'ReliaQual Associates, LLC'
__copyright__ = 'Copyright 2007 - 2015 Andrew "weibullguy" Rowland'
try:
locale.setlocale(locale.LC_ALL, Configuration.LOCALE)
except locale.Error:
locale.setlocale(locale.LC_ALL, '')
_ = gettext.gettext
class RiskAnalysis(gtk.VPaned):
"""
The Work Book view for analyzing and displaying the risk associated with
the development environment. The attributes of a development environment
Work Book view are:
:ivar list _lst_handler_id: the list of gtk.Widget() signal handler IDs.
:ivar _software_model: the :py:class:`rtk.software.Software.Model` to
display.
"""
def __init__(self):
"""
Method to initialize the development environment risk analysis
questions Work Book page.
"""
gtk.VPaned.__init__(self)
# Define private dictionary attributes.
# Define private list attributes.
self._lst_handler_id = []
# Define private scalar attributes.
self._software_model = None
# Define public dictionary attributes.
# Define public list attributes.
# Define public scalar attributes.
self.chkDevEnvQ1 = Widgets.make_check_button()
self.chkDevEnvQ2 = Widgets.make_check_button()
self.chkDevEnvQ3 = Widgets.make_check_button()
self.chkDevEnvQ4 = Widgets.make_check_button()
self.chkDevEnvQ5 = Widgets.make_check_button()
self.chkDevEnvQ6 = Widgets.make_check_button()
self.chkDevEnvQ7 = Widgets.make_check_button()
self.chkDevEnvQ8 = Widgets.make_check_button()
self.chkDevEnvQ9 = Widgets.make_check_button()
self.chkDevEnvQ10 = Widgets.make_check_button()
self.chkDevEnvQ11 = Widgets.make_check_button()
self.chkDevEnvQ12 = Widgets.make_check_button()
self.chkDevEnvQ13 = Widgets.make_check_button()
self.chkDevEnvQ14 = Widgets.make_check_button()
self.chkDevEnvQ15 = Widgets.make_check_button()
self.chkDevEnvQ16 = Widgets.make_check_button()
self.chkDevEnvQ17 = Widgets.make_check_button()
self.chkDevEnvQ18 = Widgets.make_check_button()
self.chkDevEnvQ19 = Widgets.make_check_button()
self.chkDevEnvQ20 = Widgets.make_check_button()
self.chkDevEnvQ21 = Widgets.make_check_button()
self.chkDevEnvQ22 = Widgets.make_check_button()
self.chkDevEnvQ23 = Widgets.make_check_button()
self.chkDevEnvQ24 = Widgets.make_check_button()
self.chkDevEnvQ25 = Widgets.make_check_button()
self.chkDevEnvQ26 = Widgets.make_check_button()
self.chkDevEnvQ27 = Widgets.make_check_button()
self.chkDevEnvQ28 = Widgets.make_check_button()
self.chkDevEnvQ29 = Widgets.make_check_button()
self.chkDevEnvQ30 = Widgets.make_check_button()
self.chkDevEnvQ31 = Widgets.make_check_button()
self.chkDevEnvQ32 = Widgets.make_check_button()
self.chkDevEnvQ33 = Widgets.make_check_button()
self.chkDevEnvQ34 = Widgets.make_check_button()
self.chkDevEnvQ35 = Widgets.make_check_button()
self.chkDevEnvQ36 = Widgets.make_check_button()
self.chkDevEnvQ37 = Widgets.make_check_button()
self.chkDevEnvQ38 = Widgets.make_check_button()
self.chkDevEnvQ39 = Widgets.make_check_button()
self.chkDevEnvQ40 = Widgets.make_check_button()
self.chkDevEnvQ41 = Widgets.make_check_button()
self.chkDevEnvQ42 = Widgets.make_check_button()
self.chkDevEnvQ43 = Widgets.make_check_button()
# Connect gtk.Widget() signals to callback methods.
self._lst_handler_id.append(
self.chkDevEnvQ1.connect('toggled', self._on_toggled, 0))
self._lst_handler_id.append(
self.chkDevEnvQ2.connect('toggled', self._on_toggled, 1))
self._lst_handler_id.append(
self.chkDevEnvQ3.connect('toggled', self._on_toggled, 2))
self._lst_handler_id.append(
self.chkDevEnvQ4.connect('toggled', self._on_toggled, 3))
self._lst_handler_id.append(
self.chkDevEnvQ5.connect('toggled', self._on_toggled, 4))
self._lst_handler_id.append(
self.chkDevEnvQ6.connect('toggled', self._on_toggled, 5))
self._lst_handler_id.append(
self.chkDevEnvQ7.connect('toggled', self._on_toggled, 6))
self._lst_handler_id.append(
self.chkDevEnvQ8.connect('toggled', self._on_toggled, 7))
self._lst_handler_id.append(
self.chkDevEnvQ9.connect('toggled', self._on_toggled, 8))
self._lst_handler_id.append(
self.chkDevEnvQ10.connect('toggled', self._on_toggled, 9))
self._lst_handler_id.append(
self.chkDevEnvQ11.connect('toggled', self._on_toggled, 10))
self._lst_handler_id.append(
self.chkDevEnvQ12.connect('toggled', self._on_toggled, 11))
self._lst_handler_id.append(
self.chkDevEnvQ13.connect('toggled', self._on_toggled, 12))
self._lst_handler_id.append(
self.chkDevEnvQ14.connect('toggled', self._on_toggled, 13))
self._lst_handler_id.append(
self.chkDevEnvQ15.connect('toggled', self._on_toggled, 14))
self._lst_handler_id.append(
self.chkDevEnvQ16.connect('toggled', self._on_toggled, 15))
self._lst_handler_id.append(
self.chkDevEnvQ17.connect('toggled', self._on_toggled, 16))
self._lst_handler_id.append(
self.chkDevEnvQ18.connect('toggled', self._on_toggled, 17))
self._lst_handler_id.append(
self.chkDevEnvQ19.connect('toggled', self._on_toggled, 18))
self._lst_handler_id.append(
self.chkDevEnvQ20.connect('toggled', self._on_toggled, 19))
self._lst_handler_id.append(
self.chkDevEnvQ21.connect('toggled', self._on_toggled, 20))
self._lst_handler_id.append(
self.chkDevEnvQ22.connect('toggled', self._on_toggled, 21))
self._lst_handler_id.append(
self.chkDevEnvQ23.connect('toggled', self._on_toggled, 22))
self._lst_handler_id.append(
self.chkDevEnvQ24.connect('toggled', self._on_toggled, 23))
self._lst_handler_id.append(
self.chkDevEnvQ25.connect('toggled', self._on_toggled, 24))
self._lst_handler_id.append(
self.chkDevEnvQ26.connect('toggled', self._on_toggled, 25))
self._lst_handler_id.append(
self.chkDevEnvQ27.connect('toggled', self._on_toggled, 26))
self._lst_handler_id.append(
self.chkDevEnvQ28.connect('toggled', self._on_toggled, 27))
self._lst_handler_id.append(
self.chkDevEnvQ29.connect('toggled', self._on_toggled, 28))
self._lst_handler_id.append(
self.chkDevEnvQ30.connect('toggled', self._on_toggled, 29))
self._lst_handler_id.append(
self.chkDevEnvQ31.connect('toggled', self._on_toggled, 30))
self._lst_handler_id.append(
self.chkDevEnvQ32.connect('toggled', self._on_toggled, 31))
self._lst_handler_id.append(
self.chkDevEnvQ33.connect('toggled', self._on_toggled, 32))
self._lst_handler_id.append(
self.chkDevEnvQ34.connect('toggled', self._on_toggled, 33))
self._lst_handler_id.append(
self.chkDevEnvQ35.connect('toggled', self._on_toggled, 34))
self._lst_handler_id.append(
self.chkDevEnvQ36.connect('toggled', self._on_toggled, 35))
self._lst_handler_id.append(
self.chkDevEnvQ37.connect('toggled', self._on_toggled, 36))
self._lst_handler_id.append(
self.chkDevEnvQ38.connect('toggled', self._on_toggled, 37))
self._lst_handler_id.append(
self.chkDevEnvQ39.connect('toggled', self._on_toggled, 38))
self._lst_handler_id.append(
self.chkDevEnvQ40.connect('toggled', self._on_toggled, 39))
self._lst_handler_id.append(
self.chkDevEnvQ41.connect('toggled', self._on_toggled, 40))
self._lst_handler_id.append(
self.chkDevEnvQ42.connect('toggled', self._on_toggled, 41))
self._lst_handler_id.append(
self.chkDevEnvQ43.connect('toggled', self._on_toggled, 42))
def create_risk_analysis_page(self, notebook):
"""
Method to create the development environment risk analysis page and add
it to the risk analysis gtk.Notebook().
:param gtk.Notebook notebook: the gtk.Notebook() instance that will
hold the development environment risk
analysis questions.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
# Build-up the containers for the tab. #
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
_hpaned = gtk.HPaned()
self.pack1(_hpaned, resize=True, shrink=True)
# Create the organizational risk pane.
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u"Organization"))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(u"1. There are separate design and coding "
u"organizations."),
_(u"2. There is an independent software test "
u"organization."),
_(u"3. There is an independent software quality "
u"assurance organization."),
_(u"4. There is an independent software configuration "
u"management organization."),
_(u"5. There is an independent software verification "
u"and validation organization."),
_(u"6. A structured programming team will develop the "
u"software."),
_(u"7. The educational level of the software team members "
u"is above average."),
_(u"8. The experience level of the software team members "
u"is above average.")]
(_x_pos,
_y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_x_pos += 125
_fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])
# Create the methods risk pane.
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u"Methods"))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(u"1. Standards are defined and will be enforced."),
_(u"2. Software will be developed using a higher order "
u"language."),
_(u"3. The development process will include formal "
u"reviews (PDR, CDR, etc.)."),
_(u"4. The development process will include frequent "
u"walkthroughs."),
_(u"5. Development will take a top-down and "
u"structured approach."),
_(u"6. Unit development folders will be used."),
_(u"7. A software development library will be used."),
_(u"8. A formal change and error reporting process "
u"will be used."),
_(u"9. Progress and status will routinely be "
u"reported.")]
(__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])
# Create the documentation risk pane.
_hpaned = gtk.HPaned()
self.pack2(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u"Documentation"))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(u" 1. System requirements specifications will be "
u"documented."),
_(u" 2. Software requirements specifications will be "
u"documented."),
_(u" 3. Interface design specifications will be "
u"documented."),
_(u" 4. Software design specification will be "
u"documented."),
_(u" 5. Test plans, procedures, and reports will be "
u"documented."),
_(u" 6. The software development plan will be "
u"documented."),
_(u" 7. The software quality assurance plan will be "
u"documented."),
_(u" 8. The software configuration management plan will "
u"be documented."),
_(u" 9. A requirements traceability matrix will be "
u"used."),
_(u"10. The software version description will be "
u"documented."),
_(u"11. All software discrepancies will be "
u"documented.")]
(__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])
# Create the tools and test techniques risk pane.
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u"Tools & Test Techniques"))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(u" 1. The software language requirements will be "
u"specified."),
_(u" 2. Formal program design language will be used."),
_(u" 3. Program design graphical techniques "
u"(flowcharts, HIPO, etc.) will be used."),
_(u" 4. Simulation/emulation tools will be used."),
_(u" 5. Configuration management tools will be used."),
_(u" 6. A code auditing tool will be used."),
_(u" 7. A data flow analyzer will be used."),
_(u" 8. A programmer's workbench will be used."),
_(u" 9. Measurement tools will be used."),
_(u"10. Software code reviews will be used."),
_(u"11. Software branch testing will be used."),
_(u"12. Random testing will be used."),
_(u"13. Functional testing will be used."),
_(u"14. Error and anomaly detection testing will be "
u"used."),
_(u"15. Structure analysis will be used.")]
(__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])
_fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])
_fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])
_fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])
_fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])
_label = gtk.Label()
_label.set_markup("<span weight='bold'>" +
_(u"Development\nEnvironment") +
"</span>")
_label.set_alignment(xalign=0.5, yalign=0.5)
_label.set_justify(gtk.JUSTIFY_CENTER)
_label.set_angle(0)
_label.show_all()
_label.set_tooltip_text(_(u"Assesses risk due to the development "
u"environment."))
notebook.insert_page(self, tab_label=_label, position=-1)
return False
def load(self, model):
"""
Method to load the Development Environment Risk Analysis answers.
:param `rtk.software.Software` model: the Software data model to load
the gtk.ToggleButton() from.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
self._software_model = model
self.chkDevEnvQ1.set_active(model.lst_development[0])
self.chkDevEnvQ2.set_active(model.lst_development[1])
self.chkDevEnvQ3.set_active(model.lst_development[2])
self.chkDevEnvQ4.set_active(model.lst_development[3])
self.chkDevEnvQ5.set_active(model.lst_development[4])
self.chkDevEnvQ6.set_active(model.lst_development[5])
self.chkDevEnvQ7.set_active(model.lst_development[6])
self.chkDevEnvQ8.set_active(model.lst_development[7])
self.chkDevEnvQ9.set_active(model.lst_development[8])
self.chkDevEnvQ10.set_active(model.lst_development[9])
self.chkDevEnvQ11.set_active(model.lst_development[10])
self.chkDevEnvQ12.set_active(model.lst_development[11])
self.chkDevEnvQ13.set_active(model.lst_development[12])
self.chkDevEnvQ14.set_active(model.lst_development[13])
self.chkDevEnvQ15.set_active(model.lst_development[14])
self.chkDevEnvQ16.set_active(model.lst_development[15])
self.chkDevEnvQ17.set_active(model.lst_development[16])
self.chkDevEnvQ18.set_active(model.lst_development[17])
self.chkDevEnvQ19.set_active(model.lst_development[18])
self.chkDevEnvQ20.set_active(model.lst_development[19])
self.chkDevEnvQ21.set_active(model.lst_development[20])
self.chkDevEnvQ22.set_active(model.lst_development[21])
self.chkDevEnvQ23.set_active(model.lst_development[22])
self.chkDevEnvQ24.set_active(model.lst_development[23])
self.chkDevEnvQ25.set_active(model.lst_development[24])
self.chkDevEnvQ26.set_active(model.lst_development[25])
self.chkDevEnvQ27.set_active(model.lst_development[26])
self.chkDevEnvQ28.set_active(model.lst_development[27])
self.chkDevEnvQ29.set_active(model.lst_development[28])
self.chkDevEnvQ30.set_active(model.lst_development[29])
self.chkDevEnvQ31.set_active(model.lst_development[30])
self.chkDevEnvQ32.set_active(model.lst_development[31])
self.chkDevEnvQ33.set_active(model.lst_development[32])
self.chkDevEnvQ34.set_active(model.lst_development[33])
self.chkDevEnvQ35.set_active(model.lst_development[34])
self.chkDevEnvQ36.set_active(model.lst_development[35])
self.chkDevEnvQ37.set_active(model.lst_development[36])
self.chkDevEnvQ38.set_active(model.lst_development[37])
self.chkDevEnvQ39.set_active(model.lst_development[38])
self.chkDevEnvQ40.set_active(model.lst_development[39])
self.chkDevEnvQ41.set_active(model.lst_development[40])
self.chkDevEnvQ42.set_active(model.lst_development[41])
self.chkDevEnvQ43.set_active(model.lst_development[42])
return False
def _on_toggled(self, check, index):
"""
Callback method for gtk.CheckButton() 'toggled' event.
:param gtk.CheckButton check: the gtk.CheckButton() that called this
method.
:param int index: the index of the Development Environment question
associated with the gtk.CheckButton() that was
toggled.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
check.handler_block(self._lst_handler_id[index])
self._software_model.lst_development[index] = int(check.get_active())
check.handler_unblock(self._lst_handler_id[index])
return False
| [
4,
5,
7,
9,
10
] |
977 | 136215a3ba99f74160373181c458db9bec4bb6b7 | <mask token>
| <mask token>
def decode(hash):
hash = base64.b64decode(hash.encode('utf-8'))
key = DesKey(b'7ly6UznJ')
return key.decrypt(hash, initial=b'XuVUm5fR', padding=True).decode('utf-8')
<mask token>
| <mask token>
def decode(hash):
hash = base64.b64decode(hash.encode('utf-8'))
key = DesKey(b'7ly6UznJ')
return key.decrypt(hash, initial=b'XuVUm5fR', padding=True).decode('utf-8')
print(decode('XXXXXXXXXXXXXXXXXXXXXX'))
| import json
import base64
from des import *
import sys
def decode(hash):
hash = base64.b64decode(hash.encode('utf-8'))
key = DesKey(b'7ly6UznJ')
return key.decrypt(hash, initial=b'XuVUm5fR', padding=True).decode('utf-8')
print(decode('XXXXXXXXXXXXXXXXXXXXXX'))
| #PortableKanban 4.3.6578.38136 - Encrypted Password Retrieval
#Python3 -m pip install des
#or
#pip install des
import json
import base64
from des import * #python3 -m pip install des, pip install des
import sys
def decode(hash):
hash = base64.b64decode(hash.encode('utf-8'))
key = DesKey(b"7ly6UznJ")
return key.decrypt(hash,initial=b"XuVUm5fR",padding=True).decode('utf-8')
print(decode('XXXXXXXXXXXXXXXXXXXXXX'))
#change this to your encrypted key
| [
0,
1,
2,
3,
4
] |
978 | cc46485a3b5c68e4f77a2f9a033fd2ee2859b52b | <mask token>
| <mask token>
for c in range(1, 11):
tree = DecisionTreeClassifier(max_depth=4, random_state=c)
model.append(tree.fit(X_train, y_train))
<mask token>
for a in model:
in_sample_accuracy.append(a.score(X_train, y_train))
out_of_sample_accuracy.append(a.score(X_test, y_test))
<mask token>
a.append('mean')
a.append('standard')
in_sample_accuracy.append(np.mean(in_sample_accuracy))
in_sample_accuracy.append(np.std(in_sample_accuracy[:-1]))
out_of_sample_accuracy.append(np.mean(out_of_sample_accuracy))
out_of_sample_accuracy.append(np.std(out_of_sample_accuracy[:-1]))
<mask token>
pd.set_option('precision', 3)
b
<mask token>
CVS.append(score)
pd.set_option('precision', 3)
<mask token>
dt.fit(X_train, y_train)
<mask token>
c
print('My name is Fengkai Xu')
print('My NetID is: fengkai4')
print(
'I hereby certify that I have read the University policy on Academic Integrity and that I am not in violation.'
)
| <mask token>
iris_dataset = load_iris()
X = iris_dataset['data']
y = iris_dataset['target']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1,
stratify=y, random_state=42)
model = []
for c in range(1, 11):
tree = DecisionTreeClassifier(max_depth=4, random_state=c)
model.append(tree.fit(X_train, y_train))
in_sample_accuracy = []
out_of_sample_accuracy = []
for a in model:
in_sample_accuracy.append(a.score(X_train, y_train))
out_of_sample_accuracy.append(a.score(X_test, y_test))
a = list(range(1, 11))
a.append('mean')
a.append('standard')
in_sample_accuracy.append(np.mean(in_sample_accuracy))
in_sample_accuracy.append(np.std(in_sample_accuracy[:-1]))
out_of_sample_accuracy.append(np.mean(out_of_sample_accuracy))
out_of_sample_accuracy.append(np.std(out_of_sample_accuracy[:-1]))
b = pd.DataFrame([in_sample_accuracy, out_of_sample_accuracy], columns=a,
index=['in_sample_accuracy', 'out_of_sample_accuracy'])
pd.set_option('precision', 3)
b
CVS = []
score = cross_val_score(DecisionTreeClassifier(max_depth=4), X_train,
y_train, cv=10)
CVS.append(score)
pd.set_option('precision', 3)
c = pd.DataFrame(CVS, columns=['result1', 'result2', 'result3', 'result4',
'result5', 'result6', 'result7', 'result8', 'result9', 'result 10'])
c['mean'] = c.mean(1)
c['standard'] = c.std(1)
dt = DecisionTreeClassifier(max_depth=4)
dt.fit(X_train, y_train)
c['Out-of-sample-accuracy'] = dt.score(X_test, y_test)
c
print('My name is Fengkai Xu')
print('My NetID is: fengkai4')
print(
'I hereby certify that I have read the University policy on Academic Integrity and that I am not in violation.'
)
| from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import train_test_split
import pandas as pd
import numpy as np
from sklearn.datasets import load_iris
from sklearn.model_selection import cross_val_score
iris_dataset = load_iris()
X = iris_dataset['data']
y = iris_dataset['target']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1,
stratify=y, random_state=42)
model = []
for c in range(1, 11):
tree = DecisionTreeClassifier(max_depth=4, random_state=c)
model.append(tree.fit(X_train, y_train))
in_sample_accuracy = []
out_of_sample_accuracy = []
for a in model:
in_sample_accuracy.append(a.score(X_train, y_train))
out_of_sample_accuracy.append(a.score(X_test, y_test))
a = list(range(1, 11))
a.append('mean')
a.append('standard')
in_sample_accuracy.append(np.mean(in_sample_accuracy))
in_sample_accuracy.append(np.std(in_sample_accuracy[:-1]))
out_of_sample_accuracy.append(np.mean(out_of_sample_accuracy))
out_of_sample_accuracy.append(np.std(out_of_sample_accuracy[:-1]))
b = pd.DataFrame([in_sample_accuracy, out_of_sample_accuracy], columns=a,
index=['in_sample_accuracy', 'out_of_sample_accuracy'])
pd.set_option('precision', 3)
b
CVS = []
score = cross_val_score(DecisionTreeClassifier(max_depth=4), X_train,
y_train, cv=10)
CVS.append(score)
pd.set_option('precision', 3)
c = pd.DataFrame(CVS, columns=['result1', 'result2', 'result3', 'result4',
'result5', 'result6', 'result7', 'result8', 'result9', 'result 10'])
c['mean'] = c.mean(1)
c['standard'] = c.std(1)
dt = DecisionTreeClassifier(max_depth=4)
dt.fit(X_train, y_train)
c['Out-of-sample-accuracy'] = dt.score(X_test, y_test)
c
print('My name is Fengkai Xu')
print('My NetID is: fengkai4')
print(
'I hereby certify that I have read the University policy on Academic Integrity and that I am not in violation.'
)
|
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import train_test_split
import pandas as pd
import numpy as np
from sklearn.datasets import load_iris
from sklearn.model_selection import cross_val_score
iris_dataset=load_iris()
X=iris_dataset['data']
y=iris_dataset['target']
X_train,X_test,y_train,y_test=train_test_split(X,y,test_size=0.1,stratify=y,random_state=42)
model=[]
for c in range(1,11):
tree=DecisionTreeClassifier(max_depth=4,random_state=c)
model.append(tree.fit(X_train,y_train))
in_sample_accuracy=[]
out_of_sample_accuracy=[]
for a in model:
in_sample_accuracy.append(a.score(X_train,y_train))
out_of_sample_accuracy.append(a.score(X_test,y_test))
a=list(range(1,11))
a.append('mean')
a.append('standard')
in_sample_accuracy.append(np.mean(in_sample_accuracy))
in_sample_accuracy.append(np.std(in_sample_accuracy[:-1]))
out_of_sample_accuracy.append(np.mean(out_of_sample_accuracy))
out_of_sample_accuracy.append(np.std(out_of_sample_accuracy[:-1]))
b=pd.DataFrame([in_sample_accuracy,out_of_sample_accuracy,],
columns=a,index=['in_sample_accuracy','out_of_sample_accuracy'])
pd.set_option('precision',3)
b
#cross validation
CVS=[]
score=cross_val_score(DecisionTreeClassifier(max_depth=4),X_train,y_train,cv=10)
CVS.append(score)
pd.set_option('precision',3)
c=pd.DataFrame(CVS,columns=['result1','result2','result3','result4','result5','result6','result7','result8','result9','result 10'],)
c['mean']=c.mean(1)
c['standard']=c.std(1)
dt=DecisionTreeClassifier(max_depth=4)
dt.fit(X_train,y_train)
c['Out-of-sample-accuracy']=dt.score(X_test,y_test)
c
print("My name is Fengkai Xu")
print("My NetID is: fengkai4")
print("I hereby certify that I have read the University policy on Academic Integrity and that I am not in violation.") | [
0,
1,
2,
3,
4
] |
979 | ce98c13555c474de0a9cb12e99a97b2316312b00 | <mask token>
| <mask token>
for i in range(3):
ans += min(yuki[i], enemy[(i + 1) % 3]) * 3
yuki[i], enemy[(i + 1) % 3] = max(0, yuki[i] - enemy[(i + 1) % 3]), max(
0, enemy[(i + 1) % 3] - yuki[i])
for i in range(3):
ans += min(yuki[i], enemy[i])
print(ans)
| yuki = list(map(int, input().split()))
S = input()
enemy = [S.count('G'), S.count('C'), S.count('P')]
ans = 0
for i in range(3):
ans += min(yuki[i], enemy[(i + 1) % 3]) * 3
yuki[i], enemy[(i + 1) % 3] = max(0, yuki[i] - enemy[(i + 1) % 3]), max(
0, enemy[(i + 1) % 3] - yuki[i])
for i in range(3):
ans += min(yuki[i], enemy[i])
print(ans)
| null | null | [
0,
1,
2
] |
980 | 3f3ed0165120dc135a4ce1f282dbdf9dad57adf8 | <mask token>
| <mask token>
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
EMOTICONS = {'O:)': 'angel', 'o:)': 'angel', 'O:-)': 'angel', 'o:-)':
'angel', 'o:-3': 'angel', 'o:3': 'angel', 'O;^)': 'angel', '>:[':
'annoyed/disappointed', ':-(': 'annoyed/disappointed', ':(':
'annoyed/disappointed', ':((': 'annoyed/disappointed', ':-((':
'annoyed/disappointed', ':-c': 'annoyed/disappointed', ':-<':
'annoyed/disappointed', ':?C': 'annoyed/disappointed', ':<':
'annoyed/disappointed', ':[': 'annoyed/disappointed', ':{':
'annoyed/disappointed', ':=||': 'annoyed/disappointed', ':@':
'annoyed/disappointed', '>:(': 'annoyed/disappointed', ':/':
'annoyed/disappointed', ':\\': 'annoyed/disappointed', '=/':
'annoyed/disappointed', '=\\': 'annoyed/disappointed', '>:/':
'annoyed/disappointed', '>:\\': 'annoyed/disappointed', ':S':
'annoyed/disappointed', ':s': 'annoyed/disappointed', ':-S':
'annoyed/disappointed', ':-s': 'annoyed/disappointed', ':|':
'annoyed/disappointed', ':-|': 'annoyed/disappointed', ':$':
'annoyed/disappointed', '?_?': 'annoyed/disappointed', '(>_<)':
'annoyed/disappointed', '>_<': 'annoyed/disappointed', '>__<':
'annoyed/disappointed', '(>__<)': 'annoyed/disappointed', '(-.-)':
'annoyed/disappointed', '(-_-)': 'annoyed/disappointed', '(._.)':
'annoyed/disappointed', '/:)': 'annoyed/disappointed', ':-$':
'annoyed/disappointed', '>:P': 'annoyed/disappointed', 'K':
'annoyed/disappointed', '3:)': 'devilish', '3:-)': 'devilish', '}:-)':
'devilish', '}:)': 'devilish', '>:)': 'devilish', 'B-)': 'happy', ':-)':
'happy', ':)': 'happy', ':o)': 'happy', ':]': 'happy', ':3': 'happy',
':c)': 'happy', ':>': 'happy', '=]': 'happy', '8)': 'happy', '=)':
'happy', ':}': 'happy', ':^)': 'happy', ':?)': 'happy', ':-))': 'happy',
'<:-P': 'happy', '<:P': 'happy', '<:-p': 'happy', '<:p': 'happy', ';;)':
'happy', 'J': 'happy', '<3': 'heart', '^5': 'high-five', '>_>^':
'high-five', '^<_<': 'high-five', ':*': 'kiss', ':*)': 'kiss', ':^*':
'kiss', '}{': 'kiss', "('}{')": 'kiss', ':-D': 'laughing', ':D':
'laughing', '8-D': 'laughing', '8D': 'laughing', 'x-D': 'laughing',
'xD': 'laughing', 'X-D': 'laughing', 'XD': 'laughing', '=-D':
'laughing', '=D': 'laughing', ';D': 'laughing', '-3': 'laughing', '3':
'laughing', 'B^D': 'laughing', 'D:<': 'laughing', 'D:': 'laughing',
'D8': 'laughing', 'D;': 'laughing', 'D=': 'laughing', 'DX': 'laughing',
':-B': 'nerd', '8-)': 'nerd', '8)': 'nerd', '</3': 'sad', ":'(": 'sad',
":'-(": 'sad', 'QQ': 'sad', 'L': 'sad', ':#': 'sealed mouth', ':-#':
'sealed mouth', ':-X': 'sealed mouth', ':-x': 'sealed mouth', ':X':
'sealed mouth', ':x': 'sealed mouth', '??': 'shooting star', '??':
'shooting star', '~?': 'shooting star', '>:O': 'suprprised/shocked',
'>:o': 'suprprised/shocked', ':-O': 'suprprised/shocked', ':-o':
'suprprised/shocked', ':O': 'suprprised/shocked', ':o':
'suprprised/shocked', 'O_o': 'suprprised/shocked', 'o_O':
'suprprised/shocked', 'O.o': 'suprprised/shocked', 'o.O':
'suprprised/shocked', '(O_o)': 'suprprised/shocked', '(o_O)':
'suprprised/shocked', '(O.o)': 'suprprised/shocked', '(o.O)':
'suprprised/shocked', ":'-)": 'tears of happines', ":')":
'tears of happines', ':P': 'teasing/playful', ':p': 'teasing/playful',
'>:P': 'teasing/playful', '>:p': 'teasing/playful', 'X-P':
'teasing/playful', 'x-p': 'teasing/playful', 'xp': 'teasing/playful',
'XP': 'teasing/playful', ':-P': 'teasing/playful', ':-p':
'teasing/playful', '=P': 'teasing/playful', '=P': 'teasing/playful',
':-?': 'teasing/playful', ':-b': 'teasing/playful', ':b':
'teasing/playful', ';)': 'wink', u'º)': 'wink', ';-)': 'wink', ';]':
'wink', u'^Ü^': 'happy'}
special_tokens = EMOTICONS
<mask token>
EASY_WORDS = {u'ليا': [(Prefix(u'ل'), u'يا', Suffix(u''))], u'لي': [(Prefix
(u'ل'), u'ي', Suffix(u''))], u'لكم': [(Prefix(u'ل'), u'كم', Suffix(u'')
)], u'لكما': [(Prefix(u'ل'), u'كما', Suffix(u''))], u'له': [(Prefix(
u'ل'), u'ه', Suffix(u''))], u'لها': [(Prefix(u'ل'), u'ها', Suffix(u''))
], u'لهم': [(Prefix(u'ل'), u'هم', Suffix(u''))], u'لهما': [(Prefix(u'ل'
), u'هما', Suffix(u''))], u'لهن': [(Prefix(u'ل'), u'هم', Suffix(u''))],
u'بيا': [(Prefix(u'ب'), u'يا', Suffix(u''))], u'بي': [(Prefix(u'ب'),
u'ي', Suffix(u''))], u'بك': [(Prefix(u'ب'), u'ك', Suffix(u''))], u'بكم':
[(Prefix(u'ب'), u'كم', Suffix(u''))], u'بكما': [(Prefix(u'ب'), u'كما',
Suffix(u''))], u'به': [(Prefix(u'ب'), u'ه', Suffix(u''))], u'بها': [(
Prefix(u'ب'), u'ها', Suffix(u''))], u'بهما': [(Prefix(u'ب'), u'هما',
Suffix(u''))], u'بهم': [(Prefix(u'ب'), u'هم', Suffix(u''))], u'بهن': [(
Prefix(u'ب'), u'هن', Suffix(u''))], u'عليا': [(Prefix(u''), u'على',
Suffix(u'يا'))], u'فيا': [(Prefix(u'ف'), u'يا', Suffix(u''))]}
EMOTICONS_TAG = 'EMO'
PUNCTUATION_TAG = 'PUNC'
DIGIT_TAG = 'CD'
NOTDEFINED_TAG = 'NN'
| import os.path
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
EMOTICONS = {'O:)': 'angel', 'o:)': 'angel', 'O:-)': 'angel', 'o:-)':
'angel', 'o:-3': 'angel', 'o:3': 'angel', 'O;^)': 'angel', '>:[':
'annoyed/disappointed', ':-(': 'annoyed/disappointed', ':(':
'annoyed/disappointed', ':((': 'annoyed/disappointed', ':-((':
'annoyed/disappointed', ':-c': 'annoyed/disappointed', ':-<':
'annoyed/disappointed', ':?C': 'annoyed/disappointed', ':<':
'annoyed/disappointed', ':[': 'annoyed/disappointed', ':{':
'annoyed/disappointed', ':=||': 'annoyed/disappointed', ':@':
'annoyed/disappointed', '>:(': 'annoyed/disappointed', ':/':
'annoyed/disappointed', ':\\': 'annoyed/disappointed', '=/':
'annoyed/disappointed', '=\\': 'annoyed/disappointed', '>:/':
'annoyed/disappointed', '>:\\': 'annoyed/disappointed', ':S':
'annoyed/disappointed', ':s': 'annoyed/disappointed', ':-S':
'annoyed/disappointed', ':-s': 'annoyed/disappointed', ':|':
'annoyed/disappointed', ':-|': 'annoyed/disappointed', ':$':
'annoyed/disappointed', '?_?': 'annoyed/disappointed', '(>_<)':
'annoyed/disappointed', '>_<': 'annoyed/disappointed', '>__<':
'annoyed/disappointed', '(>__<)': 'annoyed/disappointed', '(-.-)':
'annoyed/disappointed', '(-_-)': 'annoyed/disappointed', '(._.)':
'annoyed/disappointed', '/:)': 'annoyed/disappointed', ':-$':
'annoyed/disappointed', '>:P': 'annoyed/disappointed', 'K':
'annoyed/disappointed', '3:)': 'devilish', '3:-)': 'devilish', '}:-)':
'devilish', '}:)': 'devilish', '>:)': 'devilish', 'B-)': 'happy', ':-)':
'happy', ':)': 'happy', ':o)': 'happy', ':]': 'happy', ':3': 'happy',
':c)': 'happy', ':>': 'happy', '=]': 'happy', '8)': 'happy', '=)':
'happy', ':}': 'happy', ':^)': 'happy', ':?)': 'happy', ':-))': 'happy',
'<:-P': 'happy', '<:P': 'happy', '<:-p': 'happy', '<:p': 'happy', ';;)':
'happy', 'J': 'happy', '<3': 'heart', '^5': 'high-five', '>_>^':
'high-five', '^<_<': 'high-five', ':*': 'kiss', ':*)': 'kiss', ':^*':
'kiss', '}{': 'kiss', "('}{')": 'kiss', ':-D': 'laughing', ':D':
'laughing', '8-D': 'laughing', '8D': 'laughing', 'x-D': 'laughing',
'xD': 'laughing', 'X-D': 'laughing', 'XD': 'laughing', '=-D':
'laughing', '=D': 'laughing', ';D': 'laughing', '-3': 'laughing', '3':
'laughing', 'B^D': 'laughing', 'D:<': 'laughing', 'D:': 'laughing',
'D8': 'laughing', 'D;': 'laughing', 'D=': 'laughing', 'DX': 'laughing',
':-B': 'nerd', '8-)': 'nerd', '8)': 'nerd', '</3': 'sad', ":'(": 'sad',
":'-(": 'sad', 'QQ': 'sad', 'L': 'sad', ':#': 'sealed mouth', ':-#':
'sealed mouth', ':-X': 'sealed mouth', ':-x': 'sealed mouth', ':X':
'sealed mouth', ':x': 'sealed mouth', '??': 'shooting star', '??':
'shooting star', '~?': 'shooting star', '>:O': 'suprprised/shocked',
'>:o': 'suprprised/shocked', ':-O': 'suprprised/shocked', ':-o':
'suprprised/shocked', ':O': 'suprprised/shocked', ':o':
'suprprised/shocked', 'O_o': 'suprprised/shocked', 'o_O':
'suprprised/shocked', 'O.o': 'suprprised/shocked', 'o.O':
'suprprised/shocked', '(O_o)': 'suprprised/shocked', '(o_O)':
'suprprised/shocked', '(O.o)': 'suprprised/shocked', '(o.O)':
'suprprised/shocked', ":'-)": 'tears of happines', ":')":
'tears of happines', ':P': 'teasing/playful', ':p': 'teasing/playful',
'>:P': 'teasing/playful', '>:p': 'teasing/playful', 'X-P':
'teasing/playful', 'x-p': 'teasing/playful', 'xp': 'teasing/playful',
'XP': 'teasing/playful', ':-P': 'teasing/playful', ':-p':
'teasing/playful', '=P': 'teasing/playful', '=P': 'teasing/playful',
':-?': 'teasing/playful', ':-b': 'teasing/playful', ':b':
'teasing/playful', ';)': 'wink', u'º)': 'wink', ';-)': 'wink', ';]':
'wink', u'^Ü^': 'happy'}
special_tokens = EMOTICONS
from DAPOS.data.variation import Prefix, Suffix
EASY_WORDS = {u'ليا': [(Prefix(u'ل'), u'يا', Suffix(u''))], u'لي': [(Prefix
(u'ل'), u'ي', Suffix(u''))], u'لكم': [(Prefix(u'ل'), u'كم', Suffix(u'')
)], u'لكما': [(Prefix(u'ل'), u'كما', Suffix(u''))], u'له': [(Prefix(
u'ل'), u'ه', Suffix(u''))], u'لها': [(Prefix(u'ل'), u'ها', Suffix(u''))
], u'لهم': [(Prefix(u'ل'), u'هم', Suffix(u''))], u'لهما': [(Prefix(u'ل'
), u'هما', Suffix(u''))], u'لهن': [(Prefix(u'ل'), u'هم', Suffix(u''))],
u'بيا': [(Prefix(u'ب'), u'يا', Suffix(u''))], u'بي': [(Prefix(u'ب'),
u'ي', Suffix(u''))], u'بك': [(Prefix(u'ب'), u'ك', Suffix(u''))], u'بكم':
[(Prefix(u'ب'), u'كم', Suffix(u''))], u'بكما': [(Prefix(u'ب'), u'كما',
Suffix(u''))], u'به': [(Prefix(u'ب'), u'ه', Suffix(u''))], u'بها': [(
Prefix(u'ب'), u'ها', Suffix(u''))], u'بهما': [(Prefix(u'ب'), u'هما',
Suffix(u''))], u'بهم': [(Prefix(u'ب'), u'هم', Suffix(u''))], u'بهن': [(
Prefix(u'ب'), u'هن', Suffix(u''))], u'عليا': [(Prefix(u''), u'على',
Suffix(u'يا'))], u'فيا': [(Prefix(u'ف'), u'يا', Suffix(u''))]}
EMOTICONS_TAG = 'EMO'
PUNCTUATION_TAG = 'PUNC'
DIGIT_TAG = 'CD'
NOTDEFINED_TAG = 'NN'
| # coding: UTF-8 -*-
import os.path
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
EMOTICONS = {
"O:)": "angel",
"o:)": "angel",
"O:-)": "angel",
"o:-)": "angel",
"o:-3": "angel",
"o:3": "angel",
"O;^)": "angel",
">:[": "annoyed/disappointed",
":-(": "annoyed/disappointed",
":(": "annoyed/disappointed",
":((": "annoyed/disappointed",
":-((": "annoyed/disappointed",
":-c": "annoyed/disappointed",
":-<": "annoyed/disappointed",
":?C": "annoyed/disappointed",
":<": "annoyed/disappointed",
":[": "annoyed/disappointed",
":{": "annoyed/disappointed",
":=||": "annoyed/disappointed",
":@": "annoyed/disappointed",
">:(": "annoyed/disappointed",
":/": "annoyed/disappointed",
":\\": "annoyed/disappointed",
"=/": "annoyed/disappointed",
"=\\": "annoyed/disappointed",
">:/": "annoyed/disappointed",
">:\\": "annoyed/disappointed",
":S": "annoyed/disappointed",
":s": "annoyed/disappointed",
":-S": "annoyed/disappointed",
":-s": "annoyed/disappointed",
":|": "annoyed/disappointed",
":-|": "annoyed/disappointed",
":$": "annoyed/disappointed",
"?_?": "annoyed/disappointed",
"(>_<)": "annoyed/disappointed",
">_<": "annoyed/disappointed",
">__<": "annoyed/disappointed",
"(>__<)": "annoyed/disappointed",
"(-.-)": "annoyed/disappointed",
"(-_-)": "annoyed/disappointed",
"(._.)": "annoyed/disappointed",
"/:)": "annoyed/disappointed",
":-$": "annoyed/disappointed",
">:P": "annoyed/disappointed",
"K": "annoyed/disappointed",
"3:)": "devilish",
"3:-)": "devilish",
"}:-)": "devilish",
"}:)": "devilish",
">:)": "devilish",
"B-)": "happy",
":-)": "happy",
":)": "happy",
":o)": "happy",
":]": "happy",
":3": "happy",
":c)": "happy",
":>": "happy",
"=]": "happy",
"8)": "happy",
"=)": "happy",
":}": "happy",
":^)": "happy",
":?)": "happy",
":-))": "happy",
"<:-P": "happy",
"<:P": "happy",
"<:-p": "happy",
"<:p": "happy",
";;)": "happy",
"J": "happy",
"<3": "heart",
"^5": "high-five",
">_>^": "high-five",
"^<_<": "high-five",
":*": "kiss",
":*)": "kiss",
":^*": "kiss",
"}{": "kiss",
"('}{')": "kiss",
":-D": "laughing",
":D": "laughing",
"8-D": "laughing",
"8D": "laughing",
"x-D": "laughing",
"xD": "laughing",
"X-D": "laughing",
"XD": "laughing",
"=-D": "laughing",
"=D": "laughing",
";D": "laughing",
"-3": "laughing",
"3": "laughing",
"B^D": "laughing",
"D:<": "laughing",
"D:": "laughing",
"D8": "laughing",
"D;": "laughing",
"D=": "laughing",
"DX": "laughing",
":-B": "nerd",
"8-)": "nerd",
"8)": "nerd",
"</3": "sad",
":'(": "sad",
":'-(": "sad",
"QQ": "sad",
"L": "sad",
":#": "sealed mouth",
":-#": "sealed mouth",
":-X": "sealed mouth",
":-x": "sealed mouth",
":X": "sealed mouth",
":x": "sealed mouth",
"??": "shooting star",
"??": "shooting star",
"~?": "shooting star",
">:O": "suprprised/shocked",
">:o": "suprprised/shocked",
":-O": "suprprised/shocked",
":-o": "suprprised/shocked",
":O": "suprprised/shocked",
":o": "suprprised/shocked",
"O_o": "suprprised/shocked",
"o_O": "suprprised/shocked",
"O.o": "suprprised/shocked",
"o.O": "suprprised/shocked",
"(O_o)": "suprprised/shocked",
"(o_O)": "suprprised/shocked",
"(O.o)": "suprprised/shocked",
"(o.O)": "suprprised/shocked",
":'-)": "tears of happines",
":')": "tears of happines",
":P": "teasing/playful",
":p": "teasing/playful",
">:P": "teasing/playful",
">:p": "teasing/playful",
"X-P": "teasing/playful",
"x-p": "teasing/playful",
"xp": "teasing/playful",
"XP": "teasing/playful",
":-P": "teasing/playful",
":-p": "teasing/playful",
"=P": "teasing/playful",
"=P": "teasing/playful",
":-?": "teasing/playful",
":-b": "teasing/playful",
":b": "teasing/playful",
";)": "wink",
u"º)": "wink",
";-)": "wink",
";]": "wink",
u"^Ü^": "happy",
}
special_tokens = EMOTICONS
from DAPOS.data.variation import Prefix, Suffix
EASY_WORDS = {
u"ليا": [(Prefix(u"ل"), u"يا", Suffix(u""))],
u"لي": [(Prefix(u"ل"), u"ي", Suffix(u""))],
u"لكم": [(Prefix(u"ل"), u"كم", Suffix(u""))],
u"لكما": [(Prefix(u"ل"), u"كما", Suffix(u""))],
u"له": [(Prefix(u"ل"), u"ه", Suffix(u""))],
u"لها": [(Prefix(u"ل"), u"ها", Suffix(u""))],
u"لهم": [(Prefix(u"ل"), u"هم", Suffix(u""))],
u"لهما": [(Prefix(u"ل"), u"هما", Suffix(u""))],
u"لهن": [(Prefix(u"ل"), u"هم", Suffix(u""))],
u"بيا": [(Prefix(u"ب"), u"يا", Suffix(u""))],
u"بي": [(Prefix(u"ب"), u"ي", Suffix(u""))],
u"بك": [(Prefix(u"ب"), u"ك", Suffix(u""))],
u"بكم": [(Prefix(u"ب"), u"كم", Suffix(u""))],
u"بكما": [(Prefix(u"ب"), u"كما", Suffix(u""))],
u"به": [(Prefix(u"ب"), u"ه", Suffix(u""))],
u"بها": [(Prefix(u"ب"), u"ها", Suffix(u""))],
u"بهما": [(Prefix(u"ب"), u"هما", Suffix(u""))],
u"بهم": [(Prefix(u"ب"), u"هم", Suffix(u""))],
u"بهن": [(Prefix(u"ب"), u"هن", Suffix(u""))],
u"عليا": [(Prefix(u""), u"على", Suffix(u"يا"))],
u"فيا": [(Prefix(u"ف"), u"يا", Suffix(u""))],
}
EMOTICONS_TAG = 'EMO'
PUNCTUATION_TAG = 'PUNC'
DIGIT_TAG = 'CD'
NOTDEFINED_TAG = 'NN'
| null | [
0,
1,
2,
3
] |
981 | 3cc894570189fe545f5db3150d0b69c16dc211dc | class player:
<mask token>
<mask token>
<mask token>
| class player:
def __init__(self, name: str, symbol: str):
self._name = name
self._symbol = symbol
<mask token>
<mask token>
| class player:
def __init__(self, name: str, symbol: str):
self._name = name
self._symbol = symbol
<mask token>
def get_next_move(self):
"""
Asks user for next move
:return: board position
"""
return int(input('Enter your move: '))
| class player:
def __init__(self, name: str, symbol: str):
self._name = name
self._symbol = symbol
def decide_next_move(self):
"""
Checks all possible combinations to decide best next move
:return: board position
"""
pass
def get_next_move(self):
"""
Asks user for next move
:return: board position
"""
return int(input('Enter your move: '))
| null | [
1,
2,
3,
4
] |
982 | 3f8b8b8cfbe712f09734d0fb7302073187d65a73 | <mask token>
| <mask token>
def Sort(a):
i = 1
n = len(a)
while i < len(a):
j = i
print(i - 1, '\t', i)
while a[j - 1] > a[j] and j >= 0:
j -= 1
print('Key : ', a[i], ' inserting at: ', j, '\t in ', a)
if n > 2:
j1 = n - 2
temp = arr[n - 1]
while arr[j1] > temp and j1 >= 0:
arr[j1 + 1] = arr[j1]
j1 -= 1
print(' '.join(list(map(str, arr))))
arr[j1 + 1] = temp
print(' '.join(list(map(str, arr))))
elif n == 1:
return arr
else:
temp = arr[1]
arr[1] = arr[0]
print(' '.join(list(map(str, arr))))
arr[0] = temp
print(' '.join(list(map(str, arr))))
i += 1
return a
| '''
def Sort(a):
i=1
while i<len(a):
j=i
while j>0 and a[j-1] > a[j]:
temp = a[j-1]
a[j-1] = a[j]
a[j] = temp
j-=1
i+=1
return a
'''
def Sort(a):
i=1
n=len(a)
while i<len(a):
j=i
print(i-1,'\t',i)
while a[j-1]>a[j] and j>=0:
j-=1
print('Key : ',a[i],' inserting at: ',j, '\t in ',a)
if n>2:
j1=n-2
temp = arr[n-1]
while arr[j1] > temp and j1>=0:
arr[j1+1] = arr[j1]
j1-=1
print(' '.join(list(map(str, arr))))
arr[j1+1] = temp
print(' '.join(list(map(str, arr))))
elif n==1:
return arr
else: # len(arr) =2
temp = arr[1]
arr[1]=arr[0]
print(' '.join(list(map(str, arr))))
arr[0] = temp
print(' '.join(list(map(str, arr))))
i+=1
return a
| null | null | [
0,
1,
2
] |
983 | e95de58828c63dc8ae24efff314665a308f6ce0c | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
initial = True
dependencies = [('stores', '0001_initial')]
operations = [migrations.CreateModel(name='Assistants', fields=[('id',
models.UUIDField(default=uuid.uuid4, editable=False, primary_key=
True, serialize=False)), ('name_assistants', models.CharField(
max_length=255)), ('phone_assistants', models.IntegerField()), (
'email_assistants', models.EmailField(max_length=254)), (
'address_assistants', models.TextField()), ('timestamp', models.
DateField(auto_now=True)), ('fkstore', models.ForeignKey(on_delete=
django.db.models.deletion.CASCADE, related_name='assistants', to=
'stores.Store'))])]
| from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [('stores', '0001_initial')]
operations = [migrations.CreateModel(name='Assistants', fields=[('id',
models.UUIDField(default=uuid.uuid4, editable=False, primary_key=
True, serialize=False)), ('name_assistants', models.CharField(
max_length=255)), ('phone_assistants', models.IntegerField()), (
'email_assistants', models.EmailField(max_length=254)), (
'address_assistants', models.TextField()), ('timestamp', models.
DateField(auto_now=True)), ('fkstore', models.ForeignKey(on_delete=
django.db.models.deletion.CASCADE, related_name='assistants', to=
'stores.Store'))])]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-13 02:06
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('stores', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Assistants',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('name_assistants', models.CharField(max_length=255)),
('phone_assistants', models.IntegerField()),
('email_assistants', models.EmailField(max_length=254)),
('address_assistants', models.TextField()),
('timestamp', models.DateField(auto_now=True)),
('fkstore', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assistants', to='stores.Store')),
],
),
]
| [
0,
1,
2,
3,
4
] |
984 | a406efcab62b2af67484da776f01fc4e6d20b697 | #!/usr/bin/env python3
def twoNumberSum(array, targetSum):
# Write your code here.
# O(n^2) time | O(1) space
''' Double for loop, quadratic run time
No variables increase as the input size increases,
therefore constant space complexity.
'''
for i in range(len(array) - 1):
firstNum = array[i]
for j in range(i + 1, len(array)):
secondNum = array[j]
if firstNum + secondNum == targetSum:
return [firstNum, secondNum]
return []
# Testing
if __name__ == '__main__':
import json
import debug_v1
with open('test.json', 'r') as t:
load_test = json.load(t)
test_case = load_test['test']
correct_output = load_test['answer']
for count, case in enumerate(test_case):
print(f'Test Case {count+1}:', end=' ')
value = twoNumberSum(case['array'], case['targetSum'])
debug_v1.debug(case, value, correct_output[count])
| null | null | null | null | [
0
] |
985 | d265781c6b618752a1afcf65ac137052c26388a6 | import xarray as xr
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import pickle
import seaborn as sns
%load_ext autoreload
%autoreload 2
%matplotlib
data_dir = Path('/Volumes/Lees_Extend/data/ecmwf_sowc/data/')
# READ in model (maybe want to do more predictions on historical data)
from src.models import load_model, Persistence
ealstm_path = data_dir / 'models/one_month_forecast/ealstm/model.pt'
assert ealstm_path.exists(), \
'Expected the unzipped file to have the model.pt file saved'
persistence = Persistence(data_folder=data_dir)
ealstm = load_model(model_path=ealstm_path)
# TODO: need to predict from X variables in other files
ealstm.evaluate_train_timesteps(year=np.arange(1990, 2010), month=3)
| null | null | null | null | [
0
] |
986 | 15edb1c051ccbc6f927c0a859288511f94a3d853 | <mask token>
class Encoder(Protocol):
<mask token>
class Decoder(Protocol):
def __call__(self, packed: EncodedData, **kwargs) ->Dict[Any, Any]:
...
class DataClassYAMLMixin(DataClassDictMixin):
def to_yaml(self: T, encoder: Encoder=yaml.dump, dict_params: Mapping=
MappingProxyType({}), **encoder_kwargs) ->EncodedData:
return encoder(self.to_dict(**dict(DEFAULT_DICT_PARAMS, **
dict_params)), **encoder_kwargs)
@classmethod
def from_yaml(cls: Type[T], data: EncodedData, decoder: Decoder=yaml.
safe_load, dict_params: Mapping=MappingProxyType({}), **decoder_kwargs
) ->T:
return cls.from_dict(decoder(data, **decoder_kwargs), **dict(
DEFAULT_DICT_PARAMS, **dict_params))
| <mask token>
class Encoder(Protocol):
def __call__(self, o, **kwargs) ->EncodedData:
...
class Decoder(Protocol):
def __call__(self, packed: EncodedData, **kwargs) ->Dict[Any, Any]:
...
class DataClassYAMLMixin(DataClassDictMixin):
def to_yaml(self: T, encoder: Encoder=yaml.dump, dict_params: Mapping=
MappingProxyType({}), **encoder_kwargs) ->EncodedData:
return encoder(self.to_dict(**dict(DEFAULT_DICT_PARAMS, **
dict_params)), **encoder_kwargs)
@classmethod
def from_yaml(cls: Type[T], data: EncodedData, decoder: Decoder=yaml.
safe_load, dict_params: Mapping=MappingProxyType({}), **decoder_kwargs
) ->T:
return cls.from_dict(decoder(data, **decoder_kwargs), **dict(
DEFAULT_DICT_PARAMS, **dict_params))
| <mask token>
DEFAULT_DICT_PARAMS = {'use_bytes': False, 'use_enum': False,
'use_datetime': False}
EncodedData = Union[str, bytes]
T = TypeVar('T', bound='DataClassYAMLMixin')
class Encoder(Protocol):
def __call__(self, o, **kwargs) ->EncodedData:
...
class Decoder(Protocol):
def __call__(self, packed: EncodedData, **kwargs) ->Dict[Any, Any]:
...
class DataClassYAMLMixin(DataClassDictMixin):
def to_yaml(self: T, encoder: Encoder=yaml.dump, dict_params: Mapping=
MappingProxyType({}), **encoder_kwargs) ->EncodedData:
return encoder(self.to_dict(**dict(DEFAULT_DICT_PARAMS, **
dict_params)), **encoder_kwargs)
@classmethod
def from_yaml(cls: Type[T], data: EncodedData, decoder: Decoder=yaml.
safe_load, dict_params: Mapping=MappingProxyType({}), **decoder_kwargs
) ->T:
return cls.from_dict(decoder(data, **decoder_kwargs), **dict(
DEFAULT_DICT_PARAMS, **dict_params))
| from types import MappingProxyType
from typing import Any, Dict, Mapping, Type, TypeVar, Union
import yaml
from typing_extensions import Protocol
from mashumaro.serializer.base import DataClassDictMixin
DEFAULT_DICT_PARAMS = {'use_bytes': False, 'use_enum': False,
'use_datetime': False}
EncodedData = Union[str, bytes]
T = TypeVar('T', bound='DataClassYAMLMixin')
class Encoder(Protocol):
def __call__(self, o, **kwargs) ->EncodedData:
...
class Decoder(Protocol):
def __call__(self, packed: EncodedData, **kwargs) ->Dict[Any, Any]:
...
class DataClassYAMLMixin(DataClassDictMixin):
def to_yaml(self: T, encoder: Encoder=yaml.dump, dict_params: Mapping=
MappingProxyType({}), **encoder_kwargs) ->EncodedData:
return encoder(self.to_dict(**dict(DEFAULT_DICT_PARAMS, **
dict_params)), **encoder_kwargs)
@classmethod
def from_yaml(cls: Type[T], data: EncodedData, decoder: Decoder=yaml.
safe_load, dict_params: Mapping=MappingProxyType({}), **decoder_kwargs
) ->T:
return cls.from_dict(decoder(data, **decoder_kwargs), **dict(
DEFAULT_DICT_PARAMS, **dict_params))
| from types import MappingProxyType
from typing import Any, Dict, Mapping, Type, TypeVar, Union
import yaml
from typing_extensions import Protocol
from mashumaro.serializer.base import DataClassDictMixin
DEFAULT_DICT_PARAMS = {
"use_bytes": False,
"use_enum": False,
"use_datetime": False,
}
EncodedData = Union[str, bytes]
T = TypeVar("T", bound="DataClassYAMLMixin")
class Encoder(Protocol): # pragma no cover
def __call__(self, o, **kwargs) -> EncodedData:
...
class Decoder(Protocol): # pragma no cover
def __call__(self, packed: EncodedData, **kwargs) -> Dict[Any, Any]:
...
class DataClassYAMLMixin(DataClassDictMixin):
def to_yaml(
self: T,
encoder: Encoder = yaml.dump, # type: ignore
dict_params: Mapping = MappingProxyType({}),
**encoder_kwargs,
) -> EncodedData:
return encoder(
self.to_dict(**dict(DEFAULT_DICT_PARAMS, **dict_params)),
**encoder_kwargs,
)
@classmethod
def from_yaml(
cls: Type[T],
data: EncodedData,
decoder: Decoder = yaml.safe_load, # type: ignore
dict_params: Mapping = MappingProxyType({}),
**decoder_kwargs,
) -> T:
return cls.from_dict(
decoder(data, **decoder_kwargs),
**dict(DEFAULT_DICT_PARAMS, **dict_params),
)
| [
6,
7,
8,
9,
10
] |
987 | 274185896ab5c11256d69699df69fc2c0dde4f2d | <mask token>
def get_links_from_markdown(path, name):
try:
with open(path, 'r') as file:
md = file.read()
html = markdown.markdown(md)
soup = BeautifulSoup(html, 'html.parser')
return soup.find_all('a')
except PermissionError:
print('Could not open "%s"' % path)
except UnicodeDecodeError:
print('Could not proccess "%s"' % path)
return []
def get_guide_packages(src_dir='content'):
if len(sys.argv) > 1:
src_dir = sys.argv[1]
subjects = defaultdict(list)
for entry in os.scandir(src_dir):
name = entry.name[:-3]
for link in get_links_from_markdown(entry.path, name):
if len(link.text.split(':')) == 2:
subjects[name].append(link.text)
return subjects
<mask token>
| <mask token>
def get_links_from_markdown(path, name):
try:
with open(path, 'r') as file:
md = file.read()
html = markdown.markdown(md)
soup = BeautifulSoup(html, 'html.parser')
return soup.find_all('a')
except PermissionError:
print('Could not open "%s"' % path)
except UnicodeDecodeError:
print('Could not proccess "%s"' % path)
return []
def get_guide_packages(src_dir='content'):
if len(sys.argv) > 1:
src_dir = sys.argv[1]
subjects = defaultdict(list)
for entry in os.scandir(src_dir):
name = entry.name[:-3]
for link in get_links_from_markdown(entry.path, name):
if len(link.text.split(':')) == 2:
subjects[name].append(link.text)
return subjects
def write_packages(packages, path='packages-guide'):
with open(path, 'w') as out:
out.write('\n# packages from http://guide.meteor.com\n')
for subject, links in packages.items():
out.write('\n# %s\n' % subject)
for link in links:
out.write('%s\n' % link)
<mask token>
| <mask token>
def get_links_from_markdown(path, name):
try:
with open(path, 'r') as file:
md = file.read()
html = markdown.markdown(md)
soup = BeautifulSoup(html, 'html.parser')
return soup.find_all('a')
except PermissionError:
print('Could not open "%s"' % path)
except UnicodeDecodeError:
print('Could not proccess "%s"' % path)
return []
def get_guide_packages(src_dir='content'):
if len(sys.argv) > 1:
src_dir = sys.argv[1]
subjects = defaultdict(list)
for entry in os.scandir(src_dir):
name = entry.name[:-3]
for link in get_links_from_markdown(entry.path, name):
if len(link.text.split(':')) == 2:
subjects[name].append(link.text)
return subjects
def write_packages(packages, path='packages-guide'):
with open(path, 'w') as out:
out.write('\n# packages from http://guide.meteor.com\n')
for subject, links in packages.items():
out.write('\n# %s\n' % subject)
for link in links:
out.write('%s\n' % link)
if __name__ == '__main__':
GUIDE = get_guide_packages()
write_packages(GUIDE)
| <mask token>
from collections import defaultdict
import os
import sys
import markdown
from bs4 import BeautifulSoup
def get_links_from_markdown(path, name):
try:
with open(path, 'r') as file:
md = file.read()
html = markdown.markdown(md)
soup = BeautifulSoup(html, 'html.parser')
return soup.find_all('a')
except PermissionError:
print('Could not open "%s"' % path)
except UnicodeDecodeError:
print('Could not proccess "%s"' % path)
return []
def get_guide_packages(src_dir='content'):
if len(sys.argv) > 1:
src_dir = sys.argv[1]
subjects = defaultdict(list)
for entry in os.scandir(src_dir):
name = entry.name[:-3]
for link in get_links_from_markdown(entry.path, name):
if len(link.text.split(':')) == 2:
subjects[name].append(link.text)
return subjects
def write_packages(packages, path='packages-guide'):
with open(path, 'w') as out:
out.write('\n# packages from http://guide.meteor.com\n')
for subject, links in packages.items():
out.write('\n# %s\n' % subject)
for link in links:
out.write('%s\n' % link)
if __name__ == '__main__':
GUIDE = get_guide_packages()
write_packages(GUIDE)
| ''' extract package names from the Meteor guide and write them to packages-guide
Uses the content folder of https://github.com/meteor/guide '''
from collections import defaultdict
import os
import sys
import markdown
from bs4 import BeautifulSoup
def get_links_from_markdown(path, name):
try:
with open(path, 'r') as file:
md = file.read()
html = markdown.markdown(md)
soup = BeautifulSoup(html, 'html.parser')
return soup.find_all('a')
except PermissionError:
print('Could not open "%s"' % path)
except UnicodeDecodeError:
print('Could not proccess "%s"' % path)
return []
def get_guide_packages(src_dir='content'):
if len(sys.argv) > 1:
src_dir = sys.argv[1]
subjects = defaultdict(list)
for entry in os.scandir(src_dir):
name = entry.name[:-3]
for link in get_links_from_markdown(entry.path, name):
if len(link.text.split(':')) == 2: # packages only
subjects[name].append(link.text)
return subjects
def write_packages(packages, path='packages-guide'):
with open(path, 'w') as out:
out.write('\n# packages from http://guide.meteor.com\n')
for subject, links in packages.items():
out.write('\n# %s\n' % subject)
for link in links:
out.write('%s\n' % link)
if __name__ == '__main__':
GUIDE = get_guide_packages()
write_packages(GUIDE)
| [
2,
3,
4,
5,
6
] |
988 | 21e86e4719cda5c40f780aca6e56eb13c8c9b8e5 | <mask token>
class StepName(Enum):
<mask token>
null = 'null'
unitTest = 'unitTest'
integrationTest = 'integrationTest'
changeLog = 'changeLog'
requirements = 'requirements'
docs = 'docs'
build = 'build'
githubRelease = 'githubRelease'
artifactPublication = 'artifactPublication'
docPublication = 'docPublication'
class NullStep(Step):
"""This is a "null" or "no-op" step that does nothing."""
def execute(self):
pass
class ChangeLogStep(Step):
"""This step generates a PDS-style changelog"""
_sections = (
'{"improvements":{"prefix":"**Improvements:**","labels":["Epic"]},"defects":{"prefix":"**Defects:**","labels":["bug"]},"deprecations":{"prefix":"**Deprecations:**","labels":["deprecation"]}}'
)
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot generate changelog'
)
return
invoke(['github_changelog_generator', '--user', self.getOwner(),
'--project', self.getRepository(), '--output', 'CHANGELOG.md',
'--token', token, '--configure-sections', self._sections,
'--no-pull-requests', '--issues-label',
'**Other closed issues:**', '--issue-line-labels',
'high,low,medium'])
commit('CHANGELOG.md', 'Update changelog')
class RequirementsStep(Step):
"""This step generates a PDS-style requirements file"""
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot generate requirements'
)
return
argv = ['requirement-report', '--format', 'md', '--organization',
self.getOwner(), '--repository', self.getRepository(),
'--output', 'docs/requirements/', '--token', token]
if not self.assembly.isStable():
argv.append('--dev')
generatedFile = invoke(argv).strip()
if not generatedFile:
_logger.warn(
'🤨 Did not get a requirements file from the requirement-report; will skip it'
)
return
commit(generatedFile, 'Update requirements')
class DocPublicationStep(Step):
def getDocDir(self):
raise NotImplementedError('Subclasses must implement ``getDocDir``')
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot send doc artifacts to GitHub'
)
return
github = github3.login(token=token)
repo = github.repository(self.getOwner(), self.getRepository())
tmpFileName = None
try:
release = repo.releases().next()
fd, tmpFileName = tempfile.mkstemp('.zip')
with zipfile.ZipFile(os.fdopen(fd, 'wb'), 'w') as zf:
for folder, subdirs, filenames in os.walk(self.getDocDir()):
for fn in filenames:
path = os.path.join(folder, fn)
if os.path.isfile(path):
zf.write(path, path[len(self.getDocDir()) + 1:])
for asset in release.assets():
if asset.name == 'documentation.zip':
asset.delete()
break
with open(tmpFileName, 'rb') as tmpFile:
release.upload_asset('application/zip', 'documentation.zip',
tmpFile, 'Documentation (zip)')
except StopIteration:
_logger.info(
'🧐 No releases found at all, so I cannot publish documentation assets to them'
)
return
finally:
if tmpFileName is not None:
os.remove(tmpFileName)
| <mask token>
class Step(object):
<mask token>
def __init__(self, assembly):
"""Initialize a step with the given ``assembly``"""
self.assembly = assembly
def __repr__(self):
return f'<{self.__class__.__name__}()>'
def execute(self):
raise NotImplementedError('Subclasses must implement ``execute``')
<mask token>
<mask token>
<mask token>
class StepName(Enum):
"""Enumerated identifiers for each of the possible steps of a roundup"""
null = 'null'
unitTest = 'unitTest'
integrationTest = 'integrationTest'
changeLog = 'changeLog'
requirements = 'requirements'
docs = 'docs'
build = 'build'
githubRelease = 'githubRelease'
artifactPublication = 'artifactPublication'
docPublication = 'docPublication'
class NullStep(Step):
"""This is a "null" or "no-op" step that does nothing."""
def execute(self):
pass
class ChangeLogStep(Step):
"""This step generates a PDS-style changelog"""
_sections = (
'{"improvements":{"prefix":"**Improvements:**","labels":["Epic"]},"defects":{"prefix":"**Defects:**","labels":["bug"]},"deprecations":{"prefix":"**Deprecations:**","labels":["deprecation"]}}'
)
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot generate changelog'
)
return
invoke(['github_changelog_generator', '--user', self.getOwner(),
'--project', self.getRepository(), '--output', 'CHANGELOG.md',
'--token', token, '--configure-sections', self._sections,
'--no-pull-requests', '--issues-label',
'**Other closed issues:**', '--issue-line-labels',
'high,low,medium'])
commit('CHANGELOG.md', 'Update changelog')
class RequirementsStep(Step):
"""This step generates a PDS-style requirements file"""
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot generate requirements'
)
return
argv = ['requirement-report', '--format', 'md', '--organization',
self.getOwner(), '--repository', self.getRepository(),
'--output', 'docs/requirements/', '--token', token]
if not self.assembly.isStable():
argv.append('--dev')
generatedFile = invoke(argv).strip()
if not generatedFile:
_logger.warn(
'🤨 Did not get a requirements file from the requirement-report; will skip it'
)
return
commit(generatedFile, 'Update requirements')
class DocPublicationStep(Step):
def getDocDir(self):
raise NotImplementedError('Subclasses must implement ``getDocDir``')
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot send doc artifacts to GitHub'
)
return
github = github3.login(token=token)
repo = github.repository(self.getOwner(), self.getRepository())
tmpFileName = None
try:
release = repo.releases().next()
fd, tmpFileName = tempfile.mkstemp('.zip')
with zipfile.ZipFile(os.fdopen(fd, 'wb'), 'w') as zf:
for folder, subdirs, filenames in os.walk(self.getDocDir()):
for fn in filenames:
path = os.path.join(folder, fn)
if os.path.isfile(path):
zf.write(path, path[len(self.getDocDir()) + 1:])
for asset in release.assets():
if asset.name == 'documentation.zip':
asset.delete()
break
with open(tmpFileName, 'rb') as tmpFile:
release.upload_asset('application/zip', 'documentation.zip',
tmpFile, 'Documentation (zip)')
except StopIteration:
_logger.info(
'🧐 No releases found at all, so I cannot publish documentation assets to them'
)
return
finally:
if tmpFileName is not None:
os.remove(tmpFileName)
| <mask token>
class Step(object):
<mask token>
def __init__(self, assembly):
"""Initialize a step with the given ``assembly``"""
self.assembly = assembly
def __repr__(self):
return f'<{self.__class__.__name__}()>'
def execute(self):
raise NotImplementedError('Subclasses must implement ``execute``')
def getRepository(self):
"""Utility: get the name of the GitHub repository"""
return self.assembly.context.environ.get('GITHUB_REPOSITORY').split('/'
)[1]
<mask token>
<mask token>
class StepName(Enum):
"""Enumerated identifiers for each of the possible steps of a roundup"""
null = 'null'
unitTest = 'unitTest'
integrationTest = 'integrationTest'
changeLog = 'changeLog'
requirements = 'requirements'
docs = 'docs'
build = 'build'
githubRelease = 'githubRelease'
artifactPublication = 'artifactPublication'
docPublication = 'docPublication'
class NullStep(Step):
"""This is a "null" or "no-op" step that does nothing."""
def execute(self):
pass
class ChangeLogStep(Step):
"""This step generates a PDS-style changelog"""
_sections = (
'{"improvements":{"prefix":"**Improvements:**","labels":["Epic"]},"defects":{"prefix":"**Defects:**","labels":["bug"]},"deprecations":{"prefix":"**Deprecations:**","labels":["deprecation"]}}'
)
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot generate changelog'
)
return
invoke(['github_changelog_generator', '--user', self.getOwner(),
'--project', self.getRepository(), '--output', 'CHANGELOG.md',
'--token', token, '--configure-sections', self._sections,
'--no-pull-requests', '--issues-label',
'**Other closed issues:**', '--issue-line-labels',
'high,low,medium'])
commit('CHANGELOG.md', 'Update changelog')
class RequirementsStep(Step):
"""This step generates a PDS-style requirements file"""
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot generate requirements'
)
return
argv = ['requirement-report', '--format', 'md', '--organization',
self.getOwner(), '--repository', self.getRepository(),
'--output', 'docs/requirements/', '--token', token]
if not self.assembly.isStable():
argv.append('--dev')
generatedFile = invoke(argv).strip()
if not generatedFile:
_logger.warn(
'🤨 Did not get a requirements file from the requirement-report; will skip it'
)
return
commit(generatedFile, 'Update requirements')
class DocPublicationStep(Step):
def getDocDir(self):
raise NotImplementedError('Subclasses must implement ``getDocDir``')
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot send doc artifacts to GitHub'
)
return
github = github3.login(token=token)
repo = github.repository(self.getOwner(), self.getRepository())
tmpFileName = None
try:
release = repo.releases().next()
fd, tmpFileName = tempfile.mkstemp('.zip')
with zipfile.ZipFile(os.fdopen(fd, 'wb'), 'w') as zf:
for folder, subdirs, filenames in os.walk(self.getDocDir()):
for fn in filenames:
path = os.path.join(folder, fn)
if os.path.isfile(path):
zf.write(path, path[len(self.getDocDir()) + 1:])
for asset in release.assets():
if asset.name == 'documentation.zip':
asset.delete()
break
with open(tmpFileName, 'rb') as tmpFile:
release.upload_asset('application/zip', 'documentation.zip',
tmpFile, 'Documentation (zip)')
except StopIteration:
_logger.info(
'🧐 No releases found at all, so I cannot publish documentation assets to them'
)
return
finally:
if tmpFileName is not None:
os.remove(tmpFileName)
| <mask token>
_logger = logging.getLogger(__name__)
class Step(object):
"""An abstract step; executing steps comprises a roundup"""
def __init__(self, assembly):
"""Initialize a step with the given ``assembly``"""
self.assembly = assembly
def __repr__(self):
return f'<{self.__class__.__name__}()>'
def execute(self):
raise NotImplementedError('Subclasses must implement ``execute``')
def getRepository(self):
"""Utility: get the name of the GitHub repository"""
return self.assembly.context.environ.get('GITHUB_REPOSITORY').split('/'
)[1]
def getToken(self):
"""Utility: get the administrative GitHub token"""
return self.assembly.context.environ.get('ADMIN_GITHUB_TOKEN')
def getOwner(self):
"""Utility: return the owning user/organization of the repository in use"""
return self.assembly.context.environ.get('GITHUB_REPOSITORY').split('/'
)[0]
class StepName(Enum):
"""Enumerated identifiers for each of the possible steps of a roundup"""
null = 'null'
unitTest = 'unitTest'
integrationTest = 'integrationTest'
changeLog = 'changeLog'
requirements = 'requirements'
docs = 'docs'
build = 'build'
githubRelease = 'githubRelease'
artifactPublication = 'artifactPublication'
docPublication = 'docPublication'
class NullStep(Step):
"""This is a "null" or "no-op" step that does nothing."""
def execute(self):
pass
class ChangeLogStep(Step):
"""This step generates a PDS-style changelog"""
_sections = (
'{"improvements":{"prefix":"**Improvements:**","labels":["Epic"]},"defects":{"prefix":"**Defects:**","labels":["bug"]},"deprecations":{"prefix":"**Deprecations:**","labels":["deprecation"]}}'
)
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot generate changelog'
)
return
invoke(['github_changelog_generator', '--user', self.getOwner(),
'--project', self.getRepository(), '--output', 'CHANGELOG.md',
'--token', token, '--configure-sections', self._sections,
'--no-pull-requests', '--issues-label',
'**Other closed issues:**', '--issue-line-labels',
'high,low,medium'])
commit('CHANGELOG.md', 'Update changelog')
class RequirementsStep(Step):
"""This step generates a PDS-style requirements file"""
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot generate requirements'
)
return
argv = ['requirement-report', '--format', 'md', '--organization',
self.getOwner(), '--repository', self.getRepository(),
'--output', 'docs/requirements/', '--token', token]
if not self.assembly.isStable():
argv.append('--dev')
generatedFile = invoke(argv).strip()
if not generatedFile:
_logger.warn(
'🤨 Did not get a requirements file from the requirement-report; will skip it'
)
return
commit(generatedFile, 'Update requirements')
class DocPublicationStep(Step):
def getDocDir(self):
raise NotImplementedError('Subclasses must implement ``getDocDir``')
def execute(self):
token = self.getToken()
if not token:
_logger.info(
'🤷\u200d♀️ No GitHub administrative token; cannot send doc artifacts to GitHub'
)
return
github = github3.login(token=token)
repo = github.repository(self.getOwner(), self.getRepository())
tmpFileName = None
try:
release = repo.releases().next()
fd, tmpFileName = tempfile.mkstemp('.zip')
with zipfile.ZipFile(os.fdopen(fd, 'wb'), 'w') as zf:
for folder, subdirs, filenames in os.walk(self.getDocDir()):
for fn in filenames:
path = os.path.join(folder, fn)
if os.path.isfile(path):
zf.write(path, path[len(self.getDocDir()) + 1:])
for asset in release.assets():
if asset.name == 'documentation.zip':
asset.delete()
break
with open(tmpFileName, 'rb') as tmpFile:
release.upload_asset('application/zip', 'documentation.zip',
tmpFile, 'Documentation (zip)')
except StopIteration:
_logger.info(
'🧐 No releases found at all, so I cannot publish documentation assets to them'
)
return
finally:
if tmpFileName is not None:
os.remove(tmpFileName)
| # encoding: utf-8
'''🤠 PDS Roundup: A step takes you further towards a complete roundup'''
from enum import Enum
from .util import commit, invoke
import logging, github3, tempfile, zipfile, os
_logger = logging.getLogger(__name__)
class Step(object):
'''An abstract step; executing steps comprises a roundup'''
def __init__(self, assembly):
'''Initialize a step with the given ``assembly``'''
self.assembly = assembly
def __repr__(self):
return f'<{self.__class__.__name__}()>'
def execute(self):
raise NotImplementedError('Subclasses must implement ``execute``')
def getRepository(self):
'''Utility: get the name of the GitHub repository'''
return self.assembly.context.environ.get('GITHUB_REPOSITORY').split('/')[1]
def getToken(self):
'''Utility: get the administrative GitHub token'''
return self.assembly.context.environ.get('ADMIN_GITHUB_TOKEN')
def getOwner(self):
'''Utility: return the owning user/organization of the repository in use'''
return self.assembly.context.environ.get('GITHUB_REPOSITORY').split('/')[0]
class StepName(Enum):
'''Enumerated identifiers for each of the possible steps of a roundup'''
null = 'null'
unitTest = 'unitTest'
integrationTest = 'integrationTest'
changeLog = 'changeLog'
requirements = 'requirements'
docs = 'docs'
build = 'build'
githubRelease = 'githubRelease'
artifactPublication = 'artifactPublication'
docPublication = 'docPublication'
# Common Steps
# ============
#
# The folowing are concrete Step classes that are shared between contexts;
# i.e., they're independent of Python, Maven, etc.
class NullStep(Step):
'''This is a "null" or "no-op" step that does nothing.'''
def execute(self):
pass
# But for development, this sure is handy:
# import pdb;pdb.set_trace()
# import subprocess
# subprocess.run('/bin/sh')
class ChangeLogStep(Step):
'''This step generates a PDS-style changelog'''
_sections = '{"improvements":{"prefix":"**Improvements:**","labels":["Epic"]},"defects":{"prefix":"**Defects:**","labels":["bug"]},"deprecations":{"prefix":"**Deprecations:**","labels":["deprecation"]}}'
def execute(self):
token = self.getToken()
if not token:
_logger.info('🤷♀️ No GitHub administrative token; cannot generate changelog')
return
invoke([
'github_changelog_generator',
'--user',
self.getOwner(),
'--project',
self.getRepository(),
'--output',
'CHANGELOG.md',
'--token',
token,
'--configure-sections',
self._sections,
'--no-pull-requests',
'--issues-label',
'**Other closed issues:**',
'--issue-line-labels',
'high,low,medium'
])
commit('CHANGELOG.md', 'Update changelog')
class RequirementsStep(Step):
'''This step generates a PDS-style requirements file'''
def execute(self):
token = self.getToken()
if not token:
_logger.info('🤷♀️ No GitHub administrative token; cannot generate requirements')
return
argv = [
'requirement-report',
'--format',
'md',
'--organization',
self.getOwner(),
'--repository',
self.getRepository(),
'--output',
'docs/requirements/',
'--token',
token
]
if not self.assembly.isStable():
argv.append('--dev')
generatedFile = invoke(argv).strip()
if not generatedFile:
_logger.warn('🤨 Did not get a requirements file from the requirement-report; will skip it')
return
commit(generatedFile, 'Update requirements')
class DocPublicationStep(Step):
def getDocDir(self):
raise NotImplementedError('Subclasses must implement ``getDocDir``')
def execute(self):
token = self.getToken()
if not token:
_logger.info('🤷♀️ No GitHub administrative token; cannot send doc artifacts to GitHub')
return
github = github3.login(token=token)
repo = github.repository(self.getOwner(), self.getRepository())
# 😮 TODO: There's a race here. This code is looking for the *latest* release, which
# we assume was made by the earlier ``StepName.githubRelease`` step. It's possible someone
# could create another release in between these steps! It'd be better if we fetched the
# release being worked on directly.
tmpFileName = None
try:
release = repo.releases().next() # ← here
# Make a ZIP archive of the docs
fd, tmpFileName = tempfile.mkstemp('.zip')
with zipfile.ZipFile(os.fdopen(fd, 'wb'), 'w') as zf:
for folder, subdirs, filenames in os.walk(self.getDocDir()):
for fn in filenames:
path = os.path.join(folder, fn)
# Avoid things like Unix-domain sockets if they just happen to appear:
if os.path.isfile(path):
zf.write(path, path[len(self.getDocDir()) + 1:])
# Remove any existing ``documentation.zip``
for asset in release.assets():
if asset.name == 'documentation.zip':
asset.delete()
break
# Add the new ZIP file as a downloadable asset
with open(tmpFileName, 'rb') as tmpFile:
release.upload_asset('application/zip', 'documentation.zip', tmpFile, 'Documentation (zip)')
except StopIteration:
_logger.info('🧐 No releases found at all, so I cannot publish documentation assets to them')
return
finally:
if tmpFileName is not None: os.remove(tmpFileName)
| [
15,
20,
21,
25,
27
] |
989 | 333914f99face050376e4713ca118f2347e50018 | <mask token>
| <mask token>
urlpatterns.append(path('sub/', include(
'sandbox.staticpages_testapp.sub_urls')))
| <mask token>
staticpages_loader = StaticpagesLoader()
urlpatterns = [path('admin/', admin.site.urls), *staticpages_loader.
build_urls(['index', {'template_path': 'index.html', 'name': 'foo',
'extra': 'free for use'}])]
urlpatterns.append(path('sub/', include(
'sandbox.staticpages_testapp.sub_urls')))
| <mask token>
from django.contrib import admin
from django.urls import include, path
from staticpages.loader import StaticpagesLoader
staticpages_loader = StaticpagesLoader()
urlpatterns = [path('admin/', admin.site.urls), *staticpages_loader.
build_urls(['index', {'template_path': 'index.html', 'name': 'foo',
'extra': 'free for use'}])]
urlpatterns.append(path('sub/', include(
'sandbox.staticpages_testapp.sub_urls')))
| """
URL Configuration to test mounting created urls from registries
"""
from django.contrib import admin
from django.urls import include, path
from staticpages.loader import StaticpagesLoader
staticpages_loader = StaticpagesLoader()
urlpatterns = [
path("admin/", admin.site.urls),
# Add base pages urls using the same template
*staticpages_loader.build_urls([
"index",
{
"template_path": "index.html",
"name": "foo",
"extra": "free for use",
},
])
]
# Include another urls map on a sub path
urlpatterns.append(
path("sub/", include("sandbox.staticpages_testapp.sub_urls")),
)
| [
0,
1,
2,
3,
4
] |
990 | 5ef7c838d8e9a05a09bd974790a85ff36d56a336 | import mock
def exc():
print 'here should raise'
def recursion():
try:
print 'here'
return exc()
except StandardError:
print 'exc'
return recursion()
def test_recursion():
global exc
exc = mock.Mock(side_effect = [StandardError, StandardError, mock.DEFAULT])
recursion()
test_recursion() | null | null | null | null | [
0
] |
991 | 88b3dd7414a68de65bafb317fbd4da2b1bc933fc | <mask token>
def corec_unlock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
| <mask token>
def corec_set(parameter, value):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
params[parameter] = value
with open(params_fn, 'w') as f:
json.dump(params, f, indent=4)
def corec_get(parameter):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
if parameter in params:
return params[parameter]
return None
<mask token>
def corec_unlock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
| <mask token>
def corec_set(parameter, value):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
params[parameter] = value
with open(params_fn, 'w') as f:
json.dump(params, f, indent=4)
def corec_get(parameter):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
if parameter in params:
return params[parameter]
return None
def corec_lock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = True
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
def corec_unlock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
| import json
def corec_set(parameter, value):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
params[parameter] = value
with open(params_fn, 'w') as f:
json.dump(params, f, indent=4)
def corec_get(parameter):
params_fn = 'corec_parameters.json'
with open(params_fn) as f:
params = json.load(f)
if parameter in params:
return params[parameter]
return None
def corec_lock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = True
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
def corec_unlock(lock):
locks_fn = 'corec_locks.json'
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
|
import json
def corec_set(parameter, value):
params_fn = "corec_parameters.json"
with open(params_fn) as f:
params = json.load(f)
params[parameter] = value
with open(params_fn, 'w') as f:
json.dump(params, f, indent=4)
def corec_get(parameter):
params_fn = "corec_parameters.json"
with open(params_fn) as f:
params = json.load(f)
if parameter in params:
return params[parameter]
return None
def corec_lock(lock):
locks_fn = "corec_locks.json"
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = True
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
def corec_unlock(lock):
locks_fn = "corec_locks.json"
with open(locks_fn) as f:
locks = json.load(f)
locks[lock] = False
with open(locks_fn, 'w') as f:
json.dump(locks, f, indent=4)
| [
1,
3,
4,
5,
6
] |
992 | 095374aa7613f163fedbd7d253219478108d4f42 | <mask token>
| broker_url = 'redis://120.78.168.67/10'
CELERY_RESULT_BACKEND = 'redis://120.78.168.67/0'
CELERY_TIMEZONE = 'Asia/Shanghai'
| # Celery配置文件
# 指定消息队列为Redis
broker_url = "redis://120.78.168.67/10"
CELERY_RESULT_BACKEND = "redis://120.78.168.67/0"
CELERY_TIMEZONE = 'Asia/Shanghai'
| null | null | [
0,
1,
2
] |
993 | 2c1de638ac25a9f27b1af94fa075b7c1b9df6884 | <mask token>
class Migration(SchemaMigration):
def forwards(self, orm):
db.add_column(u'smslink_phoneuser', 'last_contacted', self.gf(
'django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
<mask token>
<mask token>
<mask token>
| <mask token>
class Migration(SchemaMigration):
def forwards(self, orm):
db.add_column(u'smslink_phoneuser', 'last_contacted', self.gf(
'django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
db.delete_column(u'smslink_phoneuser', 'last_contacted')
<mask token>
<mask token>
| <mask token>
class Migration(SchemaMigration):
def forwards(self, orm):
db.add_column(u'smslink_phoneuser', 'last_contacted', self.gf(
'django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
db.delete_column(u'smslink_phoneuser', 'last_contacted')
models = {u'foodproviders.entryrequirement': {'Meta': {'object_name':
'EntryRequirement'}, u'id': ('django.db.models.fields.AutoField', [
], {'primary_key': 'True'}), 'requirement': (
'django.db.models.fields.CharField', [], {'unique': 'True',
'max_length': '2'})}, u'foodproviders.postcode': {'Meta': {
'unique_together': "(('outward', 'inward'),)", 'object_name':
'PostCode'}, u'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'}), 'inward': (
'django.db.models.fields.CharField', [], {'max_length': '5'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [],
{'null': 'True', 'blank': 'True'}), 'outward': (
'django.db.models.fields.CharField', [], {'max_length': '5',
'db_index': 'True'})}, u'smslink.phoneuser': {'Meta': {
'object_name': 'PhoneUser'}, u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_contacted': ('django.db.models.fields.DateTimeField', [], {
'null': 'True', 'blank': 'True'}), 'number': (
'django.db.models.fields.CharField', [], {'max_length': '20',
'db_index': 'True'}), 'post_code': (
'django.db.models.fields.related.ForeignKey', [], {'to':
u"orm['foodproviders.PostCode']", 'null': 'True', 'blank': 'True'}),
'requirements_satisfied': (
'django.db.models.fields.related.ManyToManyField', [], {'to':
u"orm['foodproviders.EntryRequirement']", 'symmetrical': 'False'})}}
complete_apps = ['smslink']
| import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.add_column(u'smslink_phoneuser', 'last_contacted', self.gf(
'django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
db.delete_column(u'smslink_phoneuser', 'last_contacted')
models = {u'foodproviders.entryrequirement': {'Meta': {'object_name':
'EntryRequirement'}, u'id': ('django.db.models.fields.AutoField', [
], {'primary_key': 'True'}), 'requirement': (
'django.db.models.fields.CharField', [], {'unique': 'True',
'max_length': '2'})}, u'foodproviders.postcode': {'Meta': {
'unique_together': "(('outward', 'inward'),)", 'object_name':
'PostCode'}, u'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'}), 'inward': (
'django.db.models.fields.CharField', [], {'max_length': '5'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [],
{'null': 'True', 'blank': 'True'}), 'outward': (
'django.db.models.fields.CharField', [], {'max_length': '5',
'db_index': 'True'})}, u'smslink.phoneuser': {'Meta': {
'object_name': 'PhoneUser'}, u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_contacted': ('django.db.models.fields.DateTimeField', [], {
'null': 'True', 'blank': 'True'}), 'number': (
'django.db.models.fields.CharField', [], {'max_length': '20',
'db_index': 'True'}), 'post_code': (
'django.db.models.fields.related.ForeignKey', [], {'to':
u"orm['foodproviders.PostCode']", 'null': 'True', 'blank': 'True'}),
'requirements_satisfied': (
'django.db.models.fields.related.ManyToManyField', [], {'to':
u"orm['foodproviders.EntryRequirement']", 'symmetrical': 'False'})}}
complete_apps = ['smslink']
| # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'PhoneUser.last_contacted'
db.add_column(u'smslink_phoneuser', 'last_contacted',
self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'PhoneUser.last_contacted'
db.delete_column(u'smslink_phoneuser', 'last_contacted')
models = {
u'foodproviders.entryrequirement': {
'Meta': {'object_name': 'EntryRequirement'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'requirement': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2'})
},
u'foodproviders.postcode': {
'Meta': {'unique_together': "(('outward', 'inward'),)", 'object_name': 'PostCode'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inward': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'outward': ('django.db.models.fields.CharField', [], {'max_length': '5', 'db_index': 'True'})
},
u'smslink.phoneuser': {
'Meta': {'object_name': 'PhoneUser'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_contacted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'post_code': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foodproviders.PostCode']", 'null': 'True', 'blank': 'True'}),
'requirements_satisfied': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['foodproviders.EntryRequirement']", 'symmetrical': 'False'})
}
}
complete_apps = ['smslink'] | [
2,
3,
4,
5,
6
] |
994 | 18bad56ff6d230e63e83174672b8aa8625c1ebb4 |
RANGES = {
# Intervalles de la gamme majeure
0: [1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1],
# Intervalles de la gamme mineure naturelle
1: [1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0],
# Intervalles de la gamme mineure harmonique
2: [1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1]
}
RANGES_NAMES = {
'fr': ['Majeur', 'Mineur naturel', 'Mineur harmonique']
}
# Nombre total de notes
N = 12
# Nombre de nombre par gamme
N_T = 7
NOTES = {
'fr': ['DO', 'DO#', 'RE', 'RE#', 'MI', 'FA', 'FA#', 'SOL', 'SOL#', 'LA', 'LA#', 'SI']
}
CHORDS = {
'fr': {
0: ['', 'm', 'm', '', '', 'm', 'dim'],
1: ['m', 'dim', '', 'm', 'm', '', ''],
2: ['', 'm', 'm', '', '', 'm', 'dim']
}
}
def get_notes_from_range(r, t):
""" Return all notes from a given range"""
# calcul du tableau de notes
tab = []
for i in range(N):
n = (i - t)%N
tab.append(RANGES[r][n])
return tab
def get_range_chords(r):
return []
def export_range(res, lg):
notes = [NOTES[lg][(n + res['keynote'] )% 12] for n in range(N) if res['notes'][(n + res['keynote'] )% 12]]
return {
'keynote': NOTES[lg][res['keynote']],
'range': RANGES_NAMES[lg][res['range']],
'notes': notes,
'pourcentage': res['pourcentage']
# 'Accords': [notes[i] + CHORDS[lg][res['range']][i] for i in range(N_T)]
}
def print_range(r):
print r['Tonique'] + ' ' + r['Gamme']
print r['Accords']
print
## traitement
def range_ranking(given_notes):
result = []
# pour chaque tonique:
for t in range(N):
# pour chaque mode:
#for m in range(0, 12):
# pour chaque gamme:
for r in range(len(RANGES)):
# re-initialisation du pourcentage
pourcentage = 0.0
# obtention de toutes les notes de la gamme consideree
range_notes = get_notes_from_range(r, t)
# pour chaque note connue:
for i in given_notes:
# si la note connue est dans la gamme:
if range_notes[i] == 1:
#alors pourcentage += 1
pourcentage += 1
else:
pourcentage -= 1
pourcentage = (pourcentage/len(given_notes)) * 100
result.append({'keynote': t,
# 'mode': m,
'range': r,
'notes': range_notes,
'pourcentage': pourcentage})
return result
def main(notes, lg):
# Compute pourcentage for every registered ranges
unsorted_ranking = range_ranking(notes)
sorted_ranking = sorted(unsorted_ranking, key=lambda g: g['pourcentage'], reverse=True)
best_results = [r for r in sorted_ranking if r['pourcentage'] == sorted_ranking[0]['pourcentage']]
return best_results
def get_ranges(given_notes, lg='fr'):
errors = {}
results = []
# Clean user entry
print 'g' + str(given_notes)
notes = [NOTES['fr'].index(n) for n in given_notes]
print 'n' + str(notes)
try:
best_results = main(notes, lg)
except Exception as e:
errors['status'] = 'error'
errors['message'] = e
return errors
errors['status'] = 'success'
errors['message'] = ''
errors['result'] = [export_range(r, lg) for r in best_results]
return errors
if __name__ == '__main__':
#TODO: Test that arrays have consistents length
# Get entry from user
notes = [0, 2, 4, 5, 7, 9, 11]
lg = 'fr'
print [NOTES[lg][i] for i in notes]
print
print "Ces notes correspondent a la gamme:"
#TODO: Clean user entry
best_results = main(notes, lg)
for r in best_results:
print export_range(r, lg)
| null | null | null | null | [
0
] |
995 | 364ac79e0f885c67f2fff57dfe3ddde63f0c269e | <mask token>
class TriviaTestCase(unittest.TestCase):
<mask token>
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_path = DB_PATH
setup_db(self.app, self.database_path)
self.question_to_delete = Question(question='What?', answer='huh!',
category=1, difficulty=1)
self.new_question = {'question': 'What?', 'answer': 'What',
'category': 1, 'difficulty': 1}
self.quizz = {'previous_questions': [1, 3], 'quiz_category': {'id':
1, 'type': 'Science'}}
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
self.db.create_all()
def tearDown(self):
"""Executed after reach test"""
pass
def test_get_categories_if_success(self):
res = self.client().get('/categories')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['categories'])
def test_get_categories_if_non_existing_category(self):
res = self.client().get('/categories/10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_get_questions_if_success(self):
res = self.client().get('/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertTrue(data['categories'])
self.assertIsNone(data['current_category'])
def test_get_questions_if_invalid_page(self):
res = self.client().get('/questions?page=10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_delete_question_if_success(self):
self.question_to_delete.insert()
res = self.client().delete(f'/questions/{self.question_to_delete.id}')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['deleted_question'], self.question_to_delete.id)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
<mask token>
def test_create_question_if_success(self):
res = self.client().post('/questions', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['created_question'])
def test_create_question_if_bad_endpoint(self):
res = self.client().post('/questions/45', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 405)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'method not allowed')
def test_search_questions_with_results(self):
res = self.client().post('/questions/search', json={'search':
'Van Gogh'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['total_questions'])
self.assertEqual(len(data['questions']), 1)
def test_search_questions_without_results(self):
res = self.client().post('/questions/search', json={'search':
'Weird search'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['total_questions'], 0)
self.assertEqual(len(data['questions']), 0)
<mask token>
def test_get_questions_by_category_if_success(self):
res = self.client().get('/categories/1/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertEqual(data['current_category'], 1)
def test_get_questions_by_category_if_failure(self):
res = self.client().get('/categories/10000/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
<mask token>
def test_get_quizz_question_if_bad_request(self):
res = self.client().post('/quizzes/4', json=self.quizz)
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
<mask token>
<mask token>
| <mask token>
class TriviaTestCase(unittest.TestCase):
<mask token>
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_path = DB_PATH
setup_db(self.app, self.database_path)
self.question_to_delete = Question(question='What?', answer='huh!',
category=1, difficulty=1)
self.new_question = {'question': 'What?', 'answer': 'What',
'category': 1, 'difficulty': 1}
self.quizz = {'previous_questions': [1, 3], 'quiz_category': {'id':
1, 'type': 'Science'}}
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
self.db.create_all()
def tearDown(self):
"""Executed after reach test"""
pass
def test_get_categories_if_success(self):
res = self.client().get('/categories')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['categories'])
def test_get_categories_if_non_existing_category(self):
res = self.client().get('/categories/10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_get_questions_if_success(self):
res = self.client().get('/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertTrue(data['categories'])
self.assertIsNone(data['current_category'])
def test_get_questions_if_invalid_page(self):
res = self.client().get('/questions?page=10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_delete_question_if_success(self):
self.question_to_delete.insert()
res = self.client().delete(f'/questions/{self.question_to_delete.id}')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['deleted_question'], self.question_to_delete.id)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
<mask token>
def test_create_question_if_success(self):
res = self.client().post('/questions', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['created_question'])
def test_create_question_if_bad_endpoint(self):
res = self.client().post('/questions/45', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 405)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'method not allowed')
def test_search_questions_with_results(self):
res = self.client().post('/questions/search', json={'search':
'Van Gogh'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['total_questions'])
self.assertEqual(len(data['questions']), 1)
def test_search_questions_without_results(self):
res = self.client().post('/questions/search', json={'search':
'Weird search'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['total_questions'], 0)
self.assertEqual(len(data['questions']), 0)
<mask token>
def test_get_questions_by_category_if_success(self):
res = self.client().get('/categories/1/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertEqual(data['current_category'], 1)
def test_get_questions_by_category_if_failure(self):
res = self.client().get('/categories/10000/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_get_quizz_question_if_success(self):
res = self.client().post('/quizzes', json=self.quizz)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['question'])
def test_get_quizz_question_if_bad_request(self):
res = self.client().post('/quizzes/4', json=self.quizz)
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
<mask token>
<mask token>
| <mask token>
class TriviaTestCase(unittest.TestCase):
<mask token>
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_path = DB_PATH
setup_db(self.app, self.database_path)
self.question_to_delete = Question(question='What?', answer='huh!',
category=1, difficulty=1)
self.new_question = {'question': 'What?', 'answer': 'What',
'category': 1, 'difficulty': 1}
self.quizz = {'previous_questions': [1, 3], 'quiz_category': {'id':
1, 'type': 'Science'}}
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
self.db.create_all()
def tearDown(self):
"""Executed after reach test"""
pass
def test_get_categories_if_success(self):
res = self.client().get('/categories')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['categories'])
def test_get_categories_if_non_existing_category(self):
res = self.client().get('/categories/10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_get_questions_if_success(self):
res = self.client().get('/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertTrue(data['categories'])
self.assertIsNone(data['current_category'])
def test_get_questions_if_invalid_page(self):
res = self.client().get('/questions?page=10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_delete_question_if_success(self):
self.question_to_delete.insert()
res = self.client().delete(f'/questions/{self.question_to_delete.id}')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['deleted_question'], self.question_to_delete.id)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
def test_delete_questions_if_non_existing_book(self):
res = self.client().delete('/questions/100000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'unprocessable')
def test_create_question_if_success(self):
res = self.client().post('/questions', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['created_question'])
def test_create_question_if_bad_endpoint(self):
res = self.client().post('/questions/45', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 405)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'method not allowed')
def test_search_questions_with_results(self):
res = self.client().post('/questions/search', json={'search':
'Van Gogh'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['total_questions'])
self.assertEqual(len(data['questions']), 1)
def test_search_questions_without_results(self):
res = self.client().post('/questions/search', json={'search':
'Weird search'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['total_questions'], 0)
self.assertEqual(len(data['questions']), 0)
def test_search_questions_failure(self):
res = self.client().post('/questions/search', json={'wrong_key':
'Van Gogh'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 400)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'bad request')
def test_get_questions_by_category_if_success(self):
res = self.client().get('/categories/1/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertEqual(data['current_category'], 1)
def test_get_questions_by_category_if_failure(self):
res = self.client().get('/categories/10000/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_get_quizz_question_if_success(self):
res = self.client().post('/quizzes', json=self.quizz)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['question'])
def test_get_quizz_question_if_bad_request(self):
res = self.client().post('/quizzes/4', json=self.quizz)
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
<mask token>
<mask token>
| <mask token>
class TriviaTestCase(unittest.TestCase):
"""This class represents the trivia test case"""
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_path = DB_PATH
setup_db(self.app, self.database_path)
self.question_to_delete = Question(question='What?', answer='huh!',
category=1, difficulty=1)
self.new_question = {'question': 'What?', 'answer': 'What',
'category': 1, 'difficulty': 1}
self.quizz = {'previous_questions': [1, 3], 'quiz_category': {'id':
1, 'type': 'Science'}}
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
self.db.create_all()
def tearDown(self):
"""Executed after reach test"""
pass
def test_get_categories_if_success(self):
res = self.client().get('/categories')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['categories'])
def test_get_categories_if_non_existing_category(self):
res = self.client().get('/categories/10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_get_questions_if_success(self):
res = self.client().get('/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertTrue(data['categories'])
self.assertIsNone(data['current_category'])
def test_get_questions_if_invalid_page(self):
res = self.client().get('/questions?page=10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_delete_question_if_success(self):
self.question_to_delete.insert()
res = self.client().delete(f'/questions/{self.question_to_delete.id}')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['deleted_question'], self.question_to_delete.id)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
def test_delete_questions_if_non_existing_book(self):
res = self.client().delete('/questions/100000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'unprocessable')
def test_create_question_if_success(self):
res = self.client().post('/questions', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['created_question'])
def test_create_question_if_bad_endpoint(self):
res = self.client().post('/questions/45', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 405)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'method not allowed')
def test_search_questions_with_results(self):
res = self.client().post('/questions/search', json={'search':
'Van Gogh'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['total_questions'])
self.assertEqual(len(data['questions']), 1)
def test_search_questions_without_results(self):
res = self.client().post('/questions/search', json={'search':
'Weird search'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['total_questions'], 0)
self.assertEqual(len(data['questions']), 0)
def test_search_questions_failure(self):
res = self.client().post('/questions/search', json={'wrong_key':
'Van Gogh'})
data = json.loads(res.data)
self.assertEqual(res.status_code, 400)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'bad request')
def test_get_questions_by_category_if_success(self):
res = self.client().get('/categories/1/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertEqual(data['current_category'], 1)
def test_get_questions_by_category_if_failure(self):
res = self.client().get('/categories/10000/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_get_quizz_question_if_success(self):
res = self.client().post('/quizzes', json=self.quizz)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['question'])
def test_get_quizz_question_if_bad_request(self):
res = self.client().post('/quizzes/4', json=self.quizz)
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
"""
TODO
Write at least one test for each test for successful
operation and for expected errors.
"""
<mask token>
| import os
import unittest
import json
from flask_sqlalchemy import SQLAlchemy
from flaskr import create_app
from models import setup_db, Question
DB_HOST = os.getenv('DB_HOST', '127.0.0.1:5432')
DB_USER = os.getenv('DB_USER', 'postgres')
DB_PASSWORD = os.getenv('DB_PASSWORD', 'postgres')
DB_NAME = os.getenv('DB_NAME', 'trivia_test')
DB_PATH = 'postgresql+psycopg2://{}:{}@{}/{}'.\
format(DB_USER, DB_PASSWORD, DB_HOST, DB_NAME)
class TriviaTestCase(unittest.TestCase):
"""This class represents the trivia test case"""
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_path = DB_PATH
setup_db(self.app, self.database_path)
self.question_to_delete = Question(
question='What?',
answer='huh!',
category=1,
difficulty=1
)
self.new_question = {
'question': 'What?',
'answer': 'What',
'category': 1,
'difficulty': 1
}
self.quizz = {
'previous_questions': [1, 3],
'quiz_category': {'id': 1, 'type': 'Science'}
}
# binds the app to the current context
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
# create all tables
self.db.create_all()
def tearDown(self):
"""Executed after reach test"""
pass
def test_get_categories_if_success(self):
res = self.client().get('/categories')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['categories'])
def test_get_categories_if_non_existing_category(self):
res = self.client().get('/categories/10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_get_questions_if_success(self):
res = self.client().get('/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertTrue(data['categories'])
self.assertIsNone(data['current_category'])
def test_get_questions_if_invalid_page(self):
res = self.client().get('/questions?page=10000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_delete_question_if_success(self):
self.question_to_delete.insert()
res = self.client().delete(f'/questions/{self.question_to_delete.id}')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['deleted_question'], self.question_to_delete.id)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
def test_delete_questions_if_non_existing_book(self):
res = self.client().delete('/questions/100000')
data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'unprocessable')
def test_create_question_if_success(self):
res = self.client().post('/questions', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['created_question'])
def test_create_question_if_bad_endpoint(self):
res = self.client().post('/questions/45', json=self.new_question)
data = json.loads(res.data)
self.assertEqual(res.status_code, 405)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'method not allowed')
def test_search_questions_with_results(self):
res = self.client().post(
'/questions/search', json={'search': 'Van Gogh'}
)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['total_questions'])
self.assertEqual(len(data['questions']), 1)
def test_search_questions_without_results(self):
res = self.client().post(
'/questions/search', json={'search': 'Weird search'}
)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertEqual(data['total_questions'], 0)
self.assertEqual(len(data['questions']), 0)
def test_search_questions_failure(self):
res = self.client().post(
'/questions/search', json={'wrong_key': 'Van Gogh'}
)
data = json.loads(res.data)
self.assertEqual(res.status_code, 400)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'bad request')
def test_get_questions_by_category_if_success(self):
res = self.client().get('/categories/1/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['questions'])
self.assertTrue(data['total_questions'])
self.assertEqual(data['current_category'], 1)
def test_get_questions_by_category_if_failure(self):
res = self.client().get('/categories/10000/questions')
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
def test_get_quizz_question_if_success(self):
res = self.client().post('/quizzes', json=self.quizz)
data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(data['success'], True)
self.assertTrue(data['question'])
def test_get_quizz_question_if_bad_request(self):
res = self.client().post('/quizzes/4', json=self.quizz)
data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'not found')
"""
TODO
Write at least one test for each test for successful
operation and for expected errors.
"""
# Make the tests conveniently executable
if __name__ == "__main__":
unittest.main()
| [
15,
16,
18,
19,
23
] |
996 | 34c81b9318d978305748d413c869a86ee6709e2c | <mask token>
| <mask token>
class YumiConstants:
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
| <mask token>
class YumiConstants:
T_gripper_gripperV = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0],
[0, 0, -1]], from_frame='gripper', to_frame='obj')
T_rightH_yumi_1 = rt.RigidTransform(rotation=[[0, 0, 1], [1, 0, 0], [0,
1, 0]], translation=[0.6256, -0.15060002, 0.3616], from_frame=
'home', to_frame='yumi')
T_rightH_yumi_2 = rt.RigidTransform(rotation=[[0, 0, 1], [1, 0, 0], [0,
1, 0]], translation=[0.6256 - 0.1, -0.15060002 + 0.1, 0.3616],
from_frame='home', to_frame='yumi')
T_rightH_yumi_3 = rt.RigidTransform(rotation=[[0, 0, 1], [1, 0, 0], [0,
1, 0]], translation=[0.6256 - 0.1, -0.15060002 + 0.1, 0.3616 - 0.05
], from_frame='home', to_frame='yumi')
T_leftH_yumi_1 = rt.RigidTransform(rotation=[[1, 0, 0], [0, 0, -1], [0,
1, 0]], translation=[0.52070004, 0.07340001, 0.3574], from_frame=
'home', to_frame='yumi')
T_leftH_yumi_2 = rt.RigidTransform(rotation=[[1, 0, 0], [0, 0, -1], [0,
1, 0]], translation=[0.67080003 - 0.15, -0.12650001 + 0.2,
0.35720003], from_frame='home', to_frame='yumi')
T_board_yumi = rt.RigidTransform(translation=[0.3984, 0, 0.0837],
from_frame='board', to_frame='yumi')
board_center = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0, 0,
-1]], translation=[0.42971, -0.004, -0.057], from_frame='yumi',
to_frame='world')
T_rightH_yumi = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0,
0, -1]], translation=[0.3984, 0 - 8 * 0.0375, 0.0837], from_frame=
'home', to_frame='yumi')
T_leftH_yumi = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0, 0,
-1]], translation=[0.3984, 0 + 8 * 0.0375, 0.0837], from_frame=
'home', to_frame='yumi')
right_threading_home = YuMiState([101.34, -83.3, 54.01, -44.34, -82.32,
-26.22, -76.76])
left_threading_home = YuMiState([-74.73, -70.63, 9.62, 15.86, 65.74, -
169.18, 50.61])
right_pickup_home = YuMiState([80.92, -118.47, 39.2, -139.35, 107.91,
4.83, -26.93])
left_pickup_home = YuMiState([-75.32, -114.45, 37.59, 134.52, 102.66, -
8.73, 42.77])
| from autolab_core import rigid_transformations as rt
from yumipy import YuMiState
class YumiConstants:
T_gripper_gripperV = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0],
[0, 0, -1]], from_frame='gripper', to_frame='obj')
T_rightH_yumi_1 = rt.RigidTransform(rotation=[[0, 0, 1], [1, 0, 0], [0,
1, 0]], translation=[0.6256, -0.15060002, 0.3616], from_frame=
'home', to_frame='yumi')
T_rightH_yumi_2 = rt.RigidTransform(rotation=[[0, 0, 1], [1, 0, 0], [0,
1, 0]], translation=[0.6256 - 0.1, -0.15060002 + 0.1, 0.3616],
from_frame='home', to_frame='yumi')
T_rightH_yumi_3 = rt.RigidTransform(rotation=[[0, 0, 1], [1, 0, 0], [0,
1, 0]], translation=[0.6256 - 0.1, -0.15060002 + 0.1, 0.3616 - 0.05
], from_frame='home', to_frame='yumi')
T_leftH_yumi_1 = rt.RigidTransform(rotation=[[1, 0, 0], [0, 0, -1], [0,
1, 0]], translation=[0.52070004, 0.07340001, 0.3574], from_frame=
'home', to_frame='yumi')
T_leftH_yumi_2 = rt.RigidTransform(rotation=[[1, 0, 0], [0, 0, -1], [0,
1, 0]], translation=[0.67080003 - 0.15, -0.12650001 + 0.2,
0.35720003], from_frame='home', to_frame='yumi')
T_board_yumi = rt.RigidTransform(translation=[0.3984, 0, 0.0837],
from_frame='board', to_frame='yumi')
board_center = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0, 0,
-1]], translation=[0.42971, -0.004, -0.057], from_frame='yumi',
to_frame='world')
T_rightH_yumi = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0,
0, -1]], translation=[0.3984, 0 - 8 * 0.0375, 0.0837], from_frame=
'home', to_frame='yumi')
T_leftH_yumi = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0, 0,
-1]], translation=[0.3984, 0 + 8 * 0.0375, 0.0837], from_frame=
'home', to_frame='yumi')
right_threading_home = YuMiState([101.34, -83.3, 54.01, -44.34, -82.32,
-26.22, -76.76])
left_threading_home = YuMiState([-74.73, -70.63, 9.62, 15.86, 65.74, -
169.18, 50.61])
right_pickup_home = YuMiState([80.92, -118.47, 39.2, -139.35, 107.91,
4.83, -26.93])
left_pickup_home = YuMiState([-75.32, -114.45, 37.59, 134.52, 102.66, -
8.73, 42.77])
| # import visual_servoing_utils_main as utils
from autolab_core import rigid_transformations as rt
from yumipy import YuMiState
class YumiConstants:
T_gripper_gripperV = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0, 0, -1]],
from_frame='gripper', to_frame='obj')
T_rightH_yumi_1 = rt.RigidTransform(rotation=[[0, 0, 1], [1, 0, 0], [0, 1, 0]],
translation=[0.6256, -0.15060002, 0.3616],
from_frame='home', to_frame='yumi')
T_rightH_yumi_2 = rt.RigidTransform(rotation=[[0, 0, 1], [1, 0, 0], [0, 1, 0]],
translation=[0.6256 - 0.1, -0.15060002 + 0.1, 0.3616],
from_frame='home', to_frame='yumi')
T_rightH_yumi_3 = rt.RigidTransform(rotation=[[0, 0, 1], [1, 0, 0], [0, 1, 0]],
translation=[0.6256 - 0.1, -0.15060002 + 0.1, 0.3616 - 0.05],
from_frame='home', to_frame='yumi')
T_leftH_yumi_1 = rt.RigidTransform(rotation=[[1, 0, 0], [0, 0, -1], [0, 1, 0]],
translation=[0.52070004, 0.07340001, 0.3574],
from_frame='home', to_frame='yumi')
T_leftH_yumi_2 = rt.RigidTransform(rotation=[[1, 0, 0], [0, 0, -1], [0, 1, 0]],
translation=[0.67080003 - 0.15, -0.12650001 + 0.2, 0.35720003],
from_frame='home', to_frame='yumi')
T_board_yumi = rt.RigidTransform(translation=[0.3984, 0, 0.0837],
from_frame='board', to_frame='yumi')
board_center = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0, 0, -1]],
translation=[0.42971, -0.004, -0.057],
from_frame='yumi', to_frame='world')
T_rightH_yumi = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0, 0, -1]],
translation=[0.3984, 0 - 8 * 0.0375, 0.0837],
from_frame='home', to_frame='yumi')
T_leftH_yumi = rt.RigidTransform(rotation=[[-1, 0, 0], [0, 1, 0], [0, 0, -1]],
translation=[0.3984, 0 + 8 * 0.0375, 0.0837],
# translation=[0.3984, 0 + 8*0.0375, 0.0837],
from_frame='home', to_frame='yumi')
right_threading_home = YuMiState([101.34, -83.3, 54.01, -44.34, -82.32, -26.22, -76.76])
left_threading_home = YuMiState([-74.73, -70.63, 9.62, 15.86, 65.74, -169.18, 50.61])
right_pickup_home = YuMiState([80.92, -118.47, 39.2, -139.35, 107.91, 4.83, -26.93])
left_pickup_home = YuMiState([-75.32, -114.45, 37.59, 134.52, 102.66, -8.73, 42.77])
| [
0,
1,
2,
3,
4
] |
997 | 04099c46c029af37a08b3861809da13b3cc3153b | <mask token>
| <mask token>
def quick_sort(array: list) ->list:
return []
| """
OBJECTIVE: Given a list, sort it from low to high using the QUICK SORT algorithm
Quicksort first divides a large array into two smaller sub-arrays: the low elements and the high elements.
Quicksort can then recursively sort the sub-arrays.
The steps are:
1. Pick an element, called a pivot, from the array.
2. Partitioning: reorder the array so that all elements with values less than the pivot come before the pivot,
while all elements with values greater than the pivot come after it (equal values can go either way).
After this partitioning, the pivot is in its final position. This is called the partition operation.
3. Recursively apply the above steps to the sub-array of elements with smaller values
and separately to the sub-array of elements with greater values.
The base case of the recursion is arrays of size zero or one, which are in order by definition,
so they never need to be sorted.
https://www.geeksforgeeks.org/quick-sort/
"""
def quick_sort(array: list) -> list:
return []
| null | null | [
0,
1,
2
] |
998 | 9a6d6637cd4ecf2f6e9c8eb8e702be06e83beea4 | <mask token>
| <mask token>
if __name__ == '__main__':
app.run(debug=app.config['DEBUG'])
| <mask token>
__author__ = '七月'
app = create_app()
if __name__ == '__main__':
app.run(debug=app.config['DEBUG'])
| from app import create_app
__author__ = '七月'
app = create_app()
if __name__ == '__main__':
app.run(debug=app.config['DEBUG'])
| null | [
0,
1,
2,
3
] |
999 | f405a3e9ccabbba6719f632eb9c51809b8deb319 | <mask token>
def upload_file(file_name, object_name=None):
RESULT_BUCKET_NAME = 'worm4047bucket2'
s3_client = get_client('s3')
max_retries = 5
while max_retries > 0:
try:
response = s3_client.upload_file(file_name, RESULT_BUCKET_NAME,
object_name, Callback=ProgressPercentage(file_name))
break
except ClientError as e:
logging.error(e)
max_retries -= 1
return max_retries > 0
def upload_results(object_name, results):
file_name = object_name
with open(file_name, 'w+') as f:
f.write(results)
return upload_file(file_name, object_name)
<mask token>
def get_client(type):
global ACCESS_KEY
global SECRET_KEY
global SESSION_TOKEN
global REGION
return boto3.client(type, aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY, aws_session_token=SESSION_TOKEN,
region_name=REGION)
def get_objects(FILENAME):
logging.info(os.getcwd())
result = dict()
object_set = set()
try:
f = open(FILENAME, 'r')
temp_data = f.read().split('\n')
data = dict()
currfps = 0
obj_in_frame = []
for lines in temp_data:
lines = lines.replace('\n', '')
if 'FPS' in lines:
if currfps > 0 and len(obj_in_frame) > 0:
data[currfps] = obj_in_frame
obj_in_frame = []
currfps += 1
elif '%' in lines:
obj_in_frame.append(lines)
for key in data:
object_map = []
for obj in data[key]:
obj_name, obj_conf = obj.split()
obj_name = obj_name.replace(':', '')
object_set.add(obj_name)
obj_conf = int(obj_conf.replace('%', ''))
object_map.append({obj_name: obj_conf * 1.0 / 100})
result[key] = object_map
except Exception as e:
pass
return list(object_set)
<mask token>
| <mask token>
def upload_file(file_name, object_name=None):
RESULT_BUCKET_NAME = 'worm4047bucket2'
s3_client = get_client('s3')
max_retries = 5
while max_retries > 0:
try:
response = s3_client.upload_file(file_name, RESULT_BUCKET_NAME,
object_name, Callback=ProgressPercentage(file_name))
break
except ClientError as e:
logging.error(e)
max_retries -= 1
return max_retries > 0
def upload_results(object_name, results):
file_name = object_name
with open(file_name, 'w+') as f:
f.write(results)
return upload_file(file_name, object_name)
def get_creds():
global ACCESS_KEY
global SECRET_KEY
global SESSION_TOKEN
global REGION
cred_file = 'cred.json'
with open(cred_file) as f:
data = json.load(f)
ACCESS_KEY = data['aws_access_key_id']
SECRET_KEY = data['aws_secret_access_key']
SESSION_TOKEN = data['aws_session_token']
REGION = data['region']
def get_client(type):
global ACCESS_KEY
global SECRET_KEY
global SESSION_TOKEN
global REGION
return boto3.client(type, aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY, aws_session_token=SESSION_TOKEN,
region_name=REGION)
def get_objects(FILENAME):
logging.info(os.getcwd())
result = dict()
object_set = set()
try:
f = open(FILENAME, 'r')
temp_data = f.read().split('\n')
data = dict()
currfps = 0
obj_in_frame = []
for lines in temp_data:
lines = lines.replace('\n', '')
if 'FPS' in lines:
if currfps > 0 and len(obj_in_frame) > 0:
data[currfps] = obj_in_frame
obj_in_frame = []
currfps += 1
elif '%' in lines:
obj_in_frame.append(lines)
for key in data:
object_map = []
for obj in data[key]:
obj_name, obj_conf = obj.split()
obj_name = obj_name.replace(':', '')
object_set.add(obj_name)
obj_conf = int(obj_conf.replace('%', ''))
object_map.append({obj_name: obj_conf * 1.0 / 100})
result[key] = object_map
except Exception as e:
pass
return list(object_set)
<mask token>
| <mask token>
def upload_file(file_name, object_name=None):
RESULT_BUCKET_NAME = 'worm4047bucket2'
s3_client = get_client('s3')
max_retries = 5
while max_retries > 0:
try:
response = s3_client.upload_file(file_name, RESULT_BUCKET_NAME,
object_name, Callback=ProgressPercentage(file_name))
break
except ClientError as e:
logging.error(e)
max_retries -= 1
return max_retries > 0
def upload_results(object_name, results):
file_name = object_name
with open(file_name, 'w+') as f:
f.write(results)
return upload_file(file_name, object_name)
def get_creds():
global ACCESS_KEY
global SECRET_KEY
global SESSION_TOKEN
global REGION
cred_file = 'cred.json'
with open(cred_file) as f:
data = json.load(f)
ACCESS_KEY = data['aws_access_key_id']
SECRET_KEY = data['aws_secret_access_key']
SESSION_TOKEN = data['aws_session_token']
REGION = data['region']
def get_client(type):
global ACCESS_KEY
global SECRET_KEY
global SESSION_TOKEN
global REGION
return boto3.client(type, aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY, aws_session_token=SESSION_TOKEN,
region_name=REGION)
def get_objects(FILENAME):
logging.info(os.getcwd())
result = dict()
object_set = set()
try:
f = open(FILENAME, 'r')
temp_data = f.read().split('\n')
data = dict()
currfps = 0
obj_in_frame = []
for lines in temp_data:
lines = lines.replace('\n', '')
if 'FPS' in lines:
if currfps > 0 and len(obj_in_frame) > 0:
data[currfps] = obj_in_frame
obj_in_frame = []
currfps += 1
elif '%' in lines:
obj_in_frame.append(lines)
for key in data:
object_map = []
for obj in data[key]:
obj_name, obj_conf = obj.split()
obj_name = obj_name.replace(':', '')
object_set.add(obj_name)
obj_conf = int(obj_conf.replace('%', ''))
object_map.append({obj_name: obj_conf * 1.0 / 100})
result[key] = object_map
except Exception as e:
pass
return list(object_set)
if __name__ == '__main__':
ACCESS_KEY, SECRET_KEY, SESSION_TOKEN, REGION = '', '', '', ''
OUTPUT_FILENAME = 'results.txt'
PATH_DARKNET = '/home/pi/darknet/'
get_creds()
object_list = get_objects(PATH_DARKNET + OUTPUT_FILENAME)
object_name = sys.argv[1]
results = ''
if len(object_list) == 0:
results = 'no object detected'
else:
results = ', '.join(object_list)
upload_results(object_name, results)
| import boto3
from botocore.exceptions import ClientError
import logging
import subprocess
import string
import random
import time
import os
import sys
import time
import json
from ProgressPercentage import *
import logging
def upload_file(file_name, object_name=None):
RESULT_BUCKET_NAME = 'worm4047bucket2'
s3_client = get_client('s3')
max_retries = 5
while max_retries > 0:
try:
response = s3_client.upload_file(file_name, RESULT_BUCKET_NAME,
object_name, Callback=ProgressPercentage(file_name))
break
except ClientError as e:
logging.error(e)
max_retries -= 1
return max_retries > 0
def upload_results(object_name, results):
file_name = object_name
with open(file_name, 'w+') as f:
f.write(results)
return upload_file(file_name, object_name)
def get_creds():
global ACCESS_KEY
global SECRET_KEY
global SESSION_TOKEN
global REGION
cred_file = 'cred.json'
with open(cred_file) as f:
data = json.load(f)
ACCESS_KEY = data['aws_access_key_id']
SECRET_KEY = data['aws_secret_access_key']
SESSION_TOKEN = data['aws_session_token']
REGION = data['region']
def get_client(type):
global ACCESS_KEY
global SECRET_KEY
global SESSION_TOKEN
global REGION
return boto3.client(type, aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY, aws_session_token=SESSION_TOKEN,
region_name=REGION)
def get_objects(FILENAME):
logging.info(os.getcwd())
result = dict()
object_set = set()
try:
f = open(FILENAME, 'r')
temp_data = f.read().split('\n')
data = dict()
currfps = 0
obj_in_frame = []
for lines in temp_data:
lines = lines.replace('\n', '')
if 'FPS' in lines:
if currfps > 0 and len(obj_in_frame) > 0:
data[currfps] = obj_in_frame
obj_in_frame = []
currfps += 1
elif '%' in lines:
obj_in_frame.append(lines)
for key in data:
object_map = []
for obj in data[key]:
obj_name, obj_conf = obj.split()
obj_name = obj_name.replace(':', '')
object_set.add(obj_name)
obj_conf = int(obj_conf.replace('%', ''))
object_map.append({obj_name: obj_conf * 1.0 / 100})
result[key] = object_map
except Exception as e:
pass
return list(object_set)
if __name__ == '__main__':
ACCESS_KEY, SECRET_KEY, SESSION_TOKEN, REGION = '', '', '', ''
OUTPUT_FILENAME = 'results.txt'
PATH_DARKNET = '/home/pi/darknet/'
get_creds()
object_list = get_objects(PATH_DARKNET + OUTPUT_FILENAME)
object_name = sys.argv[1]
results = ''
if len(object_list) == 0:
results = 'no object detected'
else:
results = ', '.join(object_list)
upload_results(object_name, results)
| import boto3
from botocore.exceptions import ClientError
import logging
import subprocess
import string
import random
import time
import os
import sys
import time
import json
from ProgressPercentage import *
import logging
def upload_file(file_name, object_name=None):
RESULT_BUCKET_NAME = "worm4047bucket2"
s3_client = get_client('s3')
max_retries = 5
while max_retries > 0:
try:
response = s3_client.upload_file(file_name, RESULT_BUCKET_NAME, object_name, Callback=ProgressPercentage(file_name))
break
except ClientError as e:
logging.error(e)
max_retries -= 1
return max_retries > 0
def upload_results(object_name, results):
file_name = object_name
with open(file_name, 'w+') as f:
f.write(results)
return upload_file(file_name, object_name)
def get_creds():
global ACCESS_KEY
global SECRET_KEY
global SESSION_TOKEN
global REGION
cred_file = "cred.json"
with open(cred_file) as f:
data = json.load(f)
ACCESS_KEY = data['aws_access_key_id']
SECRET_KEY = data['aws_secret_access_key']
SESSION_TOKEN = data['aws_session_token']
REGION = data['region']
def get_client(type):
global ACCESS_KEY
global SECRET_KEY
global SESSION_TOKEN
global REGION
# return boto3.client(type, region_name=REGION)
return boto3.client(type,aws_access_key_id=ACCESS_KEY,aws_secret_access_key=SECRET_KEY,aws_session_token=SESSION_TOKEN,region_name=REGION)
def get_objects(FILENAME):
logging.info(os.getcwd())
result = dict()
object_set = set()
try:
f = open(FILENAME, 'r')
temp_data = f.read().split('\n')
data = dict()
currfps = 0
obj_in_frame = []
for lines in temp_data:
lines = lines.replace('\n', "")
if 'FPS' in lines:
if currfps > 0 and len(obj_in_frame) > 0:
data[currfps] = (obj_in_frame)
obj_in_frame = []
currfps += 1
elif '%' in lines:
obj_in_frame.append(lines)
for key in data:
object_map = []
for obj in data[key]:
obj_name, obj_conf = obj.split()
obj_name = (obj_name.replace(':',''))
object_set.add(obj_name)
obj_conf = (int)(obj_conf.replace('%',''))
object_map.append({obj_name:(obj_conf*1.0)/100})
result[key] = (object_map)
except Exception as e:
pass
# return {'results' : [result]}
return list(object_set)
if __name__ == '__main__':
ACCESS_KEY, SECRET_KEY, SESSION_TOKEN, REGION = "", "", "", ""
OUTPUT_FILENAME = "results.txt"
PATH_DARKNET = "/home/pi/darknet/"
get_creds()
object_list = get_objects(PATH_DARKNET + OUTPUT_FILENAME)
object_name = sys.argv[1]
results = ""
if len(object_list) == 0:
results = "no object detected"
else:
results = ", ".join(object_list)
# results[sys.argv[1]] = object_list
upload_results(object_name, results) | [
4,
5,
6,
7,
8
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.