index
int64 0
10k
| blob_id
stringlengths 40
40
| step-1
stringlengths 13
984k
| step-2
stringlengths 6
1.23M
⌀ | step-3
stringlengths 15
1.34M
⌀ | step-4
stringlengths 30
1.34M
⌀ | step-5
stringlengths 64
1.2M
⌀ | step-ids
sequencelengths 1
5
|
---|---|---|---|---|---|---|---|
1,900 | 1219f7b7ac335f3a69e289d1ab2b6318a2aef23f | <mask token>
| <mask token>
if len(sys.argv) != 2:
print('usage: part2.py puzzle_input')
exit(1)
<mask token>
for i in range(sys.maxsize):
digest = hashlib.md5(puzzle_input.encode('utf-8') + str(i).encode('utf-8')
).hexdigest()
if digest.startswith('000000'):
input_num = i
break
print(f'puzzle_input: {puzzle_input} solved with {input_num}')
print('\ndone.')
| <mask token>
if len(sys.argv) != 2:
print('usage: part2.py puzzle_input')
exit(1)
puzzle_input = sys.argv[1]
input_num = 0
for i in range(sys.maxsize):
digest = hashlib.md5(puzzle_input.encode('utf-8') + str(i).encode('utf-8')
).hexdigest()
if digest.startswith('000000'):
input_num = i
break
print(f'puzzle_input: {puzzle_input} solved with {input_num}')
print('\ndone.')
| import sys
import hashlib
if len(sys.argv) != 2:
print('usage: part2.py puzzle_input')
exit(1)
puzzle_input = sys.argv[1]
input_num = 0
for i in range(sys.maxsize):
digest = hashlib.md5(puzzle_input.encode('utf-8') + str(i).encode('utf-8')
).hexdigest()
if digest.startswith('000000'):
input_num = i
break
print(f'puzzle_input: {puzzle_input} solved with {input_num}')
print('\ndone.')
| #!/usr/bin/env python3
import sys
import hashlib
# Usage
if len(sys.argv) != 2:
print("usage: part2.py puzzle_input")
exit(1)
# Get Secret
puzzle_input = sys.argv[1]
input_num = 0
# Calcuate
for i in range(sys.maxsize):
digest = hashlib.md5(puzzle_input.encode('utf-8')+str(i).encode('utf-8')).hexdigest()
if (digest.startswith('000000')): # must start with 6 zeros
input_num = i
break;
# Print Results
print(f'puzzle_input: {puzzle_input} solved with {input_num}')
print("\ndone.");
| [
0,
1,
2,
3,
4
] |
1,901 | 6743a4f3c9118e790e52b586a36d71a735101702 | <mask token>
class CompanyInfo(object):
def __init__(self):
self._alter_list = None
self._basic_info = None
self._case_info_list = None
self._entinv_list = None
self._fr_position_list = None
self._frinv_list = None
self._person_list = None
self._share_holder_list = None
<mask token>
@alter_list.setter
def alter_list(self, value):
if isinstance(value, list):
self._alter_list = list()
for i in value:
if isinstance(i, EpInfo):
self._alter_list.append(i)
else:
self._alter_list.append(EpInfo.from_alipay_dict(i))
@property
def basic_info(self):
return self._basic_info
<mask token>
@property
def case_info_list(self):
return self._case_info_list
@case_info_list.setter
def case_info_list(self, value):
if isinstance(value, list):
self._case_info_list = list()
for i in value:
if isinstance(i, EpInfo):
self._case_info_list.append(i)
else:
self._case_info_list.append(EpInfo.from_alipay_dict(i))
<mask token>
<mask token>
@property
def fr_position_list(self):
return self._fr_position_list
<mask token>
<mask token>
@frinv_list.setter
def frinv_list(self, value):
if isinstance(value, list):
self._frinv_list = list()
for i in value:
if isinstance(i, EpInfo):
self._frinv_list.append(i)
else:
self._frinv_list.append(EpInfo.from_alipay_dict(i))
@property
def person_list(self):
return self._person_list
@person_list.setter
def person_list(self, value):
if isinstance(value, list):
self._person_list = list()
for i in value:
if isinstance(i, EpInfo):
self._person_list.append(i)
else:
self._person_list.append(EpInfo.from_alipay_dict(i))
@property
def share_holder_list(self):
return self._share_holder_list
@share_holder_list.setter
def share_holder_list(self, value):
if isinstance(value, list):
self._share_holder_list = list()
for i in value:
if isinstance(i, EpInfo):
self._share_holder_list.append(i)
else:
self._share_holder_list.append(EpInfo.from_alipay_dict(i))
def to_alipay_dict(self):
params = dict()
if self.alter_list:
if isinstance(self.alter_list, list):
for i in range(0, len(self.alter_list)):
element = self.alter_list[i]
if hasattr(element, 'to_alipay_dict'):
self.alter_list[i] = element.to_alipay_dict()
if hasattr(self.alter_list, 'to_alipay_dict'):
params['alter_list'] = self.alter_list.to_alipay_dict()
else:
params['alter_list'] = self.alter_list
if self.basic_info:
if hasattr(self.basic_info, 'to_alipay_dict'):
params['basic_info'] = self.basic_info.to_alipay_dict()
else:
params['basic_info'] = self.basic_info
if self.case_info_list:
if isinstance(self.case_info_list, list):
for i in range(0, len(self.case_info_list)):
element = self.case_info_list[i]
if hasattr(element, 'to_alipay_dict'):
self.case_info_list[i] = element.to_alipay_dict()
if hasattr(self.case_info_list, 'to_alipay_dict'):
params['case_info_list'] = self.case_info_list.to_alipay_dict()
else:
params['case_info_list'] = self.case_info_list
if self.entinv_list:
if isinstance(self.entinv_list, list):
for i in range(0, len(self.entinv_list)):
element = self.entinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.entinv_list[i] = element.to_alipay_dict()
if hasattr(self.entinv_list, 'to_alipay_dict'):
params['entinv_list'] = self.entinv_list.to_alipay_dict()
else:
params['entinv_list'] = self.entinv_list
if self.fr_position_list:
if isinstance(self.fr_position_list, list):
for i in range(0, len(self.fr_position_list)):
element = self.fr_position_list[i]
if hasattr(element, 'to_alipay_dict'):
self.fr_position_list[i] = element.to_alipay_dict()
if hasattr(self.fr_position_list, 'to_alipay_dict'):
params['fr_position_list'
] = self.fr_position_list.to_alipay_dict()
else:
params['fr_position_list'] = self.fr_position_list
if self.frinv_list:
if isinstance(self.frinv_list, list):
for i in range(0, len(self.frinv_list)):
element = self.frinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.frinv_list[i] = element.to_alipay_dict()
if hasattr(self.frinv_list, 'to_alipay_dict'):
params['frinv_list'] = self.frinv_list.to_alipay_dict()
else:
params['frinv_list'] = self.frinv_list
if self.person_list:
if isinstance(self.person_list, list):
for i in range(0, len(self.person_list)):
element = self.person_list[i]
if hasattr(element, 'to_alipay_dict'):
self.person_list[i] = element.to_alipay_dict()
if hasattr(self.person_list, 'to_alipay_dict'):
params['person_list'] = self.person_list.to_alipay_dict()
else:
params['person_list'] = self.person_list
if self.share_holder_list:
if isinstance(self.share_holder_list, list):
for i in range(0, len(self.share_holder_list)):
element = self.share_holder_list[i]
if hasattr(element, 'to_alipay_dict'):
self.share_holder_list[i] = element.to_alipay_dict()
if hasattr(self.share_holder_list, 'to_alipay_dict'):
params['share_holder_list'
] = self.share_holder_list.to_alipay_dict()
else:
params['share_holder_list'] = self.share_holder_list
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = CompanyInfo()
if 'alter_list' in d:
o.alter_list = d['alter_list']
if 'basic_info' in d:
o.basic_info = d['basic_info']
if 'case_info_list' in d:
o.case_info_list = d['case_info_list']
if 'entinv_list' in d:
o.entinv_list = d['entinv_list']
if 'fr_position_list' in d:
o.fr_position_list = d['fr_position_list']
if 'frinv_list' in d:
o.frinv_list = d['frinv_list']
if 'person_list' in d:
o.person_list = d['person_list']
if 'share_holder_list' in d:
o.share_holder_list = d['share_holder_list']
return o
| <mask token>
class CompanyInfo(object):
def __init__(self):
self._alter_list = None
self._basic_info = None
self._case_info_list = None
self._entinv_list = None
self._fr_position_list = None
self._frinv_list = None
self._person_list = None
self._share_holder_list = None
<mask token>
@alter_list.setter
def alter_list(self, value):
if isinstance(value, list):
self._alter_list = list()
for i in value:
if isinstance(i, EpInfo):
self._alter_list.append(i)
else:
self._alter_list.append(EpInfo.from_alipay_dict(i))
@property
def basic_info(self):
return self._basic_info
@basic_info.setter
def basic_info(self, value):
if isinstance(value, EpInfo):
self._basic_info = value
else:
self._basic_info = EpInfo.from_alipay_dict(value)
@property
def case_info_list(self):
return self._case_info_list
@case_info_list.setter
def case_info_list(self, value):
if isinstance(value, list):
self._case_info_list = list()
for i in value:
if isinstance(i, EpInfo):
self._case_info_list.append(i)
else:
self._case_info_list.append(EpInfo.from_alipay_dict(i))
@property
def entinv_list(self):
return self._entinv_list
@entinv_list.setter
def entinv_list(self, value):
if isinstance(value, list):
self._entinv_list = list()
for i in value:
if isinstance(i, EpInfo):
self._entinv_list.append(i)
else:
self._entinv_list.append(EpInfo.from_alipay_dict(i))
@property
def fr_position_list(self):
return self._fr_position_list
@fr_position_list.setter
def fr_position_list(self, value):
if isinstance(value, list):
self._fr_position_list = list()
for i in value:
if isinstance(i, EpInfo):
self._fr_position_list.append(i)
else:
self._fr_position_list.append(EpInfo.from_alipay_dict(i))
<mask token>
@frinv_list.setter
def frinv_list(self, value):
if isinstance(value, list):
self._frinv_list = list()
for i in value:
if isinstance(i, EpInfo):
self._frinv_list.append(i)
else:
self._frinv_list.append(EpInfo.from_alipay_dict(i))
@property
def person_list(self):
return self._person_list
@person_list.setter
def person_list(self, value):
if isinstance(value, list):
self._person_list = list()
for i in value:
if isinstance(i, EpInfo):
self._person_list.append(i)
else:
self._person_list.append(EpInfo.from_alipay_dict(i))
@property
def share_holder_list(self):
return self._share_holder_list
@share_holder_list.setter
def share_holder_list(self, value):
if isinstance(value, list):
self._share_holder_list = list()
for i in value:
if isinstance(i, EpInfo):
self._share_holder_list.append(i)
else:
self._share_holder_list.append(EpInfo.from_alipay_dict(i))
def to_alipay_dict(self):
params = dict()
if self.alter_list:
if isinstance(self.alter_list, list):
for i in range(0, len(self.alter_list)):
element = self.alter_list[i]
if hasattr(element, 'to_alipay_dict'):
self.alter_list[i] = element.to_alipay_dict()
if hasattr(self.alter_list, 'to_alipay_dict'):
params['alter_list'] = self.alter_list.to_alipay_dict()
else:
params['alter_list'] = self.alter_list
if self.basic_info:
if hasattr(self.basic_info, 'to_alipay_dict'):
params['basic_info'] = self.basic_info.to_alipay_dict()
else:
params['basic_info'] = self.basic_info
if self.case_info_list:
if isinstance(self.case_info_list, list):
for i in range(0, len(self.case_info_list)):
element = self.case_info_list[i]
if hasattr(element, 'to_alipay_dict'):
self.case_info_list[i] = element.to_alipay_dict()
if hasattr(self.case_info_list, 'to_alipay_dict'):
params['case_info_list'] = self.case_info_list.to_alipay_dict()
else:
params['case_info_list'] = self.case_info_list
if self.entinv_list:
if isinstance(self.entinv_list, list):
for i in range(0, len(self.entinv_list)):
element = self.entinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.entinv_list[i] = element.to_alipay_dict()
if hasattr(self.entinv_list, 'to_alipay_dict'):
params['entinv_list'] = self.entinv_list.to_alipay_dict()
else:
params['entinv_list'] = self.entinv_list
if self.fr_position_list:
if isinstance(self.fr_position_list, list):
for i in range(0, len(self.fr_position_list)):
element = self.fr_position_list[i]
if hasattr(element, 'to_alipay_dict'):
self.fr_position_list[i] = element.to_alipay_dict()
if hasattr(self.fr_position_list, 'to_alipay_dict'):
params['fr_position_list'
] = self.fr_position_list.to_alipay_dict()
else:
params['fr_position_list'] = self.fr_position_list
if self.frinv_list:
if isinstance(self.frinv_list, list):
for i in range(0, len(self.frinv_list)):
element = self.frinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.frinv_list[i] = element.to_alipay_dict()
if hasattr(self.frinv_list, 'to_alipay_dict'):
params['frinv_list'] = self.frinv_list.to_alipay_dict()
else:
params['frinv_list'] = self.frinv_list
if self.person_list:
if isinstance(self.person_list, list):
for i in range(0, len(self.person_list)):
element = self.person_list[i]
if hasattr(element, 'to_alipay_dict'):
self.person_list[i] = element.to_alipay_dict()
if hasattr(self.person_list, 'to_alipay_dict'):
params['person_list'] = self.person_list.to_alipay_dict()
else:
params['person_list'] = self.person_list
if self.share_holder_list:
if isinstance(self.share_holder_list, list):
for i in range(0, len(self.share_holder_list)):
element = self.share_holder_list[i]
if hasattr(element, 'to_alipay_dict'):
self.share_holder_list[i] = element.to_alipay_dict()
if hasattr(self.share_holder_list, 'to_alipay_dict'):
params['share_holder_list'
] = self.share_holder_list.to_alipay_dict()
else:
params['share_holder_list'] = self.share_holder_list
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = CompanyInfo()
if 'alter_list' in d:
o.alter_list = d['alter_list']
if 'basic_info' in d:
o.basic_info = d['basic_info']
if 'case_info_list' in d:
o.case_info_list = d['case_info_list']
if 'entinv_list' in d:
o.entinv_list = d['entinv_list']
if 'fr_position_list' in d:
o.fr_position_list = d['fr_position_list']
if 'frinv_list' in d:
o.frinv_list = d['frinv_list']
if 'person_list' in d:
o.person_list = d['person_list']
if 'share_holder_list' in d:
o.share_holder_list = d['share_holder_list']
return o
| <mask token>
class CompanyInfo(object):
def __init__(self):
self._alter_list = None
self._basic_info = None
self._case_info_list = None
self._entinv_list = None
self._fr_position_list = None
self._frinv_list = None
self._person_list = None
self._share_holder_list = None
@property
def alter_list(self):
return self._alter_list
@alter_list.setter
def alter_list(self, value):
if isinstance(value, list):
self._alter_list = list()
for i in value:
if isinstance(i, EpInfo):
self._alter_list.append(i)
else:
self._alter_list.append(EpInfo.from_alipay_dict(i))
@property
def basic_info(self):
return self._basic_info
@basic_info.setter
def basic_info(self, value):
if isinstance(value, EpInfo):
self._basic_info = value
else:
self._basic_info = EpInfo.from_alipay_dict(value)
@property
def case_info_list(self):
return self._case_info_list
@case_info_list.setter
def case_info_list(self, value):
if isinstance(value, list):
self._case_info_list = list()
for i in value:
if isinstance(i, EpInfo):
self._case_info_list.append(i)
else:
self._case_info_list.append(EpInfo.from_alipay_dict(i))
@property
def entinv_list(self):
return self._entinv_list
@entinv_list.setter
def entinv_list(self, value):
if isinstance(value, list):
self._entinv_list = list()
for i in value:
if isinstance(i, EpInfo):
self._entinv_list.append(i)
else:
self._entinv_list.append(EpInfo.from_alipay_dict(i))
@property
def fr_position_list(self):
return self._fr_position_list
@fr_position_list.setter
def fr_position_list(self, value):
if isinstance(value, list):
self._fr_position_list = list()
for i in value:
if isinstance(i, EpInfo):
self._fr_position_list.append(i)
else:
self._fr_position_list.append(EpInfo.from_alipay_dict(i))
<mask token>
@frinv_list.setter
def frinv_list(self, value):
if isinstance(value, list):
self._frinv_list = list()
for i in value:
if isinstance(i, EpInfo):
self._frinv_list.append(i)
else:
self._frinv_list.append(EpInfo.from_alipay_dict(i))
@property
def person_list(self):
return self._person_list
@person_list.setter
def person_list(self, value):
if isinstance(value, list):
self._person_list = list()
for i in value:
if isinstance(i, EpInfo):
self._person_list.append(i)
else:
self._person_list.append(EpInfo.from_alipay_dict(i))
@property
def share_holder_list(self):
return self._share_holder_list
@share_holder_list.setter
def share_holder_list(self, value):
if isinstance(value, list):
self._share_holder_list = list()
for i in value:
if isinstance(i, EpInfo):
self._share_holder_list.append(i)
else:
self._share_holder_list.append(EpInfo.from_alipay_dict(i))
def to_alipay_dict(self):
params = dict()
if self.alter_list:
if isinstance(self.alter_list, list):
for i in range(0, len(self.alter_list)):
element = self.alter_list[i]
if hasattr(element, 'to_alipay_dict'):
self.alter_list[i] = element.to_alipay_dict()
if hasattr(self.alter_list, 'to_alipay_dict'):
params['alter_list'] = self.alter_list.to_alipay_dict()
else:
params['alter_list'] = self.alter_list
if self.basic_info:
if hasattr(self.basic_info, 'to_alipay_dict'):
params['basic_info'] = self.basic_info.to_alipay_dict()
else:
params['basic_info'] = self.basic_info
if self.case_info_list:
if isinstance(self.case_info_list, list):
for i in range(0, len(self.case_info_list)):
element = self.case_info_list[i]
if hasattr(element, 'to_alipay_dict'):
self.case_info_list[i] = element.to_alipay_dict()
if hasattr(self.case_info_list, 'to_alipay_dict'):
params['case_info_list'] = self.case_info_list.to_alipay_dict()
else:
params['case_info_list'] = self.case_info_list
if self.entinv_list:
if isinstance(self.entinv_list, list):
for i in range(0, len(self.entinv_list)):
element = self.entinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.entinv_list[i] = element.to_alipay_dict()
if hasattr(self.entinv_list, 'to_alipay_dict'):
params['entinv_list'] = self.entinv_list.to_alipay_dict()
else:
params['entinv_list'] = self.entinv_list
if self.fr_position_list:
if isinstance(self.fr_position_list, list):
for i in range(0, len(self.fr_position_list)):
element = self.fr_position_list[i]
if hasattr(element, 'to_alipay_dict'):
self.fr_position_list[i] = element.to_alipay_dict()
if hasattr(self.fr_position_list, 'to_alipay_dict'):
params['fr_position_list'
] = self.fr_position_list.to_alipay_dict()
else:
params['fr_position_list'] = self.fr_position_list
if self.frinv_list:
if isinstance(self.frinv_list, list):
for i in range(0, len(self.frinv_list)):
element = self.frinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.frinv_list[i] = element.to_alipay_dict()
if hasattr(self.frinv_list, 'to_alipay_dict'):
params['frinv_list'] = self.frinv_list.to_alipay_dict()
else:
params['frinv_list'] = self.frinv_list
if self.person_list:
if isinstance(self.person_list, list):
for i in range(0, len(self.person_list)):
element = self.person_list[i]
if hasattr(element, 'to_alipay_dict'):
self.person_list[i] = element.to_alipay_dict()
if hasattr(self.person_list, 'to_alipay_dict'):
params['person_list'] = self.person_list.to_alipay_dict()
else:
params['person_list'] = self.person_list
if self.share_holder_list:
if isinstance(self.share_holder_list, list):
for i in range(0, len(self.share_holder_list)):
element = self.share_holder_list[i]
if hasattr(element, 'to_alipay_dict'):
self.share_holder_list[i] = element.to_alipay_dict()
if hasattr(self.share_holder_list, 'to_alipay_dict'):
params['share_holder_list'
] = self.share_holder_list.to_alipay_dict()
else:
params['share_holder_list'] = self.share_holder_list
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = CompanyInfo()
if 'alter_list' in d:
o.alter_list = d['alter_list']
if 'basic_info' in d:
o.basic_info = d['basic_info']
if 'case_info_list' in d:
o.case_info_list = d['case_info_list']
if 'entinv_list' in d:
o.entinv_list = d['entinv_list']
if 'fr_position_list' in d:
o.fr_position_list = d['fr_position_list']
if 'frinv_list' in d:
o.frinv_list = d['frinv_list']
if 'person_list' in d:
o.person_list = d['person_list']
if 'share_holder_list' in d:
o.share_holder_list = d['share_holder_list']
return o
| <mask token>
class CompanyInfo(object):
def __init__(self):
self._alter_list = None
self._basic_info = None
self._case_info_list = None
self._entinv_list = None
self._fr_position_list = None
self._frinv_list = None
self._person_list = None
self._share_holder_list = None
@property
def alter_list(self):
return self._alter_list
@alter_list.setter
def alter_list(self, value):
if isinstance(value, list):
self._alter_list = list()
for i in value:
if isinstance(i, EpInfo):
self._alter_list.append(i)
else:
self._alter_list.append(EpInfo.from_alipay_dict(i))
@property
def basic_info(self):
return self._basic_info
@basic_info.setter
def basic_info(self, value):
if isinstance(value, EpInfo):
self._basic_info = value
else:
self._basic_info = EpInfo.from_alipay_dict(value)
@property
def case_info_list(self):
return self._case_info_list
@case_info_list.setter
def case_info_list(self, value):
if isinstance(value, list):
self._case_info_list = list()
for i in value:
if isinstance(i, EpInfo):
self._case_info_list.append(i)
else:
self._case_info_list.append(EpInfo.from_alipay_dict(i))
@property
def entinv_list(self):
return self._entinv_list
@entinv_list.setter
def entinv_list(self, value):
if isinstance(value, list):
self._entinv_list = list()
for i in value:
if isinstance(i, EpInfo):
self._entinv_list.append(i)
else:
self._entinv_list.append(EpInfo.from_alipay_dict(i))
@property
def fr_position_list(self):
return self._fr_position_list
@fr_position_list.setter
def fr_position_list(self, value):
if isinstance(value, list):
self._fr_position_list = list()
for i in value:
if isinstance(i, EpInfo):
self._fr_position_list.append(i)
else:
self._fr_position_list.append(EpInfo.from_alipay_dict(i))
@property
def frinv_list(self):
return self._frinv_list
@frinv_list.setter
def frinv_list(self, value):
if isinstance(value, list):
self._frinv_list = list()
for i in value:
if isinstance(i, EpInfo):
self._frinv_list.append(i)
else:
self._frinv_list.append(EpInfo.from_alipay_dict(i))
@property
def person_list(self):
return self._person_list
@person_list.setter
def person_list(self, value):
if isinstance(value, list):
self._person_list = list()
for i in value:
if isinstance(i, EpInfo):
self._person_list.append(i)
else:
self._person_list.append(EpInfo.from_alipay_dict(i))
@property
def share_holder_list(self):
return self._share_holder_list
@share_holder_list.setter
def share_holder_list(self, value):
if isinstance(value, list):
self._share_holder_list = list()
for i in value:
if isinstance(i, EpInfo):
self._share_holder_list.append(i)
else:
self._share_holder_list.append(EpInfo.from_alipay_dict(i))
def to_alipay_dict(self):
params = dict()
if self.alter_list:
if isinstance(self.alter_list, list):
for i in range(0, len(self.alter_list)):
element = self.alter_list[i]
if hasattr(element, 'to_alipay_dict'):
self.alter_list[i] = element.to_alipay_dict()
if hasattr(self.alter_list, 'to_alipay_dict'):
params['alter_list'] = self.alter_list.to_alipay_dict()
else:
params['alter_list'] = self.alter_list
if self.basic_info:
if hasattr(self.basic_info, 'to_alipay_dict'):
params['basic_info'] = self.basic_info.to_alipay_dict()
else:
params['basic_info'] = self.basic_info
if self.case_info_list:
if isinstance(self.case_info_list, list):
for i in range(0, len(self.case_info_list)):
element = self.case_info_list[i]
if hasattr(element, 'to_alipay_dict'):
self.case_info_list[i] = element.to_alipay_dict()
if hasattr(self.case_info_list, 'to_alipay_dict'):
params['case_info_list'] = self.case_info_list.to_alipay_dict()
else:
params['case_info_list'] = self.case_info_list
if self.entinv_list:
if isinstance(self.entinv_list, list):
for i in range(0, len(self.entinv_list)):
element = self.entinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.entinv_list[i] = element.to_alipay_dict()
if hasattr(self.entinv_list, 'to_alipay_dict'):
params['entinv_list'] = self.entinv_list.to_alipay_dict()
else:
params['entinv_list'] = self.entinv_list
if self.fr_position_list:
if isinstance(self.fr_position_list, list):
for i in range(0, len(self.fr_position_list)):
element = self.fr_position_list[i]
if hasattr(element, 'to_alipay_dict'):
self.fr_position_list[i] = element.to_alipay_dict()
if hasattr(self.fr_position_list, 'to_alipay_dict'):
params['fr_position_list'
] = self.fr_position_list.to_alipay_dict()
else:
params['fr_position_list'] = self.fr_position_list
if self.frinv_list:
if isinstance(self.frinv_list, list):
for i in range(0, len(self.frinv_list)):
element = self.frinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.frinv_list[i] = element.to_alipay_dict()
if hasattr(self.frinv_list, 'to_alipay_dict'):
params['frinv_list'] = self.frinv_list.to_alipay_dict()
else:
params['frinv_list'] = self.frinv_list
if self.person_list:
if isinstance(self.person_list, list):
for i in range(0, len(self.person_list)):
element = self.person_list[i]
if hasattr(element, 'to_alipay_dict'):
self.person_list[i] = element.to_alipay_dict()
if hasattr(self.person_list, 'to_alipay_dict'):
params['person_list'] = self.person_list.to_alipay_dict()
else:
params['person_list'] = self.person_list
if self.share_holder_list:
if isinstance(self.share_holder_list, list):
for i in range(0, len(self.share_holder_list)):
element = self.share_holder_list[i]
if hasattr(element, 'to_alipay_dict'):
self.share_holder_list[i] = element.to_alipay_dict()
if hasattr(self.share_holder_list, 'to_alipay_dict'):
params['share_holder_list'
] = self.share_holder_list.to_alipay_dict()
else:
params['share_holder_list'] = self.share_holder_list
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = CompanyInfo()
if 'alter_list' in d:
o.alter_list = d['alter_list']
if 'basic_info' in d:
o.basic_info = d['basic_info']
if 'case_info_list' in d:
o.case_info_list = d['case_info_list']
if 'entinv_list' in d:
o.entinv_list = d['entinv_list']
if 'fr_position_list' in d:
o.fr_position_list = d['fr_position_list']
if 'frinv_list' in d:
o.frinv_list = d['frinv_list']
if 'person_list' in d:
o.person_list = d['person_list']
if 'share_holder_list' in d:
o.share_holder_list = d['share_holder_list']
return o
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.EpInfo import EpInfo
from alipay.aop.api.domain.EpInfo import EpInfo
from alipay.aop.api.domain.EpInfo import EpInfo
from alipay.aop.api.domain.EpInfo import EpInfo
from alipay.aop.api.domain.EpInfo import EpInfo
from alipay.aop.api.domain.EpInfo import EpInfo
from alipay.aop.api.domain.EpInfo import EpInfo
from alipay.aop.api.domain.EpInfo import EpInfo
class CompanyInfo(object):
def __init__(self):
self._alter_list = None
self._basic_info = None
self._case_info_list = None
self._entinv_list = None
self._fr_position_list = None
self._frinv_list = None
self._person_list = None
self._share_holder_list = None
@property
def alter_list(self):
return self._alter_list
@alter_list.setter
def alter_list(self, value):
if isinstance(value, list):
self._alter_list = list()
for i in value:
if isinstance(i, EpInfo):
self._alter_list.append(i)
else:
self._alter_list.append(EpInfo.from_alipay_dict(i))
@property
def basic_info(self):
return self._basic_info
@basic_info.setter
def basic_info(self, value):
if isinstance(value, EpInfo):
self._basic_info = value
else:
self._basic_info = EpInfo.from_alipay_dict(value)
@property
def case_info_list(self):
return self._case_info_list
@case_info_list.setter
def case_info_list(self, value):
if isinstance(value, list):
self._case_info_list = list()
for i in value:
if isinstance(i, EpInfo):
self._case_info_list.append(i)
else:
self._case_info_list.append(EpInfo.from_alipay_dict(i))
@property
def entinv_list(self):
return self._entinv_list
@entinv_list.setter
def entinv_list(self, value):
if isinstance(value, list):
self._entinv_list = list()
for i in value:
if isinstance(i, EpInfo):
self._entinv_list.append(i)
else:
self._entinv_list.append(EpInfo.from_alipay_dict(i))
@property
def fr_position_list(self):
return self._fr_position_list
@fr_position_list.setter
def fr_position_list(self, value):
if isinstance(value, list):
self._fr_position_list = list()
for i in value:
if isinstance(i, EpInfo):
self._fr_position_list.append(i)
else:
self._fr_position_list.append(EpInfo.from_alipay_dict(i))
@property
def frinv_list(self):
return self._frinv_list
@frinv_list.setter
def frinv_list(self, value):
if isinstance(value, list):
self._frinv_list = list()
for i in value:
if isinstance(i, EpInfo):
self._frinv_list.append(i)
else:
self._frinv_list.append(EpInfo.from_alipay_dict(i))
@property
def person_list(self):
return self._person_list
@person_list.setter
def person_list(self, value):
if isinstance(value, list):
self._person_list = list()
for i in value:
if isinstance(i, EpInfo):
self._person_list.append(i)
else:
self._person_list.append(EpInfo.from_alipay_dict(i))
@property
def share_holder_list(self):
return self._share_holder_list
@share_holder_list.setter
def share_holder_list(self, value):
if isinstance(value, list):
self._share_holder_list = list()
for i in value:
if isinstance(i, EpInfo):
self._share_holder_list.append(i)
else:
self._share_holder_list.append(EpInfo.from_alipay_dict(i))
def to_alipay_dict(self):
params = dict()
if self.alter_list:
if isinstance(self.alter_list, list):
for i in range(0, len(self.alter_list)):
element = self.alter_list[i]
if hasattr(element, 'to_alipay_dict'):
self.alter_list[i] = element.to_alipay_dict()
if hasattr(self.alter_list, 'to_alipay_dict'):
params['alter_list'] = self.alter_list.to_alipay_dict()
else:
params['alter_list'] = self.alter_list
if self.basic_info:
if hasattr(self.basic_info, 'to_alipay_dict'):
params['basic_info'] = self.basic_info.to_alipay_dict()
else:
params['basic_info'] = self.basic_info
if self.case_info_list:
if isinstance(self.case_info_list, list):
for i in range(0, len(self.case_info_list)):
element = self.case_info_list[i]
if hasattr(element, 'to_alipay_dict'):
self.case_info_list[i] = element.to_alipay_dict()
if hasattr(self.case_info_list, 'to_alipay_dict'):
params['case_info_list'] = self.case_info_list.to_alipay_dict()
else:
params['case_info_list'] = self.case_info_list
if self.entinv_list:
if isinstance(self.entinv_list, list):
for i in range(0, len(self.entinv_list)):
element = self.entinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.entinv_list[i] = element.to_alipay_dict()
if hasattr(self.entinv_list, 'to_alipay_dict'):
params['entinv_list'] = self.entinv_list.to_alipay_dict()
else:
params['entinv_list'] = self.entinv_list
if self.fr_position_list:
if isinstance(self.fr_position_list, list):
for i in range(0, len(self.fr_position_list)):
element = self.fr_position_list[i]
if hasattr(element, 'to_alipay_dict'):
self.fr_position_list[i] = element.to_alipay_dict()
if hasattr(self.fr_position_list, 'to_alipay_dict'):
params['fr_position_list'] = self.fr_position_list.to_alipay_dict()
else:
params['fr_position_list'] = self.fr_position_list
if self.frinv_list:
if isinstance(self.frinv_list, list):
for i in range(0, len(self.frinv_list)):
element = self.frinv_list[i]
if hasattr(element, 'to_alipay_dict'):
self.frinv_list[i] = element.to_alipay_dict()
if hasattr(self.frinv_list, 'to_alipay_dict'):
params['frinv_list'] = self.frinv_list.to_alipay_dict()
else:
params['frinv_list'] = self.frinv_list
if self.person_list:
if isinstance(self.person_list, list):
for i in range(0, len(self.person_list)):
element = self.person_list[i]
if hasattr(element, 'to_alipay_dict'):
self.person_list[i] = element.to_alipay_dict()
if hasattr(self.person_list, 'to_alipay_dict'):
params['person_list'] = self.person_list.to_alipay_dict()
else:
params['person_list'] = self.person_list
if self.share_holder_list:
if isinstance(self.share_holder_list, list):
for i in range(0, len(self.share_holder_list)):
element = self.share_holder_list[i]
if hasattr(element, 'to_alipay_dict'):
self.share_holder_list[i] = element.to_alipay_dict()
if hasattr(self.share_holder_list, 'to_alipay_dict'):
params['share_holder_list'] = self.share_holder_list.to_alipay_dict()
else:
params['share_holder_list'] = self.share_holder_list
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = CompanyInfo()
if 'alter_list' in d:
o.alter_list = d['alter_list']
if 'basic_info' in d:
o.basic_info = d['basic_info']
if 'case_info_list' in d:
o.case_info_list = d['case_info_list']
if 'entinv_list' in d:
o.entinv_list = d['entinv_list']
if 'fr_position_list' in d:
o.fr_position_list = d['fr_position_list']
if 'frinv_list' in d:
o.frinv_list = d['frinv_list']
if 'person_list' in d:
o.person_list = d['person_list']
if 'share_holder_list' in d:
o.share_holder_list = d['share_holder_list']
return o
| [
14,
18,
19,
20,
22
] |
1,902 | 9e950f6fe895cfd497e94139397e8a0f19725dc0 | <mask token>
| <mask token>
urlpatterns += [url('^api-auth/', include('rest_framework.urls', namespace=
'rest_framework'))]
| <mask token>
urlpatterns = [url('^admin/', admin.site.urls), url('^', include(
'books.urls')), url('^', include('borrowed_books.urls')), url('^',
include('reviews.urls')), url('^', include('api_root.urls')), url(
'^api-token-auth/', obtain_jwt_token), url('^', include(
'django.contrib.auth.urls')), url('^account/', include('rest_auth.urls'
)), url('^account/registration/', include('rest_auth.registration.urls'))]
urlpatterns += [url('^api-auth/', include('rest_framework.urls', namespace=
'rest_framework'))]
| <mask token>
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework_jwt.views import obtain_jwt_token
urlpatterns = [url('^admin/', admin.site.urls), url('^', include(
'books.urls')), url('^', include('borrowed_books.urls')), url('^',
include('reviews.urls')), url('^', include('api_root.urls')), url(
'^api-token-auth/', obtain_jwt_token), url('^', include(
'django.contrib.auth.urls')), url('^account/', include('rest_auth.urls'
)), url('^account/registration/', include('rest_auth.registration.urls'))]
urlpatterns += [url('^api-auth/', include('rest_framework.urls', namespace=
'rest_framework'))]
| """lendbooks URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework_jwt.views import obtain_jwt_token
urlpatterns = [
url(r'^admin/', admin.site.urls), # Django Admin
url(r'^', include('books.urls')), # Books Management
url(r'^', include('borrowed_books.urls')), # Borrow Books
url(r'^', include('reviews.urls')), # Reviews
url(r'^', include('api_root.urls')),
url(r'^api-token-auth/', obtain_jwt_token), # JWT
url(r'^', include('django.contrib.auth.urls')), # Django's own Auth'
url(r'^account/', include('rest_auth.urls')), # Account Management
url(r'^account/registration/', include('rest_auth.registration.urls')), # Account Registration
]
urlpatterns += [
url(r'^api-auth/', include('rest_framework.urls',
namespace='rest_framework')),
]
| [
0,
1,
2,
3,
4
] |
1,903 | b76b188dc77077ae70f320d01e9410d44b171974 | <mask token>
| <mask token>
for i in range(200):
zeros[i, i] = s[i]
for n in range(1, 7):
r = 2 ** i
p = np.dot(u, zeros[:, :r])
svd = np.dot(p, v[:r, :])
fig.add_subplot(3, 2, n)
plt.imshow(svd, 'gray')
plt.show()
| <mask token>
fig = plt.figure()
img = plt.imread('clown.bmp')
u, s, v = np.linalg.svd(img)
zeros = np.zeros((200, 320))
for i in range(200):
zeros[i, i] = s[i]
for n in range(1, 7):
r = 2 ** i
p = np.dot(u, zeros[:, :r])
svd = np.dot(p, v[:r, :])
fig.add_subplot(3, 2, n)
plt.imshow(svd, 'gray')
plt.show()
| import numpy as np
import matplotlib.pyplot as plt
fig = plt.figure()
img = plt.imread('clown.bmp')
u, s, v = np.linalg.svd(img)
zeros = np.zeros((200, 320))
for i in range(200):
zeros[i, i] = s[i]
for n in range(1, 7):
r = 2 ** i
p = np.dot(u, zeros[:, :r])
svd = np.dot(p, v[:r, :])
fig.add_subplot(3, 2, n)
plt.imshow(svd, 'gray')
plt.show()
| # Ömer Malik Kalembaşı 150180112
import numpy as np
import matplotlib.pyplot as plt
fig = plt.figure()
img = plt.imread("clown.bmp")
u, s, v = np.linalg.svd(img)
zeros = np.zeros((200, 320))
for i in range(200):
zeros[i, i] = s[i]
for n in range(1, 7):
r = 2**i
p = np.dot(u, zeros[:, :r])
svd = np.dot(p, v[:r, :])
fig.add_subplot(3, 2, n)
plt.imshow(svd, "gray")
plt.show()
| [
0,
1,
2,
3,
4
] |
1,904 | b68cc09347584dfc613b2e38d036b124c9af7952 | <mask token>
| <mask token>
auth.set_access_token('956917059287375875-EThit80MxgQPTJlh7ZObqyHsoV8Q2D7',
'eLv893meGppqfX3xOr8SJ93kpsbZpoOiRsVM3XTgJryZM')
<mask token>
auth.set_access_token('956917059287375875-EThit80MxgQPTJlh7ZObqyHsoV8Q2D7',
'eLv893meGppqfX3xOr8SJ93kpsbZpoOiRsVM3XTgJryZM')
<mask token>
for tweet in tweepy.Cursor(api.search, q='#ootd', count=100,
include_entities=True, lang='en', since='2018-11-01').items():
if 'media' in tweet.entities:
for image in tweet.entities['media']:
favs = tweet.favorite_count
if favs > 30:
csvWriter.writerow([favs, image['media_url'], tweet.created_at]
)
| <mask token>
auth = OAuthHandler('WNUpykrIjiGF0NKoV7qk7uiNj',
'Nhe0GjOkbaQKbPMLTqcAYQnqMnz3Edpdup28h2R2KqRLa6iBDN')
auth.set_access_token('956917059287375875-EThit80MxgQPTJlh7ZObqyHsoV8Q2D7',
'eLv893meGppqfX3xOr8SJ93kpsbZpoOiRsVM3XTgJryZM')
auth = tweepy.OAuthHandler('WNUpykrIjiGF0NKoV7qk7uiNj',
'Nhe0GjOkbaQKbPMLTqcAYQnqMnz3Edpdup28h2R2KqRLa6iBDN')
auth.set_access_token('956917059287375875-EThit80MxgQPTJlh7ZObqyHsoV8Q2D7',
'eLv893meGppqfX3xOr8SJ93kpsbZpoOiRsVM3XTgJryZM')
api = tweepy.API(auth, wait_on_rate_limit=True)
csvFile = open('data.csv', 'a')
csvWriter = csv.writer(csvFile)
for tweet in tweepy.Cursor(api.search, q='#ootd', count=100,
include_entities=True, lang='en', since='2018-11-01').items():
if 'media' in tweet.entities:
for image in tweet.entities['media']:
favs = tweet.favorite_count
if favs > 30:
csvWriter.writerow([favs, image['media_url'], tweet.created_at]
)
| import csv
import tweepy
import pandas as pd
from tweepy.auth import OAuthHandler
auth = OAuthHandler('WNUpykrIjiGF0NKoV7qk7uiNj',
'Nhe0GjOkbaQKbPMLTqcAYQnqMnz3Edpdup28h2R2KqRLa6iBDN')
auth.set_access_token('956917059287375875-EThit80MxgQPTJlh7ZObqyHsoV8Q2D7',
'eLv893meGppqfX3xOr8SJ93kpsbZpoOiRsVM3XTgJryZM')
auth = tweepy.OAuthHandler('WNUpykrIjiGF0NKoV7qk7uiNj',
'Nhe0GjOkbaQKbPMLTqcAYQnqMnz3Edpdup28h2R2KqRLa6iBDN')
auth.set_access_token('956917059287375875-EThit80MxgQPTJlh7ZObqyHsoV8Q2D7',
'eLv893meGppqfX3xOr8SJ93kpsbZpoOiRsVM3XTgJryZM')
api = tweepy.API(auth, wait_on_rate_limit=True)
csvFile = open('data.csv', 'a')
csvWriter = csv.writer(csvFile)
for tweet in tweepy.Cursor(api.search, q='#ootd', count=100,
include_entities=True, lang='en', since='2018-11-01').items():
if 'media' in tweet.entities:
for image in tweet.entities['media']:
favs = tweet.favorite_count
if favs > 30:
csvWriter.writerow([favs, image['media_url'], tweet.created_at]
)
| import csv
import tweepy
import pandas as pd
####input your credentials here
from tweepy.auth import OAuthHandler
auth = OAuthHandler('WNUpykrIjiGF0NKoV7qk7uiNj', 'Nhe0GjOkbaQKbPMLTqcAYQnqMnz3Edpdup28h2R2KqRLa6iBDN')
auth.set_access_token('956917059287375875-EThit80MxgQPTJlh7ZObqyHsoV8Q2D7', 'eLv893meGppqfX3xOr8SJ93kpsbZpoOiRsVM3XTgJryZM')
auth = tweepy.OAuthHandler('WNUpykrIjiGF0NKoV7qk7uiNj', 'Nhe0GjOkbaQKbPMLTqcAYQnqMnz3Edpdup28h2R2KqRLa6iBDN')
auth.set_access_token('956917059287375875-EThit80MxgQPTJlh7ZObqyHsoV8Q2D7', 'eLv893meGppqfX3xOr8SJ93kpsbZpoOiRsVM3XTgJryZM')
api = tweepy.API(auth,wait_on_rate_limit=True)
csvFile = open('data.csv', 'a')
csvWriter = csv.writer(csvFile)
for tweet in tweepy.Cursor(api.search,q="#ootd",count=100,
include_entities=True,
lang="en",
since="2018-11-01").items():
if 'media' in tweet.entities:
for image in tweet.entities['media']:
favs = tweet.favorite_count
if favs > 30:
csvWriter.writerow([favs, image['media_url'], tweet.created_at]) | [
0,
1,
2,
3,
4
] |
1,905 | 4ba722e685c7608fcfd5111131c96847c0408a02 | import wfdb as wf
import numpy as np
from scipy import signal as ss
from datasets import mitdb as dm
from matplotlib import pyplot as plt
def show_path(path):
""" As a plot """
# Read in the data
record = wf.rdsamp(path)
annotation = wf.rdann(path, 'atr')
data = record.p_signals
cha = data[:, 0]
print 'Channel type:', record.signame[0]
times = np.arange(len(cha), dtype = float)
times /= record.fs
plt.plot(times, cha)
plt.xlabel('Time [s]')
plt.show()
def show_annotations(path):
""" Exemplary code """
record = wf.rdsamp(path)
annotation = wf.rdann(path, 'atr')
# Get data and annotations for the first 2000 samples
howmany = 2000
channel = record.p_signals[:howmany, 0]
# Extract all of the annotation related infromation
where = annotation.annsamp < howmany
samp = annotation.annsamp[where]
# Convert to numpy.array to get fancy indexing access
types = np.array(annotation.anntype)
types = types[where]
times = np.arange(howmany, dtype = 'float') / record.fs
plt.plot(times, channel)
# Prepare qrs information for the plot
qrs_times = times[samp]
# Scale to show markers at the top
qrs_values = np.ones_like(qrs_times)
qrs_values *= channel.max() * 1.4
plt.plot(qrs_times, qrs_values, 'ro')
# Also show annotation code
# And their words
for it, sam in enumerate(samp):
# Get the annotation position
xa = times[sam]
ya = channel.max() * 1.1
# Use just the first letter
a_txt = types[it]
plt.annotate(a_txt, xy = (xa, ya))
plt.xlim([0, 4])
plt.xlabel('Time [s]')
plt.show()
def show_objective():
""" For the model """
# Choose a record
records = dm.get_records()
path = records[17]
record = wf.rdsamp(path)
ann = wf.rdann(path, 'atr')
chid = 0
print 'Channel:', record.signame[chid]
cha = record.p_signals[:, chid]
# These were found manually
sta = 184000
end = sta + 1000
times = np.arange(end-sta, dtype = 'float')
times /= record.fs
# Extract the annotations for that fragment
where = (sta < ann.annsamp) & (ann.annsamp < end)
samples = ann.annsamp[where] - sta
print samples
# Prepare dirac-comb type of labels
qrs_values = np.zeros_like(times)
qrs_values[samples] = 1
# Prepare gaussian-comb type of labels
kernel = ss.hamming(36)
qrs_gauss = np.convolve(kernel,
qrs_values,
mode = 'same')
# Make the plots
fig = plt.figure()
ax1 = fig.add_subplot(3,1,1)
ax1.plot(times, cha[sta : end])
ax2 = fig.add_subplot(3,1,2, sharex=ax1)
ax2.plot(times,
qrs_values,
'C1',
lw = 4,
alpha = 0.888)
ax3 = fig.add_subplot(3,1,3, sharex=ax1)
ax3.plot(times,
qrs_gauss,
'C3',
lw = 4,
alpha = 0.888)
plt.setp(ax1.get_xticklabels(), visible=False)
plt.setp(ax2.get_xticklabels(), visible=False)
plt.xlabel('Time [s]')
plt.xlim([0, 2.5])
plt.show()
def show_objective_part2():
""" For the model """
# Choose a record
records = dm.get_records()
path = records[13]
record = wf.rdsamp(path)
ann = wf.rdann(path, 'atr')
chid = 0
print 'File:', path
print 'Channel:', record.signame[chid]
cha = record.p_signals[:, chid]
# These were found manually
sta = 184000
end = sta + 1000
times = np.arange(end-sta, dtype = 'float')
times /= record.fs
# Extract the annotations for that fragment
where = (sta < ann.annsamp) & (ann.annsamp < end)
samples = ann.annsamp[where] - sta
print samples
# Prepare dirac-comb type of labels
qrs_values = np.zeros_like(times)
qrs_values[samples] = 1
# Prepare gaussian-comb type of labels
kernel = ss.hamming(36)
qrs_gauss = np.convolve(kernel,
qrs_values,
mode = 'same')
# Make the plots
fig = plt.figure()
ax1 = fig.add_subplot(2,1,1)
ax1.plot(times, cha[sta : end])
ax1.set_title('Input', loc = 'left')
ax2 = fig.add_subplot(2,1,2, sharex=ax1)
ax2.plot(times,
qrs_gauss,
'C3',
lw = 4,
alpha = 0.888)
ax2.set_title('Output', loc = 'left')
ax1.grid()
ax2.grid()
plt.setp(ax1.get_xticklabels(), visible=False)
plt.xlabel('Time [s]')
plt.xlim([0, 2.5])
plt.show()
| null | null | null | null | [
0
] |
1,906 | efbfe95acbe0b97e863c8788bca4a71633da36b3 | <mask token>
class Location:
<mask token>
<mask token>
def update_overall_average_value(self):
value_sum = 0
for event in self.events:
value_sum += event.value
value_count = len(self.events)
if value_count > 0:
self.overall_average_value = value_sum / value_count
| <mask token>
class Location:
def __init__(self, location_dict):
self.x = location_dict['x']
self.y = location_dict['y']
self.id = location_dict['id']
self.events = []
self.latest_average_value = 0
self.latest_event_count = 0
self.average_value_at_time_dict = {}
self.overall_average_value = 0
<mask token>
def update_overall_average_value(self):
value_sum = 0
for event in self.events:
value_sum += event.value
value_count = len(self.events)
if value_count > 0:
self.overall_average_value = value_sum / value_count
| <mask token>
class Location:
def __init__(self, location_dict):
self.x = location_dict['x']
self.y = location_dict['y']
self.id = location_dict['id']
self.events = []
self.latest_average_value = 0
self.latest_event_count = 0
self.average_value_at_time_dict = {}
self.overall_average_value = 0
def update_average_values_at_time(self, time_to_calculate):
self.latest_event_count = 0
sum_of_values = 0
for event in self.events:
if event.time_rounded_to_minute == time_to_calculate:
sum_of_values += event.value
self.latest_event_count += 1
self.latest_average_value = 0
if self.latest_event_count > 0:
self.latest_average_value = sum_of_values / self.latest_event_count
formatted_time = datetime.strftime(datetime.utcfromtimestamp(
time_to_calculate + 3600), '%d/%m/%Y %H:%M:%S')
self.average_value_at_time_dict[formatted_time
] = self.latest_average_value
def update_overall_average_value(self):
value_sum = 0
for event in self.events:
value_sum += event.value
value_count = len(self.events)
if value_count > 0:
self.overall_average_value = value_sum / value_count
| from datetime import datetime
class Location:
def __init__(self, location_dict):
self.x = location_dict['x']
self.y = location_dict['y']
self.id = location_dict['id']
self.events = []
self.latest_average_value = 0
self.latest_event_count = 0
self.average_value_at_time_dict = {}
self.overall_average_value = 0
def update_average_values_at_time(self, time_to_calculate):
self.latest_event_count = 0
sum_of_values = 0
for event in self.events:
if event.time_rounded_to_minute == time_to_calculate:
sum_of_values += event.value
self.latest_event_count += 1
self.latest_average_value = 0
if self.latest_event_count > 0:
self.latest_average_value = sum_of_values / self.latest_event_count
formatted_time = datetime.strftime(datetime.utcfromtimestamp(
time_to_calculate + 3600), '%d/%m/%Y %H:%M:%S')
self.average_value_at_time_dict[formatted_time
] = self.latest_average_value
def update_overall_average_value(self):
value_sum = 0
for event in self.events:
value_sum += event.value
value_count = len(self.events)
if value_count > 0:
self.overall_average_value = value_sum / value_count
| from datetime import datetime
class Location:
def __init__(self, location_dict):
self.x = location_dict['x']
self.y = location_dict['y']
self.id = location_dict['id']
self.events = []
self.latest_average_value = 0
self.latest_event_count = 0
self.average_value_at_time_dict = {}
self.overall_average_value = 0
def update_average_values_at_time(self, time_to_calculate):
self.latest_event_count = 0
sum_of_values = 0
for event in self.events:
if event.time_rounded_to_minute == time_to_calculate:
# remove event from self.events
# remove event id from event_id_set in main
sum_of_values += event.value
self.latest_event_count += 1
self.latest_average_value = 0
if self.latest_event_count > 0:
self.latest_average_value = sum_of_values / self.latest_event_count
formatted_time = datetime.strftime(datetime.utcfromtimestamp(time_to_calculate + 3600), "%d/%m/%Y %H:%M:%S")
self.average_value_at_time_dict[formatted_time] = self.latest_average_value
def update_overall_average_value(self):
value_sum = 0
for event in self.events:
value_sum += event.value
value_count = len(self.events)
if value_count > 0:
self.overall_average_value = value_sum / value_count
| [
2,
3,
4,
5,
6
] |
1,907 | 6c2699ff8e739595a2648d53745dc3c788536d7b | <mask token>
def noOfStepsDP(n, k):
dp = [0] * max(n + 1, 3)
dp[0] = 1
dp[1] = 1
dp[2] = 2
for i in range(3, n + 1):
dp[i] = dp[i - 1] + dp[i - 2] + dp[i - 3]
return dp[n]
<mask token>
| def noOfSteps(n, k):
if n < 0:
return 0
if n == 0:
return 1
t_steps = 0
for i in range(1, k + 1):
t_steps += noOfSteps(n - i, k)
return t_steps
def noOfStepsDP(n, k):
dp = [0] * max(n + 1, 3)
dp[0] = 1
dp[1] = 1
dp[2] = 2
for i in range(3, n + 1):
dp[i] = dp[i - 1] + dp[i - 2] + dp[i - 3]
return dp[n]
<mask token>
| def noOfSteps(n, k):
if n < 0:
return 0
if n == 0:
return 1
t_steps = 0
for i in range(1, k + 1):
t_steps += noOfSteps(n - i, k)
return t_steps
def noOfStepsDP(n, k):
dp = [0] * max(n + 1, 3)
dp[0] = 1
dp[1] = 1
dp[2] = 2
for i in range(3, n + 1):
dp[i] = dp[i - 1] + dp[i - 2] + dp[i - 3]
return dp[n]
<mask token>
noOfSteps(n, 3), noOfStepsDP(n, 3)
| def noOfSteps(n, k):
if n < 0:
return 0
if n == 0:
return 1
t_steps = 0
for i in range(1, k + 1):
t_steps += noOfSteps(n - i, k)
return t_steps
def noOfStepsDP(n, k):
dp = [0] * max(n + 1, 3)
dp[0] = 1
dp[1] = 1
dp[2] = 2
for i in range(3, n + 1):
dp[i] = dp[i - 1] + dp[i - 2] + dp[i - 3]
return dp[n]
n = 10
noOfSteps(n, 3), noOfStepsDP(n, 3)
| # Q. In How many ways N stair can be climb if allowesd steps are 1, 2 or 3.
# triple Sort
def noOfSteps(n, k):
if n<0: return 0
if n == 0: return 1
t_steps = 0
for i in range(1, k+1):
t_steps += noOfSteps(n-i, k)
return t_steps
def noOfStepsDP(n,k):
dp = [0]*max((n+1),3)
dp[0] = 1
dp[1] = 1
dp[2] = 2
for i in range(3, n+1):
dp[i] = dp[i-1]+dp[i-2]+dp[i-3]
return dp[n]
n = 10
noOfSteps(n,3), noOfStepsDP(n,3) | [
1,
2,
3,
4,
5
] |
1,908 | c7f26978333c7e6cccf7451ea5d10511a66b62c2 | <mask token>
| <mask token>
eval(compile(base64.b64decode(code), '<string>', 'exec'))
| <mask token>
code = (
b'CmltcG9ydCBweW1vbmdvCmltcG9ydCByYW5kb20KaW1wb3J0IHJlCmltcG9ydCBzdHJpbmcKaW1wb3J0IHN5cwppbXBvcnQgZ2V0b3B0CmltcG9ydCBwcHJpbnQKCiMgQ29weXJpZ2h0IDIwMTUKIyBNb25nb0RCLCBJbmMuCiMgQXV0aG9yOiBBbmRyZXcgRXJsaWNoc29uICAgYWplQDEwZ2VuLmNvbQojCiMgSWYgeW91IGFyZSBhIHN0dWRlbnQgYW5kIHJlYWRpbmcgdGhpcyBjb2RlLCB0dXJuIGJhY2sgbm93LCBiZWZvcmUKIyB0aGUgTW9uZ29EQiBnb2RzIHNtaXRlIHlvdS4KCmNvbm5lY3Rpb24gPSBOb25lCmRiID0gTm9uZQptb25nb3N0ciA9ICJtb25nb2RiOi8vbG9jYWxob3N0OjI3MDE3IgpkYl9uYW1lID0gImFkbWluIgpyc19uYW1lID0gIm0xMDEiCgojIHRoaXMgc2NyaXB0IHdpbGwgY2hlY2sgdGhhdCBhIHJlcGxpY2Egc2V0IHdpdGggdGhyZWUgbm9kZXMgaXMgcnVubmluZyBvbiBhIGhvc3QKCiMgY29tbWFuZCBsaW5lIGFyZyBwYXJzaW5nIHRvIG1ha2UgZm9sa3MgaGFwcHkgd2hvIHdhbnQgdG8gcnVuIGF0IG1vbmdvbGFicyBvciBtb25nb2hxCiMgdGhpcyBmdW5jdGlvbnMgdXNlcyBnbG9iYWwgdmFycyB0byBjb21tdW5pY2F0ZS4gZm9yZ2l2ZSBtZS4KZGVmIGFyZ19wYXJzaW5nKGFyZ3YpOgoKICAgIGdsb2JhbCB3ZWJob3N0CiAgICBnbG9iYWwgbW9uZ29zdHIKICAgIGdsb2JhbCBkYl9uYW1lCgogICAgdHJ5OgogICAgICAgIG9wdHMsIGFyZ3MgPSBnZXRvcHQuZ2V0b3B0KGFyZ3YsICItcDotbTotZDoiKQogICAgZXhjZXB0IGdldG9wdC5HZXRvcHRFcnJvcjoKICAgICAgICBwcmludCgidXNhZ2UgdmFsaWRhdGUucHkgLW0gbW9uZ29Db25uZWN0U3RyaW5nIikKICAgICAgICBwcmludCgiXHRtb25nb0Nvbm5lY3Rpb25TdHJpbmcgZGVmYXVsdCB0byB7MH0iLmZvcm1hdChtb25nb3N0cikpCiAgICAgICAgcHJpbnQoIlx0ZGF0YWJhc2VOYW1lIGRlZmF1bHRzIHRvIHswfSIuZm9ybWF0KGRiX25hbWUpKQogICAgICAgIHN5cy5leGl0KDIpCiAgICBmb3Igb3B0LCBhcmcgaW4gb3B0czoKICAgICAgICBpZiAob3B0ID09ICctaCcpOgogICAgICAgICAgICBwcmludCgidXNhZ2UgdmFsaWRhdGUucHkgLW0gbW9uZ29Db25uZWN0U3RyaW5nIC1kIGRhdGFiYXNlTmFtZSIpCiAgICAgICAgICAgIHN5cy5leGl0KDIpCiAgICAgICAgZWxpZiBvcHQgaW4gKCItbSIpOgogICAgICAgICAgICBtb25nb3N0ciA9IGFyZwogICAgICAgICAgICBwcmludCgiT3ZlcnJpZGluZyBNb25nb0RCIGNvbm5lY3Rpb24gc3RyaW5nIHRvIGJlICIsIG1vbmdvc3RyKQogICAgICAgIGVsaWYgb3B0IGluICgiLWQiKToKICAgICAgICAgICAgZGJfbmFtZSA9IGFyZwogICAgICAgICAgICBwcmludCgiT3ZlcnJpZGluZyBNb25nb0RCIGRhdGFiYXNlIHRvIGJlICIsIGRiX25hbWUpCgojIGdldHMgdGhlIHJlcGxpY2Egc2V0IHN0YXR1cwpkZWYgZ2V0X3JzX3N0YXR1cygpOgogICAgZGIgPSBjb25uZWN0aW9uLmFkbWluCiAgICBycyA9IGRiLmNvbW1hbmQoInJlcGxTZXRHZXRTdGF0dXMiKQogICAgcmV0dXJuIHJzCgojIGdldHMgdGhlIHJlcGxpY2Egc3RhdGUgY29uZmlnCmRlZiBnZXRfcnNfY29uZmlndXJhdGlvbigpOgogICAgZGIgPSBjb25uZWN0aW9uLmxvY2FsCiAgICBjb2xsID0gZGIuc3lzdGVtLnJlcGxzZXQKICAgIHJldHVybiBjb2xsLmZpbmRfb25lKCkKCmRlZiByZXBsX3NldF9ydW5uaW5nKG51bV9ub2Rlcyk6CgogICAgdHJ5OgogICAgICAgIHJzID0gZ2V0X3JzX3N0YXR1cygpCiAgICAgICAgY29uZiA9IGdldF9yc19jb25maWd1cmF0aW9uKCkKICAgICAgICBob3N0cyAgPSBjb25uZWN0aW9uLmhvc3RzCiAgICBleGNlcHQ6CiAgICAgICAgcHJpbnQoImNhbid0IHF1ZXJ5IE1vbmdvREIuLmlzIGl0IHJ1bm5pbmc/IikKICAgICAgICByYWlzZQogICAgICAgIHJldHVybiBGYWxzZQoKICAgIGlmIChyc1snb2snXSAhPSAxKToKICAgICAgICBwcmludCgiU29ycnksIG9rIGlzIG5vdCAxIGZvciBycy5zdGF0dXMoKSIpCiAgICAgICAgcHJpbnQoIkhlcmUgaXMgd2hhdCBJIGdldDoiKQogICAgICAgIHBwID0gcHByaW50LlByZXR0eVByaW50ZXIoZGVwdGg9NikKICAgICAgICBwcC5wcHJpbnQocnMpCiAgICAgICAgcmV0dXJuIEZhbHNlCgogICAgaWYgKGxlbihyc1snbWVtYmVycyddKSAhPSBudW1fbm9kZXMpOgogICAgICAgIHByaW50KCJTb3JyeSwgdGhlcmUgbmVlZCB0byBiZSB0aHJlZSBtZW1iZXJzIG9mIHRoZSByZXBsaWNhIHNldC4iKQogICAgICAgIHByaW50KCJoZXJlIGlzIHRoZSBtZW1iZXJzIGFycmF5IEkgc2VlIikKCiAgICAgICAgcHAgPSBwcHJpbnQuUHJldHR5UHJpbnRlcihkZXB0aD02KQogICAgICAgIHBwLnBwcmludChyc1snbWVtYmVycyddKQogICAgICAgIHJldHVybiBGYWxzZQogICAgCiAgICBwcmludCgiTG9va3MgZ29vZC4gUmVwbGljYSBzZXQgd2l0aCB0aHJlZSBub2RlcyBydW5uaW5nIikKICAgIHJldHVybiBUcnVlCgpkZWYgZ3JhY2VmdWxfZXhpdChpKToKICAgIGNvbm5lY3Rpb24uY2xvc2UoKQogICAgc3lzLmV4aXQoaSkKCiMgbWFpbiBzZWN0aW9uIG9mIHRoZSBjb2RlCmRlZiBtYWluKGFyZ3YpOgogICAgICAgICAgICAKICAgIGFyZ19wYXJzaW5nKGFyZ3YpCiAgICBnbG9iYWwgY29ubmVjdGlvbgogICAgZ2xvYmFsIGRiCgogICAgcHJpbnQoIldlbGNvbWUgdG8gdGhlIEhXIDYueCByZXBsaWNhIENoZWNrZXIuIE15IGpvYiBpcyB0byBtYWtlIHN1cmUgeW91IHN0YXJ0ZWQgYSByZXBsaWNhIHNldCB3aXRoIHRocmVlIG5vZGVzIikKCiAgICAjIGNvbm5lY3QgdG8gdGhlIGRiIChtb25nb3N0ciB3YXMgc2V0IGluIGFyZ19wYXJzaW5nKQogICAgdHJ5OgogICAgICAgIGNvbm5lY3Rpb24gPSBweW1vbmdvLk1vbmdvQ2xpZW50KG1vbmdvc3RyLCByZXBsaWNhU2V0PXJzX25hbWUpCiAgICAgICAgZGIgPSBjb25uZWN0aW9uW2RiX25hbWVdCiAgICBleGNlcHQ6CiAgICAgICAgcHJpbnQoImNhbid0IGNvbm5lY3QgdG8gTW9uZ29EQiByZXBsaWNhIixyc19uYW1lLCIgc2V0IHVzaW5nIiwgbW9uZ29zdHIsICIuIElzIGl0IHJ1bm5pbmc/IikKICAgICAgICBleGl0KDIpICAgICAjIG5vIGdyYWNlZnVsIGV4aXQgaWYgaXQgaXMgbm90IGNvbm5lY3RlZAogICAgICAgIAogICAgaWYgKG5vdCByZXBsX3NldF9ydW5uaW5nKDMpKToKICAgICAgICBwcmludCgiU29ycnksIHRoZSByZXBsaWNhIHNldCBkb2VzIG5vdCBzZWVtIHRvIGJlIHJ1bm5pbmciKQogICAgICAgIGdyYWNlZnVsX2V4aXQoMSkKICAgIAogICAgIyBpZiB5b3UgYXJlIHJlYWRpbmcgdGhpcyBpbiBjbGVhcnRleHQsIHlvdSBhcmUgdmlvbGF0aW5nIHRoZSBob25vciBjb2RlLgogICAgIyBZb3UgY2FuIHN0aWxsIHJlZGVlbSB5b3Vyc2VsZi4gR2V0IGl0IHdvcmtpbmcgYW5kIGRvbid0IHN1Ym1pdCB0aGUgdmFsaWRhdGlvbiBjb2RlIHVudGlsIHlvdSBkby4KICAgICMgQWxsIGEgbWFuIGhhcyBhdCB0aGUgZW5kIG9mIHRoZSBkYXkgaXMgaGlzIHdvcmQuCiAgICBwcmludCgiVGVzdHMgUGFzc2VkIGZvciBIVyA2LjUuIFlvdXIgSFcgNi41IHZhbGlkYXRpb24gY29kZSBpcyBranZqa2wzMjkwbWYwbTIwZjJramp2IikKICAgIGdyYWNlZnVsX2V4aXQoMCkKCmlmIF9fbmFtZV9fID09ICJfX21haW5fXyI6CiAgICBtYWluKHN5cy5hcmd2WzE6XSkKCgoKCgoKCgoK'
)
eval(compile(base64.b64decode(code), '<string>', 'exec'))
| import base64
code = (
b'CmltcG9ydCBweW1vbmdvCmltcG9ydCByYW5kb20KaW1wb3J0IHJlCmltcG9ydCBzdHJpbmcKaW1wb3J0IHN5cwppbXBvcnQgZ2V0b3B0CmltcG9ydCBwcHJpbnQKCiMgQ29weXJpZ2h0IDIwMTUKIyBNb25nb0RCLCBJbmMuCiMgQXV0aG9yOiBBbmRyZXcgRXJsaWNoc29uICAgYWplQDEwZ2VuLmNvbQojCiMgSWYgeW91IGFyZSBhIHN0dWRlbnQgYW5kIHJlYWRpbmcgdGhpcyBjb2RlLCB0dXJuIGJhY2sgbm93LCBiZWZvcmUKIyB0aGUgTW9uZ29EQiBnb2RzIHNtaXRlIHlvdS4KCmNvbm5lY3Rpb24gPSBOb25lCmRiID0gTm9uZQptb25nb3N0ciA9ICJtb25nb2RiOi8vbG9jYWxob3N0OjI3MDE3IgpkYl9uYW1lID0gImFkbWluIgpyc19uYW1lID0gIm0xMDEiCgojIHRoaXMgc2NyaXB0IHdpbGwgY2hlY2sgdGhhdCBhIHJlcGxpY2Egc2V0IHdpdGggdGhyZWUgbm9kZXMgaXMgcnVubmluZyBvbiBhIGhvc3QKCiMgY29tbWFuZCBsaW5lIGFyZyBwYXJzaW5nIHRvIG1ha2UgZm9sa3MgaGFwcHkgd2hvIHdhbnQgdG8gcnVuIGF0IG1vbmdvbGFicyBvciBtb25nb2hxCiMgdGhpcyBmdW5jdGlvbnMgdXNlcyBnbG9iYWwgdmFycyB0byBjb21tdW5pY2F0ZS4gZm9yZ2l2ZSBtZS4KZGVmIGFyZ19wYXJzaW5nKGFyZ3YpOgoKICAgIGdsb2JhbCB3ZWJob3N0CiAgICBnbG9iYWwgbW9uZ29zdHIKICAgIGdsb2JhbCBkYl9uYW1lCgogICAgdHJ5OgogICAgICAgIG9wdHMsIGFyZ3MgPSBnZXRvcHQuZ2V0b3B0KGFyZ3YsICItcDotbTotZDoiKQogICAgZXhjZXB0IGdldG9wdC5HZXRvcHRFcnJvcjoKICAgICAgICBwcmludCgidXNhZ2UgdmFsaWRhdGUucHkgLW0gbW9uZ29Db25uZWN0U3RyaW5nIikKICAgICAgICBwcmludCgiXHRtb25nb0Nvbm5lY3Rpb25TdHJpbmcgZGVmYXVsdCB0byB7MH0iLmZvcm1hdChtb25nb3N0cikpCiAgICAgICAgcHJpbnQoIlx0ZGF0YWJhc2VOYW1lIGRlZmF1bHRzIHRvIHswfSIuZm9ybWF0KGRiX25hbWUpKQogICAgICAgIHN5cy5leGl0KDIpCiAgICBmb3Igb3B0LCBhcmcgaW4gb3B0czoKICAgICAgICBpZiAob3B0ID09ICctaCcpOgogICAgICAgICAgICBwcmludCgidXNhZ2UgdmFsaWRhdGUucHkgLW0gbW9uZ29Db25uZWN0U3RyaW5nIC1kIGRhdGFiYXNlTmFtZSIpCiAgICAgICAgICAgIHN5cy5leGl0KDIpCiAgICAgICAgZWxpZiBvcHQgaW4gKCItbSIpOgogICAgICAgICAgICBtb25nb3N0ciA9IGFyZwogICAgICAgICAgICBwcmludCgiT3ZlcnJpZGluZyBNb25nb0RCIGNvbm5lY3Rpb24gc3RyaW5nIHRvIGJlICIsIG1vbmdvc3RyKQogICAgICAgIGVsaWYgb3B0IGluICgiLWQiKToKICAgICAgICAgICAgZGJfbmFtZSA9IGFyZwogICAgICAgICAgICBwcmludCgiT3ZlcnJpZGluZyBNb25nb0RCIGRhdGFiYXNlIHRvIGJlICIsIGRiX25hbWUpCgojIGdldHMgdGhlIHJlcGxpY2Egc2V0IHN0YXR1cwpkZWYgZ2V0X3JzX3N0YXR1cygpOgogICAgZGIgPSBjb25uZWN0aW9uLmFkbWluCiAgICBycyA9IGRiLmNvbW1hbmQoInJlcGxTZXRHZXRTdGF0dXMiKQogICAgcmV0dXJuIHJzCgojIGdldHMgdGhlIHJlcGxpY2Egc3RhdGUgY29uZmlnCmRlZiBnZXRfcnNfY29uZmlndXJhdGlvbigpOgogICAgZGIgPSBjb25uZWN0aW9uLmxvY2FsCiAgICBjb2xsID0gZGIuc3lzdGVtLnJlcGxzZXQKICAgIHJldHVybiBjb2xsLmZpbmRfb25lKCkKCmRlZiByZXBsX3NldF9ydW5uaW5nKG51bV9ub2Rlcyk6CgogICAgdHJ5OgogICAgICAgIHJzID0gZ2V0X3JzX3N0YXR1cygpCiAgICAgICAgY29uZiA9IGdldF9yc19jb25maWd1cmF0aW9uKCkKICAgICAgICBob3N0cyAgPSBjb25uZWN0aW9uLmhvc3RzCiAgICBleGNlcHQ6CiAgICAgICAgcHJpbnQoImNhbid0IHF1ZXJ5IE1vbmdvREIuLmlzIGl0IHJ1bm5pbmc/IikKICAgICAgICByYWlzZQogICAgICAgIHJldHVybiBGYWxzZQoKICAgIGlmIChyc1snb2snXSAhPSAxKToKICAgICAgICBwcmludCgiU29ycnksIG9rIGlzIG5vdCAxIGZvciBycy5zdGF0dXMoKSIpCiAgICAgICAgcHJpbnQoIkhlcmUgaXMgd2hhdCBJIGdldDoiKQogICAgICAgIHBwID0gcHByaW50LlByZXR0eVByaW50ZXIoZGVwdGg9NikKICAgICAgICBwcC5wcHJpbnQocnMpCiAgICAgICAgcmV0dXJuIEZhbHNlCgogICAgaWYgKGxlbihyc1snbWVtYmVycyddKSAhPSBudW1fbm9kZXMpOgogICAgICAgIHByaW50KCJTb3JyeSwgdGhlcmUgbmVlZCB0byBiZSB0aHJlZSBtZW1iZXJzIG9mIHRoZSByZXBsaWNhIHNldC4iKQogICAgICAgIHByaW50KCJoZXJlIGlzIHRoZSBtZW1iZXJzIGFycmF5IEkgc2VlIikKCiAgICAgICAgcHAgPSBwcHJpbnQuUHJldHR5UHJpbnRlcihkZXB0aD02KQogICAgICAgIHBwLnBwcmludChyc1snbWVtYmVycyddKQogICAgICAgIHJldHVybiBGYWxzZQogICAgCiAgICBwcmludCgiTG9va3MgZ29vZC4gUmVwbGljYSBzZXQgd2l0aCB0aHJlZSBub2RlcyBydW5uaW5nIikKICAgIHJldHVybiBUcnVlCgpkZWYgZ3JhY2VmdWxfZXhpdChpKToKICAgIGNvbm5lY3Rpb24uY2xvc2UoKQogICAgc3lzLmV4aXQoaSkKCiMgbWFpbiBzZWN0aW9uIG9mIHRoZSBjb2RlCmRlZiBtYWluKGFyZ3YpOgogICAgICAgICAgICAKICAgIGFyZ19wYXJzaW5nKGFyZ3YpCiAgICBnbG9iYWwgY29ubmVjdGlvbgogICAgZ2xvYmFsIGRiCgogICAgcHJpbnQoIldlbGNvbWUgdG8gdGhlIEhXIDYueCByZXBsaWNhIENoZWNrZXIuIE15IGpvYiBpcyB0byBtYWtlIHN1cmUgeW91IHN0YXJ0ZWQgYSByZXBsaWNhIHNldCB3aXRoIHRocmVlIG5vZGVzIikKCiAgICAjIGNvbm5lY3QgdG8gdGhlIGRiIChtb25nb3N0ciB3YXMgc2V0IGluIGFyZ19wYXJzaW5nKQogICAgdHJ5OgogICAgICAgIGNvbm5lY3Rpb24gPSBweW1vbmdvLk1vbmdvQ2xpZW50KG1vbmdvc3RyLCByZXBsaWNhU2V0PXJzX25hbWUpCiAgICAgICAgZGIgPSBjb25uZWN0aW9uW2RiX25hbWVdCiAgICBleGNlcHQ6CiAgICAgICAgcHJpbnQoImNhbid0IGNvbm5lY3QgdG8gTW9uZ29EQiByZXBsaWNhIixyc19uYW1lLCIgc2V0IHVzaW5nIiwgbW9uZ29zdHIsICIuIElzIGl0IHJ1bm5pbmc/IikKICAgICAgICBleGl0KDIpICAgICAjIG5vIGdyYWNlZnVsIGV4aXQgaWYgaXQgaXMgbm90IGNvbm5lY3RlZAogICAgICAgIAogICAgaWYgKG5vdCByZXBsX3NldF9ydW5uaW5nKDMpKToKICAgICAgICBwcmludCgiU29ycnksIHRoZSByZXBsaWNhIHNldCBkb2VzIG5vdCBzZWVtIHRvIGJlIHJ1bm5pbmciKQogICAgICAgIGdyYWNlZnVsX2V4aXQoMSkKICAgIAogICAgIyBpZiB5b3UgYXJlIHJlYWRpbmcgdGhpcyBpbiBjbGVhcnRleHQsIHlvdSBhcmUgdmlvbGF0aW5nIHRoZSBob25vciBjb2RlLgogICAgIyBZb3UgY2FuIHN0aWxsIHJlZGVlbSB5b3Vyc2VsZi4gR2V0IGl0IHdvcmtpbmcgYW5kIGRvbid0IHN1Ym1pdCB0aGUgdmFsaWRhdGlvbiBjb2RlIHVudGlsIHlvdSBkby4KICAgICMgQWxsIGEgbWFuIGhhcyBhdCB0aGUgZW5kIG9mIHRoZSBkYXkgaXMgaGlzIHdvcmQuCiAgICBwcmludCgiVGVzdHMgUGFzc2VkIGZvciBIVyA2LjUuIFlvdXIgSFcgNi41IHZhbGlkYXRpb24gY29kZSBpcyBranZqa2wzMjkwbWYwbTIwZjJramp2IikKICAgIGdyYWNlZnVsX2V4aXQoMCkKCmlmIF9fbmFtZV9fID09ICJfX21haW5fXyI6CiAgICBtYWluKHN5cy5hcmd2WzE6XSkKCgoKCgoKCgoK'
)
eval(compile(base64.b64decode(code), '<string>', 'exec'))
| import base64
code=b'CmltcG9ydCBweW1vbmdvCmltcG9ydCByYW5kb20KaW1wb3J0IHJlCmltcG9ydCBzdHJpbmcKaW1wb3J0IHN5cwppbXBvcnQgZ2V0b3B0CmltcG9ydCBwcHJpbnQKCiMgQ29weXJpZ2h0IDIwMTUKIyBNb25nb0RCLCBJbmMuCiMgQXV0aG9yOiBBbmRyZXcgRXJsaWNoc29uICAgYWplQDEwZ2VuLmNvbQojCiMgSWYgeW91IGFyZSBhIHN0dWRlbnQgYW5kIHJlYWRpbmcgdGhpcyBjb2RlLCB0dXJuIGJhY2sgbm93LCBiZWZvcmUKIyB0aGUgTW9uZ29EQiBnb2RzIHNtaXRlIHlvdS4KCmNvbm5lY3Rpb24gPSBOb25lCmRiID0gTm9uZQptb25nb3N0ciA9ICJtb25nb2RiOi8vbG9jYWxob3N0OjI3MDE3IgpkYl9uYW1lID0gImFkbWluIgpyc19uYW1lID0gIm0xMDEiCgojIHRoaXMgc2NyaXB0IHdpbGwgY2hlY2sgdGhhdCBhIHJlcGxpY2Egc2V0IHdpdGggdGhyZWUgbm9kZXMgaXMgcnVubmluZyBvbiBhIGhvc3QKCiMgY29tbWFuZCBsaW5lIGFyZyBwYXJzaW5nIHRvIG1ha2UgZm9sa3MgaGFwcHkgd2hvIHdhbnQgdG8gcnVuIGF0IG1vbmdvbGFicyBvciBtb25nb2hxCiMgdGhpcyBmdW5jdGlvbnMgdXNlcyBnbG9iYWwgdmFycyB0byBjb21tdW5pY2F0ZS4gZm9yZ2l2ZSBtZS4KZGVmIGFyZ19wYXJzaW5nKGFyZ3YpOgoKICAgIGdsb2JhbCB3ZWJob3N0CiAgICBnbG9iYWwgbW9uZ29zdHIKICAgIGdsb2JhbCBkYl9uYW1lCgogICAgdHJ5OgogICAgICAgIG9wdHMsIGFyZ3MgPSBnZXRvcHQuZ2V0b3B0KGFyZ3YsICItcDotbTotZDoiKQogICAgZXhjZXB0IGdldG9wdC5HZXRvcHRFcnJvcjoKICAgICAgICBwcmludCgidXNhZ2UgdmFsaWRhdGUucHkgLW0gbW9uZ29Db25uZWN0U3RyaW5nIikKICAgICAgICBwcmludCgiXHRtb25nb0Nvbm5lY3Rpb25TdHJpbmcgZGVmYXVsdCB0byB7MH0iLmZvcm1hdChtb25nb3N0cikpCiAgICAgICAgcHJpbnQoIlx0ZGF0YWJhc2VOYW1lIGRlZmF1bHRzIHRvIHswfSIuZm9ybWF0KGRiX25hbWUpKQogICAgICAgIHN5cy5leGl0KDIpCiAgICBmb3Igb3B0LCBhcmcgaW4gb3B0czoKICAgICAgICBpZiAob3B0ID09ICctaCcpOgogICAgICAgICAgICBwcmludCgidXNhZ2UgdmFsaWRhdGUucHkgLW0gbW9uZ29Db25uZWN0U3RyaW5nIC1kIGRhdGFiYXNlTmFtZSIpCiAgICAgICAgICAgIHN5cy5leGl0KDIpCiAgICAgICAgZWxpZiBvcHQgaW4gKCItbSIpOgogICAgICAgICAgICBtb25nb3N0ciA9IGFyZwogICAgICAgICAgICBwcmludCgiT3ZlcnJpZGluZyBNb25nb0RCIGNvbm5lY3Rpb24gc3RyaW5nIHRvIGJlICIsIG1vbmdvc3RyKQogICAgICAgIGVsaWYgb3B0IGluICgiLWQiKToKICAgICAgICAgICAgZGJfbmFtZSA9IGFyZwogICAgICAgICAgICBwcmludCgiT3ZlcnJpZGluZyBNb25nb0RCIGRhdGFiYXNlIHRvIGJlICIsIGRiX25hbWUpCgojIGdldHMgdGhlIHJlcGxpY2Egc2V0IHN0YXR1cwpkZWYgZ2V0X3JzX3N0YXR1cygpOgogICAgZGIgPSBjb25uZWN0aW9uLmFkbWluCiAgICBycyA9IGRiLmNvbW1hbmQoInJlcGxTZXRHZXRTdGF0dXMiKQogICAgcmV0dXJuIHJzCgojIGdldHMgdGhlIHJlcGxpY2Egc3RhdGUgY29uZmlnCmRlZiBnZXRfcnNfY29uZmlndXJhdGlvbigpOgogICAgZGIgPSBjb25uZWN0aW9uLmxvY2FsCiAgICBjb2xsID0gZGIuc3lzdGVtLnJlcGxzZXQKICAgIHJldHVybiBjb2xsLmZpbmRfb25lKCkKCmRlZiByZXBsX3NldF9ydW5uaW5nKG51bV9ub2Rlcyk6CgogICAgdHJ5OgogICAgICAgIHJzID0gZ2V0X3JzX3N0YXR1cygpCiAgICAgICAgY29uZiA9IGdldF9yc19jb25maWd1cmF0aW9uKCkKICAgICAgICBob3N0cyAgPSBjb25uZWN0aW9uLmhvc3RzCiAgICBleGNlcHQ6CiAgICAgICAgcHJpbnQoImNhbid0IHF1ZXJ5IE1vbmdvREIuLmlzIGl0IHJ1bm5pbmc/IikKICAgICAgICByYWlzZQogICAgICAgIHJldHVybiBGYWxzZQoKICAgIGlmIChyc1snb2snXSAhPSAxKToKICAgICAgICBwcmludCgiU29ycnksIG9rIGlzIG5vdCAxIGZvciBycy5zdGF0dXMoKSIpCiAgICAgICAgcHJpbnQoIkhlcmUgaXMgd2hhdCBJIGdldDoiKQogICAgICAgIHBwID0gcHByaW50LlByZXR0eVByaW50ZXIoZGVwdGg9NikKICAgICAgICBwcC5wcHJpbnQocnMpCiAgICAgICAgcmV0dXJuIEZhbHNlCgogICAgaWYgKGxlbihyc1snbWVtYmVycyddKSAhPSBudW1fbm9kZXMpOgogICAgICAgIHByaW50KCJTb3JyeSwgdGhlcmUgbmVlZCB0byBiZSB0aHJlZSBtZW1iZXJzIG9mIHRoZSByZXBsaWNhIHNldC4iKQogICAgICAgIHByaW50KCJoZXJlIGlzIHRoZSBtZW1iZXJzIGFycmF5IEkgc2VlIikKCiAgICAgICAgcHAgPSBwcHJpbnQuUHJldHR5UHJpbnRlcihkZXB0aD02KQogICAgICAgIHBwLnBwcmludChyc1snbWVtYmVycyddKQogICAgICAgIHJldHVybiBGYWxzZQogICAgCiAgICBwcmludCgiTG9va3MgZ29vZC4gUmVwbGljYSBzZXQgd2l0aCB0aHJlZSBub2RlcyBydW5uaW5nIikKICAgIHJldHVybiBUcnVlCgpkZWYgZ3JhY2VmdWxfZXhpdChpKToKICAgIGNvbm5lY3Rpb24uY2xvc2UoKQogICAgc3lzLmV4aXQoaSkKCiMgbWFpbiBzZWN0aW9uIG9mIHRoZSBjb2RlCmRlZiBtYWluKGFyZ3YpOgogICAgICAgICAgICAKICAgIGFyZ19wYXJzaW5nKGFyZ3YpCiAgICBnbG9iYWwgY29ubmVjdGlvbgogICAgZ2xvYmFsIGRiCgogICAgcHJpbnQoIldlbGNvbWUgdG8gdGhlIEhXIDYueCByZXBsaWNhIENoZWNrZXIuIE15IGpvYiBpcyB0byBtYWtlIHN1cmUgeW91IHN0YXJ0ZWQgYSByZXBsaWNhIHNldCB3aXRoIHRocmVlIG5vZGVzIikKCiAgICAjIGNvbm5lY3QgdG8gdGhlIGRiIChtb25nb3N0ciB3YXMgc2V0IGluIGFyZ19wYXJzaW5nKQogICAgdHJ5OgogICAgICAgIGNvbm5lY3Rpb24gPSBweW1vbmdvLk1vbmdvQ2xpZW50KG1vbmdvc3RyLCByZXBsaWNhU2V0PXJzX25hbWUpCiAgICAgICAgZGIgPSBjb25uZWN0aW9uW2RiX25hbWVdCiAgICBleGNlcHQ6CiAgICAgICAgcHJpbnQoImNhbid0IGNvbm5lY3QgdG8gTW9uZ29EQiByZXBsaWNhIixyc19uYW1lLCIgc2V0IHVzaW5nIiwgbW9uZ29zdHIsICIuIElzIGl0IHJ1bm5pbmc/IikKICAgICAgICBleGl0KDIpICAgICAjIG5vIGdyYWNlZnVsIGV4aXQgaWYgaXQgaXMgbm90IGNvbm5lY3RlZAogICAgICAgIAogICAgaWYgKG5vdCByZXBsX3NldF9ydW5uaW5nKDMpKToKICAgICAgICBwcmludCgiU29ycnksIHRoZSByZXBsaWNhIHNldCBkb2VzIG5vdCBzZWVtIHRvIGJlIHJ1bm5pbmciKQogICAgICAgIGdyYWNlZnVsX2V4aXQoMSkKICAgIAogICAgIyBpZiB5b3UgYXJlIHJlYWRpbmcgdGhpcyBpbiBjbGVhcnRleHQsIHlvdSBhcmUgdmlvbGF0aW5nIHRoZSBob25vciBjb2RlLgogICAgIyBZb3UgY2FuIHN0aWxsIHJlZGVlbSB5b3Vyc2VsZi4gR2V0IGl0IHdvcmtpbmcgYW5kIGRvbid0IHN1Ym1pdCB0aGUgdmFsaWRhdGlvbiBjb2RlIHVudGlsIHlvdSBkby4KICAgICMgQWxsIGEgbWFuIGhhcyBhdCB0aGUgZW5kIG9mIHRoZSBkYXkgaXMgaGlzIHdvcmQuCiAgICBwcmludCgiVGVzdHMgUGFzc2VkIGZvciBIVyA2LjUuIFlvdXIgSFcgNi41IHZhbGlkYXRpb24gY29kZSBpcyBranZqa2wzMjkwbWYwbTIwZjJramp2IikKICAgIGdyYWNlZnVsX2V4aXQoMCkKCmlmIF9fbmFtZV9fID09ICJfX21haW5fXyI6CiAgICBtYWluKHN5cy5hcmd2WzE6XSkKCgoKCgoKCgoK'
eval(compile(base64.b64decode(code), "<string>", 'exec'))
| [
0,
1,
2,
3,
4
] |
1,909 | fee2ddca5888c9db00d2d7a4fe11ba20c4e31685 | <mask token>
def enter():
global Start_menu
Start_menu = Menu()
menu_world.add_object(Start_menu, 0)
<mask token>
def handle_events():
global Start_menu, menu_time
events = get_events()
for event in events:
if event.type == SDL_QUIT:
game_framework.quit()
elif event.type == SDL_KEYDOWN and event.key == SDLK_ESCAPE:
game_framework.quit()
elif Start_menu.start == 1:
menu_time = get_time()
game_framework.change_state(game_state)
else:
Start_menu.handle_event(event)
def update():
for game_object in menu_world.all_objects():
game_object.update()
def draw():
clear_canvas()
for game_object in menu_world.all_objects():
game_object.draw()
update_canvas()
| <mask token>
def enter():
global Start_menu
Start_menu = Menu()
menu_world.add_object(Start_menu, 0)
<mask token>
def pause():
pass
def resume():
pass
def handle_events():
global Start_menu, menu_time
events = get_events()
for event in events:
if event.type == SDL_QUIT:
game_framework.quit()
elif event.type == SDL_KEYDOWN and event.key == SDLK_ESCAPE:
game_framework.quit()
elif Start_menu.start == 1:
menu_time = get_time()
game_framework.change_state(game_state)
else:
Start_menu.handle_event(event)
def update():
for game_object in menu_world.all_objects():
game_object.update()
def draw():
clear_canvas()
for game_object in menu_world.all_objects():
game_object.draw()
update_canvas()
| <mask token>
def enter():
global Start_menu
Start_menu = Menu()
menu_world.add_object(Start_menu, 0)
def exit():
menu_world.clear()
def pause():
pass
def resume():
pass
def handle_events():
global Start_menu, menu_time
events = get_events()
for event in events:
if event.type == SDL_QUIT:
game_framework.quit()
elif event.type == SDL_KEYDOWN and event.key == SDLK_ESCAPE:
game_framework.quit()
elif Start_menu.start == 1:
menu_time = get_time()
game_framework.change_state(game_state)
else:
Start_menu.handle_event(event)
def update():
for game_object in menu_world.all_objects():
game_object.update()
def draw():
clear_canvas()
for game_object in menu_world.all_objects():
game_object.draw()
update_canvas()
| <mask token>
name = 'MenuState'
boy = None
Start_menu = None
menu_time = None
def enter():
global Start_menu
Start_menu = Menu()
menu_world.add_object(Start_menu, 0)
def exit():
menu_world.clear()
def pause():
pass
def resume():
pass
def handle_events():
global Start_menu, menu_time
events = get_events()
for event in events:
if event.type == SDL_QUIT:
game_framework.quit()
elif event.type == SDL_KEYDOWN and event.key == SDLK_ESCAPE:
game_framework.quit()
elif Start_menu.start == 1:
menu_time = get_time()
game_framework.change_state(game_state)
else:
Start_menu.handle_event(event)
def update():
for game_object in menu_world.all_objects():
game_object.update()
def draw():
clear_canvas()
for game_object in menu_world.all_objects():
game_object.draw()
update_canvas()
| import random
import json
import os
from pico2d import *
import game_framework
import game_world
import menu_world
import game_state
from Start_menu import Menu
name = "MenuState"
boy = None
Start_menu = None
menu_time =None
def enter():
global Start_menu
Start_menu = Menu()
menu_world.add_object(Start_menu, 0)
def exit():
menu_world.clear()
def pause():
pass
def resume():
pass
def handle_events():
global Start_menu,menu_time
events = get_events()
for event in events:
if event.type == SDL_QUIT:
game_framework.quit()
elif event.type == SDL_KEYDOWN and event.key == SDLK_ESCAPE:
game_framework.quit()
elif Start_menu.start ==1:
menu_time =get_time()
game_framework.change_state(game_state)
#game_framework.quit()
else:
Start_menu.handle_event(event)
def update():
for game_object in menu_world.all_objects():
game_object.update()
def draw():
clear_canvas()
for game_object in menu_world.all_objects():
game_object.draw()
update_canvas()
| [
4,
6,
7,
8,
10
] |
1,910 | 8ebf031cb294c69bf744d543b18783d6ac5ef257 | <mask token>
def bellman_ford(graph, start):
distance = {}
predecessor = {}
for node in graph:
distance[node] = INF
predecessor[node] = None
distance[start] = 0
for _ in range(len(graph) - 1):
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
distance[neigbor] = distance[node] + graph[node][neigbor]
predecessor[neigbor] = node
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
return -1, '그래프에 음수 사이클이 존재합니다.'
return distance, graph
<mask token>
| <mask token>
def bellman_ford(graph, start):
distance = {}
predecessor = {}
for node in graph:
distance[node] = INF
predecessor[node] = None
distance[start] = 0
for _ in range(len(graph) - 1):
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
distance[neigbor] = distance[node] + graph[node][neigbor]
predecessor[neigbor] = node
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
return -1, '그래프에 음수 사이클이 존재합니다.'
return distance, graph
<mask token>
print(distance)
print(predecessor)
<mask token>
print(distance)
print(predecessor)
| <mask token>
INF = sys.maxsize
def bellman_ford(graph, start):
distance = {}
predecessor = {}
for node in graph:
distance[node] = INF
predecessor[node] = None
distance[start] = 0
for _ in range(len(graph) - 1):
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
distance[neigbor] = distance[node] + graph[node][neigbor]
predecessor[neigbor] = node
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
return -1, '그래프에 음수 사이클이 존재합니다.'
return distance, graph
graph = {'A': {'B': -1, 'C': 4}, 'B': {'C': 3, 'D': 2, 'E': 2}, 'C': {},
'D': {'B': 1, 'C': 5}, 'E': {'D': -3}}
distance, predecessor = bellman_ford(graph, start='A')
print(distance)
print(predecessor)
graph = {'A': {'B': -1, 'C': 4}, 'B': {'C': 3, 'D': 2, 'E': 2}, 'C': {'A':
-5}, 'D': {'B': 1, 'C': 5}, 'E': {'D': -3}}
distance, predecessor = bellman_ford(graph, start='A')
print(distance)
print(predecessor)
| import sys
INF = sys.maxsize
def bellman_ford(graph, start):
distance = {}
predecessor = {}
for node in graph:
distance[node] = INF
predecessor[node] = None
distance[start] = 0
for _ in range(len(graph) - 1):
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
distance[neigbor] = distance[node] + graph[node][neigbor]
predecessor[neigbor] = node
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
return -1, '그래프에 음수 사이클이 존재합니다.'
return distance, graph
graph = {'A': {'B': -1, 'C': 4}, 'B': {'C': 3, 'D': 2, 'E': 2}, 'C': {},
'D': {'B': 1, 'C': 5}, 'E': {'D': -3}}
distance, predecessor = bellman_ford(graph, start='A')
print(distance)
print(predecessor)
graph = {'A': {'B': -1, 'C': 4}, 'B': {'C': 3, 'D': 2, 'E': 2}, 'C': {'A':
-5}, 'D': {'B': 1, 'C': 5}, 'E': {'D': -3}}
distance, predecessor = bellman_ford(graph, start='A')
print(distance)
print(predecessor)
| import sys
INF = sys.maxsize
def bellman_ford(graph,start):
distance = {}
predecessor = {}
# 거리 값, 이전 정점 초기화
for node in graph:
distance[node] = INF
predecessor[node] = None
distance[start] = 0
# V-1개마큼 반복
for _ in range(len(graph)-1):
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
distance[neigbor] = distance[node] + graph[node][neigbor]
predecessor[neigbor] = node
# 음수 사이클이 존재하는지 (1번더 반복후 V-1번 반복했을때랑 같으면 음수사이클X 다르면 음수사이클 존재)
for node in graph:
for neigbor in graph[node]:
if distance[neigbor] > distance[node] + graph[node][neigbor]:
return -1, "그래프에 음수 사이클이 존재합니다."
return distance,graph
# 음수 사이클이 존재하지 않는 그래프
graph = {
'A': {'B': -1, 'C': 4},
'B': {'C': 3, 'D': 2, 'E': 2},
'C': {},
'D': {'B': 1, 'C': 5},
'E': {'D': -3}
}
# 그래프 정보와 시작 정점을 넘김
distance, predecessor = bellman_ford(graph, start='A')
print(distance)
print(predecessor)
# 음수 사이클이 존재하는 그래프
graph = {
'A': {'B': -1, 'C': 4},
'B': {'C': 3, 'D': 2, 'E': 2},
'C': {'A': -5},
'D': {'B': 1, 'C': 5},
'E': {'D': -3}
}
distance, predecessor = bellman_ford(graph, start='A')
print(distance)
print(predecessor) | [
1,
2,
3,
4,
5
] |
1,911 | cda7595e46528739cad49a5d62a80bc7b2087157 | <mask token>
class Point:
def __init__(self, x: int, y: int):
self.x = x
self.y = y
def create_point(self):
point = [self.x, self.y]
return point
@staticmethod
def calculate_distance(point_1: [], point_2: []):
side_a = abs(point_1.x - point_2.x)
side_b = abs(point_1.y - point_2.y)
side_c = math.sqrt(side_a ** 2 + side_b ** 2)
return side_c
<mask token>
| <mask token>
class Point:
def __init__(self, x: int, y: int):
self.x = x
self.y = y
def create_point(self):
point = [self.x, self.y]
return point
@staticmethod
def calculate_distance(point_1: [], point_2: []):
side_a = abs(point_1.x - point_2.x)
side_b = abs(point_1.y - point_2.y)
side_c = math.sqrt(side_a ** 2 + side_b ** 2)
return side_c
<mask token>
while n > 0:
n -= 1
a, b = [int(x) for x in input().split()]
point = Point(a, b).create_point()
total_points.append(point)
<mask token>
for index_1 in range(len(total_points)):
for index_2 in range(len(total_points)):
if index_1 != index_2:
segment = Point(total_points[index_1][0], total_points[index_2][0])
segment_list.append(segment)
| <mask token>
class Point:
def __init__(self, x: int, y: int):
self.x = x
self.y = y
def create_point(self):
point = [self.x, self.y]
return point
@staticmethod
def calculate_distance(point_1: [], point_2: []):
side_a = abs(point_1.x - point_2.x)
side_b = abs(point_1.y - point_2.y)
side_c = math.sqrt(side_a ** 2 + side_b ** 2)
return side_c
n = int(input())
total_points = []
while n > 0:
n -= 1
a, b = [int(x) for x in input().split()]
point = Point(a, b).create_point()
total_points.append(point)
segment_list = []
for index_1 in range(len(total_points)):
for index_2 in range(len(total_points)):
if index_1 != index_2:
segment = Point(total_points[index_1][0], total_points[index_2][0])
segment_list.append(segment)
| import math
class Point:
def __init__(self, x: int, y: int):
self.x = x
self.y = y
def create_point(self):
point = [self.x, self.y]
return point
@staticmethod
def calculate_distance(point_1: [], point_2: []):
side_a = abs(point_1.x - point_2.x)
side_b = abs(point_1.y - point_2.y)
side_c = math.sqrt(side_a ** 2 + side_b ** 2)
return side_c
n = int(input())
total_points = []
while n > 0:
n -= 1
a, b = [int(x) for x in input().split()]
point = Point(a, b).create_point()
total_points.append(point)
segment_list = []
for index_1 in range(len(total_points)):
for index_2 in range(len(total_points)):
if index_1 != index_2:
segment = Point(total_points[index_1][0], total_points[index_2][0])
segment_list.append(segment)
| null | [
4,
5,
6,
7
] |
1,912 | 1a78d9e0807824263fd46547d5b75c61610456d4 | <mask token>
| <mask token>
while True:
det, frame_with_color = camera.read()
frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)
frame_kp, frame_desc = sift.detectAndCompute(frame, None)
matches = bf.knnMatch(frame_desc, train_desc, k=2)
good = []
for m, n in matches:
if m.distance < 0.75 * n.distance:
good.append(m)
if len(good) > LOWEST_MATCHES_NUMBER:
train_points = []
frame_points = []
for m in good:
train_points.append(train_kp[m.trainIdx].pt)
frame_points.append(frame_kp[m.queryIdx].pt)
train_points, frame_points = np.float32((train_points, frame_points))
H, status = cv2.findHomography(train_points, frame_points, cv2.
RANSAC, 3.0)
h, w = train_img.shape
trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -
1, 0]]])
queryBorder = cv2.perspectiveTransform(trainBorder, H)
cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0,
0, 255), 5)
else:
print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),
LOWEST_MATCHES_NUMBER))
cv2.imshow('result', frame_with_color)
if cv2.waitKey(5) == ord('q'):
break
camera.release()
cv2.destroyAllWindows()
| <mask token>
LOWEST_MATCHES_NUMBER = 30
sift = cv2.xfeatures2d.SIFT_create()
bf = cv2.BFMatcher()
train_img = cv2.imread('Photo/demo2.jpg', 0)
train_kp, train_desc = sift.detectAndCompute(train_img, None)
camera = cv2.VideoCapture(0)
while True:
det, frame_with_color = camera.read()
frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)
frame_kp, frame_desc = sift.detectAndCompute(frame, None)
matches = bf.knnMatch(frame_desc, train_desc, k=2)
good = []
for m, n in matches:
if m.distance < 0.75 * n.distance:
good.append(m)
if len(good) > LOWEST_MATCHES_NUMBER:
train_points = []
frame_points = []
for m in good:
train_points.append(train_kp[m.trainIdx].pt)
frame_points.append(frame_kp[m.queryIdx].pt)
train_points, frame_points = np.float32((train_points, frame_points))
H, status = cv2.findHomography(train_points, frame_points, cv2.
RANSAC, 3.0)
h, w = train_img.shape
trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -
1, 0]]])
queryBorder = cv2.perspectiveTransform(trainBorder, H)
cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0,
0, 255), 5)
else:
print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),
LOWEST_MATCHES_NUMBER))
cv2.imshow('result', frame_with_color)
if cv2.waitKey(5) == ord('q'):
break
camera.release()
cv2.destroyAllWindows()
| import cv2
import numpy as np
LOWEST_MATCHES_NUMBER = 30
sift = cv2.xfeatures2d.SIFT_create()
bf = cv2.BFMatcher()
train_img = cv2.imread('Photo/demo2.jpg', 0)
train_kp, train_desc = sift.detectAndCompute(train_img, None)
camera = cv2.VideoCapture(0)
while True:
det, frame_with_color = camera.read()
frame = cv2.cvtColor(frame_with_color, cv2.COLOR_BGR2GRAY)
frame_kp, frame_desc = sift.detectAndCompute(frame, None)
matches = bf.knnMatch(frame_desc, train_desc, k=2)
good = []
for m, n in matches:
if m.distance < 0.75 * n.distance:
good.append(m)
if len(good) > LOWEST_MATCHES_NUMBER:
train_points = []
frame_points = []
for m in good:
train_points.append(train_kp[m.trainIdx].pt)
frame_points.append(frame_kp[m.queryIdx].pt)
train_points, frame_points = np.float32((train_points, frame_points))
H, status = cv2.findHomography(train_points, frame_points, cv2.
RANSAC, 3.0)
h, w = train_img.shape
trainBorder = np.float32([[[0, 0], [0, h - 1], [w - 1, h - 1], [w -
1, 0]]])
queryBorder = cv2.perspectiveTransform(trainBorder, H)
cv2.polylines(frame_with_color, [np.int32(queryBorder)], True, (0,
0, 255), 5)
else:
print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good),
LOWEST_MATCHES_NUMBER))
cv2.imshow('result', frame_with_color)
if cv2.waitKey(5) == ord('q'):
break
camera.release()
cv2.destroyAllWindows()
| import cv2
import numpy as np
LOWEST_MATCHES_NUMBER = 30
sift = cv2.xfeatures2d.SIFT_create()
bf = cv2.BFMatcher();
train_img = cv2.imread('Photo/demo2.jpg', 0)
train_kp, train_desc = sift.detectAndCompute(train_img, None);
camera = cv2.VideoCapture(0);
while (True):
det, frame_with_color = camera.read();
frame = cv2.cvtColor(frame_with_color,cv2.COLOR_BGR2GRAY)
frame_kp, frame_desc = sift.detectAndCompute(frame,None)
matches=bf.knnMatch(frame_desc,train_desc,k=2)
good = []
for m,n in matches:
if(m.distance < 0.75*n.distance):
good.append(m)
if(len(good)> LOWEST_MATCHES_NUMBER):
train_points = []
frame_points = []
for m in good:
train_points.append(train_kp[m.trainIdx].pt)
frame_points.append(frame_kp[m.queryIdx].pt)
train_points, frame_points=np.float32((train_points,frame_points))
H,status=cv2.findHomography(train_points,frame_points,cv2.RANSAC,3.0)
h,w=train_img.shape
trainBorder=np.float32([[[0,0],[0,h-1],[w-1,h-1],[w-1,0]]])
queryBorder=cv2.perspectiveTransform(trainBorder,H)
cv2.polylines(frame_with_color,[np.int32(queryBorder)],True,(0,0,255),5)
else:
print('FOUND LOW MATCHES NUMBER {} / {}'.format(len(good), LOWEST_MATCHES_NUMBER))
cv2.imshow('result',frame_with_color)
if cv2.waitKey(5)==ord('q'):
break
camera.release()
cv2.destroyAllWindows()
| [
0,
1,
2,
3,
4
] |
1,913 | 88f5aa56eca6b61ba2b428bff0efdf4ec7f5f5d9 | <mask token>
| <mask token>
setup(name='testcov-plugin', version='1.0', packages=['testcov'],
namespace_packages=['testcov'], entry_points={'plugins': [
'testp = testcov.plugin:testp']}, description='Test for coverage bug')
| import io
import os
from setuptools import setup
setup(name='testcov-plugin', version='1.0', packages=['testcov'],
namespace_packages=['testcov'], entry_points={'plugins': [
'testp = testcov.plugin:testp']}, description='Test for coverage bug')
| import io
import os
from setuptools import setup
setup(name='testcov-plugin',
version='1.0',
packages=['testcov'],
namespace_packages=['testcov'],
entry_points={
'plugins': ['testp = testcov.plugin:testp'],
},
description="Test for coverage bug")
| null | [
0,
1,
2,
3
] |
1,914 | bcab83e0ae6ee4925393b50bdefdfeb85c42ad2c | <mask token>
class SENDMAIL(object):
<mask token>
<mask token>
<mask token>
| <mask token>
class SENDMAIL(object):
def __init__(self):
self.smtpserver = 'smtp.qq.com'
self.username = '[email protected]'
self.password = 'xxxxxxxxxxxxxxxx'
self.sender = '[email protected]'
def sendmail(self, receiver, lch, type, cfsj):
subject = '【 抢票提醒通知 】'
receiver = ['%s' % receiver]
msg = MIMEMultipart('mixed')
msg['Subject'] = subject
msg['From'] = 'Ncoreqp-Server <[email protected]>'
msg['To'] = ';'.join(receiver)
text = (
"""Hi!
十万火急, 探子来报!
目前, %s号列车, %s当前有票! - 出发时间为:[ %s ]
快去12306网站支付买票吧!! 快速通道链接https://www.12306.cn/index/
http://www.northcorezh.com
北芯众合, 改变生活!
"""
% (lch, type, cfsj))
text_plain = MIMEText(text, 'plain', 'utf-8')
msg.attach(text_plain)
smtp = smtplib.SMTP()
smtp.connect('smtp.qq.com')
smtp.login(self.username, self.password)
smtp.sendmail(self.sender, receiver, msg.as_string())
smtp.quit()
print('邮件发送成功 !!!')
<mask token>
| <mask token>
class SENDMAIL(object):
def __init__(self):
self.smtpserver = 'smtp.qq.com'
self.username = '[email protected]'
self.password = 'xxxxxxxxxxxxxxxx'
self.sender = '[email protected]'
def sendmail(self, receiver, lch, type, cfsj):
subject = '【 抢票提醒通知 】'
receiver = ['%s' % receiver]
msg = MIMEMultipart('mixed')
msg['Subject'] = subject
msg['From'] = 'Ncoreqp-Server <[email protected]>'
msg['To'] = ';'.join(receiver)
text = (
"""Hi!
十万火急, 探子来报!
目前, %s号列车, %s当前有票! - 出发时间为:[ %s ]
快去12306网站支付买票吧!! 快速通道链接https://www.12306.cn/index/
http://www.northcorezh.com
北芯众合, 改变生活!
"""
% (lch, type, cfsj))
text_plain = MIMEText(text, 'plain', 'utf-8')
msg.attach(text_plain)
smtp = smtplib.SMTP()
smtp.connect('smtp.qq.com')
smtp.login(self.username, self.password)
smtp.sendmail(self.sender, receiver, msg.as_string())
smtp.quit()
print('邮件发送成功 !!!')
def send_email_by_smtp(self):
sender_email_address = '[email protected]'
sender_email_password = 'xxxxxxxxxxxxxxxxxx'
smtp_server_host = 'smtp.qq.com'
smtp_server_port = 465
receiver_email = '[email protected]'
message_subject = 'Python smtp测试邮件'
message_context = '这是一封通过Python smtp发送的测试邮件...'
message = MIMEText(message_context, 'plain', 'utf-8')
message['From'] = Header(sender_email_address, 'utf-8')
message['To'] = Header(receiver_email, 'utf-8')
message['Subject'] = Header(message_subject, 'utf-8')
email_client = smtplib.SMTP_SSL(smtp_server_host, smtp_server_port)
try:
email_client.login(sender_email_address, sender_email_password)
print(
'smtp----login success, now will send an email to {receiver_email}'
)
except Exception:
print(
'smtp----sorry, username or password not correct or another problem occur'
)
else:
email_client.sendmail(sender_email_address, receiver_email,
message.as_string())
print(f'smtp----send email to {receiver_email} finish')
finally:
email_client.close()
| import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.header import Header
class SENDMAIL(object):
def __init__(self):
self.smtpserver = 'smtp.qq.com'
self.username = '[email protected]'
self.password = 'xxxxxxxxxxxxxxxx'
self.sender = '[email protected]'
def sendmail(self, receiver, lch, type, cfsj):
subject = '【 抢票提醒通知 】'
receiver = ['%s' % receiver]
msg = MIMEMultipart('mixed')
msg['Subject'] = subject
msg['From'] = 'Ncoreqp-Server <[email protected]>'
msg['To'] = ';'.join(receiver)
text = (
"""Hi!
十万火急, 探子来报!
目前, %s号列车, %s当前有票! - 出发时间为:[ %s ]
快去12306网站支付买票吧!! 快速通道链接https://www.12306.cn/index/
http://www.northcorezh.com
北芯众合, 改变生活!
"""
% (lch, type, cfsj))
text_plain = MIMEText(text, 'plain', 'utf-8')
msg.attach(text_plain)
smtp = smtplib.SMTP()
smtp.connect('smtp.qq.com')
smtp.login(self.username, self.password)
smtp.sendmail(self.sender, receiver, msg.as_string())
smtp.quit()
print('邮件发送成功 !!!')
def send_email_by_smtp(self):
sender_email_address = '[email protected]'
sender_email_password = 'xxxxxxxxxxxxxxxxxx'
smtp_server_host = 'smtp.qq.com'
smtp_server_port = 465
receiver_email = '[email protected]'
message_subject = 'Python smtp测试邮件'
message_context = '这是一封通过Python smtp发送的测试邮件...'
message = MIMEText(message_context, 'plain', 'utf-8')
message['From'] = Header(sender_email_address, 'utf-8')
message['To'] = Header(receiver_email, 'utf-8')
message['Subject'] = Header(message_subject, 'utf-8')
email_client = smtplib.SMTP_SSL(smtp_server_host, smtp_server_port)
try:
email_client.login(sender_email_address, sender_email_password)
print(
'smtp----login success, now will send an email to {receiver_email}'
)
except Exception:
print(
'smtp----sorry, username or password not correct or another problem occur'
)
else:
email_client.sendmail(sender_email_address, receiver_email,
message.as_string())
print(f'smtp----send email to {receiver_email} finish')
finally:
email_client.close()
| import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.header import Header
class SENDMAIL(object):
def __init__(self):
self.smtpserver = 'smtp.qq.com'
self.username = '[email protected]' # 比如QQ邮箱
self.password = 'xxxxxxxxxxxxxxxx' # 生成授权码
self.sender = '[email protected]'
def sendmail(self, receiver, lch, type, cfsj):
subject = '【 抢票提醒通知 】'
receiver = ['%s' % receiver]
msg = MIMEMultipart('mixed')
msg['Subject'] = subject
msg['From'] = 'Ncoreqp-Server <[email protected]>'
msg['To'] = ";".join(receiver)
# 构造文字内容
text = """Hi!\n
十万火急, 探子来报! \n
目前, %s号列车, %s当前有票! - 出发时间为:[ %s ]
快去12306网站支付买票吧!! 快速通道链接https://www.12306.cn/index/\n
http://www.northcorezh.com\n
北芯众合, 改变生活!
""" % (lch, type, cfsj)
text_plain = MIMEText(text, 'plain', 'utf-8')
msg.attach(text_plain)
# 发送邮件
smtp = smtplib.SMTP()
smtp.connect('smtp.qq.com')
# 我们用set_debuglevel(1)就可以打印出和SMTP服务器交互的所有信息。
# smtp.set_debuglevel(1)
smtp.login(self.username, self.password)
smtp.sendmail(self.sender, receiver, msg.as_string())
smtp.quit()
print('邮件发送成功 !!!')
def send_email_by_smtp(self):
# 用于发送邮件的邮箱。修改成自己的邮箱
sender_email_address = "[email protected]"
# 用于发送邮件的邮箱的密码。修改成自己的邮箱的密码
sender_email_password = "xxxxxxxxxxxxxxxxxx"
# 用于发送邮件的邮箱的smtp服务器,也可以直接是IP地址
# 修改成自己邮箱的sntp服务器地址;qq邮箱不需要修改此值
smtp_server_host = "smtp.qq.com"
# 修改成自己邮箱的sntp服务器监听的端口;qq邮箱不需要修改此值
smtp_server_port = 465
# 要发往的邮箱
receiver_email = "[email protected]"
# 要发送的邮件主题
message_subject = "Python smtp测试邮件"
# 要发送的邮件内容
message_context = "这是一封通过Python smtp发送的测试邮件..."
# 邮件对象,用于构建邮件
message = MIMEText(message_context, 'plain', 'utf-8')
# 设置发件人(声称的)
message["From"] = Header(sender_email_address, "utf-8")
# 设置收件人(声称的)
message["To"] = Header(receiver_email, "utf-8")
# 设置邮件主题
message["Subject"] = Header(message_subject, "utf-8")
# 连接smtp服务器。如果没有使用SSL,将SMTP_SSL()改成SMTP()即可其他都不需要做改动
email_client = smtplib.SMTP_SSL(smtp_server_host, smtp_server_port)
try:
# 验证邮箱及密码是否正确
email_client.login(sender_email_address, sender_email_password)
print("smtp----login success, now will send an email to {receiver_email}")
except Exception:
print("smtp----sorry, username or password not correct or another problem occur")
else:
# 发送邮件
email_client.sendmail(sender_email_address, receiver_email, message.as_string())
print(f"smtp----send email to {receiver_email} finish")
finally:
# 关闭连接
email_client.close()
| [
1,
3,
4,
5,
6
] |
1,915 | 393af07fa7a5c265dbdd3047ef33a77130edf259 | <mask token>
def inicio():
global P, M, G, en
B1 = Button(ventana, text='CAJAS PEQUEÑAS', command=A, state='normal',
bg='yellow').grid(column=1, row=1)
B2 = Button(ventana, text='CAJAS MEDIANAS', command=B, state='normal',
bg='orange').grid(column=2, row=1)
B3 = Button(ventana, text='CAJAS GRANDES', command=C, state='normal',
bg='red').grid(column=3, row=1)
B4 = Button(ventana, text='TOTAL DE CAJAS', command=D, state='normal',
bg='green').grid(column=4, row=1)
def A():
global P
P = P + 1
def B():
global M
M = M + 1
def C():
global G
G = G + 1
def D():
global P, M, G
l = Label(ventana, text='El total de CAJAS PEQUEÑAS es:' + str(P)).grid(
column=0, row=2)
l = Label(ventana, text='El total de CAJAS MEDIANAS es:' + str(M)).grid(
column=0, row=3)
l = Label(ventana, text='El total de CAJAS GRANDES es:' + str(G)).grid(
column=0, row=4)
l = Label(ventana, text='EL TOTAL DE CAJAS CONTADAS ES:' + str(P + M + G)
).grid(column=0, row=5)
if en == 1:
inicio()
<mask token>
| <mask token>
global P, M, G, en
<mask token>
def inicio():
global P, M, G, en
B1 = Button(ventana, text='CAJAS PEQUEÑAS', command=A, state='normal',
bg='yellow').grid(column=1, row=1)
B2 = Button(ventana, text='CAJAS MEDIANAS', command=B, state='normal',
bg='orange').grid(column=2, row=1)
B3 = Button(ventana, text='CAJAS GRANDES', command=C, state='normal',
bg='red').grid(column=3, row=1)
B4 = Button(ventana, text='TOTAL DE CAJAS', command=D, state='normal',
bg='green').grid(column=4, row=1)
def A():
global P
P = P + 1
def B():
global M
M = M + 1
def C():
global G
G = G + 1
def D():
global P, M, G
l = Label(ventana, text='El total de CAJAS PEQUEÑAS es:' + str(P)).grid(
column=0, row=2)
l = Label(ventana, text='El total de CAJAS MEDIANAS es:' + str(M)).grid(
column=0, row=3)
l = Label(ventana, text='El total de CAJAS GRANDES es:' + str(G)).grid(
column=0, row=4)
l = Label(ventana, text='EL TOTAL DE CAJAS CONTADAS ES:' + str(P + M + G)
).grid(column=0, row=5)
if en == 1:
inicio()
<mask token>
inicio()
ventana.mainloop()
| <mask token>
global P, M, G, en
P = 0
M = 0
G = 0
en = 1
def inicio():
global P, M, G, en
B1 = Button(ventana, text='CAJAS PEQUEÑAS', command=A, state='normal',
bg='yellow').grid(column=1, row=1)
B2 = Button(ventana, text='CAJAS MEDIANAS', command=B, state='normal',
bg='orange').grid(column=2, row=1)
B3 = Button(ventana, text='CAJAS GRANDES', command=C, state='normal',
bg='red').grid(column=3, row=1)
B4 = Button(ventana, text='TOTAL DE CAJAS', command=D, state='normal',
bg='green').grid(column=4, row=1)
def A():
global P
P = P + 1
def B():
global M
M = M + 1
def C():
global G
G = G + 1
def D():
global P, M, G
l = Label(ventana, text='El total de CAJAS PEQUEÑAS es:' + str(P)).grid(
column=0, row=2)
l = Label(ventana, text='El total de CAJAS MEDIANAS es:' + str(M)).grid(
column=0, row=3)
l = Label(ventana, text='El total de CAJAS GRANDES es:' + str(G)).grid(
column=0, row=4)
l = Label(ventana, text='EL TOTAL DE CAJAS CONTADAS ES:' + str(P + M + G)
).grid(column=0, row=5)
if en == 1:
inicio()
ventana = Tk()
inicio()
ventana.mainloop()
| from tkinter import *
global P, M, G, en
P = 0
M = 0
G = 0
en = 1
def inicio():
global P, M, G, en
B1 = Button(ventana, text='CAJAS PEQUEÑAS', command=A, state='normal',
bg='yellow').grid(column=1, row=1)
B2 = Button(ventana, text='CAJAS MEDIANAS', command=B, state='normal',
bg='orange').grid(column=2, row=1)
B3 = Button(ventana, text='CAJAS GRANDES', command=C, state='normal',
bg='red').grid(column=3, row=1)
B4 = Button(ventana, text='TOTAL DE CAJAS', command=D, state='normal',
bg='green').grid(column=4, row=1)
def A():
global P
P = P + 1
def B():
global M
M = M + 1
def C():
global G
G = G + 1
def D():
global P, M, G
l = Label(ventana, text='El total de CAJAS PEQUEÑAS es:' + str(P)).grid(
column=0, row=2)
l = Label(ventana, text='El total de CAJAS MEDIANAS es:' + str(M)).grid(
column=0, row=3)
l = Label(ventana, text='El total de CAJAS GRANDES es:' + str(G)).grid(
column=0, row=4)
l = Label(ventana, text='EL TOTAL DE CAJAS CONTADAS ES:' + str(P + M + G)
).grid(column=0, row=5)
if en == 1:
inicio()
ventana = Tk()
inicio()
ventana.mainloop()
| from tkinter import *
global P,M,G,en
P=0
M=0
G=0
en=1
def inicio():
global P,M,G,en
B1=Button(ventana,text="CAJAS PEQUEÑAS",command=A,state="normal",bg="yellow").grid(column=1,row=1)
B2=Button(ventana,text="CAJAS MEDIANAS",command=B,state="normal",bg="orange").grid(column=2,row=1)
B3=Button(ventana,text="CAJAS GRANDES",command=C,state="normal",bg="red").grid(column=3,row=1)
B4=Button(ventana,text="TOTAL DE CAJAS",command=D,state="normal",bg="green").grid(column=4,row=1)
def A ():
global P
P=P+1
def B ():
global M
M=M+1
def C ():
global G
G=G+1
def D ():
global P,M,G
l=Label(ventana,text="El total de CAJAS PEQUEÑAS es:"+str(P)).grid(column=0,row=2)
l=Label(ventana,text="El total de CAJAS MEDIANAS es:"+str(M)).grid(column=0,row=3)
l=Label(ventana,text="El total de CAJAS GRANDES es:"+str(G)).grid(column=0,row=4)
l=Label(ventana,text="EL TOTAL DE CAJAS CONTADAS ES:"+str(P+M+G)).grid(column=0,row=5)
if(en==1):
inicio()
ventana=Tk()
inicio()
ventana.mainloop()
| [
5,
6,
7,
8,
9
] |
1,916 | 65b7a14c54cd988185bac54fd8a31330966f8ba9 | <mask token>
| <mask token>
config.read('dwh.cfg')
<mask token>
| <mask token>
config = configparser.ConfigParser()
config.read('dwh.cfg')
schema = """CREATE SCHEMA IF NOT EXISTS public;
SET search_path TO public;"""
staging_events_table_drop = 'DROP TABLE IF EXISTS staging_events;'
staging_songs_table_drop = 'DROP TABLE IF EXISTS staging_songs;'
songplay_table_drop = 'DROP TABLE IF EXISTS songplay;'
users_table_drop = 'DROP TABLE IF EXISTS users;'
song_table_drop = 'DROP TABLE IF EXISTS song;'
artist_table_drop = 'DROP TABLE IF EXISTS artist;'
time_table_drop = 'DROP TABLE IF EXISTS time;'
staging_events_table_create = """CREATE TABLE staging_events(
artist VARCHAR,
auth VARCHAR,
first_name VARCHAR,
gender CHAR(16),
item_in_session INTEGER,
last_name VARCHAR,
length FLOAT,
level VARCHAR(10),
location VARCHAR,
method VARCHAR(4),
page VARCHAR(16),
registration VARCHAR,
session_id INTEGER,
song VARCHAR,
status INTEGER,
ts BIGINT,
user_agent VARCHAR,
user_id INTEGER);"""
staging_songs_table_create = """CREATE TABLE staging_songs(
song_id VARCHAR,
num_songs INTEGER,
artist_id VARCHAR,
artist_latitude VARCHAR,
artist_longitude VARCHAR,
artist_location VARCHAR,
artist_name VARCHAR,
title VARCHAR,
duration FLOAT,
year INTEGER);"""
songplay_table_create = """CREATE TABLE songplay (
songplay_id INTEGER IDENTITY(0,1) sortkey,
start_time TIMESTAMP,
user_id INTEGER,
level VARCHAR(10),
song_id VARCHAR distkey,
artist_id VARCHAR,
session_id INTEGER,
location VARCHAR,
user_agent VARCHAR);"""
users_table_create = """CREATE TABLE users (
user_id INTEGER sortkey distkey,
first_name VARCHAR,
last_name VARCHAR,
gender CHAR(16),
level VARCHAR(10));"""
song_table_create = """CREATE TABLE song (
song_id VARCHAR sortkey distkey,
title VARCHAR,
artist_id VARCHAR,
year INTEGER,
duration FLOAT);"""
artist_table_create = """CREATE TABLE artist (
artist_id VARCHAR sortkey distkey,
artist_name VARCHAR,
artist_location VARCHAR,
artist_latitude VARCHAR,
artist_longitude VARCHAR);"""
time_table_create = """CREATE TABLE time (
start_time TIMESTAMP sortkey distkey,
hour INTEGER,
day INTEGER,
week INTEGER,
month INTEGER,
year INTEGER,
weekday INTEGER);"""
staging_events_copy = (
"""COPY staging_events
FROM 's3://udacity-dend/log_data'
CREDENTIALS 'aws_iam_role={}'
COMPUPDATE OFF REGION 'us-west-2'
FORMAT AS JSON 's3://udacity-dend/log_json_path.json';
"""
.format('IAM ARN'))
staging_songs_copy = (
"""COPY staging_songs
FROM 's3://udacity-dend/song_data'
CREDENTIALS 'aws_iam_role={}'
COMPUPDATE OFF REGION 'us-west-2'
JSON 'auto' truncatecolumns;
"""
.format('IAM ARN'))
songplay_table_insert = """INSERT INTO songplay(start_time, user_id, level,
song_id, artist_id, session_id,location, user_agent)
SELECT DISTINCT TIMESTAMP 'epoch' + ts/1000 *INTERVAL '1second' as start_time,
se.user_id,
se.level,
ss.song_id,
ss.artist_id,
se.session_id,
se.location,
se.user_agent
FROM staging_events se, staging_songs ss
WHERE se.page = 'NextSong'
AND se.artist = ss.artist_name
AND se.length = ss.duration"""
users_table_insert = """INSERT INTO users (user_id, first_name, last_name, gender, level)
SELECT
se.user_id,
se.first_name,
se.last_name,
se.gender,
se.level
FROM staging_events se"""
song_table_insert = """INSERT INTO song (song_id, title, artist_id, year, duration)
SELECT
ss.song_id,
ss.title,
ss.artist_id,
ss.year,
ss.duration
FROM staging_songs ss"""
artist_table_insert = """INSERT INTO artist (artist_id, artist_name, artist_location, artist_latitude, artist_longitude)
SELECT
ss.artist_id,
ss.artist_name,
ss.artist_location,
ss.artist_latitude,
ss.artist_longitude
FROM staging_songs ss"""
time_table_insert = """INSERT INTO time(start_time, hour, day, week, month, year, weekday)
SELECT start_time,
EXTRACT(hour from start_time),
EXTRACT(day from start_time),
EXTRACT(week from start_time),
EXTRACT(month from start_time),
EXTRACT(year from start_time),
EXTRACT(dayofweek from start_time)
FROM songplay"""
test1 = 'SELECT * FROM songplay LIMIT 1; '
test2 = 'SELECT * FROM users LIMIT 1; '
test3 = 'SELECT * FROM song LIMIT 1; '
test4 = 'SELECT * FROM artist LIMIT 1; '
test5 = 'SELECT * FROM time LIMIT 1; '
create_table_queries = [staging_events_table_create,
staging_songs_table_create, songplay_table_create, users_table_create,
song_table_create, artist_table_create, time_table_create]
drop_table_queries = [staging_events_table_drop, staging_songs_table_drop,
songplay_table_drop, users_table_drop, song_table_drop,
artist_table_drop, time_table_drop]
copy_table_queries = [staging_events_copy, staging_songs_copy]
insert_table_queries = [songplay_table_insert, users_table_insert,
song_table_insert, artist_table_insert, time_table_insert]
test_queries = [test1, test2, test3, test4, test5]
| import configparser
config = configparser.ConfigParser()
config.read('dwh.cfg')
schema = """CREATE SCHEMA IF NOT EXISTS public;
SET search_path TO public;"""
staging_events_table_drop = 'DROP TABLE IF EXISTS staging_events;'
staging_songs_table_drop = 'DROP TABLE IF EXISTS staging_songs;'
songplay_table_drop = 'DROP TABLE IF EXISTS songplay;'
users_table_drop = 'DROP TABLE IF EXISTS users;'
song_table_drop = 'DROP TABLE IF EXISTS song;'
artist_table_drop = 'DROP TABLE IF EXISTS artist;'
time_table_drop = 'DROP TABLE IF EXISTS time;'
staging_events_table_create = """CREATE TABLE staging_events(
artist VARCHAR,
auth VARCHAR,
first_name VARCHAR,
gender CHAR(16),
item_in_session INTEGER,
last_name VARCHAR,
length FLOAT,
level VARCHAR(10),
location VARCHAR,
method VARCHAR(4),
page VARCHAR(16),
registration VARCHAR,
session_id INTEGER,
song VARCHAR,
status INTEGER,
ts BIGINT,
user_agent VARCHAR,
user_id INTEGER);"""
staging_songs_table_create = """CREATE TABLE staging_songs(
song_id VARCHAR,
num_songs INTEGER,
artist_id VARCHAR,
artist_latitude VARCHAR,
artist_longitude VARCHAR,
artist_location VARCHAR,
artist_name VARCHAR,
title VARCHAR,
duration FLOAT,
year INTEGER);"""
songplay_table_create = """CREATE TABLE songplay (
songplay_id INTEGER IDENTITY(0,1) sortkey,
start_time TIMESTAMP,
user_id INTEGER,
level VARCHAR(10),
song_id VARCHAR distkey,
artist_id VARCHAR,
session_id INTEGER,
location VARCHAR,
user_agent VARCHAR);"""
users_table_create = """CREATE TABLE users (
user_id INTEGER sortkey distkey,
first_name VARCHAR,
last_name VARCHAR,
gender CHAR(16),
level VARCHAR(10));"""
song_table_create = """CREATE TABLE song (
song_id VARCHAR sortkey distkey,
title VARCHAR,
artist_id VARCHAR,
year INTEGER,
duration FLOAT);"""
artist_table_create = """CREATE TABLE artist (
artist_id VARCHAR sortkey distkey,
artist_name VARCHAR,
artist_location VARCHAR,
artist_latitude VARCHAR,
artist_longitude VARCHAR);"""
time_table_create = """CREATE TABLE time (
start_time TIMESTAMP sortkey distkey,
hour INTEGER,
day INTEGER,
week INTEGER,
month INTEGER,
year INTEGER,
weekday INTEGER);"""
staging_events_copy = (
"""COPY staging_events
FROM 's3://udacity-dend/log_data'
CREDENTIALS 'aws_iam_role={}'
COMPUPDATE OFF REGION 'us-west-2'
FORMAT AS JSON 's3://udacity-dend/log_json_path.json';
"""
.format('IAM ARN'))
staging_songs_copy = (
"""COPY staging_songs
FROM 's3://udacity-dend/song_data'
CREDENTIALS 'aws_iam_role={}'
COMPUPDATE OFF REGION 'us-west-2'
JSON 'auto' truncatecolumns;
"""
.format('IAM ARN'))
songplay_table_insert = """INSERT INTO songplay(start_time, user_id, level,
song_id, artist_id, session_id,location, user_agent)
SELECT DISTINCT TIMESTAMP 'epoch' + ts/1000 *INTERVAL '1second' as start_time,
se.user_id,
se.level,
ss.song_id,
ss.artist_id,
se.session_id,
se.location,
se.user_agent
FROM staging_events se, staging_songs ss
WHERE se.page = 'NextSong'
AND se.artist = ss.artist_name
AND se.length = ss.duration"""
users_table_insert = """INSERT INTO users (user_id, first_name, last_name, gender, level)
SELECT
se.user_id,
se.first_name,
se.last_name,
se.gender,
se.level
FROM staging_events se"""
song_table_insert = """INSERT INTO song (song_id, title, artist_id, year, duration)
SELECT
ss.song_id,
ss.title,
ss.artist_id,
ss.year,
ss.duration
FROM staging_songs ss"""
artist_table_insert = """INSERT INTO artist (artist_id, artist_name, artist_location, artist_latitude, artist_longitude)
SELECT
ss.artist_id,
ss.artist_name,
ss.artist_location,
ss.artist_latitude,
ss.artist_longitude
FROM staging_songs ss"""
time_table_insert = """INSERT INTO time(start_time, hour, day, week, month, year, weekday)
SELECT start_time,
EXTRACT(hour from start_time),
EXTRACT(day from start_time),
EXTRACT(week from start_time),
EXTRACT(month from start_time),
EXTRACT(year from start_time),
EXTRACT(dayofweek from start_time)
FROM songplay"""
test1 = 'SELECT * FROM songplay LIMIT 1; '
test2 = 'SELECT * FROM users LIMIT 1; '
test3 = 'SELECT * FROM song LIMIT 1; '
test4 = 'SELECT * FROM artist LIMIT 1; '
test5 = 'SELECT * FROM time LIMIT 1; '
create_table_queries = [staging_events_table_create,
staging_songs_table_create, songplay_table_create, users_table_create,
song_table_create, artist_table_create, time_table_create]
drop_table_queries = [staging_events_table_drop, staging_songs_table_drop,
songplay_table_drop, users_table_drop, song_table_drop,
artist_table_drop, time_table_drop]
copy_table_queries = [staging_events_copy, staging_songs_copy]
insert_table_queries = [songplay_table_insert, users_table_insert,
song_table_insert, artist_table_insert, time_table_insert]
test_queries = [test1, test2, test3, test4, test5]
| import configparser
# CONFIG
config = configparser.ConfigParser()
config.read('dwh.cfg')
# DISTRIBUTION SCHEMA
schema = ("""CREATE SCHEMA IF NOT EXISTS public;
SET search_path TO public;""")
# DROP TABLES
staging_events_table_drop = ("DROP TABLE IF EXISTS staging_events;")
staging_songs_table_drop = ("DROP TABLE IF EXISTS staging_songs;")
songplay_table_drop = ("DROP TABLE IF EXISTS songplay;")
users_table_drop = ("DROP TABLE IF EXISTS users;")
song_table_drop = ("DROP TABLE IF EXISTS song;")
artist_table_drop = ("DROP TABLE IF EXISTS artist;")
time_table_drop = ("DROP TABLE IF EXISTS time;")
# CREATE STAGING TABLES
staging_events_table_create = ("""CREATE TABLE staging_events(
artist VARCHAR,
auth VARCHAR,
first_name VARCHAR,
gender CHAR(16),
item_in_session INTEGER,
last_name VARCHAR,
length FLOAT,
level VARCHAR(10),
location VARCHAR,
method VARCHAR(4),
page VARCHAR(16),
registration VARCHAR,
session_id INTEGER,
song VARCHAR,
status INTEGER,
ts BIGINT,
user_agent VARCHAR,
user_id INTEGER);""")
staging_songs_table_create = ("""CREATE TABLE staging_songs(
song_id VARCHAR,
num_songs INTEGER,
artist_id VARCHAR,
artist_latitude VARCHAR,
artist_longitude VARCHAR,
artist_location VARCHAR,
artist_name VARCHAR,
title VARCHAR,
duration FLOAT,
year INTEGER);""")
# CREATE FACT TABLE
songplay_table_create = ("""CREATE TABLE songplay (
songplay_id INTEGER IDENTITY(0,1) sortkey,
start_time TIMESTAMP,
user_id INTEGER,
level VARCHAR(10),
song_id VARCHAR distkey,
artist_id VARCHAR,
session_id INTEGER,
location VARCHAR,
user_agent VARCHAR);""")
# CREATE DIMENSION TABLES
users_table_create = ("""CREATE TABLE users (
user_id INTEGER sortkey distkey,
first_name VARCHAR,
last_name VARCHAR,
gender CHAR(16),
level VARCHAR(10));""")
song_table_create = ("""CREATE TABLE song (
song_id VARCHAR sortkey distkey,
title VARCHAR,
artist_id VARCHAR,
year INTEGER,
duration FLOAT);""")
artist_table_create = ("""CREATE TABLE artist (
artist_id VARCHAR sortkey distkey,
artist_name VARCHAR,
artist_location VARCHAR,
artist_latitude VARCHAR,
artist_longitude VARCHAR);""")
time_table_create = ("""CREATE TABLE time (
start_time TIMESTAMP sortkey distkey,
hour INTEGER,
day INTEGER,
week INTEGER,
month INTEGER,
year INTEGER,
weekday INTEGER);""")
# COPY FROM S3 INTO STAGING TABLES
staging_events_copy = ("""COPY staging_events
FROM 's3://udacity-dend/log_data'
CREDENTIALS 'aws_iam_role={}'
COMPUPDATE OFF REGION 'us-west-2'
FORMAT AS JSON 's3://udacity-dend/log_json_path.json';
""").format("IAM ARN")
#limiting data due to execution time - remove prefix /A/A/ to copy entire file
staging_songs_copy = ("""COPY staging_songs
FROM 's3://udacity-dend/song_data'
CREDENTIALS 'aws_iam_role={}'
COMPUPDATE OFF REGION 'us-west-2'
JSON 'auto' truncatecolumns;
""").format("IAM ARN")
# INSERT FROM STAGING TO FINAL TABLES
songplay_table_insert =("""INSERT INTO songplay(start_time, user_id, level,
song_id, artist_id, session_id,location, user_agent)
SELECT DISTINCT TIMESTAMP 'epoch' + ts/1000 *INTERVAL '1second' as start_time,
se.user_id,
se.level,
ss.song_id,
ss.artist_id,
se.session_id,
se.location,
se.user_agent
FROM staging_events se, staging_songs ss
WHERE se.page = 'NextSong'
AND se.artist = ss.artist_name
AND se.length = ss.duration""")
users_table_insert = ("""INSERT INTO users (user_id, first_name, last_name, gender, level)
SELECT
se.user_id,
se.first_name,
se.last_name,
se.gender,
se.level
FROM staging_events se""")
song_table_insert = ("""INSERT INTO song (song_id, title, artist_id, year, duration)
SELECT
ss.song_id,
ss.title,
ss.artist_id,
ss.year,
ss.duration
FROM staging_songs ss""")
artist_table_insert = ("""INSERT INTO artist (artist_id, artist_name, artist_location, artist_latitude, artist_longitude)
SELECT
ss.artist_id,
ss.artist_name,
ss.artist_location,
ss.artist_latitude,
ss.artist_longitude
FROM staging_songs ss""")
time_table_insert = ("""INSERT INTO time(start_time, hour, day, week, month, year, weekday)
SELECT start_time,
EXTRACT(hour from start_time),
EXTRACT(day from start_time),
EXTRACT(week from start_time),
EXTRACT(month from start_time),
EXTRACT(year from start_time),
EXTRACT(dayofweek from start_time)
FROM songplay""")
#TEST QUERIES
test1 = ("""SELECT * FROM songplay LIMIT 1; """)
test2 = ("""SELECT * FROM users LIMIT 1; """)
test3 = ("""SELECT * FROM song LIMIT 1; """)
test4 = ("""SELECT * FROM artist LIMIT 1; """)
test5 = ("""SELECT * FROM time LIMIT 1; """)
# QUERY LISTS
create_table_queries = [staging_events_table_create, staging_songs_table_create, songplay_table_create, users_table_create,
song_table_create, artist_table_create, time_table_create]
drop_table_queries = [staging_events_table_drop, staging_songs_table_drop, songplay_table_drop, users_table_drop,
song_table_drop, artist_table_drop, time_table_drop]
copy_table_queries = [staging_events_copy, staging_songs_copy]
insert_table_queries = [songplay_table_insert, users_table_insert, song_table_insert, artist_table_insert, time_table_insert]
test_queries = [test1, test2, test3, test4, test5] | [
0,
1,
2,
3,
4
] |
1,917 | 214aadb7b3fc125da12f098bde87fce295349fdf | #!/usr/bin/python2
#
# Author: Victor Ananjevsky, 2007 - 2010
# based on xdg-menu.py, written by Piotr Zielinski (http://www.cl.cam.ac.uk/~pz215/)
# License: GPL
#
# This script takes names of menu files conforming to the XDG Desktop
# Menu Specification, and outputs their FVWM equivalents to the
# standard output.
#
# http://standards.freedesktop.org/menu-spec/latest/
#
# Requirements:
# pyxdg, pygtk, gnome-menus
#
# Syntax:
# fvwm-xdg-menu.py [-d Menu] menufile1 menufile2 menufile3 ...
#
# Each menufile is an XDG menu description file.
# Icons of menu entries cached in $XDG_CACHE_HOME/fvwm/icons/menu
#
# For menufile name `recent' will be generated menu of recently opened files
#
# -d mean not print headers for toplevel menu (useful in DynamicPopupAction)
#
# Example:
# fvwm-xdg-menu.py /etc/xdg/menus/applications.menu
# fvwm-xdg-menu.py applications
#
import sys
import os
from optparse import OptionParser
import xdg.Menu
from xdg.DesktopEntry import *
from xdg.RecentFiles import *
from xdg.BaseDirectory import xdg_config_dirs, xdg_cache_home
import gtk
# fix for correct output of unicode chars without terminal
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
def cache_icon (icon):
''' cache an icon '''
icon_file = "%s/%s.png" % (cache_path, os.path.basename(icon))
if os.path.exists(icon_file):
return
full_icon = "%s.png" % icon
if os.path.exists(full_icon):
gtk.gdk.pixbuf_new_from_file_at_size(full_icon, options.icon_size, options.icon_size).save(icon_file, 'png')
return
try:
icon_theme.load_icon(icon, options.icon_size, gtk.ICON_LOOKUP_NO_SVG).save(icon_file, "png")
except:
pass
def parse_menu (menu, fvwm_menu = None):
''' parse menu file '''
prefix = "+"
if fvwm_menu == None:
print ''
print 'DestroyMenu "%s"' % menu
print 'AddToMenu "%s"' % menu
else:
print 'DestroyMenu recreate %s' % fvwm_menu
prefix = "AddToMenu %s" % fvwm_menu
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
icon = entry.getIcon()
print u'%s "%s%%menu/folder.png%%" Popup "%s"' % (prefix, entry.getName(), entry)
elif isinstance(entry, xdg.Menu.MenuEntry):
desktop = DesktopEntry(entry.DesktopEntry.getFileName())
icon = desktop.getIcon()
ind = icon.rfind('.')
if ind != -1:
icon = icon[0:ind]
cmd = desktop.getExec().rstrip('%FUfu')
cache_icon(icon)
print u'%s "%s%%menu/%s.png%%" Exec exec %s' % (prefix, desktop.getName(), os.path.basename(icon), cmd)
else:
pass
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.Menu):
parse_menu(entry)
def parse_recent (fvwm_menu = None):
''' parse recently opened files '''
prefix = "+"
if fvwm_menu == None:
print ''
print 'DestroyMenu "Recent"'
print 'AddToMenu "Recent"'
else:
print 'DestroyMenu recreate %s' % fvwm_menu
prefix="AddToMenu %s" % fvwm_menu
rm = gtk.RecentManager()
for rf in rm.get_items():
print '%s "%s" Exec exec xdg-open "%s"' % (prefix, rf.get_display_name(), rf.get_uri())
# Start
cache_path = "%s/fvwm/menu" % xdg_cache_home
icon_theme = gtk.icon_theme_get_default()
if not os.path.exists(cache_path):
os.makedirs(cache_path)
# Parse commandline
parser = OptionParser()
parser.add_option("-d", "--dynamic", dest="fvwm_menu", default=None, help="Use in DynamicPopupAction", metavar="MENU")
parser.add_option("-i", "--icons", dest="icon_size", default=16, help="Set icons size", metavar="SIZE")
(options, args) = parser.parse_args()
for arg in args:
filename = ""
if os.path.exists(arg) or arg == "recent":
filename = arg
else:
tmpfile = "%s/menus/%s.menu" % (xdg_config_home, arg)
if os.path.exists(tmpfile):
filename = tmpfile
else:
for dir in xdg_config_dirs:
tmpfile = "%s/menus/%s.menu" % (dir, arg)
if os.path.exists(tmpfile):
filename = tmpfile
break
if filename == "":
continue
elif filename == "recent":
parse_recent (options.fvwm_menu)
else:
parse_menu(xdg.Menu.parse(filename), options.fvwm_menu)
| null | null | null | null | [
0
] |
1,918 | 4437075901751adeaf3df63345e270a9b0090c14 | <mask token>
| <mask token>
parser.add_argument('-pred_path', type=str, required=True)
parser.add_argument('-n_list_path', type=str, required=True)
parser.add_argument('-refer_path', type=str, required=True)
<mask token>
with open(args.pred_path, 'r') as f:
preds = f.readlines()
with open(args.n_list_path, 'r') as f:
for line in f:
n_list.append(int(line.strip()))
with open(args.refer_path, 'r') as f:
golds = f.readlines()
<mask token>
for idx, pred in enumerate(preds):
if idx == sum(n_list[:gold_idx + 1]):
gold_idx += 1
gold = golds[gold_idx].strip()
refs = [[gold.split()]]
pred = [pred.strip().split()]
sent_bleu = bleu.bleu(pred, refs, smooth=True)
print('%s : %s : %f' % (pred, refs, sent_bleu))
f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\n')
f_summary.close()
| <mask token>
parser = argparse.ArgumentParser('Compute sentence bleu.')
parser.add_argument('-pred_path', type=str, required=True)
parser.add_argument('-n_list_path', type=str, required=True)
parser.add_argument('-refer_path', type=str, required=True)
args = parser.parse_args()
n_list = []
with open(args.pred_path, 'r') as f:
preds = f.readlines()
with open(args.n_list_path, 'r') as f:
for line in f:
n_list.append(int(line.strip()))
with open(args.refer_path, 'r') as f:
golds = f.readlines()
f_summary = open(args.pred_path + '.sent-bleu', 'w')
gold_idx = 0
for idx, pred in enumerate(preds):
if idx == sum(n_list[:gold_idx + 1]):
gold_idx += 1
gold = golds[gold_idx].strip()
refs = [[gold.split()]]
pred = [pred.strip().split()]
sent_bleu = bleu.bleu(pred, refs, smooth=True)
print('%s : %s : %f' % (pred, refs, sent_bleu))
f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\n')
f_summary.close()
| import thumt.utils.bleu as bleu
import argparse
parser = argparse.ArgumentParser('Compute sentence bleu.')
parser.add_argument('-pred_path', type=str, required=True)
parser.add_argument('-n_list_path', type=str, required=True)
parser.add_argument('-refer_path', type=str, required=True)
args = parser.parse_args()
n_list = []
with open(args.pred_path, 'r') as f:
preds = f.readlines()
with open(args.n_list_path, 'r') as f:
for line in f:
n_list.append(int(line.strip()))
with open(args.refer_path, 'r') as f:
golds = f.readlines()
f_summary = open(args.pred_path + '.sent-bleu', 'w')
gold_idx = 0
for idx, pred in enumerate(preds):
if idx == sum(n_list[:gold_idx + 1]):
gold_idx += 1
gold = golds[gold_idx].strip()
refs = [[gold.split()]]
pred = [pred.strip().split()]
sent_bleu = bleu.bleu(pred, refs, smooth=True)
print('%s : %s : %f' % (pred, refs, sent_bleu))
f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\n')
f_summary.close()
| import thumt.utils.bleu as bleu
import argparse
parser = argparse.ArgumentParser("Compute sentence bleu.")
parser.add_argument("-pred_path", type=str, required=True)
parser.add_argument("-n_list_path", type=str, required=True)
parser.add_argument("-refer_path", type=str, required=True)
args = parser.parse_args()
n_list = []
with open(args.pred_path, 'r') as f:
preds = f.readlines()
with open(args.n_list_path, 'r') as f:
for line in f:
n_list.append(int(line.strip()))
with open(args.refer_path, 'r') as f:
golds = f.readlines()
f_summary = open(args.pred_path + ".sent-bleu", 'w')
gold_idx = 0
for idx, pred in enumerate(preds):
#import ipdb; ipdb.set_trace()
if idx == sum(n_list[:gold_idx + 1]):
gold_idx += 1
gold = golds[gold_idx].strip() # remove `\n`
#refs = [gold.split()]
refs = [[gold.split()]]
pred = [pred.strip().split()]
#import ipdb; ipdb.set_trace()
sent_bleu = bleu.bleu(pred, refs, smooth=True)
print("%s : %s : %f" % (pred, refs, sent_bleu))
f_summary.write(" ".join(pred[0]) + "|||" + str(sent_bleu) + "\n")
f_summary.close()
| [
0,
1,
2,
3,
4
] |
1,919 | 07452795a677836b89eef85b6fb25b33eb464d91 | <mask token>
class TestGroupInfoService:
<mask token>
def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session,
svc, params):
course = factories.Course(authority_provided_id=self.AUTHORITY)
svc.upsert_group_info(course, params=params)
group_info = self.get_inserted_group_info(db_session)
assert group_info.application_instance == course.application_instance
assert group_info.context_title == params['context_title']
assert group_info.context_label == params['context_label']
assert group_info.type == 'course_group'
<mask token>
def test_upsert_group_info_ignores_non_metadata_params(self, db_session,
svc, params):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params=dict(params, id='IGNORE ME 1',
authority_provided_id='IGNORE ME 2', something_unrelated=
'IGNORED ME 3'))
group_info = self.get_inserted_group_info(db_session)
assert group_info.authority_provided_id == self.AUTHORITY
assert group_info.id != 'IGNORE ME 1'
@pytest.mark.usefixtures('user_is_instructor')
def test_upsert_group_info_records_instructors_with_group_info(self,
db_session, svc, pyramid_request):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params={})
group_info = self.get_inserted_group_info(db_session)
assert len(group_info.instructors) == 1
assert group_info.instructors[0]['username'
] == pyramid_request.lti_user.h_user.username
assert group_info.instructors[0]['email'] == 'test_email'
<mask token>
def get_inserted_group_info(self, db_session):
return db_session.query(GroupInfo).filter_by(authority_provided_id=
self.AUTHORITY).one()
@pytest.fixture
def svc(self, pyramid_request):
return GroupInfoService(mock.sentinel.context, pyramid_request)
<mask token>
<mask token>
@pytest.fixture(autouse=True)
def with_existing_group_infos(self):
factories.GroupInfo.build_batch(3)
<mask token>
| <mask token>
class TestGroupInfoService:
<mask token>
def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session,
svc, params):
course = factories.Course(authority_provided_id=self.AUTHORITY)
svc.upsert_group_info(course, params=params)
group_info = self.get_inserted_group_info(db_session)
assert group_info.application_instance == course.application_instance
assert group_info.context_title == params['context_title']
assert group_info.context_label == params['context_label']
assert group_info.type == 'course_group'
def test_upsert_group_info_updates_an_existing_if_one_already_exists(self,
db_session, svc, params, pre_existing_group):
db_session.add(pre_existing_group)
new_application_instance = factories.ApplicationInstance()
assert pre_existing_group.application_instance != new_application_instance
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY, application_instance=new_application_instance),
params=dict(params, context_title='NEW_TITLE'))
group_info = self.get_inserted_group_info(db_session)
assert group_info.application_instance == new_application_instance
assert group_info.context_label == params['context_label']
assert group_info.context_title == 'NEW_TITLE'
assert group_info.type == 'course_group'
def test_upsert_group_info_ignores_non_metadata_params(self, db_session,
svc, params):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params=dict(params, id='IGNORE ME 1',
authority_provided_id='IGNORE ME 2', something_unrelated=
'IGNORED ME 3'))
group_info = self.get_inserted_group_info(db_session)
assert group_info.authority_provided_id == self.AUTHORITY
assert group_info.id != 'IGNORE ME 1'
@pytest.mark.usefixtures('user_is_instructor')
def test_upsert_group_info_records_instructors_with_group_info(self,
db_session, svc, pyramid_request):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params={})
group_info = self.get_inserted_group_info(db_session)
assert len(group_info.instructors) == 1
assert group_info.instructors[0]['username'
] == pyramid_request.lti_user.h_user.username
assert group_info.instructors[0]['email'] == 'test_email'
<mask token>
def get_inserted_group_info(self, db_session):
return db_session.query(GroupInfo).filter_by(authority_provided_id=
self.AUTHORITY).one()
@pytest.fixture
def svc(self, pyramid_request):
return GroupInfoService(mock.sentinel.context, pyramid_request)
@pytest.fixture
def params(self):
return {column: f'TEST_{column.upper()}' for column in GroupInfo.
columns() if column not in ('consumer_key', '_info',
'application_instance_id')}
@pytest.fixture(params=(True, False), ids=['GroupInfo w/o info',
'GroupInfo w/info'])
def pre_existing_group(self, application_instance, request, params):
pre_existing_group = GroupInfo(**dict(params, id=None,
authority_provided_id=self.AUTHORITY, application_instance_id=
application_instance.id))
if request.param:
pre_existing_group.info = None
return pre_existing_group
@pytest.fixture(autouse=True)
def with_existing_group_infos(self):
factories.GroupInfo.build_batch(3)
@pytest.fixture
def pyramid_request(self, pyramid_request):
pyramid_request.lti_user.email = 'test_email'
return pyramid_request
| <mask token>
class TestGroupInfoService:
AUTHORITY = 'TEST_AUTHORITY_PROVIDED_ID'
def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session,
svc, params):
course = factories.Course(authority_provided_id=self.AUTHORITY)
svc.upsert_group_info(course, params=params)
group_info = self.get_inserted_group_info(db_session)
assert group_info.application_instance == course.application_instance
assert group_info.context_title == params['context_title']
assert group_info.context_label == params['context_label']
assert group_info.type == 'course_group'
def test_upsert_group_info_updates_an_existing_if_one_already_exists(self,
db_session, svc, params, pre_existing_group):
db_session.add(pre_existing_group)
new_application_instance = factories.ApplicationInstance()
assert pre_existing_group.application_instance != new_application_instance
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY, application_instance=new_application_instance),
params=dict(params, context_title='NEW_TITLE'))
group_info = self.get_inserted_group_info(db_session)
assert group_info.application_instance == new_application_instance
assert group_info.context_label == params['context_label']
assert group_info.context_title == 'NEW_TITLE'
assert group_info.type == 'course_group'
def test_upsert_group_info_ignores_non_metadata_params(self, db_session,
svc, params):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params=dict(params, id='IGNORE ME 1',
authority_provided_id='IGNORE ME 2', something_unrelated=
'IGNORED ME 3'))
group_info = self.get_inserted_group_info(db_session)
assert group_info.authority_provided_id == self.AUTHORITY
assert group_info.id != 'IGNORE ME 1'
@pytest.mark.usefixtures('user_is_instructor')
def test_upsert_group_info_records_instructors_with_group_info(self,
db_session, svc, pyramid_request):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params={})
group_info = self.get_inserted_group_info(db_session)
assert len(group_info.instructors) == 1
assert group_info.instructors[0]['username'
] == pyramid_request.lti_user.h_user.username
assert group_info.instructors[0]['email'] == 'test_email'
@pytest.mark.usefixtures('user_is_learner')
def test_upsert_group_info_doesnt_record_learners_with_group_info(self,
db_session, svc):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params={})
group_info = self.get_inserted_group_info(db_session)
assert group_info.instructors == []
def get_inserted_group_info(self, db_session):
return db_session.query(GroupInfo).filter_by(authority_provided_id=
self.AUTHORITY).one()
@pytest.fixture
def svc(self, pyramid_request):
return GroupInfoService(mock.sentinel.context, pyramid_request)
@pytest.fixture
def params(self):
return {column: f'TEST_{column.upper()}' for column in GroupInfo.
columns() if column not in ('consumer_key', '_info',
'application_instance_id')}
@pytest.fixture(params=(True, False), ids=['GroupInfo w/o info',
'GroupInfo w/info'])
def pre_existing_group(self, application_instance, request, params):
pre_existing_group = GroupInfo(**dict(params, id=None,
authority_provided_id=self.AUTHORITY, application_instance_id=
application_instance.id))
if request.param:
pre_existing_group.info = None
return pre_existing_group
@pytest.fixture(autouse=True)
def with_existing_group_infos(self):
factories.GroupInfo.build_batch(3)
@pytest.fixture
def pyramid_request(self, pyramid_request):
pyramid_request.lti_user.email = 'test_email'
return pyramid_request
| from unittest import mock
import pytest
from lms.models import GroupInfo
from lms.services.group_info import GroupInfoService
from tests import factories
class TestGroupInfoService:
AUTHORITY = 'TEST_AUTHORITY_PROVIDED_ID'
def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session,
svc, params):
course = factories.Course(authority_provided_id=self.AUTHORITY)
svc.upsert_group_info(course, params=params)
group_info = self.get_inserted_group_info(db_session)
assert group_info.application_instance == course.application_instance
assert group_info.context_title == params['context_title']
assert group_info.context_label == params['context_label']
assert group_info.type == 'course_group'
def test_upsert_group_info_updates_an_existing_if_one_already_exists(self,
db_session, svc, params, pre_existing_group):
db_session.add(pre_existing_group)
new_application_instance = factories.ApplicationInstance()
assert pre_existing_group.application_instance != new_application_instance
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY, application_instance=new_application_instance),
params=dict(params, context_title='NEW_TITLE'))
group_info = self.get_inserted_group_info(db_session)
assert group_info.application_instance == new_application_instance
assert group_info.context_label == params['context_label']
assert group_info.context_title == 'NEW_TITLE'
assert group_info.type == 'course_group'
def test_upsert_group_info_ignores_non_metadata_params(self, db_session,
svc, params):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params=dict(params, id='IGNORE ME 1',
authority_provided_id='IGNORE ME 2', something_unrelated=
'IGNORED ME 3'))
group_info = self.get_inserted_group_info(db_session)
assert group_info.authority_provided_id == self.AUTHORITY
assert group_info.id != 'IGNORE ME 1'
@pytest.mark.usefixtures('user_is_instructor')
def test_upsert_group_info_records_instructors_with_group_info(self,
db_session, svc, pyramid_request):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params={})
group_info = self.get_inserted_group_info(db_session)
assert len(group_info.instructors) == 1
assert group_info.instructors[0]['username'
] == pyramid_request.lti_user.h_user.username
assert group_info.instructors[0]['email'] == 'test_email'
@pytest.mark.usefixtures('user_is_learner')
def test_upsert_group_info_doesnt_record_learners_with_group_info(self,
db_session, svc):
svc.upsert_group_info(factories.Course(authority_provided_id=self.
AUTHORITY), params={})
group_info = self.get_inserted_group_info(db_session)
assert group_info.instructors == []
def get_inserted_group_info(self, db_session):
return db_session.query(GroupInfo).filter_by(authority_provided_id=
self.AUTHORITY).one()
@pytest.fixture
def svc(self, pyramid_request):
return GroupInfoService(mock.sentinel.context, pyramid_request)
@pytest.fixture
def params(self):
return {column: f'TEST_{column.upper()}' for column in GroupInfo.
columns() if column not in ('consumer_key', '_info',
'application_instance_id')}
@pytest.fixture(params=(True, False), ids=['GroupInfo w/o info',
'GroupInfo w/info'])
def pre_existing_group(self, application_instance, request, params):
pre_existing_group = GroupInfo(**dict(params, id=None,
authority_provided_id=self.AUTHORITY, application_instance_id=
application_instance.id))
if request.param:
pre_existing_group.info = None
return pre_existing_group
@pytest.fixture(autouse=True)
def with_existing_group_infos(self):
factories.GroupInfo.build_batch(3)
@pytest.fixture
def pyramid_request(self, pyramid_request):
pyramid_request.lti_user.email = 'test_email'
return pyramid_request
| from unittest import mock
import pytest
from lms.models import GroupInfo
from lms.services.group_info import GroupInfoService
from tests import factories
class TestGroupInfoService:
AUTHORITY = "TEST_AUTHORITY_PROVIDED_ID"
def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session, svc, params):
course = factories.Course(authority_provided_id=self.AUTHORITY)
svc.upsert_group_info(course, params=params)
group_info = self.get_inserted_group_info(db_session)
assert group_info.application_instance == course.application_instance
assert group_info.context_title == params["context_title"]
assert group_info.context_label == params["context_label"]
assert group_info.type == "course_group"
def test_upsert_group_info_updates_an_existing_if_one_already_exists(
self, db_session, svc, params, pre_existing_group
):
db_session.add(pre_existing_group)
new_application_instance = factories.ApplicationInstance()
# Sanity check that we can change the application instance
assert pre_existing_group.application_instance != new_application_instance
svc.upsert_group_info(
factories.Course(
authority_provided_id=self.AUTHORITY,
application_instance=new_application_instance,
),
params=dict(params, context_title="NEW_TITLE"),
)
group_info = self.get_inserted_group_info(db_session)
# This is very strange, but you can "steal" a group info row from
# another application instance
assert group_info.application_instance == new_application_instance
assert group_info.context_label == params["context_label"]
assert group_info.context_title == "NEW_TITLE"
assert group_info.type == "course_group"
def test_upsert_group_info_ignores_non_metadata_params(
self, db_session, svc, params
):
svc.upsert_group_info(
factories.Course(authority_provided_id=self.AUTHORITY),
params=dict(
params,
id="IGNORE ME 1",
authority_provided_id="IGNORE ME 2",
something_unrelated="IGNORED ME 3",
),
)
group_info = self.get_inserted_group_info(db_session)
assert group_info.authority_provided_id == self.AUTHORITY
assert group_info.id != "IGNORE ME 1"
@pytest.mark.usefixtures("user_is_instructor")
def test_upsert_group_info_records_instructors_with_group_info(
self, db_session, svc, pyramid_request
):
svc.upsert_group_info(
factories.Course(authority_provided_id=self.AUTHORITY), params={}
)
group_info = self.get_inserted_group_info(db_session)
assert len(group_info.instructors) == 1
assert (
group_info.instructors[0]["username"]
== pyramid_request.lti_user.h_user.username
)
assert group_info.instructors[0]["email"] == "test_email"
@pytest.mark.usefixtures("user_is_learner")
def test_upsert_group_info_doesnt_record_learners_with_group_info(
self, db_session, svc
):
svc.upsert_group_info(
factories.Course(authority_provided_id=self.AUTHORITY), params={}
)
group_info = self.get_inserted_group_info(db_session)
assert group_info.instructors == []
def get_inserted_group_info(self, db_session):
return (
db_session.query(GroupInfo)
.filter_by(authority_provided_id=self.AUTHORITY)
.one()
)
@pytest.fixture
def svc(self, pyramid_request):
return GroupInfoService(mock.sentinel.context, pyramid_request)
@pytest.fixture
def params(self):
return {
column: f"TEST_{column.upper()}"
for column in GroupInfo.columns()
if column not in ("consumer_key", "_info", "application_instance_id")
}
@pytest.fixture(
params=(True, False), ids=["GroupInfo w/o info", "GroupInfo w/info"]
)
def pre_existing_group(self, application_instance, request, params):
pre_existing_group = GroupInfo(
**dict(
params,
id=None,
authority_provided_id=self.AUTHORITY,
application_instance_id=application_instance.id,
)
)
if request.param:
pre_existing_group.info = None
return pre_existing_group
@pytest.fixture(autouse=True)
def with_existing_group_infos(self):
# Add some "noise" GroupInfo to make the tests more realistic
factories.GroupInfo.build_batch(3)
@pytest.fixture
def pyramid_request(self, pyramid_request):
pyramid_request.lti_user.email = "test_email"
return pyramid_request
| [
7,
11,
13,
14,
15
] |
1,920 | 7726f8cc9adf15823cccdaa4ba316800bb134460 | <mask token>
class YieldPeriodicCallback(object):
<mask token>
def __init__(self, callback, callback_time, io_loop=None, faststart=False):
"""Init method it can be used like tornado periodic callback, but it has
extra paramtetr
:param faststart: if true callback will be run after application start
"""
self.callback = callback
if callback_time <= 0:
raise ValueError(
'Periodic callback must have a positive callback_time')
self.callback_time = callback_time
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
self._running = False
self._timeout = None
if faststart:
self._running = True
self._next_timeout = self.io_loop.time()
self._timeout = self.io_loop.add_timeout(self._next_timeout,
self._run)
def start(self):
"""Starts the timer"""
if self._running:
return
self._running = True
self._next_timeout = self.io_loop.time()
self._schedule_next()
def stop(self):
"""Stops the timer"""
self._running = False
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
<mask token>
def _schedule_next(self):
"""Schedule next callback method"""
if self._running:
current_time = self.io_loop.time()
while self._next_timeout <= current_time:
self._next_timeout += self.callback_time / 1000.0
self._timeout = self.io_loop.add_timeout(self._next_timeout,
self._run)
| <mask token>
class YieldPeriodicCallback(object):
<mask token>
def __init__(self, callback, callback_time, io_loop=None, faststart=False):
"""Init method it can be used like tornado periodic callback, but it has
extra paramtetr
:param faststart: if true callback will be run after application start
"""
self.callback = callback
if callback_time <= 0:
raise ValueError(
'Periodic callback must have a positive callback_time')
self.callback_time = callback_time
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
self._running = False
self._timeout = None
if faststart:
self._running = True
self._next_timeout = self.io_loop.time()
self._timeout = self.io_loop.add_timeout(self._next_timeout,
self._run)
def start(self):
"""Starts the timer"""
if self._running:
return
self._running = True
self._next_timeout = self.io_loop.time()
self._schedule_next()
def stop(self):
"""Stops the timer"""
self._running = False
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
@tornado.gen.coroutine
def _run(self):
"""Run the run method and schedule next time"""
if not self._running:
return
try:
yield self.callback()
except Exception:
logging.error('Error in periodic callback', exc_info=True)
self._schedule_next()
def _schedule_next(self):
"""Schedule next callback method"""
if self._running:
current_time = self.io_loop.time()
while self._next_timeout <= current_time:
self._next_timeout += self.callback_time / 1000.0
self._timeout = self.io_loop.add_timeout(self._next_timeout,
self._run)
| <mask token>
class YieldPeriodicCallback(object):
"""Class for better periodic call"""
def __init__(self, callback, callback_time, io_loop=None, faststart=False):
"""Init method it can be used like tornado periodic callback, but it has
extra paramtetr
:param faststart: if true callback will be run after application start
"""
self.callback = callback
if callback_time <= 0:
raise ValueError(
'Periodic callback must have a positive callback_time')
self.callback_time = callback_time
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
self._running = False
self._timeout = None
if faststart:
self._running = True
self._next_timeout = self.io_loop.time()
self._timeout = self.io_loop.add_timeout(self._next_timeout,
self._run)
def start(self):
"""Starts the timer"""
if self._running:
return
self._running = True
self._next_timeout = self.io_loop.time()
self._schedule_next()
def stop(self):
"""Stops the timer"""
self._running = False
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
@tornado.gen.coroutine
def _run(self):
"""Run the run method and schedule next time"""
if not self._running:
return
try:
yield self.callback()
except Exception:
logging.error('Error in periodic callback', exc_info=True)
self._schedule_next()
def _schedule_next(self):
"""Schedule next callback method"""
if self._running:
current_time = self.io_loop.time()
while self._next_timeout <= current_time:
self._next_timeout += self.callback_time / 1000.0
self._timeout = self.io_loop.add_timeout(self._next_timeout,
self._run)
| <mask token>
import tornado
import tornado.gen
import logging
class YieldPeriodicCallback(object):
"""Class for better periodic call"""
def __init__(self, callback, callback_time, io_loop=None, faststart=False):
"""Init method it can be used like tornado periodic callback, but it has
extra paramtetr
:param faststart: if true callback will be run after application start
"""
self.callback = callback
if callback_time <= 0:
raise ValueError(
'Periodic callback must have a positive callback_time')
self.callback_time = callback_time
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
self._running = False
self._timeout = None
if faststart:
self._running = True
self._next_timeout = self.io_loop.time()
self._timeout = self.io_loop.add_timeout(self._next_timeout,
self._run)
def start(self):
"""Starts the timer"""
if self._running:
return
self._running = True
self._next_timeout = self.io_loop.time()
self._schedule_next()
def stop(self):
"""Stops the timer"""
self._running = False
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
@tornado.gen.coroutine
def _run(self):
"""Run the run method and schedule next time"""
if not self._running:
return
try:
yield self.callback()
except Exception:
logging.error('Error in periodic callback', exc_info=True)
self._schedule_next()
def _schedule_next(self):
"""Schedule next callback method"""
if self._running:
current_time = self.io_loop.time()
while self._next_timeout <= current_time:
self._next_timeout += self.callback_time / 1000.0
self._timeout = self.io_loop.add_timeout(self._next_timeout,
self._run)
| """Class for better periodic call handling"""
import tornado
import tornado.gen
import logging
class YieldPeriodicCallback(object):
"""Class for better periodic call"""
def __init__(self, callback, callback_time, io_loop=None, faststart=False):
"""Init method it can be used like tornado periodic callback, but it has
extra paramtetr
:param faststart: if true callback will be run after application start
"""
self.callback = callback
if callback_time <= 0:
raise ValueError("Periodic callback must have a positive callback_time")
self.callback_time = callback_time
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
self._running = False
self._timeout = None
if faststart:
self._running = True
self._next_timeout = self.io_loop.time()
self._timeout = self.io_loop.add_timeout(self._next_timeout, self._run)
def start(self):
"""Starts the timer"""
if self._running:
return
self._running = True
self._next_timeout = self.io_loop.time()
self._schedule_next()
def stop(self):
"""Stops the timer"""
self._running = False
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
@tornado.gen.coroutine
def _run(self):
"""Run the run method and schedule next time"""
if not self._running:
return
try:
yield self.callback()
except Exception: # pylint: disable=W0703
logging.error("Error in periodic callback", exc_info=True)
self._schedule_next()
def _schedule_next(self):
"""Schedule next callback method"""
if self._running:
current_time = self.io_loop.time()
while self._next_timeout <= current_time:
self._next_timeout += self.callback_time / 1000.0
self._timeout = self.io_loop.add_timeout(self._next_timeout, self._run)
| [
5,
6,
7,
8,
9
] |
1,921 | 0545aff80e19e47cb9e5b1941e92ff5cb109f9e6 | <mask token>
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',
'color': '#fa6814', 'fillOpacity': 0.2}}
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),
color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=
0.2, weight=1)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84
).transform(CRS.POP_WEB)
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)
)
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)
)
self.size = size_x, size_y
<mask token>
<mask token>
| <mask token>
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',
'color': '#fa6814', 'fillOpacity': 0.2}}
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),
color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=
0.2, weight=1)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84
).transform(CRS.POP_WEB)
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)
)
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)
)
self.size = size_x, size_y
<mask token>
def show(self):
return self.map
| <mask token>
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',
'color': '#fa6814', 'fillOpacity': 0.2}}
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),
color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=
0.2, weight=1)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84
).transform(CRS.POP_WEB)
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)
)
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)
)
self.size = size_x, size_y
def _handle_draw(self, control, action, geo_json):
control.clear_rectangles()
bbox_geom = geo_json['geometry']['coordinates'][0]
min_x, min_y = bbox_geom[0]
max_x, max_y = bbox_geom[2]
self.add_rectangle(min_x, min_y, max_x, max_y)
def show(self):
return self.map
| from ipyleaflet import Map, DrawControl, Marker, Rectangle
from sentinelhub import BBox, CRS
from ipywidgets import widgets as w
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {'shapeOptions': {'fillColor': '#fabd14',
'color': '#fa6814', 'fillOpacity': 0.2}}
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(bounds=((min_y, min_x), (max_y, max_x)),
color='#fa6814', fill=True, fill_color='#fabd14', fill_opacity=
0.2, weight=1)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84
).transform(CRS.POP_WEB)
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution)
)
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution)
)
self.size = size_x, size_y
def _handle_draw(self, control, action, geo_json):
control.clear_rectangles()
bbox_geom = geo_json['geometry']['coordinates'][0]
min_x, min_y = bbox_geom[0]
max_x, max_y = bbox_geom[2]
self.add_rectangle(min_x, min_y, max_x, max_y)
def show(self):
return self.map
| from ipyleaflet import Map, DrawControl, Marker, Rectangle
from sentinelhub import BBox, CRS
from ipywidgets import widgets as w
class BBoxSelector:
def __init__(self, bbox, zoom=8, resolution=10):
center = (bbox.min_y + bbox.max_y) / 2, (bbox.min_x + bbox.max_x) / 2
self.map = Map(center=center, zoom=zoom, scroll_wheel_zoom=True)
self.resolution = resolution
control = DrawControl()
control.rectangle = {
"shapeOptions": {
"fillColor": "#fabd14",
"color": "#fa6814",
"fillOpacity": 0.2
}
}
#Disable the rest of draw options
control.polyline = {}
control.circle = {}
control.circlemarker = {}
control.polygon = {}
control.edit = False
control.remove = False
control.on_draw(self._handle_draw)
self.map.add_control(control)
self.bbox = None
self.size = None
self.rectangle = None
self.add_rectangle(bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y)
# self.out = w.Output(layout=w.Layout(width='100%', height='50px', overflow_y='scroll'))
# self.vbox = w.VBox([self.map, self.out])
def add_rectangle(self, min_x, min_y, max_x, max_y):
if self.rectangle:
self.map.remove_layer(self.rectangle)
self.rectangle = Rectangle(
bounds=((min_y, min_x), (max_y, max_x)),
color="#fa6814",
fill=True,
fill_color="#fabd14",
fill_opacity=0.2,
weight=1
)
self.map.add_layer(self.rectangle)
self.bbox = BBox(((min_x, min_y), (max_x, max_y)), CRS.WGS84).transform(CRS.POP_WEB)
# self.out.append_display_data((min_x, min_y, max_x, max_y))
size_x = abs(int((self.bbox.max_x - self.bbox.min_x) / self.resolution))
size_y = abs(int((self.bbox.max_y - self.bbox.min_y) / self.resolution))
self.size = size_x, size_y
def _handle_draw(self, control, action, geo_json):
control.clear_rectangles()
bbox_geom = geo_json['geometry']['coordinates'][0]
min_x, min_y = bbox_geom[0]
max_x, max_y = bbox_geom[2]
self.add_rectangle(min_x, min_y, max_x, max_y)
def show(self):
return self.map
# return self.vbox
| [
3,
4,
5,
6,
7
] |
1,922 | 1c1673b5e54bafef9f36a2583115f8135c112ab4 | <mask token>
class GraphNN(nn.Module):
def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.0):
super(GraphNN, self).__init__()
self.ligand_dim = dim_in
self.dim_h = dim_h
self.dim_act = dim_act
self.model_name = 'DockRLGraphNN'
self.bond_cutoff = 3.6
self.number_updates = 16
self.dropout = dropout
self.initialize_gnn()
self.reset()
my_params = self.get_params()
self.num_params = my_params.shape[0]
def initialize_gnn(self):
self.edge_model = nn.Sequential(nn.Linear(self.ligand_dim, self.
dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_h), nn.
LeakyReLU(), nn.Dropout(p=self.dropout), nn.Linear(self.dim_h,
self.ligand_dim + 2 * self.dim_h))
self.encoder = nn.Sequential(nn.Linear(2 * self.ligand_dim, self.
ligand_dim), ArcTan())
self.action_layer = nn.Sequential(nn.Linear(self.ligand_dim, self.
dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_act))
def get_distance(self, node_0, node_1):
return torch.sum(torch.sqrt(torch.abs(node_0 - node_1) ** 2))
def build_graph(self, x):
self.graph = torch.zeros(x.shape[0], x.shape[0])
for ii in range(x.shape[0]):
node_ii = x[ii, 0:3]
for jj in range(x.shape[0]):
node_jj = x[jj, 0:3]
distance = self.get_distance(node_ii, node_jj)
if distance <= self.bond_cutoff:
self.graph[ii, jj] = 1.0
self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))
def forward(self, x, return_codes=False, template=None):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
if template is not None:
self.build_graph(template.detach())
else:
self.build_graph(x.detach())
new_graph = torch.Tensor()
codes = torch.Tensor()
temp_input = [torch.Tensor()]
for kk in range(x.shape[0]):
for ll in range(x.shape[0]):
if self.graph[kk, ll]:
temp_input[-1] = torch.cat([temp_input[-1], self.
edge_model(x[ll]).unsqueeze(0)])
keys = temp_input[-1][:, -self.dim_h * 2:-self.dim_h]
queries = temp_input[-1][:, -self.dim_h:]
attention = torch.zeros(1, keys.shape[0])
for mm in range(keys.shape[0]):
attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)
attention = torch.softmax(attention, dim=1)
my_input = torch.sum(attention.T * temp_input[-1][:, :self.
ligand_dim], dim=0)
my_input = torch.cat([x[kk], my_input])
codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])
new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])
if return_codes:
return codes, new_graph
else:
return new_graph
def get_actions(self, x):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
my_template = x
for ii in range(self.number_updates):
x = self.forward(x, template=my_template)
x = torch.mean(x, dim=0)
x = self.action_layer(x)
return x
def get_params(self):
params = np.array([])
for param in self.edge_model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.encoder.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.action_layer.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.edge_model.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.encoder.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.action_layer.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
pass
class MLP(nn.Module):
def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):
super(MLP, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 32
self.dropout = dropout
self.model_name = 'DockRLMLP'
self.init_params()
def init_params(self):
self.model = nn.Sequential(nn.Linear(self.dim_in, self.dim_h), nn.
ReLU(), nn.Linear(self.dim_h, self.dim_h), nn.ReLU(), nn.
Dropout(p=self.dropout), nn.Linear(self.dim_h, self.dim_act))
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = self.model(x)
return x
def get_actions(self, x):
act = self.forward(x)
act = torch.mean(act, dim=0, keepdim=True)
return act
def get_params(self):
params = np.array([])
for param in self.model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.model.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
pass
class MRNN(nn.Module):
def __init__(self, dim_in=6, dim_act=5):
super(MRNN, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 8
self.init_params()
def init_params(self):
self.g = nn.Sequential(OrderedDict([('g', nn.Linear(self.dim_h +
self.dim_in, self.dim_h)), ('act_g', nn.Sigmoid())]))
self.j = nn.Sequential(OrderedDict([('j', nn.Linear(self.dim_h +
self.dim_in, self.dim_h)), ('act_j', nn.Tanh())]))
self.w_h2y = nn.Sequential(OrderedDict([('w_h2y', nn.Linear(self.
dim_h, self.dim_act))]))
self.cell_state = torch.zeros((1, self.dim_h))
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = torch.cat((self.cell_state, x), axis=-1)
g_out = self.g(x)
j_out = (1.0 - g_out) * self.j(x)
self.cell_state = g_out * self.cell_state + j_out
y = self.w_h2y(self.cell_state)
return y
def get_action(self, x):
act = self.forward(x)
return act.detach().cpu().numpy()
def get_params(self):
params = np.array([])
for param in self.g.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.j.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.w_h2y.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.g.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.j.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.w_h2y.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
self.cell_state *= 0.0
<mask token>
| <mask token>
class Params:
<mask token>
def init_params(self):
self.params = np.random.randn(self.dim_act)
self.num_params = self.dim_act
def forward(self, obs):
return self.get_params()
def get_params(self):
return self.params
<mask token>
def reset(self):
pass
class GraphNN(nn.Module):
def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.0):
super(GraphNN, self).__init__()
self.ligand_dim = dim_in
self.dim_h = dim_h
self.dim_act = dim_act
self.model_name = 'DockRLGraphNN'
self.bond_cutoff = 3.6
self.number_updates = 16
self.dropout = dropout
self.initialize_gnn()
self.reset()
my_params = self.get_params()
self.num_params = my_params.shape[0]
def initialize_gnn(self):
self.edge_model = nn.Sequential(nn.Linear(self.ligand_dim, self.
dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_h), nn.
LeakyReLU(), nn.Dropout(p=self.dropout), nn.Linear(self.dim_h,
self.ligand_dim + 2 * self.dim_h))
self.encoder = nn.Sequential(nn.Linear(2 * self.ligand_dim, self.
ligand_dim), ArcTan())
self.action_layer = nn.Sequential(nn.Linear(self.ligand_dim, self.
dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_act))
def get_distance(self, node_0, node_1):
return torch.sum(torch.sqrt(torch.abs(node_0 - node_1) ** 2))
def build_graph(self, x):
self.graph = torch.zeros(x.shape[0], x.shape[0])
for ii in range(x.shape[0]):
node_ii = x[ii, 0:3]
for jj in range(x.shape[0]):
node_jj = x[jj, 0:3]
distance = self.get_distance(node_ii, node_jj)
if distance <= self.bond_cutoff:
self.graph[ii, jj] = 1.0
self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))
def forward(self, x, return_codes=False, template=None):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
if template is not None:
self.build_graph(template.detach())
else:
self.build_graph(x.detach())
new_graph = torch.Tensor()
codes = torch.Tensor()
temp_input = [torch.Tensor()]
for kk in range(x.shape[0]):
for ll in range(x.shape[0]):
if self.graph[kk, ll]:
temp_input[-1] = torch.cat([temp_input[-1], self.
edge_model(x[ll]).unsqueeze(0)])
keys = temp_input[-1][:, -self.dim_h * 2:-self.dim_h]
queries = temp_input[-1][:, -self.dim_h:]
attention = torch.zeros(1, keys.shape[0])
for mm in range(keys.shape[0]):
attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)
attention = torch.softmax(attention, dim=1)
my_input = torch.sum(attention.T * temp_input[-1][:, :self.
ligand_dim], dim=0)
my_input = torch.cat([x[kk], my_input])
codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])
new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])
if return_codes:
return codes, new_graph
else:
return new_graph
def get_actions(self, x):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
my_template = x
for ii in range(self.number_updates):
x = self.forward(x, template=my_template)
x = torch.mean(x, dim=0)
x = self.action_layer(x)
return x
def get_params(self):
params = np.array([])
for param in self.edge_model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.encoder.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.action_layer.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.edge_model.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.encoder.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.action_layer.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
pass
class MLP(nn.Module):
def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):
super(MLP, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 32
self.dropout = dropout
self.model_name = 'DockRLMLP'
self.init_params()
def init_params(self):
self.model = nn.Sequential(nn.Linear(self.dim_in, self.dim_h), nn.
ReLU(), nn.Linear(self.dim_h, self.dim_h), nn.ReLU(), nn.
Dropout(p=self.dropout), nn.Linear(self.dim_h, self.dim_act))
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = self.model(x)
return x
def get_actions(self, x):
act = self.forward(x)
act = torch.mean(act, dim=0, keepdim=True)
return act
def get_params(self):
params = np.array([])
for param in self.model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.model.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
pass
class MRNN(nn.Module):
def __init__(self, dim_in=6, dim_act=5):
super(MRNN, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 8
self.init_params()
def init_params(self):
self.g = nn.Sequential(OrderedDict([('g', nn.Linear(self.dim_h +
self.dim_in, self.dim_h)), ('act_g', nn.Sigmoid())]))
self.j = nn.Sequential(OrderedDict([('j', nn.Linear(self.dim_h +
self.dim_in, self.dim_h)), ('act_j', nn.Tanh())]))
self.w_h2y = nn.Sequential(OrderedDict([('w_h2y', nn.Linear(self.
dim_h, self.dim_act))]))
self.cell_state = torch.zeros((1, self.dim_h))
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = torch.cat((self.cell_state, x), axis=-1)
g_out = self.g(x)
j_out = (1.0 - g_out) * self.j(x)
self.cell_state = g_out * self.cell_state + j_out
y = self.w_h2y(self.cell_state)
return y
def get_action(self, x):
act = self.forward(x)
return act.detach().cpu().numpy()
def get_params(self):
params = np.array([])
for param in self.g.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.j.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.w_h2y.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.g.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.j.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.w_h2y.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
self.cell_state *= 0.0
<mask token>
| <mask token>
class ArcTan(nn.Module):
<mask token>
<mask token>
class Params:
def __init__(self, dim_in=7, dim_act=6, dim_h=0, dropout=0.0):
self.dim_act = dim_act
self.dim_in = 0
self.dim_h = 0
self.dropout = 0.0
self.model_name = 'DockRLParams'
self.init_params()
self.act = ArcTan()
def init_params(self):
self.params = np.random.randn(self.dim_act)
self.num_params = self.dim_act
def forward(self, obs):
return self.get_params()
def get_params(self):
return self.params
def set_params(self, params):
assert params.shape == self.params.shape
self.params = params
def reset(self):
pass
class GraphNN(nn.Module):
def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.0):
super(GraphNN, self).__init__()
self.ligand_dim = dim_in
self.dim_h = dim_h
self.dim_act = dim_act
self.model_name = 'DockRLGraphNN'
self.bond_cutoff = 3.6
self.number_updates = 16
self.dropout = dropout
self.initialize_gnn()
self.reset()
my_params = self.get_params()
self.num_params = my_params.shape[0]
def initialize_gnn(self):
self.edge_model = nn.Sequential(nn.Linear(self.ligand_dim, self.
dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_h), nn.
LeakyReLU(), nn.Dropout(p=self.dropout), nn.Linear(self.dim_h,
self.ligand_dim + 2 * self.dim_h))
self.encoder = nn.Sequential(nn.Linear(2 * self.ligand_dim, self.
ligand_dim), ArcTan())
self.action_layer = nn.Sequential(nn.Linear(self.ligand_dim, self.
dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_act))
def get_distance(self, node_0, node_1):
return torch.sum(torch.sqrt(torch.abs(node_0 - node_1) ** 2))
def build_graph(self, x):
self.graph = torch.zeros(x.shape[0], x.shape[0])
for ii in range(x.shape[0]):
node_ii = x[ii, 0:3]
for jj in range(x.shape[0]):
node_jj = x[jj, 0:3]
distance = self.get_distance(node_ii, node_jj)
if distance <= self.bond_cutoff:
self.graph[ii, jj] = 1.0
self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))
def forward(self, x, return_codes=False, template=None):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
if template is not None:
self.build_graph(template.detach())
else:
self.build_graph(x.detach())
new_graph = torch.Tensor()
codes = torch.Tensor()
temp_input = [torch.Tensor()]
for kk in range(x.shape[0]):
for ll in range(x.shape[0]):
if self.graph[kk, ll]:
temp_input[-1] = torch.cat([temp_input[-1], self.
edge_model(x[ll]).unsqueeze(0)])
keys = temp_input[-1][:, -self.dim_h * 2:-self.dim_h]
queries = temp_input[-1][:, -self.dim_h:]
attention = torch.zeros(1, keys.shape[0])
for mm in range(keys.shape[0]):
attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)
attention = torch.softmax(attention, dim=1)
my_input = torch.sum(attention.T * temp_input[-1][:, :self.
ligand_dim], dim=0)
my_input = torch.cat([x[kk], my_input])
codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])
new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])
if return_codes:
return codes, new_graph
else:
return new_graph
def get_actions(self, x):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
my_template = x
for ii in range(self.number_updates):
x = self.forward(x, template=my_template)
x = torch.mean(x, dim=0)
x = self.action_layer(x)
return x
def get_params(self):
params = np.array([])
for param in self.edge_model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.encoder.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.action_layer.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.edge_model.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.encoder.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.action_layer.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
pass
class MLP(nn.Module):
def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):
super(MLP, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 32
self.dropout = dropout
self.model_name = 'DockRLMLP'
self.init_params()
def init_params(self):
self.model = nn.Sequential(nn.Linear(self.dim_in, self.dim_h), nn.
ReLU(), nn.Linear(self.dim_h, self.dim_h), nn.ReLU(), nn.
Dropout(p=self.dropout), nn.Linear(self.dim_h, self.dim_act))
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = self.model(x)
return x
def get_actions(self, x):
act = self.forward(x)
act = torch.mean(act, dim=0, keepdim=True)
return act
def get_params(self):
params = np.array([])
for param in self.model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.model.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
pass
class MRNN(nn.Module):
def __init__(self, dim_in=6, dim_act=5):
super(MRNN, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 8
self.init_params()
def init_params(self):
self.g = nn.Sequential(OrderedDict([('g', nn.Linear(self.dim_h +
self.dim_in, self.dim_h)), ('act_g', nn.Sigmoid())]))
self.j = nn.Sequential(OrderedDict([('j', nn.Linear(self.dim_h +
self.dim_in, self.dim_h)), ('act_j', nn.Tanh())]))
self.w_h2y = nn.Sequential(OrderedDict([('w_h2y', nn.Linear(self.
dim_h, self.dim_act))]))
self.cell_state = torch.zeros((1, self.dim_h))
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = torch.cat((self.cell_state, x), axis=-1)
g_out = self.g(x)
j_out = (1.0 - g_out) * self.j(x)
self.cell_state = g_out * self.cell_state + j_out
y = self.w_h2y(self.cell_state)
return y
def get_action(self, x):
act = self.forward(x)
return act.detach().cpu().numpy()
def get_params(self):
params = np.array([])
for param in self.g.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.j.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.w_h2y.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.g.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.j.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.w_h2y.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
self.cell_state *= 0.0
<mask token>
| <mask token>
class ArcTan(nn.Module):
def __init__(self):
super(ArcTan, self).__init__()
<mask token>
class Params:
def __init__(self, dim_in=7, dim_act=6, dim_h=0, dropout=0.0):
self.dim_act = dim_act
self.dim_in = 0
self.dim_h = 0
self.dropout = 0.0
self.model_name = 'DockRLParams'
self.init_params()
self.act = ArcTan()
def init_params(self):
self.params = np.random.randn(self.dim_act)
self.num_params = self.dim_act
def forward(self, obs):
return self.get_params()
def get_params(self):
return self.params
def set_params(self, params):
assert params.shape == self.params.shape
self.params = params
def reset(self):
pass
class GraphNN(nn.Module):
def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.0):
super(GraphNN, self).__init__()
self.ligand_dim = dim_in
self.dim_h = dim_h
self.dim_act = dim_act
self.model_name = 'DockRLGraphNN'
self.bond_cutoff = 3.6
self.number_updates = 16
self.dropout = dropout
self.initialize_gnn()
self.reset()
my_params = self.get_params()
self.num_params = my_params.shape[0]
def initialize_gnn(self):
self.edge_model = nn.Sequential(nn.Linear(self.ligand_dim, self.
dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_h), nn.
LeakyReLU(), nn.Dropout(p=self.dropout), nn.Linear(self.dim_h,
self.ligand_dim + 2 * self.dim_h))
self.encoder = nn.Sequential(nn.Linear(2 * self.ligand_dim, self.
ligand_dim), ArcTan())
self.action_layer = nn.Sequential(nn.Linear(self.ligand_dim, self.
dim_h), nn.LeakyReLU(), nn.Linear(self.dim_h, self.dim_act))
def get_distance(self, node_0, node_1):
return torch.sum(torch.sqrt(torch.abs(node_0 - node_1) ** 2))
def build_graph(self, x):
self.graph = torch.zeros(x.shape[0], x.shape[0])
for ii in range(x.shape[0]):
node_ii = x[ii, 0:3]
for jj in range(x.shape[0]):
node_jj = x[jj, 0:3]
distance = self.get_distance(node_ii, node_jj)
if distance <= self.bond_cutoff:
self.graph[ii, jj] = 1.0
self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))
def forward(self, x, return_codes=False, template=None):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
if template is not None:
self.build_graph(template.detach())
else:
self.build_graph(x.detach())
new_graph = torch.Tensor()
codes = torch.Tensor()
temp_input = [torch.Tensor()]
for kk in range(x.shape[0]):
for ll in range(x.shape[0]):
if self.graph[kk, ll]:
temp_input[-1] = torch.cat([temp_input[-1], self.
edge_model(x[ll]).unsqueeze(0)])
keys = temp_input[-1][:, -self.dim_h * 2:-self.dim_h]
queries = temp_input[-1][:, -self.dim_h:]
attention = torch.zeros(1, keys.shape[0])
for mm in range(keys.shape[0]):
attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)
attention = torch.softmax(attention, dim=1)
my_input = torch.sum(attention.T * temp_input[-1][:, :self.
ligand_dim], dim=0)
my_input = torch.cat([x[kk], my_input])
codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])
new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])
if return_codes:
return codes, new_graph
else:
return new_graph
def get_actions(self, x):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
my_template = x
for ii in range(self.number_updates):
x = self.forward(x, template=my_template)
x = torch.mean(x, dim=0)
x = self.action_layer(x)
return x
def get_params(self):
params = np.array([])
for param in self.edge_model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.encoder.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.action_layer.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.edge_model.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.encoder.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.action_layer.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
pass
class MLP(nn.Module):
def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):
super(MLP, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 32
self.dropout = dropout
self.model_name = 'DockRLMLP'
self.init_params()
def init_params(self):
self.model = nn.Sequential(nn.Linear(self.dim_in, self.dim_h), nn.
ReLU(), nn.Linear(self.dim_h, self.dim_h), nn.ReLU(), nn.
Dropout(p=self.dropout), nn.Linear(self.dim_h, self.dim_act))
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = self.model(x)
return x
def get_actions(self, x):
act = self.forward(x)
act = torch.mean(act, dim=0, keepdim=True)
return act
def get_params(self):
params = np.array([])
for param in self.model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.model.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
pass
class MRNN(nn.Module):
def __init__(self, dim_in=6, dim_act=5):
super(MRNN, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 8
self.init_params()
def init_params(self):
self.g = nn.Sequential(OrderedDict([('g', nn.Linear(self.dim_h +
self.dim_in, self.dim_h)), ('act_g', nn.Sigmoid())]))
self.j = nn.Sequential(OrderedDict([('j', nn.Linear(self.dim_h +
self.dim_in, self.dim_h)), ('act_j', nn.Tanh())]))
self.w_h2y = nn.Sequential(OrderedDict([('w_h2y', nn.Linear(self.
dim_h, self.dim_act))]))
self.cell_state = torch.zeros((1, self.dim_h))
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = torch.cat((self.cell_state, x), axis=-1)
g_out = self.g(x)
j_out = (1.0 - g_out) * self.j(x)
self.cell_state = g_out * self.cell_state + j_out
y = self.w_h2y(self.cell_state)
return y
def get_action(self, x):
act = self.forward(x)
return act.detach().cpu().numpy()
def get_params(self):
params = np.array([])
for param in self.g.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.j.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.w_h2y.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params
) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.g.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.j.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
for name, param in self.w_h2y.named_parameters():
param_stop = param_start + reduce(lambda x, y: x * y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(my_params[
param_start:param_stop].reshape(param.shape)))
def reset(self):
self.cell_state *= 0.0
<mask token>
| import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from collections import OrderedDict
from functools import reduce
class ArcTan(nn.Module):
def __init__(self):
super(ArcTan,self).__init__()
def forward(self, x):
return torch.arctan(x) / 1.5708
class Params():
def __init__(self, dim_in=7, dim_act=6, dim_h=0, dropout=0.0):
self.dim_act = dim_act
self.dim_in = 0
self.dim_h = 0
self.dropout = 0.0
self.model_name = "DockRLParams"
self.init_params()
self.act = ArcTan()
def init_params(self):
self.params = np.random.randn(self.dim_act)
self.num_params = self.dim_act
def forward(self, obs):
return self.get_params()
def get_params(self):
return self.params
def set_params(self, params):
assert params.shape == self.params.shape
self.params = params
def reset(self):
pass
class GraphNN(nn.Module):
def __init__(self, dim_in=7, dim_act=6, dim_h=8, dropout=0.00):
super(GraphNN, self).__init__()
self.ligand_dim = dim_in
self.dim_h = dim_h
self.dim_act = dim_act
self.model_name = "DockRLGraphNN"
# This is a guesstimate based on:
# https://pymolwiki.org/index.php/Displaying_Biochemical_Properties
self.bond_cutoff = 3.6
self.number_updates = 16
self.dropout = dropout
self.initialize_gnn()
self.reset()
my_params = self.get_params()
self.num_params = my_params.shape[0]
def initialize_gnn(self):
# vertices MLP, with 8 element key and query vectors for self-attention
self.edge_model = nn.Sequential(\
nn.Linear(self.ligand_dim, self.dim_h),\
nn.LeakyReLU(),\
nn.Linear(self.dim_h, self.dim_h),\
nn.LeakyReLU(),\
nn.Dropout(p=self.dropout),\
nn.Linear(self.dim_h, self.ligand_dim + 2 * self.dim_h)
)
self.encoder = nn.Sequential(\
nn.Linear(2*self.ligand_dim, self.ligand_dim),\
ArcTan()
)
self.action_layer = nn.Sequential(\
nn.Linear(self.ligand_dim, self.dim_h),\
nn.LeakyReLU(),\
nn.Linear(self.dim_h, self.dim_act)\
)
def get_distance(self, node_0, node_1):
return torch.sum(torch.sqrt(torch.abs(node_0 - node_1)**2))
def build_graph(self, x):
self.graph = torch.zeros(x.shape[0],x.shape[0])
for ii in range(x.shape[0]):
node_ii = x[ii, 0:3]
for jj in range(x.shape[0]):
node_jj = x[jj, 0:3]
distance = self.get_distance(node_ii, node_jj)
if distance <= self.bond_cutoff:
self.graph[ii, jj] = 1.0
self.graph = self.graph * (1 - torch.eye(self.graph.shape[0]))
def forward(self, x, return_codes=False, template=None):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
if template is not None:
self.build_graph(template.detach())
else:
self.build_graph(x.detach())
new_graph = torch.Tensor() #torch.zeros_like(x)
codes = torch.Tensor() #torch.zeros(x.shape[0], self.dim_h)
temp_input = [torch.Tensor()]
#orch.Tensor() #torch.zeros(x.shape[0], self.dim_h+8+8)
for kk in range(x.shape[0]):
# loop through nodes for each node
for ll in range(x.shape[0]):
if self.graph[kk,ll]:
temp_input[-1] = torch.cat([temp_input[-1],\
self.edge_model(x[ll]).unsqueeze(0)])
keys = temp_input[-1][:,-self.dim_h*2:-self.dim_h]
queries = temp_input[-1][:,-self.dim_h:]
attention = torch.zeros(1, keys.shape[0])
for mm in range(keys.shape[0]):
attention[:, mm] = torch.matmul(queries[mm], keys[mm].T)
attention = torch.softmax(attention, dim=1)
my_input = torch.sum(attention.T \
* temp_input[-1][:,:self.ligand_dim],dim=0)
my_input = torch.cat([x[kk], my_input])
#this is where the cell gating would happen (TODO)
codes = torch.cat([codes, self.encoder(my_input).unsqueeze(0)])
new_graph = torch.cat([new_graph, codes[-1].unsqueeze(0)])
#self.decoder(codes[-1]).unsqueeze(0)])
if return_codes:
return codes, new_graph
else:
return new_graph
def get_actions(self, x):
if type(x) != torch.Tensor:
x = torch.Tensor(x)
my_template = x
for ii in range(self.number_updates):
x = self.forward(x, template=my_template)
x = torch.mean(x, dim=0)
x = self.action_layer(x)
return x
def get_params(self):
params = np.array([])
for param in self.edge_model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.encoder.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
# for param in self.decoder.named_parameters():
# params = np.append(params, param[1].detach().numpy().ravel())
for param in self.action_layer.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.edge_model.named_parameters():
param_stop = param_start + reduce(lambda x,y: x*y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(\
my_params[param_start:param_stop].reshape(param.shape)))
for name, param in self.encoder.named_parameters():
param_stop = param_start + reduce(lambda x,y: x*y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(\
my_params[param_start:param_stop].reshape(param.shape)))
# for name, param in self.decoder.named_parameters():
#
# param_stop = param_start + reduce(lambda x,y: x*y, param.shape)
#
# param[:] = torch.nn.Parameter(torch.Tensor(\
# my_params[param_start:param_stop].reshape(param.shape)))
for name, param in self.action_layer.named_parameters():
param_stop = param_start + reduce(lambda x,y: x*y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(\
my_params[param_start:param_stop].reshape(param.shape)))
def reset(self):
# initialize using gated cell states here later (maybe)
pass
class MLP(nn.Module):
def __init__(self, dim_in=6, dim_act=5, dim_h=32, dropout=0.0):
super(MLP, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 32
self.dropout = dropout
self.model_name = "DockRLMLP"
self.init_params()
def init_params(self):
self.model = nn.Sequential(\
nn.Linear(self.dim_in, self.dim_h),\
nn.ReLU(),\
nn.Linear(self.dim_h, self.dim_h),\
nn.ReLU(),\
nn.Dropout(p=self.dropout),\
nn.Linear(self.dim_h, self.dim_act)\
)
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = self.model(x)
return x
def get_actions(self, x):
act = self.forward(x)
act = torch.mean(act, dim=0, keepdim=True)
return act
def get_params(self):
params = np.array([])
for param in self.model.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.model.named_parameters():
param_stop = param_start + reduce(lambda x,y: x*y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(\
my_params[param_start:param_stop].reshape(param.shape)))
def reset(self):
pass
class MRNN(nn.Module):
def __init__(self, dim_in=6, dim_act=5):
super(MRNN, self).__init__()
self.dim_in = dim_in
self.dim_act = dim_act
self.dim_h = 8
self.init_params()
def init_params(self):
self.g = nn.Sequential(OrderedDict([\
("g", nn.Linear(self.dim_h+self.dim_in, self.dim_h)),\
("act_g", nn.Sigmoid())]))
self.j = nn.Sequential(OrderedDict([\
("j", nn.Linear(self.dim_h+self.dim_in, self.dim_h)),\
("act_j", nn.Tanh())]))
self.w_h2y = nn.Sequential(OrderedDict([\
("w_h2y", nn.Linear(self.dim_h, self.dim_act))]))
self.cell_state = torch.zeros((1,self.dim_h))
self.num_params = self.get_params().shape[0]
def forward(self, x):
x = torch.Tensor(x)
if len(x.shape) == 1:
x = x.unsqueeze(0)
x = torch.cat((self.cell_state, x), axis=-1)
g_out = self.g(x)
j_out = (1.0 - g_out) * self.j(x)
self.cell_state = g_out * self.cell_state + j_out
y = self.w_h2y(self.cell_state)
return y
def get_action(self, x):
act = self.forward(x)
return act.detach().cpu().numpy()
def get_params(self):
params = np.array([])
for param in self.g.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.j.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
for param in self.w_h2y.named_parameters():
params = np.append(params, param[1].detach().numpy().ravel())
return params
def set_params(self, my_params):
if my_params is None:
my_params = self.init_mean + torch.randn(self.num_params) * torch.sqrt(torch.tensor(self.var))
param_start = 0
for name, param in self.g.named_parameters():
param_stop = param_start + reduce(lambda x,y: x*y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(\
my_params[param_start:param_stop].reshape(param.shape)))
for name, param in self.j.named_parameters():
param_stop = param_start + reduce(lambda x,y: x*y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(\
my_params[param_start:param_stop].reshape(param.shape)))
for name, param in self.w_h2y.named_parameters():
param_stop = param_start + reduce(lambda x,y: x*y, param.shape)
param[:] = torch.nn.Parameter(torch.Tensor(\
my_params[param_start:param_stop].reshape(param.shape)))
def reset(self):
self.cell_state *= 0.
if __name__ == "__main__":
mrnn = MRNN()
temp = mrnn.forward(np.random.randn(1,6))
print(temp)
| [
26,
31,
34,
35,
39
] |
1,923 | 8a5ade450485f9114fa91c00c7588535ccbaf0e6 | <mask token>
| <mask token>
with open('student_registrations.json', 'w') as f:
f.write(e)
| <mask token>
s1 = ex1.load_course_registrations('data.txt')
s1 = map(asdict, s1)
e = json.dumps(list(s1))
with open('student_registrations.json', 'w') as f:
f.write(e)
| <mask token>
from dataclasses import asdict
from json import dumps
from dataclasses import dataclass
from typing import List, Dict
import json
import ex1
s1 = ex1.load_course_registrations('data.txt')
s1 = map(asdict, s1)
e = json.dumps(list(s1))
with open('student_registrations.json', 'w') as f:
f.write(e)
| '''Lab01 ex4
E/16/319 Rathnayake R.P.V.N'''
from dataclasses import asdict
from json import dumps
from dataclasses import dataclass
from typing import List, Dict
import json
import ex1 #import the ex1 to get the lord_course_registraion function
s1=ex1.load_course_registrations("data.txt") #lord the list of Student object in to the s1
s1=(map(asdict,s1)) #aply asdict() to s1 my useng the map function
e=json.dumps(list(s1)) #convert into jsom=n string
#print(e)
with open("student_registrations.json","w") as f: #open json file and write on it
f.write(e) | [
0,
1,
2,
3,
4
] |
1,924 | 294229849dcfac8d4afeab79dae3c652c853fc47 | <mask token>
def mergeSort(original_list):
return subSort(original_list)
def subSort(sub_list):
if len(sub_list) < 2:
return sub_list
index = len(sub_list) // 2
left_list = sub_list[0:index]
right_list = sub_list[index:len(sub_list)]
left_list = subSort(left_list)
right_list = subSort(right_list)
return merge(left_list, right_list)
def merge(left_list, right_list):
sorted_list = []
while len(left_list) != 0 or len(right_list) != 0:
if len(left_list) == 0:
sorted_list.append(right_list[0])
del right_list[0]
continue
if len(right_list) == 0:
sorted_list.append(left_list[0])
del left_list[0]
continue
left = left_list[0]
right = right_list[0]
if left > right:
sorted_list.append(right)
del right_list[0]
elif right > left:
sorted_list.append(left)
del left_list[0]
else:
sorted_list.append(right)
sorted_list.append(left)
del left_list[0]
del right_list[0]
return sorted_list
def random_list(number):
original_list = []
for x in range(0, COUNT + 1):
original_list.append(randrange(MIN, MAX + 1))
return original_list
<mask token>
| <mask token>
def mergeSort(original_list):
return subSort(original_list)
def subSort(sub_list):
if len(sub_list) < 2:
return sub_list
index = len(sub_list) // 2
left_list = sub_list[0:index]
right_list = sub_list[index:len(sub_list)]
left_list = subSort(left_list)
right_list = subSort(right_list)
return merge(left_list, right_list)
def merge(left_list, right_list):
sorted_list = []
while len(left_list) != 0 or len(right_list) != 0:
if len(left_list) == 0:
sorted_list.append(right_list[0])
del right_list[0]
continue
if len(right_list) == 0:
sorted_list.append(left_list[0])
del left_list[0]
continue
left = left_list[0]
right = right_list[0]
if left > right:
sorted_list.append(right)
del right_list[0]
elif right > left:
sorted_list.append(left)
del left_list[0]
else:
sorted_list.append(right)
sorted_list.append(left)
del left_list[0]
del right_list[0]
return sorted_list
def random_list(number):
original_list = []
for x in range(0, COUNT + 1):
original_list.append(randrange(MIN, MAX + 1))
return original_list
<mask token>
print('Original: ' + str(original_list))
print('Sorted: ' + str(mergeSort(original_list)))
| <mask token>
def mergeSort(original_list):
return subSort(original_list)
def subSort(sub_list):
if len(sub_list) < 2:
return sub_list
index = len(sub_list) // 2
left_list = sub_list[0:index]
right_list = sub_list[index:len(sub_list)]
left_list = subSort(left_list)
right_list = subSort(right_list)
return merge(left_list, right_list)
def merge(left_list, right_list):
sorted_list = []
while len(left_list) != 0 or len(right_list) != 0:
if len(left_list) == 0:
sorted_list.append(right_list[0])
del right_list[0]
continue
if len(right_list) == 0:
sorted_list.append(left_list[0])
del left_list[0]
continue
left = left_list[0]
right = right_list[0]
if left > right:
sorted_list.append(right)
del right_list[0]
elif right > left:
sorted_list.append(left)
del left_list[0]
else:
sorted_list.append(right)
sorted_list.append(left)
del left_list[0]
del right_list[0]
return sorted_list
def random_list(number):
original_list = []
for x in range(0, COUNT + 1):
original_list.append(randrange(MIN, MAX + 1))
return original_list
MIN = 1
MAX = 100
COUNT = 20
original_list = random_list(COUNT)
print('Original: ' + str(original_list))
print('Sorted: ' + str(mergeSort(original_list)))
| <mask token>
from random import randrange
def mergeSort(original_list):
return subSort(original_list)
def subSort(sub_list):
if len(sub_list) < 2:
return sub_list
index = len(sub_list) // 2
left_list = sub_list[0:index]
right_list = sub_list[index:len(sub_list)]
left_list = subSort(left_list)
right_list = subSort(right_list)
return merge(left_list, right_list)
def merge(left_list, right_list):
sorted_list = []
while len(left_list) != 0 or len(right_list) != 0:
if len(left_list) == 0:
sorted_list.append(right_list[0])
del right_list[0]
continue
if len(right_list) == 0:
sorted_list.append(left_list[0])
del left_list[0]
continue
left = left_list[0]
right = right_list[0]
if left > right:
sorted_list.append(right)
del right_list[0]
elif right > left:
sorted_list.append(left)
del left_list[0]
else:
sorted_list.append(right)
sorted_list.append(left)
del left_list[0]
del right_list[0]
return sorted_list
def random_list(number):
original_list = []
for x in range(0, COUNT + 1):
original_list.append(randrange(MIN, MAX + 1))
return original_list
MIN = 1
MAX = 100
COUNT = 20
original_list = random_list(COUNT)
print('Original: ' + str(original_list))
print('Sorted: ' + str(mergeSort(original_list)))
| '''
'Daniel Moulton
'3/24/15
'Implementation of the mergesort sorting algorithm in python.
'Utilizes a series of random numbers as the initial input
'Uses a top down approach to recursively sort the original list and output the final result.
'''
from random import randrange
def mergeSort(original_list):
#initializes recursive sorting function
#prints the final sorted list
return subSort(original_list)
def subSort(sub_list):
#sorts the list recursively, splitting into left and right lists
#then calling the merge function to merge back together
#returns the list if there is only one element
if (len(sub_list)<2):
return sub_list
#uses built in integer division in python to select the middle value rounded down
index = len(sub_list)//2
left_list = sub_list[0:index]
right_list = sub_list[index:len(sub_list)]
left_list = subSort(left_list)
right_list = subSort(right_list)
return merge(left_list, right_list)
def merge(left_list, right_list):
#merges the split lists back together while sorting them
sorted_list = []
while (len(left_list)!=0 or len(right_list)!=0):
if (len(left_list)==0):
sorted_list.append(right_list[0])
del right_list[0]
continue
if (len(right_list)==0):
sorted_list.append(left_list[0])
del left_list[0]
continue
left = left_list[0]
right = right_list[0]
if (left > right):
sorted_list.append(right)
del right_list[0]
elif (right > left):
sorted_list.append(left)
del left_list[0]
else:
sorted_list.append(right)
sorted_list.append(left)
del left_list[0]
del right_list[0]
return sorted_list
def random_list(number):
original_list = []
for x in range(0, COUNT+1):
original_list.append(randrange(MIN, MAX+1))
return original_list
#input list
MIN = 1
MAX = 100
COUNT = 20
original_list = random_list(COUNT)
print('Original: ' + str(original_list))
print('Sorted: ' + str(mergeSort(original_list)))
| [
4,
5,
6,
7,
8
] |
1,925 | 2f193cb1eaf7b5e99d20025716a248144af90b92 | <mask token>
class GeneralizedQSamplingModel(OdfModel, Cache):
def __init__(self, gtab, method='gqi2', sampling_length=1.2,
normalize_peaks=False):
""" Generalized Q-Sampling Imaging [1]_
This model has the same assumptions as the DSI method i.e. Cartesian
grid sampling in q-space and fast gradient switching.
Implements equations 2.14 from [2]_ for standard GQI and equation 2.16
from [2]_ for GQI2. You can think of GQI2 as an analytical solution of
the DSI ODF.
Parameters
----------
gtab : object,
GradientTable
method : str,
'standard' or 'gqi2'
sampling_length : float,
diffusion sampling length (lambda in eq. 2.14 and 2.16)
References
----------
.. [1] Yeh F-C et al., "Generalized Q-Sampling Imaging", IEEE TMI, 2010
.. [2] Garyfallidis E, "Towards an accurate brain tractography", PhD
thesis, University of Cambridge, 2012.
Notes
-----
As of version 0.9, range of the sampling length in GQI2 has changed
to match the same scale used in the 'standard' method [1]_. This
means that the value of `sampling_length` should be approximately
1 - 1.3 (see [1]_, pg. 1628).
Examples
--------
Here we create an example where we provide the data, a gradient table
and a reconstruction sphere and calculate the ODF for the first
voxel in the data.
>>> from dipy.data import dsi_voxels
>>> data, gtab = dsi_voxels()
>>> from dipy.core.subdivide_octahedron import create_unit_sphere
>>> sphere = create_unit_sphere(5)
>>> from dipy.reconst.gqi import GeneralizedQSamplingModel
>>> gq = GeneralizedQSamplingModel(gtab, 'gqi2', 1.1)
>>> voxel_signal = data[0, 0, 0]
>>> odf = gq.fit(voxel_signal).odf(sphere)
See Also
--------
dipy.reconst.dsi.DiffusionSpectrumModel
"""
OdfModel.__init__(self, gtab)
self.method = method
self.Lambda = sampling_length
self.normalize_peaks = normalize_peaks
scaling = np.sqrt(self.gtab.bvals * 0.01506)
tmp = np.tile(scaling, (3, 1))
gradsT = self.gtab.bvecs.T
b_vector = gradsT * tmp
self.b_vector = b_vector.T
@multi_voxel_fit
def fit(self, data):
return GeneralizedQSamplingFit(self, data)
class GeneralizedQSamplingFit(OdfFit):
def __init__(self, model, data):
""" Calculates PDF and ODF for a single voxel
Parameters
----------
model : object,
DiffusionSpectrumModel
data : 1d ndarray,
signal values
"""
OdfFit.__init__(self, model, data)
self._gfa = None
self.npeaks = 5
self._peak_values = None
self._peak_indices = None
self._qa = None
def odf(self, sphere):
""" Calculates the discrete ODF for a given discrete sphere.
"""
self.gqi_vector = self.model.cache_get('gqi_vector', key=sphere)
if self.gqi_vector is None:
if self.model.method == 'gqi2':
H = squared_radial_component
self.gqi_vector = np.real(H(np.dot(self.model.b_vector,
sphere.vertices.T) * self.model.Lambda))
if self.model.method == 'standard':
self.gqi_vector = np.real(np.sinc(np.dot(self.model.
b_vector, sphere.vertices.T) * self.model.Lambda / np.pi))
self.model.cache_set('gqi_vector', sphere, self.gqi_vector)
return np.dot(self.data, self.gqi_vector)
<mask token>
def npa(self, odf, width=5):
""" non-parametric anisotropy
Nimmo-Smith et al. ISMRM 2011
"""
t0, t1, t2 = triple_odf_maxima(self.odf_vertices, odf, width)
psi0 = t0[1] ** 2
psi1 = t1[1] ** 2
psi2 = t2[1] ** 2
npa = np.sqrt((psi0 - psi1) ** 2 + (psi1 - psi2) ** 2 + (psi2 - psi0) ** 2
) / np.sqrt(2 * (psi0 ** 2 + psi1 ** 2 + psi2 ** 2))
return t0, t1, t2, npa
<mask token>
def polar_zone_vertices(vertices, pole, width=5):
"""
finds the 'vertices' in the equatorial band around
the 'pole' of radius 'width' degrees
"""
return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) >
np.abs(np.cos(np.pi * width / 180))]
def upper_hemi_map(v):
"""
maps a 3-vector into the z-upper hemisphere
"""
return np.sign(v[2]) * v
def equatorial_maximum(vertices, odf, pole, width):
eqvert = equatorial_zone_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty equatorial band at %s pole with width %f' % (np.
array_str(pole), width))
return None, None
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def patch_vertices(vertices, pole, width):
"""
find 'vertices' within the cone of 'width' degrees around 'pole'
"""
return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) >
np.abs(np.cos(np.pi * width / 180))]
def patch_maximum(vertices, odf, pole, width):
eqvert = patch_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty cone around pole %s with with width %f' % (np.
array_str(pole), width))
return np.Null, np.Null
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def odf_sum(odf):
return np.sum(odf)
<mask token>
def triple_odf_maxima(vertices, odf, width):
indmax1 = np.argmax([odf[i] for i, v in enumerate(vertices)])
odfmax1 = odf[indmax1]
pole = vertices[indmax1]
eqvert = equatorial_zone_vertices(vertices, pole, width)
indmax2, odfmax2 = equatorial_maximum(vertices, odf, pole, width)
indmax3 = eqvert[np.argmin([np.abs(np.dot(vertices[indmax2], vertices[p
])) for p in eqvert])]
odfmax3 = odf[indmax3]
"""
cross12 = np.cross(vertices[indmax1],vertices[indmax2])
cross12 = cross12/np.sqrt(np.sum(cross12**2))
indmax3, odfmax3 = patch_maximum(vertices, odf, cross12, 2*width)
"""
return [(indmax1, odfmax1), (indmax2, odfmax2), (indmax3, odfmax3)]
| <mask token>
class GeneralizedQSamplingModel(OdfModel, Cache):
def __init__(self, gtab, method='gqi2', sampling_length=1.2,
normalize_peaks=False):
""" Generalized Q-Sampling Imaging [1]_
This model has the same assumptions as the DSI method i.e. Cartesian
grid sampling in q-space and fast gradient switching.
Implements equations 2.14 from [2]_ for standard GQI and equation 2.16
from [2]_ for GQI2. You can think of GQI2 as an analytical solution of
the DSI ODF.
Parameters
----------
gtab : object,
GradientTable
method : str,
'standard' or 'gqi2'
sampling_length : float,
diffusion sampling length (lambda in eq. 2.14 and 2.16)
References
----------
.. [1] Yeh F-C et al., "Generalized Q-Sampling Imaging", IEEE TMI, 2010
.. [2] Garyfallidis E, "Towards an accurate brain tractography", PhD
thesis, University of Cambridge, 2012.
Notes
-----
As of version 0.9, range of the sampling length in GQI2 has changed
to match the same scale used in the 'standard' method [1]_. This
means that the value of `sampling_length` should be approximately
1 - 1.3 (see [1]_, pg. 1628).
Examples
--------
Here we create an example where we provide the data, a gradient table
and a reconstruction sphere and calculate the ODF for the first
voxel in the data.
>>> from dipy.data import dsi_voxels
>>> data, gtab = dsi_voxels()
>>> from dipy.core.subdivide_octahedron import create_unit_sphere
>>> sphere = create_unit_sphere(5)
>>> from dipy.reconst.gqi import GeneralizedQSamplingModel
>>> gq = GeneralizedQSamplingModel(gtab, 'gqi2', 1.1)
>>> voxel_signal = data[0, 0, 0]
>>> odf = gq.fit(voxel_signal).odf(sphere)
See Also
--------
dipy.reconst.dsi.DiffusionSpectrumModel
"""
OdfModel.__init__(self, gtab)
self.method = method
self.Lambda = sampling_length
self.normalize_peaks = normalize_peaks
scaling = np.sqrt(self.gtab.bvals * 0.01506)
tmp = np.tile(scaling, (3, 1))
gradsT = self.gtab.bvecs.T
b_vector = gradsT * tmp
self.b_vector = b_vector.T
@multi_voxel_fit
def fit(self, data):
return GeneralizedQSamplingFit(self, data)
class GeneralizedQSamplingFit(OdfFit):
def __init__(self, model, data):
""" Calculates PDF and ODF for a single voxel
Parameters
----------
model : object,
DiffusionSpectrumModel
data : 1d ndarray,
signal values
"""
OdfFit.__init__(self, model, data)
self._gfa = None
self.npeaks = 5
self._peak_values = None
self._peak_indices = None
self._qa = None
def odf(self, sphere):
""" Calculates the discrete ODF for a given discrete sphere.
"""
self.gqi_vector = self.model.cache_get('gqi_vector', key=sphere)
if self.gqi_vector is None:
if self.model.method == 'gqi2':
H = squared_radial_component
self.gqi_vector = np.real(H(np.dot(self.model.b_vector,
sphere.vertices.T) * self.model.Lambda))
if self.model.method == 'standard':
self.gqi_vector = np.real(np.sinc(np.dot(self.model.
b_vector, sphere.vertices.T) * self.model.Lambda / np.pi))
self.model.cache_set('gqi_vector', sphere, self.gqi_vector)
return np.dot(self.data, self.gqi_vector)
def normalize_qa(qa, max_qa=None):
""" Normalize quantitative anisotropy.
Used mostly with GQI rather than GQI2.
Parameters
----------
qa : array, shape (X, Y, Z, N)
where N is the maximum number of peaks stored
max_qa : float,
maximum qa value. Usually found in the CSF (corticospinal fluid).
Returns
-------
nqa : array, shape (x, Y, Z, N)
normalized quantitative anisotropy
Notes
-----
Normalized quantitative anisotropy has the very useful property
to be very small near gray matter and background areas. Therefore,
it can be used to mask out white matter areas.
"""
if max_qa is None:
return qa / qa.max()
return qa / max_qa
<mask token>
def npa(self, odf, width=5):
""" non-parametric anisotropy
Nimmo-Smith et al. ISMRM 2011
"""
t0, t1, t2 = triple_odf_maxima(self.odf_vertices, odf, width)
psi0 = t0[1] ** 2
psi1 = t1[1] ** 2
psi2 = t2[1] ** 2
npa = np.sqrt((psi0 - psi1) ** 2 + (psi1 - psi2) ** 2 + (psi2 - psi0) ** 2
) / np.sqrt(2 * (psi0 ** 2 + psi1 ** 2 + psi2 ** 2))
return t0, t1, t2, npa
<mask token>
def polar_zone_vertices(vertices, pole, width=5):
"""
finds the 'vertices' in the equatorial band around
the 'pole' of radius 'width' degrees
"""
return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) >
np.abs(np.cos(np.pi * width / 180))]
def upper_hemi_map(v):
"""
maps a 3-vector into the z-upper hemisphere
"""
return np.sign(v[2]) * v
def equatorial_maximum(vertices, odf, pole, width):
eqvert = equatorial_zone_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty equatorial band at %s pole with width %f' % (np.
array_str(pole), width))
return None, None
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def patch_vertices(vertices, pole, width):
"""
find 'vertices' within the cone of 'width' degrees around 'pole'
"""
return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) >
np.abs(np.cos(np.pi * width / 180))]
def patch_maximum(vertices, odf, pole, width):
eqvert = patch_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty cone around pole %s with with width %f' % (np.
array_str(pole), width))
return np.Null, np.Null
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def odf_sum(odf):
return np.sum(odf)
def patch_sum(vertices, odf, pole, width):
eqvert = patch_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty cone around pole %s with with width %f' % (np.
array_str(pole), width))
return np.Null
return np.sum([odf[i] for i in eqvert])
def triple_odf_maxima(vertices, odf, width):
indmax1 = np.argmax([odf[i] for i, v in enumerate(vertices)])
odfmax1 = odf[indmax1]
pole = vertices[indmax1]
eqvert = equatorial_zone_vertices(vertices, pole, width)
indmax2, odfmax2 = equatorial_maximum(vertices, odf, pole, width)
indmax3 = eqvert[np.argmin([np.abs(np.dot(vertices[indmax2], vertices[p
])) for p in eqvert])]
odfmax3 = odf[indmax3]
"""
cross12 = np.cross(vertices[indmax1],vertices[indmax2])
cross12 = cross12/np.sqrt(np.sum(cross12**2))
indmax3, odfmax3 = patch_maximum(vertices, odf, cross12, 2*width)
"""
return [(indmax1, odfmax1), (indmax2, odfmax2), (indmax3, odfmax3)]
| <mask token>
class GeneralizedQSamplingModel(OdfModel, Cache):
def __init__(self, gtab, method='gqi2', sampling_length=1.2,
normalize_peaks=False):
""" Generalized Q-Sampling Imaging [1]_
This model has the same assumptions as the DSI method i.e. Cartesian
grid sampling in q-space and fast gradient switching.
Implements equations 2.14 from [2]_ for standard GQI and equation 2.16
from [2]_ for GQI2. You can think of GQI2 as an analytical solution of
the DSI ODF.
Parameters
----------
gtab : object,
GradientTable
method : str,
'standard' or 'gqi2'
sampling_length : float,
diffusion sampling length (lambda in eq. 2.14 and 2.16)
References
----------
.. [1] Yeh F-C et al., "Generalized Q-Sampling Imaging", IEEE TMI, 2010
.. [2] Garyfallidis E, "Towards an accurate brain tractography", PhD
thesis, University of Cambridge, 2012.
Notes
-----
As of version 0.9, range of the sampling length in GQI2 has changed
to match the same scale used in the 'standard' method [1]_. This
means that the value of `sampling_length` should be approximately
1 - 1.3 (see [1]_, pg. 1628).
Examples
--------
Here we create an example where we provide the data, a gradient table
and a reconstruction sphere and calculate the ODF for the first
voxel in the data.
>>> from dipy.data import dsi_voxels
>>> data, gtab = dsi_voxels()
>>> from dipy.core.subdivide_octahedron import create_unit_sphere
>>> sphere = create_unit_sphere(5)
>>> from dipy.reconst.gqi import GeneralizedQSamplingModel
>>> gq = GeneralizedQSamplingModel(gtab, 'gqi2', 1.1)
>>> voxel_signal = data[0, 0, 0]
>>> odf = gq.fit(voxel_signal).odf(sphere)
See Also
--------
dipy.reconst.dsi.DiffusionSpectrumModel
"""
OdfModel.__init__(self, gtab)
self.method = method
self.Lambda = sampling_length
self.normalize_peaks = normalize_peaks
scaling = np.sqrt(self.gtab.bvals * 0.01506)
tmp = np.tile(scaling, (3, 1))
gradsT = self.gtab.bvecs.T
b_vector = gradsT * tmp
self.b_vector = b_vector.T
@multi_voxel_fit
def fit(self, data):
return GeneralizedQSamplingFit(self, data)
class GeneralizedQSamplingFit(OdfFit):
def __init__(self, model, data):
""" Calculates PDF and ODF for a single voxel
Parameters
----------
model : object,
DiffusionSpectrumModel
data : 1d ndarray,
signal values
"""
OdfFit.__init__(self, model, data)
self._gfa = None
self.npeaks = 5
self._peak_values = None
self._peak_indices = None
self._qa = None
def odf(self, sphere):
""" Calculates the discrete ODF for a given discrete sphere.
"""
self.gqi_vector = self.model.cache_get('gqi_vector', key=sphere)
if self.gqi_vector is None:
if self.model.method == 'gqi2':
H = squared_radial_component
self.gqi_vector = np.real(H(np.dot(self.model.b_vector,
sphere.vertices.T) * self.model.Lambda))
if self.model.method == 'standard':
self.gqi_vector = np.real(np.sinc(np.dot(self.model.
b_vector, sphere.vertices.T) * self.model.Lambda / np.pi))
self.model.cache_set('gqi_vector', sphere, self.gqi_vector)
return np.dot(self.data, self.gqi_vector)
def normalize_qa(qa, max_qa=None):
""" Normalize quantitative anisotropy.
Used mostly with GQI rather than GQI2.
Parameters
----------
qa : array, shape (X, Y, Z, N)
where N is the maximum number of peaks stored
max_qa : float,
maximum qa value. Usually found in the CSF (corticospinal fluid).
Returns
-------
nqa : array, shape (x, Y, Z, N)
normalized quantitative anisotropy
Notes
-----
Normalized quantitative anisotropy has the very useful property
to be very small near gray matter and background areas. Therefore,
it can be used to mask out white matter areas.
"""
if max_qa is None:
return qa / qa.max()
return qa / max_qa
def squared_radial_component(x, tol=0.01):
""" Part of the GQI2 integral
Eq.8 in the referenced paper by Yeh et al. 2010
"""
with warnings.catch_warnings():
warnings.simplefilter('ignore')
result = (2 * x * np.cos(x) + (x * x - 2) * np.sin(x)) / x ** 3
x_near_zero = (x < tol) & (x > -tol)
return np.where(x_near_zero, 1.0 / 3, result)
def npa(self, odf, width=5):
""" non-parametric anisotropy
Nimmo-Smith et al. ISMRM 2011
"""
t0, t1, t2 = triple_odf_maxima(self.odf_vertices, odf, width)
psi0 = t0[1] ** 2
psi1 = t1[1] ** 2
psi2 = t2[1] ** 2
npa = np.sqrt((psi0 - psi1) ** 2 + (psi1 - psi2) ** 2 + (psi2 - psi0) ** 2
) / np.sqrt(2 * (psi0 ** 2 + psi1 ** 2 + psi2 ** 2))
return t0, t1, t2, npa
<mask token>
def polar_zone_vertices(vertices, pole, width=5):
"""
finds the 'vertices' in the equatorial band around
the 'pole' of radius 'width' degrees
"""
return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) >
np.abs(np.cos(np.pi * width / 180))]
def upper_hemi_map(v):
"""
maps a 3-vector into the z-upper hemisphere
"""
return np.sign(v[2]) * v
def equatorial_maximum(vertices, odf, pole, width):
eqvert = equatorial_zone_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty equatorial band at %s pole with width %f' % (np.
array_str(pole), width))
return None, None
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def patch_vertices(vertices, pole, width):
"""
find 'vertices' within the cone of 'width' degrees around 'pole'
"""
return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) >
np.abs(np.cos(np.pi * width / 180))]
def patch_maximum(vertices, odf, pole, width):
eqvert = patch_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty cone around pole %s with with width %f' % (np.
array_str(pole), width))
return np.Null, np.Null
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def odf_sum(odf):
return np.sum(odf)
def patch_sum(vertices, odf, pole, width):
eqvert = patch_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty cone around pole %s with with width %f' % (np.
array_str(pole), width))
return np.Null
return np.sum([odf[i] for i in eqvert])
def triple_odf_maxima(vertices, odf, width):
indmax1 = np.argmax([odf[i] for i, v in enumerate(vertices)])
odfmax1 = odf[indmax1]
pole = vertices[indmax1]
eqvert = equatorial_zone_vertices(vertices, pole, width)
indmax2, odfmax2 = equatorial_maximum(vertices, odf, pole, width)
indmax3 = eqvert[np.argmin([np.abs(np.dot(vertices[indmax2], vertices[p
])) for p in eqvert])]
odfmax3 = odf[indmax3]
"""
cross12 = np.cross(vertices[indmax1],vertices[indmax2])
cross12 = cross12/np.sqrt(np.sum(cross12**2))
indmax3, odfmax3 = patch_maximum(vertices, odf, cross12, 2*width)
"""
return [(indmax1, odfmax1), (indmax2, odfmax2), (indmax3, odfmax3)]
| <mask token>
import numpy as np
from dipy.reconst.odf import OdfModel, OdfFit, gfa
from dipy.reconst.cache import Cache
import warnings
from dipy.reconst.multi_voxel import multi_voxel_fit
from dipy.reconst.recspeed import local_maxima, remove_similar_vertices
class GeneralizedQSamplingModel(OdfModel, Cache):
def __init__(self, gtab, method='gqi2', sampling_length=1.2,
normalize_peaks=False):
""" Generalized Q-Sampling Imaging [1]_
This model has the same assumptions as the DSI method i.e. Cartesian
grid sampling in q-space and fast gradient switching.
Implements equations 2.14 from [2]_ for standard GQI and equation 2.16
from [2]_ for GQI2. You can think of GQI2 as an analytical solution of
the DSI ODF.
Parameters
----------
gtab : object,
GradientTable
method : str,
'standard' or 'gqi2'
sampling_length : float,
diffusion sampling length (lambda in eq. 2.14 and 2.16)
References
----------
.. [1] Yeh F-C et al., "Generalized Q-Sampling Imaging", IEEE TMI, 2010
.. [2] Garyfallidis E, "Towards an accurate brain tractography", PhD
thesis, University of Cambridge, 2012.
Notes
-----
As of version 0.9, range of the sampling length in GQI2 has changed
to match the same scale used in the 'standard' method [1]_. This
means that the value of `sampling_length` should be approximately
1 - 1.3 (see [1]_, pg. 1628).
Examples
--------
Here we create an example where we provide the data, a gradient table
and a reconstruction sphere and calculate the ODF for the first
voxel in the data.
>>> from dipy.data import dsi_voxels
>>> data, gtab = dsi_voxels()
>>> from dipy.core.subdivide_octahedron import create_unit_sphere
>>> sphere = create_unit_sphere(5)
>>> from dipy.reconst.gqi import GeneralizedQSamplingModel
>>> gq = GeneralizedQSamplingModel(gtab, 'gqi2', 1.1)
>>> voxel_signal = data[0, 0, 0]
>>> odf = gq.fit(voxel_signal).odf(sphere)
See Also
--------
dipy.reconst.dsi.DiffusionSpectrumModel
"""
OdfModel.__init__(self, gtab)
self.method = method
self.Lambda = sampling_length
self.normalize_peaks = normalize_peaks
scaling = np.sqrt(self.gtab.bvals * 0.01506)
tmp = np.tile(scaling, (3, 1))
gradsT = self.gtab.bvecs.T
b_vector = gradsT * tmp
self.b_vector = b_vector.T
@multi_voxel_fit
def fit(self, data):
return GeneralizedQSamplingFit(self, data)
class GeneralizedQSamplingFit(OdfFit):
def __init__(self, model, data):
""" Calculates PDF and ODF for a single voxel
Parameters
----------
model : object,
DiffusionSpectrumModel
data : 1d ndarray,
signal values
"""
OdfFit.__init__(self, model, data)
self._gfa = None
self.npeaks = 5
self._peak_values = None
self._peak_indices = None
self._qa = None
def odf(self, sphere):
""" Calculates the discrete ODF for a given discrete sphere.
"""
self.gqi_vector = self.model.cache_get('gqi_vector', key=sphere)
if self.gqi_vector is None:
if self.model.method == 'gqi2':
H = squared_radial_component
self.gqi_vector = np.real(H(np.dot(self.model.b_vector,
sphere.vertices.T) * self.model.Lambda))
if self.model.method == 'standard':
self.gqi_vector = np.real(np.sinc(np.dot(self.model.
b_vector, sphere.vertices.T) * self.model.Lambda / np.pi))
self.model.cache_set('gqi_vector', sphere, self.gqi_vector)
return np.dot(self.data, self.gqi_vector)
def normalize_qa(qa, max_qa=None):
""" Normalize quantitative anisotropy.
Used mostly with GQI rather than GQI2.
Parameters
----------
qa : array, shape (X, Y, Z, N)
where N is the maximum number of peaks stored
max_qa : float,
maximum qa value. Usually found in the CSF (corticospinal fluid).
Returns
-------
nqa : array, shape (x, Y, Z, N)
normalized quantitative anisotropy
Notes
-----
Normalized quantitative anisotropy has the very useful property
to be very small near gray matter and background areas. Therefore,
it can be used to mask out white matter areas.
"""
if max_qa is None:
return qa / qa.max()
return qa / max_qa
def squared_radial_component(x, tol=0.01):
""" Part of the GQI2 integral
Eq.8 in the referenced paper by Yeh et al. 2010
"""
with warnings.catch_warnings():
warnings.simplefilter('ignore')
result = (2 * x * np.cos(x) + (x * x - 2) * np.sin(x)) / x ** 3
x_near_zero = (x < tol) & (x > -tol)
return np.where(x_near_zero, 1.0 / 3, result)
def npa(self, odf, width=5):
""" non-parametric anisotropy
Nimmo-Smith et al. ISMRM 2011
"""
t0, t1, t2 = triple_odf_maxima(self.odf_vertices, odf, width)
psi0 = t0[1] ** 2
psi1 = t1[1] ** 2
psi2 = t2[1] ** 2
npa = np.sqrt((psi0 - psi1) ** 2 + (psi1 - psi2) ** 2 + (psi2 - psi0) ** 2
) / np.sqrt(2 * (psi0 ** 2 + psi1 ** 2 + psi2 ** 2))
return t0, t1, t2, npa
def equatorial_zone_vertices(vertices, pole, width=5):
"""
finds the 'vertices' in the equatorial zone conjugate
to 'pole' with width half 'width' degrees
"""
return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) <
np.abs(np.sin(np.pi * width / 180))]
def polar_zone_vertices(vertices, pole, width=5):
"""
finds the 'vertices' in the equatorial band around
the 'pole' of radius 'width' degrees
"""
return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) >
np.abs(np.cos(np.pi * width / 180))]
def upper_hemi_map(v):
"""
maps a 3-vector into the z-upper hemisphere
"""
return np.sign(v[2]) * v
def equatorial_maximum(vertices, odf, pole, width):
eqvert = equatorial_zone_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty equatorial band at %s pole with width %f' % (np.
array_str(pole), width))
return None, None
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def patch_vertices(vertices, pole, width):
"""
find 'vertices' within the cone of 'width' degrees around 'pole'
"""
return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) >
np.abs(np.cos(np.pi * width / 180))]
def patch_maximum(vertices, odf, pole, width):
eqvert = patch_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty cone around pole %s with with width %f' % (np.
array_str(pole), width))
return np.Null, np.Null
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def odf_sum(odf):
return np.sum(odf)
def patch_sum(vertices, odf, pole, width):
eqvert = patch_vertices(vertices, pole, width)
if len(eqvert) == 0:
print('empty cone around pole %s with with width %f' % (np.
array_str(pole), width))
return np.Null
return np.sum([odf[i] for i in eqvert])
def triple_odf_maxima(vertices, odf, width):
indmax1 = np.argmax([odf[i] for i, v in enumerate(vertices)])
odfmax1 = odf[indmax1]
pole = vertices[indmax1]
eqvert = equatorial_zone_vertices(vertices, pole, width)
indmax2, odfmax2 = equatorial_maximum(vertices, odf, pole, width)
indmax3 = eqvert[np.argmin([np.abs(np.dot(vertices[indmax2], vertices[p
])) for p in eqvert])]
odfmax3 = odf[indmax3]
"""
cross12 = np.cross(vertices[indmax1],vertices[indmax2])
cross12 = cross12/np.sqrt(np.sum(cross12**2))
indmax3, odfmax3 = patch_maximum(vertices, odf, cross12, 2*width)
"""
return [(indmax1, odfmax1), (indmax2, odfmax2), (indmax3, odfmax3)]
| """ Classes and functions for generalized q-sampling """
import numpy as np
from dipy.reconst.odf import OdfModel, OdfFit, gfa
from dipy.reconst.cache import Cache
import warnings
from dipy.reconst.multi_voxel import multi_voxel_fit
from dipy.reconst.recspeed import local_maxima, remove_similar_vertices
class GeneralizedQSamplingModel(OdfModel, Cache):
def __init__(self,
gtab,
method='gqi2',
sampling_length=1.2,
normalize_peaks=False):
r""" Generalized Q-Sampling Imaging [1]_
This model has the same assumptions as the DSI method i.e. Cartesian
grid sampling in q-space and fast gradient switching.
Implements equations 2.14 from [2]_ for standard GQI and equation 2.16
from [2]_ for GQI2. You can think of GQI2 as an analytical solution of
the DSI ODF.
Parameters
----------
gtab : object,
GradientTable
method : str,
'standard' or 'gqi2'
sampling_length : float,
diffusion sampling length (lambda in eq. 2.14 and 2.16)
References
----------
.. [1] Yeh F-C et al., "Generalized Q-Sampling Imaging", IEEE TMI, 2010
.. [2] Garyfallidis E, "Towards an accurate brain tractography", PhD
thesis, University of Cambridge, 2012.
Notes
-----
As of version 0.9, range of the sampling length in GQI2 has changed
to match the same scale used in the 'standard' method [1]_. This
means that the value of `sampling_length` should be approximately
1 - 1.3 (see [1]_, pg. 1628).
Examples
--------
Here we create an example where we provide the data, a gradient table
and a reconstruction sphere and calculate the ODF for the first
voxel in the data.
>>> from dipy.data import dsi_voxels
>>> data, gtab = dsi_voxels()
>>> from dipy.core.subdivide_octahedron import create_unit_sphere
>>> sphere = create_unit_sphere(5)
>>> from dipy.reconst.gqi import GeneralizedQSamplingModel
>>> gq = GeneralizedQSamplingModel(gtab, 'gqi2', 1.1)
>>> voxel_signal = data[0, 0, 0]
>>> odf = gq.fit(voxel_signal).odf(sphere)
See Also
--------
dipy.reconst.dsi.DiffusionSpectrumModel
"""
OdfModel.__init__(self, gtab)
self.method = method
self.Lambda = sampling_length
self.normalize_peaks = normalize_peaks
# 0.01506 = 6*D where D is the free water diffusion coefficient
# l_values sqrt(6 D tau) D free water diffusion coefficient and
# tau included in the b-value
scaling = np.sqrt(self.gtab.bvals * 0.01506)
tmp = np.tile(scaling, (3, 1))
gradsT = self.gtab.bvecs.T
b_vector = gradsT * tmp # element-wise product
self.b_vector = b_vector.T
@multi_voxel_fit
def fit(self, data):
return GeneralizedQSamplingFit(self, data)
class GeneralizedQSamplingFit(OdfFit):
def __init__(self, model, data):
""" Calculates PDF and ODF for a single voxel
Parameters
----------
model : object,
DiffusionSpectrumModel
data : 1d ndarray,
signal values
"""
OdfFit.__init__(self, model, data)
self._gfa = None
self.npeaks = 5
self._peak_values = None
self._peak_indices = None
self._qa = None
def odf(self, sphere):
""" Calculates the discrete ODF for a given discrete sphere.
"""
self.gqi_vector = self.model.cache_get('gqi_vector', key=sphere)
if self.gqi_vector is None:
if self.model.method == 'gqi2':
H = squared_radial_component
# print self.gqi_vector.shape
self.gqi_vector = np.real(H(np.dot(
self.model.b_vector, sphere.vertices.T) *
self.model.Lambda))
if self.model.method == 'standard':
self.gqi_vector = np.real(np.sinc(np.dot(
self.model.b_vector, sphere.vertices.T) *
self.model.Lambda / np.pi))
self.model.cache_set('gqi_vector', sphere, self.gqi_vector)
return np.dot(self.data, self.gqi_vector)
def normalize_qa(qa, max_qa=None):
""" Normalize quantitative anisotropy.
Used mostly with GQI rather than GQI2.
Parameters
----------
qa : array, shape (X, Y, Z, N)
where N is the maximum number of peaks stored
max_qa : float,
maximum qa value. Usually found in the CSF (corticospinal fluid).
Returns
-------
nqa : array, shape (x, Y, Z, N)
normalized quantitative anisotropy
Notes
-----
Normalized quantitative anisotropy has the very useful property
to be very small near gray matter and background areas. Therefore,
it can be used to mask out white matter areas.
"""
if max_qa is None:
return qa / qa.max()
return qa / max_qa
def squared_radial_component(x, tol=0.01):
""" Part of the GQI2 integral
Eq.8 in the referenced paper by Yeh et al. 2010
"""
with warnings.catch_warnings():
warnings.simplefilter("ignore")
result = (2 * x * np.cos(x) + (x * x - 2) * np.sin(x)) / (x ** 3)
x_near_zero = (x < tol) & (x > -tol)
return np.where(x_near_zero, 1./3, result)
def npa(self, odf, width=5):
""" non-parametric anisotropy
Nimmo-Smith et al. ISMRM 2011
"""
# odf = self.odf(s)
t0, t1, t2 = triple_odf_maxima(self.odf_vertices, odf, width)
psi0 = t0[1] ** 2
psi1 = t1[1] ** 2
psi2 = t2[1] ** 2
npa = (np.sqrt(
(psi0 - psi1) ** 2 +
(psi1 - psi2) ** 2 +
(psi2 - psi0) ** 2) /
np.sqrt(2 * (psi0 ** 2 + psi1 ** 2 + psi2 ** 2)))
# print 'tom >>>> ',t0,t1,t2,npa
return t0, t1, t2, npa
def equatorial_zone_vertices(vertices, pole, width=5):
"""
finds the 'vertices' in the equatorial zone conjugate
to 'pole' with width half 'width' degrees
"""
return [i
for i, v in enumerate(vertices)
if np.abs(np.dot(v, pole)) < np.abs(np.sin(np.pi * width / 180))]
def polar_zone_vertices(vertices, pole, width=5):
"""
finds the 'vertices' in the equatorial band around
the 'pole' of radius 'width' degrees
"""
return [i
for i, v in enumerate(vertices)
if np.abs(np.dot(v, pole)) > np.abs(np.cos(np.pi * width / 180))]
def upper_hemi_map(v):
"""
maps a 3-vector into the z-upper hemisphere
"""
return np.sign(v[2])*v
def equatorial_maximum(vertices, odf, pole, width):
eqvert = equatorial_zone_vertices(vertices, pole, width)
# need to test for whether eqvert is empty or not
if len(eqvert) == 0:
print('empty equatorial band at %s pole with width %f' %
(np.array_str(pole), width))
return None, None
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def patch_vertices(vertices, pole, width):
"""
find 'vertices' within the cone of 'width' degrees around 'pole'
"""
return [i
for i, v in enumerate(vertices)
if np.abs(np.dot(v, pole)) > np.abs(np.cos(np.pi * width / 180))]
def patch_maximum(vertices, odf, pole, width):
eqvert = patch_vertices(vertices, pole, width)
# need to test for whether eqvert is empty or not
if len(eqvert) == 0:
print('empty cone around pole %s with with width %f' %
(np.array_str(pole), width))
return np.Null, np.Null
eqvals = [odf[i] for i in eqvert]
eqargmax = np.argmax(eqvals)
eqvertmax = eqvert[eqargmax]
eqvalmax = eqvals[eqargmax]
return eqvertmax, eqvalmax
def odf_sum(odf):
return np.sum(odf)
def patch_sum(vertices, odf, pole, width):
eqvert = patch_vertices(vertices, pole, width)
# need to test for whether eqvert is empty or not
if len(eqvert) == 0:
print('empty cone around pole %s with with width %f' %
(np.array_str(pole), width))
return np.Null
return np.sum([odf[i] for i in eqvert])
def triple_odf_maxima(vertices, odf, width):
indmax1 = np.argmax([odf[i] for i, v in enumerate(vertices)])
odfmax1 = odf[indmax1]
pole = vertices[indmax1]
eqvert = equatorial_zone_vertices(vertices, pole, width)
indmax2, odfmax2 = equatorial_maximum(vertices, odf, pole, width)
indmax3 = eqvert[np.argmin([np.abs(np.dot(vertices[indmax2], vertices[p]))
for p in eqvert])]
odfmax3 = odf[indmax3]
"""
cross12 = np.cross(vertices[indmax1],vertices[indmax2])
cross12 = cross12/np.sqrt(np.sum(cross12**2))
indmax3, odfmax3 = patch_maximum(vertices, odf, cross12, 2*width)
"""
return [(indmax1, odfmax1), (indmax2, odfmax2), (indmax3, odfmax3)]
| [
14,
16,
17,
19,
20
] |
1,926 | 72b086e833ab3ee4ec3102869d74513ef3657675 | <mask token>
class A2C_agent(object):
<mask token>
def act(self, state):
action_distribution = self.actor_network.forward(state)
action = np.random.choice(self.num_of_actions, p=
action_distribution.detach().numpy())
return action
def memorize(self, state, action, new_state, reward, done):
self.experience_replay_buffer.push(state, action, new_state, reward,
done)
<mask token>
| <mask token>
class A2C_agent(object):
<mask token>
def act(self, state):
action_distribution = self.actor_network.forward(state)
action = np.random.choice(self.num_of_actions, p=
action_distribution.detach().numpy())
return action
def memorize(self, state, action, new_state, reward, done):
self.experience_replay_buffer.push(state, action, new_state, reward,
done)
def learn(self, rewards_batch, states_batch, actions_batch,
new_states_batch, new_actions_batch):
states_batch = np.asarray(states_batch)
actions_batch = torch.tensor(actions_batch, dtype=torch.long)
rewards_batch = torch.tensor(rewards_batch, dtype=torch.float)
new_states_batch = np.asarray(states_batch)
new_actions_batch = torch.tensor(actions_batch, dtype=torch.long)
V_batch = []
V_prime_batch = []
for state, new_state, new_action in zip(states_batch,
new_states_batch, new_actions_batch):
state = torch.Tensor(state)
v_value = self.critic_network.forward(state)
V_batch.append(v_value)
new_state = torch.Tensor(new_state)
v_prime_value = self.critic_network.forward(new_state)
V_prime_batch.append(v_prime_value)
log_probs = torch.log(self.actor_network(states_batch))
selected_log_probs = rewards_batch * log_probs[np.arange(len(
actions_batch)), actions_batch]
actor_loss = -selected_log_probs.mean()
self.actor_optimizer.zero_grad()
actor_loss.backward()
self.actor_optimizer.step()
V_prime_batch = torch.stack(V_prime_batch)
V_batch = torch.stack(V_batch)
advantage = rewards_batch + self.critic_gamma * V_prime_batch - V_batch
critic_loss = (V_batch - (rewards_batch + self.critic_gamma *
V_prime_batch)).pow(2).mean()
self.critic_optimizer.zero_grad()
critic_loss.backward()
self.critic_optimizer.step()
| <mask token>
class A2C_agent(object):
def __init__(self, env, actor_hidden_size, actor_lr, actor_batch_size,
critic_gamma, mem_size, critic_hidden_size, critic_lr,
critic_batch_size):
self.env = env
self.actor_hidden_size = actor_hidden_size
self.actor_lr = actor_lr
self.actor_batch_size = actor_batch_size
self.critic_hidden_size = critic_hidden_size
self.critic_lr = critic_lr
self.critic_batch_size = critic_batch_size
self.critic_gamma = critic_gamma
self.mem_size = mem_size
self.num_of_states = env.observation_space.shape[0]
self.num_of_actions = env.action_space.n
self.experience_replay_buffer = ReplayBuffer(self.mem_size)
self.actor_network = ActorNet(self.num_of_states, self.
actor_hidden_size, self.num_of_actions)
self.actor_optimizer = optim.Adam(self.actor_network.parameters(),
lr=self.actor_lr)
self.critic_network = CriticNet(self.num_of_states, self.
critic_hidden_size, 1)
self.critic_optimizer = optim.Adam(self.critic_network.parameters(),
lr=self.critic_lr)
def act(self, state):
action_distribution = self.actor_network.forward(state)
action = np.random.choice(self.num_of_actions, p=
action_distribution.detach().numpy())
return action
def memorize(self, state, action, new_state, reward, done):
self.experience_replay_buffer.push(state, action, new_state, reward,
done)
def learn(self, rewards_batch, states_batch, actions_batch,
new_states_batch, new_actions_batch):
states_batch = np.asarray(states_batch)
actions_batch = torch.tensor(actions_batch, dtype=torch.long)
rewards_batch = torch.tensor(rewards_batch, dtype=torch.float)
new_states_batch = np.asarray(states_batch)
new_actions_batch = torch.tensor(actions_batch, dtype=torch.long)
V_batch = []
V_prime_batch = []
for state, new_state, new_action in zip(states_batch,
new_states_batch, new_actions_batch):
state = torch.Tensor(state)
v_value = self.critic_network.forward(state)
V_batch.append(v_value)
new_state = torch.Tensor(new_state)
v_prime_value = self.critic_network.forward(new_state)
V_prime_batch.append(v_prime_value)
log_probs = torch.log(self.actor_network(states_batch))
selected_log_probs = rewards_batch * log_probs[np.arange(len(
actions_batch)), actions_batch]
actor_loss = -selected_log_probs.mean()
self.actor_optimizer.zero_grad()
actor_loss.backward()
self.actor_optimizer.step()
V_prime_batch = torch.stack(V_prime_batch)
V_batch = torch.stack(V_batch)
advantage = rewards_batch + self.critic_gamma * V_prime_batch - V_batch
critic_loss = (V_batch - (rewards_batch + self.critic_gamma *
V_prime_batch)).pow(2).mean()
self.critic_optimizer.zero_grad()
critic_loss.backward()
self.critic_optimizer.step()
| <mask token>
import torch
import torch.optim as optim
from utilities import *
from model import *
from torch.autograd import Variable
import numpy as np
import random
class A2C_agent(object):
def __init__(self, env, actor_hidden_size, actor_lr, actor_batch_size,
critic_gamma, mem_size, critic_hidden_size, critic_lr,
critic_batch_size):
self.env = env
self.actor_hidden_size = actor_hidden_size
self.actor_lr = actor_lr
self.actor_batch_size = actor_batch_size
self.critic_hidden_size = critic_hidden_size
self.critic_lr = critic_lr
self.critic_batch_size = critic_batch_size
self.critic_gamma = critic_gamma
self.mem_size = mem_size
self.num_of_states = env.observation_space.shape[0]
self.num_of_actions = env.action_space.n
self.experience_replay_buffer = ReplayBuffer(self.mem_size)
self.actor_network = ActorNet(self.num_of_states, self.
actor_hidden_size, self.num_of_actions)
self.actor_optimizer = optim.Adam(self.actor_network.parameters(),
lr=self.actor_lr)
self.critic_network = CriticNet(self.num_of_states, self.
critic_hidden_size, 1)
self.critic_optimizer = optim.Adam(self.critic_network.parameters(),
lr=self.critic_lr)
def act(self, state):
action_distribution = self.actor_network.forward(state)
action = np.random.choice(self.num_of_actions, p=
action_distribution.detach().numpy())
return action
def memorize(self, state, action, new_state, reward, done):
self.experience_replay_buffer.push(state, action, new_state, reward,
done)
def learn(self, rewards_batch, states_batch, actions_batch,
new_states_batch, new_actions_batch):
states_batch = np.asarray(states_batch)
actions_batch = torch.tensor(actions_batch, dtype=torch.long)
rewards_batch = torch.tensor(rewards_batch, dtype=torch.float)
new_states_batch = np.asarray(states_batch)
new_actions_batch = torch.tensor(actions_batch, dtype=torch.long)
V_batch = []
V_prime_batch = []
for state, new_state, new_action in zip(states_batch,
new_states_batch, new_actions_batch):
state = torch.Tensor(state)
v_value = self.critic_network.forward(state)
V_batch.append(v_value)
new_state = torch.Tensor(new_state)
v_prime_value = self.critic_network.forward(new_state)
V_prime_batch.append(v_prime_value)
log_probs = torch.log(self.actor_network(states_batch))
selected_log_probs = rewards_batch * log_probs[np.arange(len(
actions_batch)), actions_batch]
actor_loss = -selected_log_probs.mean()
self.actor_optimizer.zero_grad()
actor_loss.backward()
self.actor_optimizer.step()
V_prime_batch = torch.stack(V_prime_batch)
V_batch = torch.stack(V_batch)
advantage = rewards_batch + self.critic_gamma * V_prime_batch - V_batch
critic_loss = (V_batch - (rewards_batch + self.critic_gamma *
V_prime_batch)).pow(2).mean()
self.critic_optimizer.zero_grad()
critic_loss.backward()
self.critic_optimizer.step()
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 25 19:21:32 2019
@author: Nikos
"""
import torch
import torch.optim as optim
from utilities import *
from model import *
from torch.autograd import Variable
import numpy as np
import random
class A2C_agent(object):
def __init__(self, env, actor_hidden_size, actor_lr, actor_batch_size,
critic_gamma, mem_size, critic_hidden_size, critic_lr, critic_batch_size):
self.env = env
self.actor_hidden_size = actor_hidden_size
self.actor_lr = actor_lr
self.actor_batch_size = actor_batch_size
self.critic_hidden_size = critic_hidden_size
self.critic_lr = critic_lr
self.critic_batch_size = critic_batch_size
self.critic_gamma = critic_gamma
self.mem_size = mem_size
self.num_of_states = env.observation_space.shape[0]
self.num_of_actions = env.action_space.n
self.experience_replay_buffer = ReplayBuffer(self.mem_size)
# initialize the Actor network (policy)
self.actor_network = ActorNet(self.num_of_states, self.actor_hidden_size, self.num_of_actions)
self.actor_optimizer = optim.Adam(self.actor_network.parameters(), lr = self.actor_lr)
# initialize the Critic network (v-learning)
# The difference between the critic in A2C (here) and the
# critic int he "vanilla" Actor-Critic version is that the
# critic in A2C models the value function, hence it needs
# to only output the value of each state and not the Q-value
# for each (state, action) pair. Therefore, the output size
# here needs to be a scalar.
self.critic_network = CriticNet(self.num_of_states, self.critic_hidden_size, 1)
self.critic_optimizer = optim.Adam(self.critic_network.parameters(), lr = self.critic_lr)
def act(self, state):
# compute the action distribution based on the current state via the policy net
action_distribution = self.actor_network.forward(state)
# pick an action based on that distribution
action = np.random.choice(self.num_of_actions, p = action_distribution.detach().numpy())
return action
def memorize(self, state, action, new_state, reward, done):
# this function takes a transition (state, action, new_state, reward, done)
# and stores it into the experience memory buffer
self.experience_replay_buffer.push(state, action, new_state, reward, done)
def learn(self, rewards_batch, states_batch, actions_batch, new_states_batch, new_actions_batch):
#states_batch = torch.tensor(states_batch, dtype=torch.float)
states_batch = np.asarray(states_batch)
actions_batch = torch.tensor(actions_batch, dtype=torch.long)
rewards_batch = torch.tensor(rewards_batch, dtype=torch.float)
new_states_batch = np.asarray(states_batch)
new_actions_batch = torch.tensor(actions_batch, dtype=torch.long)
V_batch = []
V_prime_batch = []
for state, new_state, new_action in zip(states_batch,\
new_states_batch, new_actions_batch):
state = torch.Tensor(state)
v_value = self.critic_network.forward(state)
# get q-value for specific action
#Q = q_values.gather(-1, action)
V_batch.append(v_value)
new_state = torch.Tensor(new_state)
v_prime_value = self.critic_network.forward(new_state)
#V_prime = q_prime_values.gather(-1, new_action)
V_prime_batch.append(v_prime_value)
# compute the log of the probabilities that the policy outputs for each state
log_probs = torch.log(self.actor_network(states_batch))
# pick those log probabilities that correspond to the actions that were selected
selected_log_probs = rewards_batch * log_probs[np.arange(len(actions_batch)), actions_batch]
# compute the monte-carlo estimate by averaging the losses and then form the optimization
# criterion, which will be the negative log probs.
actor_loss = -selected_log_probs.mean()
self.actor_optimizer.zero_grad()
actor_loss.backward()
# if we need smooth updates we clip the grads between -1 and 1
#for param in self.online_dqn_network.parameters():
# param.grad.data.clamp_(-1,1)
self.actor_optimizer.step()
# Compute TD error for V network
V_prime_batch = torch.stack(V_prime_batch)
V_batch = torch.stack(V_batch)
# A(s, a) = r_prime + gamma * V_prime - V
advantage = rewards_batch + self.critic_gamma * V_prime_batch - V_batch
#print(deltas)
critic_loss = (V_batch - (rewards_batch + self.critic_gamma * V_prime_batch)).pow(2).mean()
#print(critic_loss)
self.critic_optimizer.zero_grad()
critic_loss.backward()
self.critic_optimizer.step()
#return loss | [
3,
4,
5,
6,
7
] |
1,927 | 7ccaa15f025b2c1ba560d07c1a30b06c9ebf9ad1 | <mask token>
| <mask token>
def random():
"""Return a random parameter set for the model."""
radius = 10 ** np.random.uniform(1.3, 4)
d_factor = 10 ** np.random.uniform(-2, -0.7)
dnn_fraction = np.random.beta(a=10, b=1)
dnn = radius * 4 / np.sqrt(3) / dnn_fraction
pars = dict(dnn=dnn, d_factor=d_factor, radius=radius)
return pars
<mask token>
| <mask token>
name = 'bcc_paracrystal'
title = 'Body-centred cubic lattic with paracrystalline distortion'
description = """
Calculates the scattering from a **body-centered cubic lattice** with
paracrystalline distortion. Thermal vibrations are considered to be
negligible, and the size of the paracrystal is infinitely large.
Paracrystalline distortion is assumed to be isotropic and characterized
by a Gaussian distribution.
"""
category = 'shape:paracrystal'
single = False
parameters = [['dnn', 'Ang', 220, [-inf, inf], '',
'Nearest neighbour distance'], ['d_factor', '', 0.06, [-inf, inf], '',
'Paracrystal distortion factor'], ['radius', 'Ang', 40, [0, inf],
'volume', 'Particle radius'], ['sld', '1e-6/Ang^2', 4, [-inf, inf],
'sld', 'Particle scattering length density'], ['sld_solvent',
'1e-6/Ang^2', 1, [-inf, inf], 'sld',
'Solvent scattering length density'], ['theta', 'degrees', 60, [-360,
360], 'orientation', 'c axis to beam angle'], ['phi', 'degrees', 60, [-
360, 360], 'orientation', 'rotation about beam'], ['psi', 'degrees', 60,
[-360, 360], 'orientation', 'rotation about c axis']]
source = ['lib/sas_3j1x_x.c', 'lib/gauss150.c', 'lib/sphere_form.c',
'bcc_paracrystal.c']
def random():
"""Return a random parameter set for the model."""
radius = 10 ** np.random.uniform(1.3, 4)
d_factor = 10 ** np.random.uniform(-2, -0.7)
dnn_fraction = np.random.beta(a=10, b=1)
dnn = radius * 4 / np.sqrt(3) / dnn_fraction
pars = dict(dnn=dnn, d_factor=d_factor, radius=radius)
return pars
q = 4.0 * pi / 220.0
tests = [[{}, [0.001, q, 0.25], [0.6945817843046642, 1.6885157981411993,
0.005367008206852725]]]
| <mask token>
import numpy as np
from numpy import inf, pi
name = 'bcc_paracrystal'
title = 'Body-centred cubic lattic with paracrystalline distortion'
description = """
Calculates the scattering from a **body-centered cubic lattice** with
paracrystalline distortion. Thermal vibrations are considered to be
negligible, and the size of the paracrystal is infinitely large.
Paracrystalline distortion is assumed to be isotropic and characterized
by a Gaussian distribution.
"""
category = 'shape:paracrystal'
single = False
parameters = [['dnn', 'Ang', 220, [-inf, inf], '',
'Nearest neighbour distance'], ['d_factor', '', 0.06, [-inf, inf], '',
'Paracrystal distortion factor'], ['radius', 'Ang', 40, [0, inf],
'volume', 'Particle radius'], ['sld', '1e-6/Ang^2', 4, [-inf, inf],
'sld', 'Particle scattering length density'], ['sld_solvent',
'1e-6/Ang^2', 1, [-inf, inf], 'sld',
'Solvent scattering length density'], ['theta', 'degrees', 60, [-360,
360], 'orientation', 'c axis to beam angle'], ['phi', 'degrees', 60, [-
360, 360], 'orientation', 'rotation about beam'], ['psi', 'degrees', 60,
[-360, 360], 'orientation', 'rotation about c axis']]
source = ['lib/sas_3j1x_x.c', 'lib/gauss150.c', 'lib/sphere_form.c',
'bcc_paracrystal.c']
def random():
"""Return a random parameter set for the model."""
radius = 10 ** np.random.uniform(1.3, 4)
d_factor = 10 ** np.random.uniform(-2, -0.7)
dnn_fraction = np.random.beta(a=10, b=1)
dnn = radius * 4 / np.sqrt(3) / dnn_fraction
pars = dict(dnn=dnn, d_factor=d_factor, radius=radius)
return pars
q = 4.0 * pi / 220.0
tests = [[{}, [0.001, q, 0.25], [0.6945817843046642, 1.6885157981411993,
0.005367008206852725]]]
| r"""
Definition
----------
Calculates the scattering from a **body-centered cubic lattice** with
paracrystalline distortion. Thermal vibrations are considered to be negligible,
and the size of the paracrystal is infinitely large. Paracrystalline distortion
is assumed to be isotropic and characterized by a Gaussian distribution.
The scattering intensity $I(q)$ is calculated as
.. math::
I(q) = \frac{\text{scale}}{V_p} V_\text{lattice} P(q) Z(q) + \text{background}
where *scale* is the volume fraction of crystal in the sample volume,
$V_\text{lattice}$ is the volume fraction of spheres in the crystal, $V_p$ is
the volume of the primary particle, $P(q)$ is the form factor of the sphere
(normalized), and $Z(q)$ is the paracrystalline structure factor for a
body-centered cubic structure.
.. note::
At this point the GUI does not return $V_\text{lattice}$ separately so that
the user will need to calculate it from the equation given and the
appropriate returned parameters.
.. warning::
As per the equations below, this model will return I(q)=0 for all q if the
distortion factor is equal to 0. The model is not meant to support perfect
crystals.
.. figure:: img/bcc_geometry.jpg
Body-centered cubic (BCC) lattice taken from reference [#Matsuoka1987]_.
Following the derivation from reference [#Matsuoka1987]_, as corrected in
reference [#Matsuoka1990]_, and based on the above figure, the
primitive unit cell vectors $\vec{a_1},\vec{a_2}$, and $\vec{a_3}$, which
enclose the smallest possible unit cell for the bcc lattice, are defined below:
.. math::
\vec{a_1} &= \frac{1}{2}(-\vec{b_1} + \vec{b_2} + \vec{b_3}) \\
\vec{a_2} &= \frac{1}{2} (\vec{b_1} - \vec{b_2} + \vec{b_3}) \\
\vec{a_3} &= \frac{1}{2}(\vec{b_1} + \vec{b_2} -\vec{b_3}).
where $\vec{b_1},\vec{b_2}$, and $\vec{b_3}$ are the unit cell vectors of the
conventional unit cell, which is a unit cell that includes the full symmetry
of the lattice. As defined by reference [#Matsuoka1987]_, the constant $a$ is the
lattice parameter of the conventional unit cell with
$|\vec{b_1}|=|\vec{b_2}|=|\vec{b_3}|=a$. Using this definition, the
nearest-neighbor distance ($D$) is given by
$D=|\vec{a_1}|=|\vec{a_2}|=|\vec{a_3}|=\sqrt{(a/2)^2+(a/2)^2+(a/2)^2}=\sqrt{\frac{3a^2}{4}}=\frac{\sqrt{3}a}{2}$.
The volume of the primitive unit cell $V_u$ is then given by:
.. math::
V_u &= |(\vec{a_1}\times \vec{a_2})\cdot\vec{a_3}|\\
&= (\frac{a^2}{2},\frac{a^2}{2},0)\cdot(\frac{a}{2},\frac{a}{2},-\frac{a}{2})\\
&= a^3/2
In this case, the volume fraction ($V_{lattice}$) of spherical particles with
radius $R$ sitting on the bcc lattice is given by:
.. math::
V_{lattice} &= \frac{4/3 \pi R^3}{a^3/2}\\
&= \frac{8\pi R^3}{3a^3}\\
&= \frac{\sqrt{3} \pi R^3}{D^3}
Now, continuing to follow [#Matsuoka1987]_, the structure (lattice)
factor $Z(\vec{q})$ for a 3D paracrystal can be written as:
.. math::
Z(\vec{q}) = \prod_{k=1}^{3}Z_k(\vec{q})
with
.. math::
Z_k(\vec{q}) = \frac{1-|F_k|^2}{1-2|F_k|\cos(\vec{a_k}\cdot\vec{q})+|F_k|^2}
and where $F_k(\vec{q})$ is the structure factor of the primitive unit cell
defined as:
.. math::
F_k(\vec{q}) = e^{-\frac{1}{2} \Delta a^2_k q^2} \times e^{-i\vec{q}\cdot\vec{a_k}}.
Here, $\vec{a_k}$ are the primitive unit cell vectors $\vec{a_1}$, $\vec{a_2}$,
and $\vec{a_3}$. Furthermore, $\Delta a_k$ is the isotropic distortion of the
lattice point from its ideal position and can be defined by a constant factor
$g=\Delta a / |\vec{a_1}| = \Delta a / |\vec{a_2}| = \Delta a / |\vec{a_3}|=\Delta a/D$.
Finally, assuming the definitions presented in this document, the authors of
reference [#Matsuoka1987]_ have derived the lattice factors which are given by:
.. math::
Z_1(q,\theta,\phi)&=[1-e^{-q^2\Delta a^2}]/\{1-2e^{-\frac{1}{2}q^2\Delta a^2}\cos[\frac{qa}{2}(\sin\theta \cos\phi + \sin\theta \sin\phi + \cos\theta)] + e^{-q^2\Delta a^2}\}\\
Z_2(q,\theta,\phi)&=[1-e^{-q^2\Delta a^2}]/\{1-2e^{-\frac{1}{2}q^2\Delta a^2}\cos[\frac{qa}{2}(-\sin\theta \cos\phi - \sin\theta \sin\phi + \cos\theta)] + e^{-q^2\Delta a^2}\}\\
Z_3(q,\theta,\phi)&=[1-e^{-q^2\Delta a^2}]/\{1-2e^{-\frac{1}{2}q^2\Delta a^2}\cos[\frac{qa}{2}(-\sin\theta \cos\phi + \sin\theta \sin\phi - \cos\theta)] + e^{-q^2\Delta a^2}\}\\
Note that Sasview is using the nearest-neighbor parameter ($D$) as an input
instead of the conventional unit cell parameter $a$. In this case, using
$a=\frac{2D}{\sqrt{3}}$, we rewrite $Z_1(q)$, $Z_2(q)$, and $Z_3(q)$ in terms
of $D$ instead of $a$, which leads to:
.. math::
Z_1(q,\theta,\phi)&=[1-e^{-q^2\Delta a^2}]/\{1-2e^{-\frac{1}{2}q^2\Delta a^2}\cos[\frac{qD}{\sqrt{3}}(\sin\theta \cos\phi + \sin\theta \sin\phi + \cos\theta)] + e^{-q^2\Delta a^2}\}\\
Z_2(q,\theta,\phi)&=[1-e^{-q^2\Delta a^2}]/\{1-2e^{-\frac{1}{2}q^2\Delta a^2}\cos[\frac{qD}{\sqrt{3}}(-\sin\theta \cos\phi - \sin\theta \sin\phi + \cos\theta)] + e^{-q^2\Delta a^2}\}\\
Z_3(q,\theta,\phi)&=[1-e^{-q^2\Delta a^2}]/\{1-2e^{-\frac{1}{2}q^2\Delta a^2}\cos[\frac{qD}{\sqrt{3}}(-\sin\theta \cos\phi + \sin\theta \sin\phi - \cos\theta)] + e^{-q^2\Delta a^2}\}\\
Finally note that the position of the Bragg peaks for the bcc lattice are
indexed by (reduced q-values):
.. math::
\frac{qa}{2\pi}=\frac{qD}{\sqrt{3}\pi}=\sqrt{h^2+k^2+l^2}.
In the above equation, we used the conventional unit cell so not all
permutations of h,k, and l will produce Bragg peaks. The Bragg scattering
condition for bcc imposes that h+k+l = even. Thus the peak positions
correspond to (just the first 5)
.. math::
\begin{array}{lccccc}
q/q_o & 1 & \sqrt{2} & \sqrt{3} & \sqrt{4} & \sqrt{5} \\
\text{Indices} & (110) & (200) & (211) & (220) & (310) \\
\end{array}
.. note::
The calculation of $Z(q)$ is a double numerical integral that must be
carried out with a high density of points to properly capture the sharp
peaks of the paracrystalline scattering. So be warned that the calculation
is slow. Fitting of any experimental data must be resolution smeared for
any meaningful fit. This makes a triple integral which may be very slow.
If a double-precision GPU with OpenCL support is available this may improve
the speed of the calculation.
This example dataset is produced using 200 data points,
*qmin* = 0.001 |Ang^-1|, *qmax* = 0.1 |Ang^-1| and the above default values.
The 2D (Anisotropic model) is based on the reference below where $I(q)$ is
approximated for 1d scattering. Thus the scattering pattern for 2D may not be
accurate, particularly at low $q$. For general details of the calculation and
angular dispersions for oriented particles see :ref:`orientation`. Note that
we are not responsible for any incorrectness of the 2D model computation.
.. figure:: img/parallelepiped_angle_definition.png
Orientation of the crystal with respect to the scattering plane, when
$\theta = \phi = 0$ the $c$ axis is along the beam direction (the $z$ axis).
References
----------
.. [#Matsuoka1987] Hideki Matsuoka et. al. *Physical Review B*, 36 (1987)
1754-1765 (Original Paper)
.. [#Matsuoka1990] Hideki Matsuoka et. al. *Physical Review B*, 41 (1990)
3854-3856 (Corrections to FCC and BCC lattice structure calculation)
Authorship and Verification
---------------------------
* **Author:** NIST IGOR/DANSE **Date:** pre 2010
* **Last Modified by:** Jonathan Gaudet **Date:** September 26, 2022
* **Last Reviewed by:** Paul Butler **Date:** November 2, 2022
"""
import numpy as np
from numpy import inf, pi
name = "bcc_paracrystal"
title = "Body-centred cubic lattic with paracrystalline distortion"
description = """
Calculates the scattering from a **body-centered cubic lattice** with
paracrystalline distortion. Thermal vibrations are considered to be
negligible, and the size of the paracrystal is infinitely large.
Paracrystalline distortion is assumed to be isotropic and characterized
by a Gaussian distribution.
"""
category = "shape:paracrystal"
#note - calculation requires double precision
single = False
# pylint: disable=bad-whitespace, line-too-long
# ["name", "units", default, [lower, upper], "type","description" ],
parameters = [["dnn", "Ang", 220, [-inf, inf], "", "Nearest neighbour distance"],
["d_factor", "", 0.06, [-inf, inf], "", "Paracrystal distortion factor"],
["radius", "Ang", 40, [0, inf], "volume", "Particle radius"],
["sld", "1e-6/Ang^2", 4, [-inf, inf], "sld", "Particle scattering length density"],
["sld_solvent", "1e-6/Ang^2", 1, [-inf, inf], "sld", "Solvent scattering length density"],
["theta", "degrees", 60, [-360, 360], "orientation", "c axis to beam angle"],
["phi", "degrees", 60, [-360, 360], "orientation", "rotation about beam"],
["psi", "degrees", 60, [-360, 360], "orientation", "rotation about c axis"]
]
# pylint: enable=bad-whitespace, line-too-long
source = ["lib/sas_3j1x_x.c", "lib/gauss150.c", "lib/sphere_form.c", "bcc_paracrystal.c"]
def random():
"""Return a random parameter set for the model."""
# Define lattice spacing as a multiple of the particle radius
# using the formula a = 4 r/sqrt(3). Systems which are ordered
# are probably mostly filled, so use a distribution which goes from
# zero to one, but leaving 90% of them within 80% of the
# maximum bcc packing. Lattice distortion values are empirically
# useful between 0.01 and 0.7. Use an exponential distribution
# in this range 'cuz its easy.
radius = 10**np.random.uniform(1.3, 4)
d_factor = 10**np.random.uniform(-2, -0.7) # sigma_d in 0.01-0.7
dnn_fraction = np.random.beta(a=10, b=1)
dnn = radius*4/np.sqrt(3)/dnn_fraction
pars = dict(
#sld=1, sld_solvent=0, scale=1, background=1e-32,
dnn=dnn,
d_factor=d_factor,
radius=radius,
)
return pars
# april 6 2017, rkh add unit tests, NOT compared with any other calc method, assume correct!
# add 2d test later
# October 26, 2022 PDB updated the 1D unit test after fixing the math. The values are again
# assumed correct. It would be good to have an independent assessment. 2D tests remain
# on the todo list
# TODO: fix the 2d tests
q = 4.*pi/220.
tests = [
[{}, [0.001, q, 0.25], [0.6945817843046642, 1.6885157981411993, 0.005367008206852725]],
#[{'theta': 20.0, 'phi': 30, 'psi': 40.0}, (-0.017, 0.035), 2082.20264399],
#[{'theta': 20.0, 'phi': 30, 'psi': 40.0}, (-0.081, 0.011), 0.436323144781],
]
| [
0,
1,
2,
3,
4
] |
1,928 | dab9b58b08b562d902ee0ae1104198cb1ebbffe5 | <mask token>
def array_to_stack(stack, source):
"""
-------------------------------------------------------
Pushes contents of source onto stack. At finish, source is empty.
Last value in source is at bottom of stack,
first value in source is on top of stack.
Use: array_to_stack(stack, source)
-------------------------------------------------------
Parameters:
stack - a Stack object (Stack)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while source != []:
temp = source.pop()
stack.push(temp)
return
<mask token>
def queue_to_array(queue, target):
"""
-------------------------------------------------------
Removes contents of queue into target. At finish, queue is empty.
Front value of queue is at front of target,
rear value of queue is at end of target.
Use: queue_to_array(queue, target)
-------------------------------------------------------
Parameters:
queue - a Queue object (Queue)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while queue.is_empty() == False:
temp = queue.remove()
target.append(temp)
return
def array_to_pq(pq, source):
"""
-------------------------------------------------------
Inserts contents of source into pq. At finish, source is empty.
Last value in source is at rear of pq,
first value in source is at front of pq.
Use: array_to_pq(pq, source)
-------------------------------------------------------
Parameters:
pq - a Priority_Queue object (Priority_Queue)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while source != []:
temp = source.pop(0)
pq.insert(temp)
return
<mask token>
def list_to_array(llist, target):
"""
-------------------------------------------------------
Removes contents of llist into target. At finish, llist is empty.
Front element of llist is at front of target,
rear element of llist is at rear of target.
Use: list_to_array(llist, target)
-------------------------------------------------------
Parameters:
llist - a List object (List)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while llist.is_empty() == False:
target.append(llist.pop(0))
return
<mask token>
| <mask token>
def array_to_stack(stack, source):
"""
-------------------------------------------------------
Pushes contents of source onto stack. At finish, source is empty.
Last value in source is at bottom of stack,
first value in source is on top of stack.
Use: array_to_stack(stack, source)
-------------------------------------------------------
Parameters:
stack - a Stack object (Stack)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while source != []:
temp = source.pop()
stack.push(temp)
return
def stack_to_array(stack, target):
"""
-------------------------------------------------------
Pops contents of stack into target. At finish, stack is empty.
Top value of stack is at end of target,
bottom value of stack is at beginning of target.
Use: stack_to_array(stack, target)
-------------------------------------------------------
Parameters:
stack - a Stack object (Stack)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while stack.is_empty() == False:
temp = stack.pop()
target.insert(0, temp)
return
def stack_test(source):
"""
-------------------------------------------------------
Tests the methods of Stack for empty and
non-empty stacks using the data in source:
is_empty, push, pop, peek
(Testing pop and peek while empty throws exceptions)
Use: stack_test(source)
-------------------------------------------------------
Parameters:
source - list of data (list of ?)
Returns:
None
-------------------------------------------------------
"""
stack = Stack()
dummy = []
if stack.is_empty() == True:
print('Stack is empty.')
array_to_stack(stack, source)
print('Converting source into a stack...')
if stack.is_empty() == False:
print('source has been transferred into stack!')
print('\nPopping stack...')
while stack.is_empty() == False:
temp = stack.pop()
print(temp)
dummy.append(temp)
print('\nstack is empty. Pushing values back into stack...')
while dummy != []:
temp = dummy.pop()
print(temp)
stack.push(temp)
print('\nPushing complete! Peeking...')
print(stack.peek())
return
def array_to_queue(queue, source):
"""
-------------------------------------------------------
Inserts contents of source into queue. At finish, source is empty.
Last value in source is at rear of queue,
first value in source is at front of queue.
Use: array_to_queue(queue, source)
-------------------------------------------------------
Parameters:
queue - a Queue object (Queue)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while source != []:
temp = source.pop(0)
queue.insert(temp)
return
def queue_to_array(queue, target):
"""
-------------------------------------------------------
Removes contents of queue into target. At finish, queue is empty.
Front value of queue is at front of target,
rear value of queue is at end of target.
Use: queue_to_array(queue, target)
-------------------------------------------------------
Parameters:
queue - a Queue object (Queue)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while queue.is_empty() == False:
temp = queue.remove()
target.append(temp)
return
def array_to_pq(pq, source):
"""
-------------------------------------------------------
Inserts contents of source into pq. At finish, source is empty.
Last value in source is at rear of pq,
first value in source is at front of pq.
Use: array_to_pq(pq, source)
-------------------------------------------------------
Parameters:
pq - a Priority_Queue object (Priority_Queue)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while source != []:
temp = source.pop(0)
pq.insert(temp)
return
<mask token>
def priority_queue_test(a):
"""
-------------------------------------------------------
Tests priority queue implementation.
Use: pq_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
the methods of Priority_Queue are tested for both empty and
non-empty priority queues using the data in a:
is_empty, insert, remove, peek
-------------------------------------------------------
"""
pq = Priority_Queue()
dummy = []
if pq.is_empty() == True:
print('pq is empty.')
array_to_pq(pq, a)
print('Converting a into a pq...')
if pq.is_empty() == False:
print('a has been transferred into pq!')
print('\nRemoving pq...')
while pq.is_empty() == False:
temp = pq.remove()
print(temp)
dummy.append(temp)
print('\\pq is empty. Inserting values back into queue...')
while dummy != []:
temp = dummy.pop()
print(temp)
pq.insert(temp)
print('\nPushing complete! Peeking...')
print(pq.peek())
print('\npq is {} objects long!'.format(len(pq)))
return
<mask token>
def list_to_array(llist, target):
"""
-------------------------------------------------------
Removes contents of llist into target. At finish, llist is empty.
Front element of llist is at front of target,
rear element of llist is at rear of target.
Use: list_to_array(llist, target)
-------------------------------------------------------
Parameters:
llist - a List object (List)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while llist.is_empty() == False:
target.append(llist.pop(0))
return
def list_test(a):
"""
-------------------------------------------------------
Tests list implementation.
The methods of List are tested for both empty and
non-empty lists using the data in a:
is_empty, insert, remove, append, index, __contains__,
find, count, max, min, __getitem__, __setitem__
Use: list_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
None
-------------------------------------------------------
"""
lst = List()
if lst.is_empty() == True:
print('lst is empty.')
array_to_list(lst, a)
print('Converting a into a lst...')
if lst.is_empty() == False:
print('a has been transferred into lst!')
print('The movie at index 0 is {}'.format(lst[0]))
print('/nRemoving the movie at index 0...')
temp = lst.remove(lst[0])
print('Now the movie at index 0 is {}'.format(lst[0]))
print('/nInserting the movie at index 1...')
lst.insert(1, temp)
print('Now the movie at index 1 is {}'.format(lst[1]))
print('/nRemoving the movie at index 0...')
temp = lst.remove(lst[0])
print('/nAppending the movie...')
lst.append(temp)
print('Peeking...')
print(lst.peek())
print('/nThe index of the movie is {}'.format(lst.index(temp)))
print('/n{} appears {} time(s)'.format(temp, lst.count(temp)))
print('/nThe max is {}'.format(lst.max()))
print('The min is {}'.format(lst.min()))
print('/nThe movie is at index {}'.format(lst.find(temp)))
return
| <mask token>
def array_to_stack(stack, source):
"""
-------------------------------------------------------
Pushes contents of source onto stack. At finish, source is empty.
Last value in source is at bottom of stack,
first value in source is on top of stack.
Use: array_to_stack(stack, source)
-------------------------------------------------------
Parameters:
stack - a Stack object (Stack)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while source != []:
temp = source.pop()
stack.push(temp)
return
def stack_to_array(stack, target):
"""
-------------------------------------------------------
Pops contents of stack into target. At finish, stack is empty.
Top value of stack is at end of target,
bottom value of stack is at beginning of target.
Use: stack_to_array(stack, target)
-------------------------------------------------------
Parameters:
stack - a Stack object (Stack)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while stack.is_empty() == False:
temp = stack.pop()
target.insert(0, temp)
return
def stack_test(source):
"""
-------------------------------------------------------
Tests the methods of Stack for empty and
non-empty stacks using the data in source:
is_empty, push, pop, peek
(Testing pop and peek while empty throws exceptions)
Use: stack_test(source)
-------------------------------------------------------
Parameters:
source - list of data (list of ?)
Returns:
None
-------------------------------------------------------
"""
stack = Stack()
dummy = []
if stack.is_empty() == True:
print('Stack is empty.')
array_to_stack(stack, source)
print('Converting source into a stack...')
if stack.is_empty() == False:
print('source has been transferred into stack!')
print('\nPopping stack...')
while stack.is_empty() == False:
temp = stack.pop()
print(temp)
dummy.append(temp)
print('\nstack is empty. Pushing values back into stack...')
while dummy != []:
temp = dummy.pop()
print(temp)
stack.push(temp)
print('\nPushing complete! Peeking...')
print(stack.peek())
return
def array_to_queue(queue, source):
"""
-------------------------------------------------------
Inserts contents of source into queue. At finish, source is empty.
Last value in source is at rear of queue,
first value in source is at front of queue.
Use: array_to_queue(queue, source)
-------------------------------------------------------
Parameters:
queue - a Queue object (Queue)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while source != []:
temp = source.pop(0)
queue.insert(temp)
return
def queue_to_array(queue, target):
"""
-------------------------------------------------------
Removes contents of queue into target. At finish, queue is empty.
Front value of queue is at front of target,
rear value of queue is at end of target.
Use: queue_to_array(queue, target)
-------------------------------------------------------
Parameters:
queue - a Queue object (Queue)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while queue.is_empty() == False:
temp = queue.remove()
target.append(temp)
return
def array_to_pq(pq, source):
"""
-------------------------------------------------------
Inserts contents of source into pq. At finish, source is empty.
Last value in source is at rear of pq,
first value in source is at front of pq.
Use: array_to_pq(pq, source)
-------------------------------------------------------
Parameters:
pq - a Priority_Queue object (Priority_Queue)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while source != []:
temp = source.pop(0)
pq.insert(temp)
return
def pq_to_array(pq, target):
"""
-------------------------------------------------------
Removes contents of pq into target. At finish, pq is empty.
Highest priority value in pq is at front of target,
lowest priority value in pq is at end of target.
Use: pq_to_array(pq, target)
-------------------------------------------------------
Parameters:
pq - a Priority_Queue object (Priority_Queue)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while pq.is_empty() == False:
temp = pq.remove()
target.append(temp)
return
def queue_test(a):
"""
-------------------------------------------------------
Tests queue implementation.
Use: queue_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
the methods of Queue are tested for both empty and
non-empty queues using the data in a:
is_empty, insert, remove, peek, len
-------------------------------------------------------
"""
queue = Queue()
dummy = []
if queue.is_empty() == True:
print('Queue is empty.')
array_to_queue(queue, a)
print('Converting a into a queue...')
if queue.is_empty() == False:
print('a has been transferred into queue!')
print('\nRemoving queue...')
while queue.is_empty() == False:
temp = queue.remove()
print(temp)
dummy.append(temp)
print('\nqueue is empty. Inserting values back into queue...')
while dummy != []:
temp = dummy.pop()
print(temp)
queue.insert(temp)
print('\nPushing complete! Peeking...')
print(queue.peek())
print('\nqueue is {} objects long!'.format(len(queue)))
return
def priority_queue_test(a):
"""
-------------------------------------------------------
Tests priority queue implementation.
Use: pq_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
the methods of Priority_Queue are tested for both empty and
non-empty priority queues using the data in a:
is_empty, insert, remove, peek
-------------------------------------------------------
"""
pq = Priority_Queue()
dummy = []
if pq.is_empty() == True:
print('pq is empty.')
array_to_pq(pq, a)
print('Converting a into a pq...')
if pq.is_empty() == False:
print('a has been transferred into pq!')
print('\nRemoving pq...')
while pq.is_empty() == False:
temp = pq.remove()
print(temp)
dummy.append(temp)
print('\\pq is empty. Inserting values back into queue...')
while dummy != []:
temp = dummy.pop()
print(temp)
pq.insert(temp)
print('\nPushing complete! Peeking...')
print(pq.peek())
print('\npq is {} objects long!'.format(len(pq)))
return
def array_to_list(llist, source):
"""
-------------------------------------------------------
Appends contests of source to llist. At finish, source is empty.
Last element in source is at rear of llist,
first element in source is at front of llist.
Use: array_to_list(llist, source)
-------------------------------------------------------
Parameters:
llist - a List object (List)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while source:
llist.append(source.pop(0))
return
def list_to_array(llist, target):
"""
-------------------------------------------------------
Removes contents of llist into target. At finish, llist is empty.
Front element of llist is at front of target,
rear element of llist is at rear of target.
Use: list_to_array(llist, target)
-------------------------------------------------------
Parameters:
llist - a List object (List)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while llist.is_empty() == False:
target.append(llist.pop(0))
return
def list_test(a):
"""
-------------------------------------------------------
Tests list implementation.
The methods of List are tested for both empty and
non-empty lists using the data in a:
is_empty, insert, remove, append, index, __contains__,
find, count, max, min, __getitem__, __setitem__
Use: list_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
None
-------------------------------------------------------
"""
lst = List()
if lst.is_empty() == True:
print('lst is empty.')
array_to_list(lst, a)
print('Converting a into a lst...')
if lst.is_empty() == False:
print('a has been transferred into lst!')
print('The movie at index 0 is {}'.format(lst[0]))
print('/nRemoving the movie at index 0...')
temp = lst.remove(lst[0])
print('Now the movie at index 0 is {}'.format(lst[0]))
print('/nInserting the movie at index 1...')
lst.insert(1, temp)
print('Now the movie at index 1 is {}'.format(lst[1]))
print('/nRemoving the movie at index 0...')
temp = lst.remove(lst[0])
print('/nAppending the movie...')
lst.append(temp)
print('Peeking...')
print(lst.peek())
print('/nThe index of the movie is {}'.format(lst.index(temp)))
print('/n{} appears {} time(s)'.format(temp, lst.count(temp)))
print('/nThe max is {}'.format(lst.max()))
print('The min is {}'.format(lst.min()))
print('/nThe movie is at index {}'.format(lst.find(temp)))
return
| <mask token>
from Stack_array import Stack
from Queue_array import Queue
from Priority_Queue_array import Priority_Queue
from List_array import List
def array_to_stack(stack, source):
"""
-------------------------------------------------------
Pushes contents of source onto stack. At finish, source is empty.
Last value in source is at bottom of stack,
first value in source is on top of stack.
Use: array_to_stack(stack, source)
-------------------------------------------------------
Parameters:
stack - a Stack object (Stack)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while source != []:
temp = source.pop()
stack.push(temp)
return
def stack_to_array(stack, target):
"""
-------------------------------------------------------
Pops contents of stack into target. At finish, stack is empty.
Top value of stack is at end of target,
bottom value of stack is at beginning of target.
Use: stack_to_array(stack, target)
-------------------------------------------------------
Parameters:
stack - a Stack object (Stack)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while stack.is_empty() == False:
temp = stack.pop()
target.insert(0, temp)
return
def stack_test(source):
"""
-------------------------------------------------------
Tests the methods of Stack for empty and
non-empty stacks using the data in source:
is_empty, push, pop, peek
(Testing pop and peek while empty throws exceptions)
Use: stack_test(source)
-------------------------------------------------------
Parameters:
source - list of data (list of ?)
Returns:
None
-------------------------------------------------------
"""
stack = Stack()
dummy = []
if stack.is_empty() == True:
print('Stack is empty.')
array_to_stack(stack, source)
print('Converting source into a stack...')
if stack.is_empty() == False:
print('source has been transferred into stack!')
print('\nPopping stack...')
while stack.is_empty() == False:
temp = stack.pop()
print(temp)
dummy.append(temp)
print('\nstack is empty. Pushing values back into stack...')
while dummy != []:
temp = dummy.pop()
print(temp)
stack.push(temp)
print('\nPushing complete! Peeking...')
print(stack.peek())
return
def array_to_queue(queue, source):
"""
-------------------------------------------------------
Inserts contents of source into queue. At finish, source is empty.
Last value in source is at rear of queue,
first value in source is at front of queue.
Use: array_to_queue(queue, source)
-------------------------------------------------------
Parameters:
queue - a Queue object (Queue)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while source != []:
temp = source.pop(0)
queue.insert(temp)
return
def queue_to_array(queue, target):
"""
-------------------------------------------------------
Removes contents of queue into target. At finish, queue is empty.
Front value of queue is at front of target,
rear value of queue is at end of target.
Use: queue_to_array(queue, target)
-------------------------------------------------------
Parameters:
queue - a Queue object (Queue)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while queue.is_empty() == False:
temp = queue.remove()
target.append(temp)
return
def array_to_pq(pq, source):
"""
-------------------------------------------------------
Inserts contents of source into pq. At finish, source is empty.
Last value in source is at rear of pq,
first value in source is at front of pq.
Use: array_to_pq(pq, source)
-------------------------------------------------------
Parameters:
pq - a Priority_Queue object (Priority_Queue)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while source != []:
temp = source.pop(0)
pq.insert(temp)
return
def pq_to_array(pq, target):
"""
-------------------------------------------------------
Removes contents of pq into target. At finish, pq is empty.
Highest priority value in pq is at front of target,
lowest priority value in pq is at end of target.
Use: pq_to_array(pq, target)
-------------------------------------------------------
Parameters:
pq - a Priority_Queue object (Priority_Queue)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while pq.is_empty() == False:
temp = pq.remove()
target.append(temp)
return
def queue_test(a):
"""
-------------------------------------------------------
Tests queue implementation.
Use: queue_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
the methods of Queue are tested for both empty and
non-empty queues using the data in a:
is_empty, insert, remove, peek, len
-------------------------------------------------------
"""
queue = Queue()
dummy = []
if queue.is_empty() == True:
print('Queue is empty.')
array_to_queue(queue, a)
print('Converting a into a queue...')
if queue.is_empty() == False:
print('a has been transferred into queue!')
print('\nRemoving queue...')
while queue.is_empty() == False:
temp = queue.remove()
print(temp)
dummy.append(temp)
print('\nqueue is empty. Inserting values back into queue...')
while dummy != []:
temp = dummy.pop()
print(temp)
queue.insert(temp)
print('\nPushing complete! Peeking...')
print(queue.peek())
print('\nqueue is {} objects long!'.format(len(queue)))
return
def priority_queue_test(a):
"""
-------------------------------------------------------
Tests priority queue implementation.
Use: pq_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
the methods of Priority_Queue are tested for both empty and
non-empty priority queues using the data in a:
is_empty, insert, remove, peek
-------------------------------------------------------
"""
pq = Priority_Queue()
dummy = []
if pq.is_empty() == True:
print('pq is empty.')
array_to_pq(pq, a)
print('Converting a into a pq...')
if pq.is_empty() == False:
print('a has been transferred into pq!')
print('\nRemoving pq...')
while pq.is_empty() == False:
temp = pq.remove()
print(temp)
dummy.append(temp)
print('\\pq is empty. Inserting values back into queue...')
while dummy != []:
temp = dummy.pop()
print(temp)
pq.insert(temp)
print('\nPushing complete! Peeking...')
print(pq.peek())
print('\npq is {} objects long!'.format(len(pq)))
return
def array_to_list(llist, source):
"""
-------------------------------------------------------
Appends contests of source to llist. At finish, source is empty.
Last element in source is at rear of llist,
first element in source is at front of llist.
Use: array_to_list(llist, source)
-------------------------------------------------------
Parameters:
llist - a List object (List)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while source:
llist.append(source.pop(0))
return
def list_to_array(llist, target):
"""
-------------------------------------------------------
Removes contents of llist into target. At finish, llist is empty.
Front element of llist is at front of target,
rear element of llist is at rear of target.
Use: list_to_array(llist, target)
-------------------------------------------------------
Parameters:
llist - a List object (List)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while llist.is_empty() == False:
target.append(llist.pop(0))
return
def list_test(a):
"""
-------------------------------------------------------
Tests list implementation.
The methods of List are tested for both empty and
non-empty lists using the data in a:
is_empty, insert, remove, append, index, __contains__,
find, count, max, min, __getitem__, __setitem__
Use: list_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
None
-------------------------------------------------------
"""
lst = List()
if lst.is_empty() == True:
print('lst is empty.')
array_to_list(lst, a)
print('Converting a into a lst...')
if lst.is_empty() == False:
print('a has been transferred into lst!')
print('The movie at index 0 is {}'.format(lst[0]))
print('/nRemoving the movie at index 0...')
temp = lst.remove(lst[0])
print('Now the movie at index 0 is {}'.format(lst[0]))
print('/nInserting the movie at index 1...')
lst.insert(1, temp)
print('Now the movie at index 1 is {}'.format(lst[1]))
print('/nRemoving the movie at index 0...')
temp = lst.remove(lst[0])
print('/nAppending the movie...')
lst.append(temp)
print('Peeking...')
print(lst.peek())
print('/nThe index of the movie is {}'.format(lst.index(temp)))
print('/n{} appears {} time(s)'.format(temp, lst.count(temp)))
print('/nThe max is {}'.format(lst.max()))
print('The min is {}'.format(lst.min()))
print('/nThe movie is at index {}'.format(lst.find(temp)))
return
| """
-------------------------------------------------------
Stack utilities
-------------------------------------------------------
Author: Evan Attfield
ID: 180817010
Email: [email protected]
__updated__ = "Jan 22, 2019"
-------------------------------------------------------
"""
from Stack_array import Stack
from Queue_array import Queue
from Priority_Queue_array import Priority_Queue
from List_array import List
def array_to_stack(stack, source):
"""
-------------------------------------------------------
Pushes contents of source onto stack. At finish, source is empty.
Last value in source is at bottom of stack,
first value in source is on top of stack.
Use: array_to_stack(stack, source)
-------------------------------------------------------
Parameters:
stack - a Stack object (Stack)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while source != []:
temp = source.pop()
stack.push(temp)
return
def stack_to_array(stack, target):
"""
-------------------------------------------------------
Pops contents of stack into target. At finish, stack is empty.
Top value of stack is at end of target,
bottom value of stack is at beginning of target.
Use: stack_to_array(stack, target)
-------------------------------------------------------
Parameters:
stack - a Stack object (Stack)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while stack.is_empty() == False:
temp = stack.pop()
target.insert(0, temp) #adds temp to the beginning, while append adds temp to the end
return
def stack_test(source):
"""
-------------------------------------------------------
Tests the methods of Stack for empty and
non-empty stacks using the data in source:
is_empty, push, pop, peek
(Testing pop and peek while empty throws exceptions)
Use: stack_test(source)
-------------------------------------------------------
Parameters:
source - list of data (list of ?)
Returns:
None
-------------------------------------------------------
"""
stack = Stack()
dummy = []
if stack.is_empty() == True:
print('Stack is empty.')
array_to_stack(stack, source)
print('Converting source into a stack...')
if stack.is_empty() == False:
print('source has been transferred into stack!')
print('\nPopping stack...')
while stack.is_empty() == False:
temp = stack.pop()
print(temp)
dummy.append(temp)
print('\nstack is empty. Pushing values back into stack...')
while dummy != []:
temp = dummy.pop()
print(temp)
stack.push(temp)
print('\nPushing complete! Peeking...')
print(stack.peek())
return
def array_to_queue(queue, source):
"""
-------------------------------------------------------
Inserts contents of source into queue. At finish, source is empty.
Last value in source is at rear of queue,
first value in source is at front of queue.
Use: array_to_queue(queue, source)
-------------------------------------------------------
Parameters:
queue - a Queue object (Queue)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while source != []:
temp = source.pop(0)
queue.insert(temp)
return
def queue_to_array(queue, target):
"""
-------------------------------------------------------
Removes contents of queue into target. At finish, queue is empty.
Front value of queue is at front of target,
rear value of queue is at end of target.
Use: queue_to_array(queue, target)
-------------------------------------------------------
Parameters:
queue - a Queue object (Queue)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while queue.is_empty() == False:
temp = queue.remove()
target.append(temp)
return
def array_to_pq(pq, source):
"""
-------------------------------------------------------
Inserts contents of source into pq. At finish, source is empty.
Last value in source is at rear of pq,
first value in source is at front of pq.
Use: array_to_pq(pq, source)
-------------------------------------------------------
Parameters:
pq - a Priority_Queue object (Priority_Queue)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while source != []:
temp = source.pop(0)
pq.insert(temp)
return
def pq_to_array(pq, target):
"""
-------------------------------------------------------
Removes contents of pq into target. At finish, pq is empty.
Highest priority value in pq is at front of target,
lowest priority value in pq is at end of target.
Use: pq_to_array(pq, target)
-------------------------------------------------------
Parameters:
pq - a Priority_Queue object (Priority_Queue)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
temp = None
while pq.is_empty() == False:
temp = pq.remove()
target.append(temp)
return
def queue_test(a):
"""
-------------------------------------------------------
Tests queue implementation.
Use: queue_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
the methods of Queue are tested for both empty and
non-empty queues using the data in a:
is_empty, insert, remove, peek, len
-------------------------------------------------------
"""
queue = Queue()
dummy = []
if queue.is_empty() == True:
print('Queue is empty.')
array_to_queue(queue, a)
print('Converting a into a queue...')
if queue.is_empty() == False:
print('a has been transferred into queue!')
print('\nRemoving queue...')
while queue.is_empty() == False:
temp = queue.remove()
print(temp)
dummy.append(temp)
print('\nqueue is empty. Inserting values back into queue...')
while dummy != []:
temp = dummy.pop()
print(temp)
queue.insert(temp)
print('\nPushing complete! Peeking...')
print(queue.peek())
print('\nqueue is {} objects long!'.format(len(queue)))
return
def priority_queue_test(a):
"""
-------------------------------------------------------
Tests priority queue implementation.
Use: pq_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
the methods of Priority_Queue are tested for both empty and
non-empty priority queues using the data in a:
is_empty, insert, remove, peek
-------------------------------------------------------
"""
pq = Priority_Queue()
dummy = []
if pq.is_empty() == True:
print('pq is empty.')
array_to_pq(pq, a)
print('Converting a into a pq...')
if pq.is_empty() == False:
print('a has been transferred into pq!')
print('\nRemoving pq...')
while pq.is_empty() == False:
temp = pq.remove()
print(temp)
dummy.append(temp)
print('\pq is empty. Inserting values back into queue...')
while dummy != []:
temp = dummy.pop()
print(temp)
pq.insert(temp)
print('\nPushing complete! Peeking...')
print(pq.peek())
print('\npq is {} objects long!'.format(len(pq)))
return
def array_to_list(llist, source):
"""
-------------------------------------------------------
Appends contests of source to llist. At finish, source is empty.
Last element in source is at rear of llist,
first element in source is at front of llist.
Use: array_to_list(llist, source)
-------------------------------------------------------
Parameters:
llist - a List object (List)
source - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while source: #a list is considered True as long as it is not empty
llist.append(source.pop(0))
return
def list_to_array(llist, target):
"""
-------------------------------------------------------
Removes contents of llist into target. At finish, llist is empty.
Front element of llist is at front of target,
rear element of llist is at rear of target.
Use: list_to_array(llist, target)
-------------------------------------------------------
Parameters:
llist - a List object (List)
target - a Python list (list)
Returns:
None
-------------------------------------------------------
"""
while llist.is_empty() == False:
target.append(llist.pop(0))
return
def list_test(a):
"""
-------------------------------------------------------
Tests list implementation.
The methods of List are tested for both empty and
non-empty lists using the data in a:
is_empty, insert, remove, append, index, __contains__,
find, count, max, min, __getitem__, __setitem__
Use: list_test(a)
-------------------------------------------------------
Parameters:
a - list of data (list of ?)
Returns:
None
-------------------------------------------------------
"""
lst = List()
if lst.is_empty() == True:
print('lst is empty.')
array_to_list(lst, a)
print('Converting a into a lst...')
if lst.is_empty() == False:
print('a has been transferred into lst!')
print('The movie at index 0 is {}'.format(lst[0]))
print('/nRemoving the movie at index 0...')
temp = lst.remove(lst[0])
print('Now the movie at index 0 is {}'.format(lst[0]))
print('/nInserting the movie at index 1...')
lst.insert(1, temp)
print('Now the movie at index 1 is {}'.format(lst[1]))
print('/nRemoving the movie at index 0...')
temp = lst.remove(lst[0])
print('/nAppending the movie...')
lst.append(temp)
print('Peeking...')
print(lst.peek())
print('/nThe index of the movie is {}'.format(lst.index(temp)))
print('/n{} appears {} time(s)'.format(temp, lst.count(temp)))
print('/nThe max is {}'. format(lst.max()))
print('The min is {}'. format(lst.min()))
print('/nThe movie is at index {}'.format(lst.find(temp)))
return
| [
4,
9,
12,
13,
14
] |
1,929 | 0e05eed2d6bc723fd8379e436621a6eba4aa5ab2 | <mask token>
| <mask token>
print(word[0])
<mask token>
print('こんにちわ、私の名前は {} です。'.format(name))
<mask token>
print('{}/{}/{}'.format(year, month, day))
for i in range(0, 5):
print('kamyu'[i])
print('aldous Huxley was born in 1894'.capitalize())
print('when? what? who?'.split())
<mask token>
print(word)
print('A screeming comes across the sky.'.replace('s', '$'))
print('Hemingway'.index('m'))
print('ケンシロウは言った"お前はもう死んでいる"とな')
print('アタタタ,' * 10 + 'オワッター!')
print('4月の晴れた寒い日で、時計がどれも十三時を打っていた。'.split('、')[0])
| <mask token>
word = "what's up"
print(word[0])
name = 'lady gaga'
print('こんにちわ、私の名前は {} です。'.format(name))
<mask token>
year = 1990
month = 7
day = 11
print('{}/{}/{}'.format(year, month, day))
for i in range(0, 5):
print('kamyu'[i])
print('aldous Huxley was born in 1894'.capitalize())
print('when? what? who?'.split())
<mask token>
word = ['the', 'fox', 'jumped', 'over', 'the', 'fence', '.']
word = ' '.join(word)
word = word[0:-2] + '.'
print(word)
print('A screeming comes across the sky.'.replace('s', '$'))
print('Hemingway'.index('m'))
print('ケンシロウは言った"お前はもう死んでいる"とな')
print('アタタタ,' * 10 + 'オワッター!')
print('4月の晴れた寒い日で、時計がどれも十三時を打っていた。'.split('、')[0])
| # python /Users/lawrie_6strings/be_professional_pythonist/control_string.py
# -*- coding: utf-8 -*-
# 文字列を3行で書いてみたい場合
"""
どないやねん。
最近の若いもんは、
ようやるやんけ。
"""
# 文字列の特定の文字を取得したい場合は,インデックスを指定してあげることでなんとかする。
word = "what's up"
print(word[0])
# 書式化
name = "lady gaga"
print("こんにちわ、私の名前は {} です。".format(name))
"複数の文字列を挿入することもできる。"
year = 1990
month = 7
day = 11
print("{}/{}/{}".format(year, month, day))
# チャレンジ
## 1
for i in range(0, 5):
print("kamyu"[i])
## 2
# what = input("what:")
# who = input("who:")
# print("I write {},I send it to {}".format(what, who))
## 3
print("aldous Huxley was born in 1894".capitalize())
## 4
print("when? what? who?".split())
## 5
"最後のピリオドを再利用しようとしすぎて、詰まってしまった。"
word = ["the", "fox", "jumped", "over", "the", "fence", "."]
word = " ".join(word)
word = word[0:-2] + "."
print(word)
## 6
print("A screeming comes across the sky.".replace("s", "$"))
## 7
print("Hemingway".index("m"))
## 8 文字列の中にさらに文字列を入れたい時。
print("ケンシロウは言った\"お前はもう死んでいる\"とな")
## 9
print("アタタタ,"*10 + "オワッター!")
## 10
print("4月の晴れた寒い日で、時計がどれも十三時を打っていた。".split("、")[0]) | null | [
0,
1,
2,
3
] |
1,930 | 8d7697a0e49dc9e966b9657171c66ccda57279d6 | <mask token>
class TestStudent(unittest.TestCase):
def setUp(self):
desired_caps = {}
desired_caps['platformName'] = 'Android'
desired_caps['platformVersion'] = '7.0'
desired_caps['automationName'] = 'UIAutomator2'
desired_caps['deviceName'] = 'PRA-AL00'
desired_caps['app'] = PATH('../VIPStudent_2.0.4.apk')
desired_caps['appPackage'
] = 'com.pnlyy.pnlclass.pnlclass_student.ceshi'
desired_caps['unicodeKeyboard'] = True
desired_caps['resetKeyboard'] = True
desired_caps['fullReset'] = True
self.driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub',
desired_caps)
sleep(3)
def tearDown(self):
self.driver.quit()
def changePwd(self):
driver = self.driver
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----开始:' + now)
login(self)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("修改密码")').click()
sleep(2)
old = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456')
sleep(1)
new = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456wxl')
sleep(1)
again = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456wxl')
sleep(1)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
user = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(4)
now = time.strftime('%Y-%m-%d %H_%M_%S')
sf0 = './' + now + '_021b_relogin_R.png'
driver.get_screenshot_as_file(sf0)
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----结束:' + now)
def changePwdBack(self):
driver = self.driver
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----开始:' + now)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("登录")').click()
sleep(2)
user = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("始终允许")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("始终允许")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("开始测试")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("点击开始录音")').click()
sleep(4)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("停止录音")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("有听到声音")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("您已完成测试")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("修改密码")').click()
sleep(2)
old = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456wxl')
sleep(1)
new = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456')
sleep(1)
again = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456')
sleep(1)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----结束:' + now)
<mask token>
| <mask token>
class TestStudent(unittest.TestCase):
def setUp(self):
desired_caps = {}
desired_caps['platformName'] = 'Android'
desired_caps['platformVersion'] = '7.0'
desired_caps['automationName'] = 'UIAutomator2'
desired_caps['deviceName'] = 'PRA-AL00'
desired_caps['app'] = PATH('../VIPStudent_2.0.4.apk')
desired_caps['appPackage'
] = 'com.pnlyy.pnlclass.pnlclass_student.ceshi'
desired_caps['unicodeKeyboard'] = True
desired_caps['resetKeyboard'] = True
desired_caps['fullReset'] = True
self.driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub',
desired_caps)
sleep(3)
def tearDown(self):
self.driver.quit()
def changePwd(self):
driver = self.driver
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----开始:' + now)
login(self)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("修改密码")').click()
sleep(2)
old = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456')
sleep(1)
new = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456wxl')
sleep(1)
again = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456wxl')
sleep(1)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
user = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(4)
now = time.strftime('%Y-%m-%d %H_%M_%S')
sf0 = './' + now + '_021b_relogin_R.png'
driver.get_screenshot_as_file(sf0)
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----结束:' + now)
def changePwdBack(self):
driver = self.driver
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----开始:' + now)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("登录")').click()
sleep(2)
user = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("始终允许")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("始终允许")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("开始测试")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("点击开始录音")').click()
sleep(4)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("停止录音")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("有听到声音")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("您已完成测试")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("修改密码")').click()
sleep(2)
old = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456wxl')
sleep(1)
new = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456')
sleep(1)
again = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456')
sleep(1)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----结束:' + now)
if __name__ == '__main__':
testunit = unittest.TestSuite()
testunit.addTest(TestStudent('changePwdBack'))
now = time.strftime('%Y-%m-%d %H_%M_%S')
filename = './' + now + '_021b_result_R.html'
fp = open(filename, 'wb')
runner = HTMLTestRunner(stream=fp, title=
'测试学生版android7.0真机(Honor8Lite)[修改密码/重置密码]测试报告by Appium',
description='自动化测试脚本运行状态:')
runner.run(testunit)
fp.close()
| <mask token>
PATH = lambda p: os.path.abspath(os.path.join(os.path.dirname(__file__), p))
class TestStudent(unittest.TestCase):
def setUp(self):
desired_caps = {}
desired_caps['platformName'] = 'Android'
desired_caps['platformVersion'] = '7.0'
desired_caps['automationName'] = 'UIAutomator2'
desired_caps['deviceName'] = 'PRA-AL00'
desired_caps['app'] = PATH('../VIPStudent_2.0.4.apk')
desired_caps['appPackage'
] = 'com.pnlyy.pnlclass.pnlclass_student.ceshi'
desired_caps['unicodeKeyboard'] = True
desired_caps['resetKeyboard'] = True
desired_caps['fullReset'] = True
self.driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub',
desired_caps)
sleep(3)
def tearDown(self):
self.driver.quit()
def changePwd(self):
driver = self.driver
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----开始:' + now)
login(self)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("修改密码")').click()
sleep(2)
old = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456')
sleep(1)
new = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456wxl')
sleep(1)
again = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456wxl')
sleep(1)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
user = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(4)
now = time.strftime('%Y-%m-%d %H_%M_%S')
sf0 = './' + now + '_021b_relogin_R.png'
driver.get_screenshot_as_file(sf0)
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----结束:' + now)
def changePwdBack(self):
driver = self.driver
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----开始:' + now)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("登录")').click()
sleep(2)
user = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("始终允许")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("始终允许")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("开始测试")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("点击开始录音")').click()
sleep(4)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("停止录音")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("有听到声音")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("您已完成测试")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("修改密码")').click()
sleep(2)
old = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456wxl')
sleep(1)
new = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456')
sleep(1)
again = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456')
sleep(1)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----结束:' + now)
if __name__ == '__main__':
testunit = unittest.TestSuite()
testunit.addTest(TestStudent('changePwdBack'))
now = time.strftime('%Y-%m-%d %H_%M_%S')
filename = './' + now + '_021b_result_R.html'
fp = open(filename, 'wb')
runner = HTMLTestRunner(stream=fp, title=
'测试学生版android7.0真机(Honor8Lite)[修改密码/重置密码]测试报告by Appium',
description='自动化测试脚本运行状态:')
runner.run(testunit)
fp.close()
| import unittest, time, os
from time import sleep
from appium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from HTMLTestRunner import HTMLTestRunner
from appium.webdriver.common.touch_action import TouchAction
from pub_Student import login, logout
PATH = lambda p: os.path.abspath(os.path.join(os.path.dirname(__file__), p))
class TestStudent(unittest.TestCase):
def setUp(self):
desired_caps = {}
desired_caps['platformName'] = 'Android'
desired_caps['platformVersion'] = '7.0'
desired_caps['automationName'] = 'UIAutomator2'
desired_caps['deviceName'] = 'PRA-AL00'
desired_caps['app'] = PATH('../VIPStudent_2.0.4.apk')
desired_caps['appPackage'
] = 'com.pnlyy.pnlclass.pnlclass_student.ceshi'
desired_caps['unicodeKeyboard'] = True
desired_caps['resetKeyboard'] = True
desired_caps['fullReset'] = True
self.driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub',
desired_caps)
sleep(3)
def tearDown(self):
self.driver.quit()
def changePwd(self):
driver = self.driver
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----开始:' + now)
login(self)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("修改密码")').click()
sleep(2)
old = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456')
sleep(1)
new = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456wxl')
sleep(1)
again = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456wxl')
sleep(1)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
user = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(4)
now = time.strftime('%Y-%m-%d %H_%M_%S')
sf0 = './' + now + '_021b_relogin_R.png'
driver.get_screenshot_as_file(sf0)
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----结束:' + now)
def changePwdBack(self):
driver = self.driver
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----开始:' + now)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("登录")').click()
sleep(2)
user = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("始终允许")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("始终允许")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("开始测试")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("点击开始录音")').click()
sleep(4)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("停止录音")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("有听到声音")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("您已完成测试")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("修改密码")').click()
sleep(2)
old = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456wxl')
sleep(1)
new = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456')
sleep(1)
again = driver.find_element_by_id(
'com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456')
sleep(1)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000, 1600, 1000, 1250, 1000)
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator(
'new UiSelector().text("确定")').click()
sleep(2)
now = time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----结束:' + now)
if __name__ == '__main__':
testunit = unittest.TestSuite()
testunit.addTest(TestStudent('changePwdBack'))
now = time.strftime('%Y-%m-%d %H_%M_%S')
filename = './' + now + '_021b_result_R.html'
fp = open(filename, 'wb')
runner = HTMLTestRunner(stream=fp, title=
'测试学生版android7.0真机(Honor8Lite)[修改密码/重置密码]测试报告by Appium',
description='自动化测试脚本运行状态:')
runner.run(testunit)
fp.close()
| #coding=utf-8
import unittest,time,os
from time import sleep
from appium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from HTMLTestRunner import HTMLTestRunner
from appium.webdriver.common.touch_action import TouchAction
from pub_Student import login,logout
# Returns abs path relative to this file and not cwd
PATH = lambda p: os.path.abspath(
os.path.join(os.path.dirname(__file__), p)
)
class TestStudent(unittest.TestCase):
def setUp(self):
desired_caps = {}
desired_caps['platformName'] = 'Android'
desired_caps['platformVersion'] = '7.0'
desired_caps['automationName'] = 'UIAutomator2'
desired_caps['deviceName'] = 'PRA-AL00'
#desired_caps['udid'] = 'HMKNW17225011700'
desired_caps['app'] = PATH('../VIPStudent_2.0.4.apk')
desired_caps['appPackage'] = 'com.pnlyy.pnlclass.pnlclass_student.ceshi'
desired_caps['unicodeKeyboard'] = True
desired_caps['resetKeyboard'] = True
desired_caps['fullReset'] = True
self.driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', desired_caps)
sleep(3)
def tearDown(self):
# end the session
self.driver.quit()
def changePwd(self):
driver=self.driver
sleep(2)
now=time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----开始:'+now)
login(self)
sleep(2)
driver.find_element_by_android_uiautomator('new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator('new UiSelector().text("修改密码")').click()
sleep(2)
old=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456')
sleep(1)
new=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456wxl')
sleep(1)
#com.android.gallery3d:id/head_select_right
again=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456wxl')
sleep(1)
driver.find_element_by_android_uiautomator('new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000,1600,1000,1250,1000)
sleep(2)
driver.find_element_by_android_uiautomator('new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator('new UiSelector().text("确定")').click()
sleep(2)
user=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(4)
now=time.strftime('%Y-%m-%d %H_%M_%S')
sf0='./'+now+'_021b_relogin_R.png'
driver.get_screenshot_as_file(sf0)
sleep(3)
driver.find_element_by_android_uiautomator('new UiSelector().text("个人中心")').click()
sleep(3)
driver.swipe(1000,1600,1000,1250,1000)
sleep(2)
driver.find_element_by_android_uiautomator('new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator('new UiSelector().text("确定")').click()
sleep(2)
now=time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:修改密码----结束:'+now)
def changePwdBack(self):
driver=self.driver
sleep(2)
now=time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----开始:'+now)
driver.find_element_by_android_uiautomator('new UiSelector().text("登录")').click()
sleep(2)
user=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etUserName')
user.click()
user.set_value('13923121234')
sleep(1)
pwd=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etPassword')
pwd.click()
pwd.set_value('123456wxl')
sleep(1)
driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/btnLogin').click()
sleep(3)
driver.find_element_by_android_uiautomator('new UiSelector().text("始终允许")').click()
sleep(2)
driver.find_element_by_android_uiautomator('new UiSelector().text("始终允许")').click()
sleep(2)
#test now
driver.find_element_by_android_uiautomator('new UiSelector().text("开始测试")').click()
sleep(2)
driver.find_element_by_android_uiautomator('new UiSelector().text("点击开始录音")').click()
sleep(4)
driver.find_element_by_android_uiautomator('new UiSelector().text("停止录音")').click()
sleep(3)
driver.find_element_by_android_uiautomator('new UiSelector().text("有听到声音")').click()
sleep(3)
driver.find_element_by_android_uiautomator('new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator('new UiSelector().text("下一步")').click()
sleep(3)
driver.find_element_by_android_uiautomator('new UiSelector().text("您已完成测试")').click()
sleep(3)
driver.find_element_by_android_uiautomator('new UiSelector().text("个人中心")').click()
sleep(3)
driver.find_element_by_android_uiautomator('new UiSelector().text("修改密码")').click()
sleep(2)
old=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etOldPass')
old.click()
old.set_value('123456wxl')
sleep(1)
new=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etNewPass')
new.click()
new.set_value('123456')
sleep(1)
#com.android.gallery3d:id/head_select_right
again=driver.find_element_by_id('com.pnlyy.pnlclass.pnlclass_student.ceshi:id/etConfirmNewPass')
again.click()
again.set_value('123456')
sleep(1)
driver.find_element_by_android_uiautomator('new UiSelector().text("确认")').click()
sleep(3)
driver.swipe(1000,1600,1000,1250,1000)
sleep(2)
driver.find_element_by_android_uiautomator('new UiSelector().text("退出登录")').click()
sleep(2)
driver.find_element_by_android_uiautomator('new UiSelector().text("确定")').click()
sleep(2)
now=time.strftime('%Y-%m-%d %H_%M_%S')
print('\n021:重置密码----结束:'+now)
if __name__ == '__main__':
testunit=unittest.TestSuite()
#testunit.addTest(TestStudent('changePwd'))
testunit.addTest(TestStudent('changePwdBack'))
now=time.strftime('%Y-%m-%d %H_%M_%S')
filename='./'+now+'_021b_result_R.html'
fp=open(filename,'wb')
runner=HTMLTestRunner(stream=fp,title='测试学生版android7.0真机(Honor8Lite)[修改密码/重置密码]测试报告by Appium',
description='自动化测试脚本运行状态:')
runner.run(testunit)
fp.close()
| [
5,
6,
7,
8,
9
] |
1,931 | e92d770f9d2176b4943653b09ac1069fa3301e46 | import glob
from PIL import Image
from PIL.ExifTags import TAGS, GPSTAGS
from pyproj import Proj
from osgeo import gdal, osr
from PyQt4.QtCore import QFile, QFileInfo
import os
from os import walk
#slika="c:\slike\Zito\DJI_0060.jpg"
#georef_slika="c:\Slike\Zito\Georeferencirana.tif"
radni_dir = 'c:/slike/Zito/testiranje/'
#-----------------Izvlaci LAT LONG----------------------------------------------------------------------------
def exif(img):
exif_data = {}
try:
i = Image.open(img)
tags = i._getexif()
for tag, value in tags.items():
decoded = TAGS.get(tag, tag)
exif_data[decoded] = value
except:
pass
return exif_data
def dms2dd(d, m, s, i):
sec = float((m * 60) + s)
dec = float(sec / 3600)
deg = float(d + dec)
if i.upper() == "W":
deg = deg * -1
elif i.upper() == "S":
deg = deg * -1
return float(deg)
def gps(exif):
lat = None
lon = None
if exif["GPSInfo"]:
# Lat
coords = exif["GPSInfo"]
i = coords[1]
d = coords[2][0][0]
m = coords[2][1][0]
s = coords[2][2][0]
lat = dms2dd(d, m ,s, i)
lat = float(str(d)+str(m)+str(s))/100000000
# Lon
i = coords[3]
d = coords[4][0][0]
m = coords[4][1][0]
s = coords[4][2][0]
lon = float(str(d)+str(m)+str(s))/100000000
return lat, lon
#------------------Pretvara LAT LONG u UTM----------------------------------------------------------------------------
def pretvarac(fotka):
Lat = gps(exif(fotka))[0]
Lon = gps(exif(fotka))[1]
print "Lon/Lat Koordinate slike: ", Lon, " ",Lat
ZoneNo = "34T" # rucno uneseno, a moze se izracunati unaprijed preko alt long
myProj = Proj("+proj=utm +zone="+ZoneNo+",+north +ellps=WGS84 +datum=WGS84 +units=m +no_defs") # north za sjevernu hemisferu
UTMx, UTMy = myProj(Lon, Lat)
round(UTMx, 2)
round(UTMy, 2)
print "UTM Koordinate slike: ", UTMx, " ",UTMy
global UTMx
global UTMy
return UTMx, UTMy
#--------------------Georeferenciranje----------------------------------------------------------------------------
def georeferenciranje(src_filename,dst_filename):
src_ds = gdal.Open(src_filename)
format = "GTiff"
driver = gdal.GetDriverByName(format)
dst_ds = driver.CreateCopy(dst_filename, src_ds, 0)
# Specify raster location through geotransform array
# (uperleftx, scalex, skewx, uperlefty, skewy, scaley)
# Scale = size of one pixel in units of raster projection
# this example below assumes 100x100
gt = [UTMx, 100, 0, UTMy, 0, -100]
dst_ds.SetGeoTransform(gt)
epsg = 3857
srs = osr.SpatialReference()
srs.ImportFromEPSG(epsg)
dest_wkt = srs.ExportToWkt()
dst_ds.SetProjection(dest_wkt)
dst_ds = None
src_ds = None
#-----------------Ubacivanje u QGIS----------------------------------------------------------------------------
def ubacivanje(fileName):
print "ubacujem raster"
#fileName = dst_filename
fileInfo = QFileInfo(fileName)
baseName = fileInfo.baseName()
rlayer = QgsRasterLayer(fileName, baseName)
iface.addRasterLayer(fileName, "Raster Layer Zito")
print "raster ubacen"
#----------------Folder loop------------------------------------------------------------------------------------
li = []
l = 0
os.chdir(radni_dir)
#Uzima listu sa imenima slika ( li )
for file in glob.glob("*.jpg"):
li.append(os.path.splitext(file)[0])
l+= 1
pretvarac(file)
gr = os.path.dirname(file)+str(l)+ '_georeferencirana'+'.tif'
georeferenciranje(file,gr)
ubacivanje(gr)
#pretvarac(slika)
#georeferenciranje(slika,georef_slika)
#ubacivanje(georef_slika)
| null | null | null | null | [
0
] |
1,932 | 4e10bc876797d0939c91cff5eff497b36af35dcb | <mask token>
| print('hello')
print('===================================================')
print('Nama Lengkap : Agung Dharmawan')
print('Kelas : Teknik Informatika 2018 A')
print('Kampus : Universitas Nahdlatul Ulama Sidoarjo')
print('===================================================')
| print ("hello")
print ("===================================================")
print ("Nama Lengkap : Agung Dharmawan")
print ("Kelas : Teknik Informatika 2018 A")
print ("Kampus : Universitas Nahdlatul Ulama Sidoarjo")
print ("===================================================")
| null | null | [
0,
1,
2
] |
1,933 | 7d9032b2426dbf3c285b99efa78be38d8f76ec24 | <mask token>
| <mask token>
print(list(result))
<mask token>
print(list(result))
| <mask token>
even_integers = lambda a: a % 2 == 0
input = [11, 4, 5, 8, 9, 2, 12]
result = filter(even_integers, input)
print(list(result))
input = [3, 5, 7]
result = filter(even_integers, input)
print(list(result))
| '''
filter_items = lambda a : a[0] == 'b'
fruits = ["apple", "banana", "pear", "orange"]
result = filter(filter_items, fruits)
print(list(result))
'''
'''
Given a list of integers, return the even integers in the list.
input = [11, 4, 5, 8, 9, 2, 12]
output = [4, 8, 2, 12]
input = [3, 5, 7]
output = []
'''
# even_integers = lambda a : a / 2 == 0
even_integers = lambda a : a % 2 == 0
input = [11, 4, 5, 8, 9, 2, 12]
result = filter(even_integers, input)
print(list(result))
input = [3, 5, 7]
result = filter(even_integers, input)
print(list(result)) | null | [
0,
1,
2,
3
] |
1,934 | 11d0e84767f7e9e4687962a3a5c58dc882cc4dd2 | <mask token>
| <mask token>
for directory_path, directory_names, file_names in walk('data'):
for file_name in file_names:
package_data.append('{}/{}'.format(directory_path, file_name))
setup(name='ccal', version=VERSION, description=
'Computational Cancer Analysis Library', url=
'https://github.com/KwatME/ccal', author=
'Kwat Medetgul-Ernar (Huwate Yeerna)', author_email='[email protected]',
license='LICENSE', classifiers=('Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.7', 'Natural Language :: English',
'Topic :: Scientific/Engineering :: Bio-Informatics'), python_requires=
'>=3.6', install_requires=(), include_package_data=True, package_data={
'ccal': package_data})
| <mask token>
package_data = []
for directory_path, directory_names, file_names in walk('data'):
for file_name in file_names:
package_data.append('{}/{}'.format(directory_path, file_name))
setup(name='ccal', version=VERSION, description=
'Computational Cancer Analysis Library', url=
'https://github.com/KwatME/ccal', author=
'Kwat Medetgul-Ernar (Huwate Yeerna)', author_email='[email protected]',
license='LICENSE', classifiers=('Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.7', 'Natural Language :: English',
'Topic :: Scientific/Engineering :: Bio-Informatics'), python_requires=
'>=3.6', install_requires=(), include_package_data=True, package_data={
'ccal': package_data})
| from os import walk
from ccal import VERSION
from setuptools import setup
package_data = []
for directory_path, directory_names, file_names in walk('data'):
for file_name in file_names:
package_data.append('{}/{}'.format(directory_path, file_name))
setup(name='ccal', version=VERSION, description=
'Computational Cancer Analysis Library', url=
'https://github.com/KwatME/ccal', author=
'Kwat Medetgul-Ernar (Huwate Yeerna)', author_email='[email protected]',
license='LICENSE', classifiers=('Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.7', 'Natural Language :: English',
'Topic :: Scientific/Engineering :: Bio-Informatics'), python_requires=
'>=3.6', install_requires=(), include_package_data=True, package_data={
'ccal': package_data})
| from os import walk
from ccal import VERSION
from setuptools import setup
package_data = []
for directory_path, directory_names, file_names in walk("data"):
for file_name in file_names:
package_data.append("{}/{}".format(directory_path, file_name))
setup(
name="ccal",
version=VERSION,
description="Computational Cancer Analysis Library",
url="https://github.com/KwatME/ccal",
author="Kwat Medetgul-Ernar (Huwate Yeerna)",
author_email="[email protected]",
license="LICENSE",
classifiers=(
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.7",
"Natural Language :: English",
"Topic :: Scientific/Engineering :: Bio-Informatics",
),
python_requires=">=3.6",
install_requires=(),
include_package_data=True,
package_data={"ccal": package_data},
)
| [
0,
1,
2,
3,
4
] |
1,935 | 495d606304e07a097033366d1a7e1d856a4cf61f | <mask token>
| <mask token>
@bp.route('/')
@bp.route('/index')
@login_required
def index():
return render_template('index.html')
| from flask import render_template, flash, redirect, url_for, request
from flask_login import current_user, login_user, logout_user, login_required
from werkzeug.urls import url_parse
from app import db
from app.models import User
from app.main import bp
@bp.route('/')
@bp.route('/index')
@login_required
def index():
return render_template('index.html')
| from flask import render_template, flash, redirect, url_for, request
from flask_login import current_user, login_user, logout_user, login_required
from werkzeug.urls import url_parse
from app import db
# from app.main.forms import [list forms here]
from app.models import User
from app.main import bp
@bp.route('/')
@bp.route('/index')
@login_required
def index():
return render_template('index.html') | null | [
0,
1,
2,
3
] |
1,936 | c15faf9df8fa2e1ad89ea2c922ab0551eaa69d3f | <mask token>
| <mask token>
def test_pixel_pixelMatchesColor():
"""屏幕像素获取、屏幕像素匹配"""
print(pixelMatchesColor(44, 107, (148, 212, 234), tolerance=20))
print(pixelMatchesColor(44, 107, (100, 212, 234), tolerance=20))
<mask token>
| <mask token>
def test_pixel_pixelMatchesColor():
"""屏幕像素获取、屏幕像素匹配"""
print(pixelMatchesColor(44, 107, (148, 212, 234), tolerance=20))
print(pixelMatchesColor(44, 107, (100, 212, 234), tolerance=20))
test_pixel_pixelMatchesColor()
| <mask token>
from pyautogui import pixel, pixelMatchesColor, screenshot
<mask token>
def test_pixel_pixelMatchesColor():
"""屏幕像素获取、屏幕像素匹配"""
print(pixelMatchesColor(44, 107, (148, 212, 234), tolerance=20))
print(pixelMatchesColor(44, 107, (100, 212, 234), tolerance=20))
test_pixel_pixelMatchesColor()
| """
pyautogui 屏幕像素获取、屏幕像素匹配
@author : zhouhuajian
@version : v1.0
"""
from pyautogui import pixel, pixelMatchesColor, screenshot
"""
主要内容:
1. pixel() - 获取屏幕像素;
2. pixelMatchesColor() - 屏幕像素匹配颜色。
"""
def test_pixel_pixelMatchesColor():
"""屏幕像素获取、屏幕像素匹配"""
# img = screenshot()
# print(img)
# print(img.getpixel((44, 107)))
# (149, 212, 234)
# print(pixel(44, 107))
# 根据上面返回值修改color
# print(pixelMatchesColor(44, 107, (149, 212, 234)))
# print(pixelMatchesColor(44, 107, (148, 212, 234)))
# color简单调整
print(pixelMatchesColor(44, 107, (148, 212, 234), tolerance=20))
print(pixelMatchesColor(44, 107, (100, 212, 234), tolerance=20))
# 看看小项目 重试、等待
test_pixel_pixelMatchesColor()
| [
0,
1,
2,
3,
4
] |
1,937 | 95f9e9a8f681679f56c3755199fba7d654af85e8 | <mask token>
def getAllMembersFromDB(**kwargs):
"""Finds and returns all the registered members"""
isResponseParsed = kwargs.get('isParsed', False)
logging.info('Trying to find all the users')
try:
rawMembersData = Member.objects()
parsedMembers = [MemberInDBSchema(**memberHelper(rawMember)) for
rawMember in rawMembersData]
logging.info('Found all the users')
if not isResponseParsed:
return parsedMembers
resp = [parsedMember.dict(exclude={'mongoDocument'}) for
parsedMember in parsedMembers]
return parseControllerResponse(data=resp, statuscode=200, message=
'Successfully found the users')
except Exception as e:
helpfulErrorMessage = "Couldn't find all the users due to " + e
logging.error(helpfulErrorMessage)
if isResponseParsed:
return parseControllerResponse(statuscode=500, message=
'Something went wrong, try again later', error=
helpfulErrorMessage)
raise helpfulErrorMessage
def getMemberFromDiscordHandle(discordHandle: str):
"""Finds and returns the user with the given discord handle, if
such a user doesn't exist, return None"""
try:
member_ = Member.objects(discordHandle=discordHandle).first()
assert member_
member = MemberInDBSchema(**memberHelper(member_))
return member
except AssertionError as _:
return None
except Exception as e:
raise Exception(
"Couldn't find a user with the discord handle {}, due to {}"
.format(discordHandle, e))
<mask token>
def getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):
"""Finds and returns the user with the given id, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get('isParsed', False)
rawData = kwargs.get('rawData', False)
logging.info('Trying to find the user with the id=' + id)
try:
user = Member.objects(id=id).first()
assert user
logging.debug('Found a user {}, with the id={}'.format(memberHelper
(user), id))
logging.info('Found the user with id=' + id)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(data=MemberInDBSchema(**memberHelper
(user)).dict(exclude={'mongoDocument'}), statuscode=200,
message='Successfully found the user')
except AssertionError as _:
logging.info('A user with id={} does not exist'.format(id))
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=404,
message='User not found', error=
'A user with id={} does not exist'.format(id))
return None
except Exception as e:
helpfulErrorMsg = ("Couldn't find a user with the userId {}, due to {}"
.format(id, e))
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=500,
message='Something went wrong, try again later.', error=
helpfulErrorMsg)
raise helpfulErrorMsg
<mask token>
| <mask token>
def getAllMembersFromDB(**kwargs):
"""Finds and returns all the registered members"""
isResponseParsed = kwargs.get('isParsed', False)
logging.info('Trying to find all the users')
try:
rawMembersData = Member.objects()
parsedMembers = [MemberInDBSchema(**memberHelper(rawMember)) for
rawMember in rawMembersData]
logging.info('Found all the users')
if not isResponseParsed:
return parsedMembers
resp = [parsedMember.dict(exclude={'mongoDocument'}) for
parsedMember in parsedMembers]
return parseControllerResponse(data=resp, statuscode=200, message=
'Successfully found the users')
except Exception as e:
helpfulErrorMessage = "Couldn't find all the users due to " + e
logging.error(helpfulErrorMessage)
if isResponseParsed:
return parseControllerResponse(statuscode=500, message=
'Something went wrong, try again later', error=
helpfulErrorMessage)
raise helpfulErrorMessage
def getMemberFromDiscordHandle(discordHandle: str):
"""Finds and returns the user with the given discord handle, if
such a user doesn't exist, return None"""
try:
member_ = Member.objects(discordHandle=discordHandle).first()
assert member_
member = MemberInDBSchema(**memberHelper(member_))
return member
except AssertionError as _:
return None
except Exception as e:
raise Exception(
"Couldn't find a user with the discord handle {}, due to {}"
.format(discordHandle, e))
def getMemberFromRollNumber(rollNumber: int, **kwargs):
"""Finds and returns the user with the given roll number, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get('isParsed', False)
rawData = kwargs.get('rawData', False)
try:
user = Member.objects(rollno=rollNumber).first()
assert user
user = Member.objects(id=id).first()
assert user
logging.debug('Found a user {}, with the rollno={}'.format(
memberHelper(user), rollNumber))
logging.info('Found the user with rollNumber =' + rollNumber)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(data=MemberInDBSchema(**memberHelper
(user)).dict(exclude={'mongoDocument'}), statuscode=200,
message='Successfully found the user')
except AssertionError as _:
logging.info('A user with roll numer={} does not exist'.format(
rollNumber))
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=404,
message='User not found', error=
'A user with rollnumber={} does not exist'.format(rollNumber))
return None
except Exception as e:
helpfulErrorMsg = (
f"Couldn't find a user with the rollNumber = {rollNumber!r}, due to {e}"
)
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=500,
message='Something went wrong, try again later.', error=
helpfulErrorMsg)
raise helpfulErrorMsg
def getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):
"""Finds and returns the user with the given id, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get('isParsed', False)
rawData = kwargs.get('rawData', False)
logging.info('Trying to find the user with the id=' + id)
try:
user = Member.objects(id=id).first()
assert user
logging.debug('Found a user {}, with the id={}'.format(memberHelper
(user), id))
logging.info('Found the user with id=' + id)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(data=MemberInDBSchema(**memberHelper
(user)).dict(exclude={'mongoDocument'}), statuscode=200,
message='Successfully found the user')
except AssertionError as _:
logging.info('A user with id={} does not exist'.format(id))
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=404,
message='User not found', error=
'A user with id={} does not exist'.format(id))
return None
except Exception as e:
helpfulErrorMsg = ("Couldn't find a user with the userId {}, due to {}"
.format(id, e))
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=500,
message='Something went wrong, try again later.', error=
helpfulErrorMsg)
raise helpfulErrorMsg
<mask token>
| <mask token>
def getAllMembersFromDB(**kwargs):
"""Finds and returns all the registered members"""
isResponseParsed = kwargs.get('isParsed', False)
logging.info('Trying to find all the users')
try:
rawMembersData = Member.objects()
parsedMembers = [MemberInDBSchema(**memberHelper(rawMember)) for
rawMember in rawMembersData]
logging.info('Found all the users')
if not isResponseParsed:
return parsedMembers
resp = [parsedMember.dict(exclude={'mongoDocument'}) for
parsedMember in parsedMembers]
return parseControllerResponse(data=resp, statuscode=200, message=
'Successfully found the users')
except Exception as e:
helpfulErrorMessage = "Couldn't find all the users due to " + e
logging.error(helpfulErrorMessage)
if isResponseParsed:
return parseControllerResponse(statuscode=500, message=
'Something went wrong, try again later', error=
helpfulErrorMessage)
raise helpfulErrorMessage
def getMemberFromDiscordHandle(discordHandle: str):
"""Finds and returns the user with the given discord handle, if
such a user doesn't exist, return None"""
try:
member_ = Member.objects(discordHandle=discordHandle).first()
assert member_
member = MemberInDBSchema(**memberHelper(member_))
return member
except AssertionError as _:
return None
except Exception as e:
raise Exception(
"Couldn't find a user with the discord handle {}, due to {}"
.format(discordHandle, e))
def getMemberFromRollNumber(rollNumber: int, **kwargs):
"""Finds and returns the user with the given roll number, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get('isParsed', False)
rawData = kwargs.get('rawData', False)
try:
user = Member.objects(rollno=rollNumber).first()
assert user
user = Member.objects(id=id).first()
assert user
logging.debug('Found a user {}, with the rollno={}'.format(
memberHelper(user), rollNumber))
logging.info('Found the user with rollNumber =' + rollNumber)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(data=MemberInDBSchema(**memberHelper
(user)).dict(exclude={'mongoDocument'}), statuscode=200,
message='Successfully found the user')
except AssertionError as _:
logging.info('A user with roll numer={} does not exist'.format(
rollNumber))
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=404,
message='User not found', error=
'A user with rollnumber={} does not exist'.format(rollNumber))
return None
except Exception as e:
helpfulErrorMsg = (
f"Couldn't find a user with the rollNumber = {rollNumber!r}, due to {e}"
)
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=500,
message='Something went wrong, try again later.', error=
helpfulErrorMsg)
raise helpfulErrorMsg
def getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):
"""Finds and returns the user with the given id, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get('isParsed', False)
rawData = kwargs.get('rawData', False)
logging.info('Trying to find the user with the id=' + id)
try:
user = Member.objects(id=id).first()
assert user
logging.debug('Found a user {}, with the id={}'.format(memberHelper
(user), id))
logging.info('Found the user with id=' + id)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(data=MemberInDBSchema(**memberHelper
(user)).dict(exclude={'mongoDocument'}), statuscode=200,
message='Successfully found the user')
except AssertionError as _:
logging.info('A user with id={} does not exist'.format(id))
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=404,
message='User not found', error=
'A user with id={} does not exist'.format(id))
return None
except Exception as e:
helpfulErrorMsg = ("Couldn't find a user with the userId {}, due to {}"
.format(id, e))
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=500,
message='Something went wrong, try again later.', error=
helpfulErrorMsg)
raise helpfulErrorMsg
def updateMemberWithGivenDetails(data: UpdateMemberSchema, userId: Union[
ObjectId, str], **kwargs):
"""Finds the user with the given data, and updates their details,
raises an error if the roll number is different"""
isResponseParsed = kwargs.get('isParsed', False)
try:
user: Member = getMemberWithGivenId(id=userId, rawData=True)
assert user, 'Not Found'
assert user.rollno == data.rollno, 'Roll Number Mismatch'
user.name = data.name if data.name else user.name
user.discordHandle = (data.discordHandle if data.discordHandle else
user.discordHandle)
user.batch = data.batch if data.batch else user.batch
if data.password:
user.password = CreateMemberSchema.hashGivenText(data.password)
user.save()
logging.info('successfully updated user data')
if isResponseParsed:
return parseControllerResponse(data=MemberInDBSchema(**
memberHelper(user)).dict(exclude={'mongoDocument'}),
statuscode=200, message='Successfully updated user details')
return True
except AssertionError as err:
if err == 'Not Found':
helpfulErrorMsg = f"A user with userId = {userId!r} doesn't exist"
logging.warn(helpfulErrorMsg)
if not isResponseParsed:
return None
return parseControllerResponse(data=None, statuscode=400,
message=helpfulErrorMsg, error=helpfulErrorMsg)
if err == 'Roll Number Mismatch':
helpfulErrorMsg = (
f"You cannot change a user's roll number after creating it.")
if not isResponseParsed:
return None
return parseControllerResponse(data=None, statuscode=400,
message=helpfulErrorMsg, error=helpfulErrorMsg)
except Exception as e:
helpfulErrorMsg = (
f"Couldn't update user={data.dict()} data, because e={e!r}")
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=500,
message='Something went wrong, try again later.', error=
helpfulErrorMsg)
raise helpfulErrorMsg
| import logging
from bson import ObjectId
from typing import Union
from app.helper import parseControllerResponse
from models.members import Member
from schema.members import CreateMemberSchema, MemberInDBSchema, UpdateMemberSchema, memberHelper
def getAllMembersFromDB(**kwargs):
"""Finds and returns all the registered members"""
isResponseParsed = kwargs.get('isParsed', False)
logging.info('Trying to find all the users')
try:
rawMembersData = Member.objects()
parsedMembers = [MemberInDBSchema(**memberHelper(rawMember)) for
rawMember in rawMembersData]
logging.info('Found all the users')
if not isResponseParsed:
return parsedMembers
resp = [parsedMember.dict(exclude={'mongoDocument'}) for
parsedMember in parsedMembers]
return parseControllerResponse(data=resp, statuscode=200, message=
'Successfully found the users')
except Exception as e:
helpfulErrorMessage = "Couldn't find all the users due to " + e
logging.error(helpfulErrorMessage)
if isResponseParsed:
return parseControllerResponse(statuscode=500, message=
'Something went wrong, try again later', error=
helpfulErrorMessage)
raise helpfulErrorMessage
def getMemberFromDiscordHandle(discordHandle: str):
"""Finds and returns the user with the given discord handle, if
such a user doesn't exist, return None"""
try:
member_ = Member.objects(discordHandle=discordHandle).first()
assert member_
member = MemberInDBSchema(**memberHelper(member_))
return member
except AssertionError as _:
return None
except Exception as e:
raise Exception(
"Couldn't find a user with the discord handle {}, due to {}"
.format(discordHandle, e))
def getMemberFromRollNumber(rollNumber: int, **kwargs):
"""Finds and returns the user with the given roll number, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get('isParsed', False)
rawData = kwargs.get('rawData', False)
try:
user = Member.objects(rollno=rollNumber).first()
assert user
user = Member.objects(id=id).first()
assert user
logging.debug('Found a user {}, with the rollno={}'.format(
memberHelper(user), rollNumber))
logging.info('Found the user with rollNumber =' + rollNumber)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(data=MemberInDBSchema(**memberHelper
(user)).dict(exclude={'mongoDocument'}), statuscode=200,
message='Successfully found the user')
except AssertionError as _:
logging.info('A user with roll numer={} does not exist'.format(
rollNumber))
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=404,
message='User not found', error=
'A user with rollnumber={} does not exist'.format(rollNumber))
return None
except Exception as e:
helpfulErrorMsg = (
f"Couldn't find a user with the rollNumber = {rollNumber!r}, due to {e}"
)
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=500,
message='Something went wrong, try again later.', error=
helpfulErrorMsg)
raise helpfulErrorMsg
def getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):
"""Finds and returns the user with the given id, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get('isParsed', False)
rawData = kwargs.get('rawData', False)
logging.info('Trying to find the user with the id=' + id)
try:
user = Member.objects(id=id).first()
assert user
logging.debug('Found a user {}, with the id={}'.format(memberHelper
(user), id))
logging.info('Found the user with id=' + id)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(data=MemberInDBSchema(**memberHelper
(user)).dict(exclude={'mongoDocument'}), statuscode=200,
message='Successfully found the user')
except AssertionError as _:
logging.info('A user with id={} does not exist'.format(id))
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=404,
message='User not found', error=
'A user with id={} does not exist'.format(id))
return None
except Exception as e:
helpfulErrorMsg = ("Couldn't find a user with the userId {}, due to {}"
.format(id, e))
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=500,
message='Something went wrong, try again later.', error=
helpfulErrorMsg)
raise helpfulErrorMsg
def updateMemberWithGivenDetails(data: UpdateMemberSchema, userId: Union[
ObjectId, str], **kwargs):
"""Finds the user with the given data, and updates their details,
raises an error if the roll number is different"""
isResponseParsed = kwargs.get('isParsed', False)
try:
user: Member = getMemberWithGivenId(id=userId, rawData=True)
assert user, 'Not Found'
assert user.rollno == data.rollno, 'Roll Number Mismatch'
user.name = data.name if data.name else user.name
user.discordHandle = (data.discordHandle if data.discordHandle else
user.discordHandle)
user.batch = data.batch if data.batch else user.batch
if data.password:
user.password = CreateMemberSchema.hashGivenText(data.password)
user.save()
logging.info('successfully updated user data')
if isResponseParsed:
return parseControllerResponse(data=MemberInDBSchema(**
memberHelper(user)).dict(exclude={'mongoDocument'}),
statuscode=200, message='Successfully updated user details')
return True
except AssertionError as err:
if err == 'Not Found':
helpfulErrorMsg = f"A user with userId = {userId!r} doesn't exist"
logging.warn(helpfulErrorMsg)
if not isResponseParsed:
return None
return parseControllerResponse(data=None, statuscode=400,
message=helpfulErrorMsg, error=helpfulErrorMsg)
if err == 'Roll Number Mismatch':
helpfulErrorMsg = (
f"You cannot change a user's roll number after creating it.")
if not isResponseParsed:
return None
return parseControllerResponse(data=None, statuscode=400,
message=helpfulErrorMsg, error=helpfulErrorMsg)
except Exception as e:
helpfulErrorMsg = (
f"Couldn't update user={data.dict()} data, because e={e!r}")
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(data=None, statuscode=500,
message='Something went wrong, try again later.', error=
helpfulErrorMsg)
raise helpfulErrorMsg
| import logging
from bson import ObjectId
from typing import Union
from app.helper import parseControllerResponse
from models.members import Member
from schema.members import (
CreateMemberSchema,
MemberInDBSchema,
UpdateMemberSchema,
memberHelper,
)
def getAllMembersFromDB(**kwargs):
"""Finds and returns all the registered members"""
isResponseParsed = kwargs.get("isParsed", False)
logging.info("Trying to find all the users")
try:
rawMembersData = Member.objects()
parsedMembers = [
MemberInDBSchema(**memberHelper(rawMember)) for rawMember in rawMembersData
]
logging.info("Found all the users")
if not isResponseParsed:
return parsedMembers
resp = [
parsedMember.dict(exclude={"mongoDocument"})
for parsedMember in parsedMembers
]
return parseControllerResponse(
data=resp, statuscode=200, message="Successfully found the users"
)
except Exception as e:
helpfulErrorMessage = "Couldn't find all the users due to " + e
logging.error(helpfulErrorMessage)
if isResponseParsed:
return parseControllerResponse(
statuscode=500,
message="Something went wrong, try again later",
error=helpfulErrorMessage,
)
raise helpfulErrorMessage
def getMemberFromDiscordHandle(discordHandle: str):
"""Finds and returns the user with the given discord handle, if
such a user doesn't exist, return None"""
try:
member_ = Member.objects(discordHandle=discordHandle).first()
assert member_
member = MemberInDBSchema(**memberHelper(member_))
return member
except AssertionError as _:
# if the member is not found, raise a ValueError
return None
except Exception as e:
raise Exception(
"Couldn't find a user with the discord handle \
{}, due to {}".format(
discordHandle, e
)
)
def getMemberFromRollNumber(rollNumber: int, **kwargs):
"""Finds and returns the user with the given roll number, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get("isParsed", False)
rawData = kwargs.get("rawData", False)
try:
user = Member.objects(rollno=rollNumber).first()
assert user
user = Member.objects(id=id).first()
assert user
logging.debug(
"Found a user {}, with the rollno={}".format(memberHelper(user), rollNumber)
)
logging.info("Found the user with rollNumber =" + rollNumber)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(
data=(MemberInDBSchema(**memberHelper(user))).dict(
exclude={"mongoDocument"}
),
statuscode=200,
message="Successfully found the user",
)
except AssertionError as _:
# user was not found, return none or parsed response
# ! its the person who called this func's responsibility to create an error
logging.info("A user with roll numer={} does not exist".format(rollNumber))
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=404,
message="User not found",
error="A user with rollnumber={} does not exist".format(rollNumber),
)
return None
except Exception as e:
helpfulErrorMsg = f"Couldn't find a user with the {rollNumber = }, due to {e}"
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=500,
message="Something went wrong, try again later.",
error=helpfulErrorMsg,
)
raise helpfulErrorMsg
def getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):
"""Finds and returns the user with the given id, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get("isParsed", False)
rawData = kwargs.get("rawData", False)
logging.info("Trying to find the user with the id=" + id)
try:
user = Member.objects(id=id).first()
assert user
logging.debug("Found a user {}, with the id={}".format(memberHelper(user), id))
logging.info("Found the user with id=" + id)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(
data=(MemberInDBSchema(**memberHelper(user))).dict(
exclude={"mongoDocument"}
),
statuscode=200,
message="Successfully found the user",
)
except AssertionError as _:
# user was not found, return none or parsed response
logging.info("A user with id={} does not exist".format(id))
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=404,
message="User not found",
error="A user with id={} does not exist".format(id),
)
return None
except Exception as e:
helpfulErrorMsg = "Couldn't find a user with the userId {}, due to {}".format(
id, e
)
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=500,
message="Something went wrong, try again later.",
error=helpfulErrorMsg,
)
raise helpfulErrorMsg
def updateMemberWithGivenDetails(
data: UpdateMemberSchema, userId: Union[ObjectId, str], **kwargs
):
"""Finds the user with the given data, and updates their details,
raises an error if the roll number is different"""
isResponseParsed = kwargs.get("isParsed", False)
try:
user: Member = getMemberWithGivenId(id=userId, rawData=True)
assert user, "Not Found"
# A user cannot change roll number after creating a doc
assert user.rollno == data.rollno, "Roll Number Mismatch"
user.name = data.name if data.name else user.name
user.discordHandle = (
data.discordHandle if data.discordHandle else user.discordHandle
)
user.batch = data.batch if data.batch else user.batch
if data.password:
user.password = CreateMemberSchema.hashGivenText(data.password)
user.save()
logging.info("successfully updated user data")
if isResponseParsed:
return parseControllerResponse(
data=(MemberInDBSchema(**memberHelper(user))).dict(
exclude={"mongoDocument"}
),
statuscode=200,
message="Successfully updated user details",
)
return True
except AssertionError as err:
if err == "Not Found":
helpfulErrorMsg = f"A user with {userId = } doesn't exist"
logging.warn(helpfulErrorMsg)
if not isResponseParsed:
return None
return parseControllerResponse(
data=None,
statuscode=400,
message=helpfulErrorMsg,
error=helpfulErrorMsg,
)
if err == "Roll Number Mismatch":
helpfulErrorMsg = (
f"You cannot change a user's roll number after creating it."
)
if not isResponseParsed:
return None
return parseControllerResponse(
data=None,
statuscode=400,
message=helpfulErrorMsg,
error=helpfulErrorMsg,
)
except Exception as e:
helpfulErrorMsg = f"Couldn't update user={data.dict()} data, because {e=}"
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=500,
message="Something went wrong, try again later.",
error=helpfulErrorMsg,
)
raise helpfulErrorMsg
| [
3,
4,
5,
6,
7
] |
1,938 | 979a387e29867818ffad7291511ff0be40dee118 | <mask token>
| <mask token>
urlpatterns = [path('', Home.as_view(), name='home'), path('signup', Signup
.as_view(), name='signup'), path('login', Login.as_view(), name='login')]
| from django.urls import path
from .views.home import Home
from .views.signup import Signup
from .views.login import Login
urlpatterns = [path('', Home.as_view(), name='home'), path('signup', Signup
.as_view(), name='signup'), path('login', Login.as_view(), name='login')]
| from django.urls import path
from .views.home import Home
from .views.signup import Signup
from .views.login import Login
urlpatterns = [
path('', Home.as_view(), name='home'),
path('signup', Signup.as_view(), name='signup'),
path('login', Login.as_view(), name='login'),
]
| null | [
0,
1,
2,
3
] |
1,939 | e7fa84dbc037253c7f852aa618e6ea88d1fda909 | <mask token>
| <mask token>
def test_template():
assert True
| import pytest
def test_template():
assert True
| null | null | [
0,
1,
2
] |
1,940 | b49696d6cac5fbf97172aa7cf16903d002262b5c | <mask token>
def AddOverflow(h):
nxbins = h.GetXaxis().GetNbins()
nybins = h.GetYaxis().GetNbins()
idxx = 0.0
idxy = nybins + 1
for ix in range(nxbins):
idxx = ix + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(idxx, nybins)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(idxx, nybins, new_last_bincont)
idxx = nxbins + 1
idxy = 0.0
for iy in range(nybins):
idxy = iy + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(nxbins, idxy)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(nxbins, idxy, new_last_bincont)
<mask token>
| <mask token>
if not '_UL' in sys.argv[1]:
if sys.argv[4] == 'remote':
from samples import *
Debug = False
else:
from samples.samples import *
Debug = True
elif sys.argv[4] == 'remote':
from samplesUL import *
Debug = False
else:
from samples.samplesUL import *
Debug = True
<mask token>
def AddOverflow(h):
nxbins = h.GetXaxis().GetNbins()
nybins = h.GetYaxis().GetNbins()
idxx = 0.0
idxy = nybins + 1
for ix in range(nxbins):
idxx = ix + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(idxx, nybins)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(idxx, nybins, new_last_bincont)
idxx = nxbins + 1
idxy = 0.0
for iy in range(nybins):
idxy = iy + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(nxbins, idxy)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(nxbins, idxy, new_last_bincont)
<mask token>
print('Starting running at ' + str(startTime))
ROOT.gROOT.SetBatch()
<mask token>
for infile in file_list:
chain.Add(infile)
print('Number of events in chain ' + str(chain.GetEntries()))
<mask token>
print('Number of entries: ' + str(tree.GetEntries()))
<mask token>
if 'Data' in sample.name:
isMC = False
<mask token>
if 'aQGC' in sample.name:
IsDim8 = True
<mask token>
if 'DataMu' in sample.name:
dataMu = True
if 'DataEle' in sample.name:
dataEle = True
<mask token>
h2_BTaggingEff_Denom_b.Sumw2()
h2_BTaggingEff_Denom_c.Sumw2()
h2_BTaggingEff_Denom_udsg.Sumw2()
h2_BTaggingEff_Num_b.Sumw2()
h2_BTaggingEff_Num_c.Sumw2()
h2_BTaggingEff_Num_udsg.Sumw2()
for i in range(tree.GetEntries()):
if Debug:
if i > 100:
break
if not Debug and i % 5000 == 0:
print('Event #', i + 1, ' out of ', tree.GetEntries())
event = Event(tree, i)
electrons = Collection(event, 'Electron')
muons = Collection(event, 'Muon')
jets = Collection(event, 'Jet')
njets = len(jets)
fatjets = Collection(event, 'FatJet')
HLT = Object(event, 'HLT')
PV = Object(event, 'PV')
Flag = Object(event, 'Flag')
tightlep = None
tightlep_p4 = None
tightlep_p4t = None
tightlep_SF = None
tightlep_SFUp = None
tightlep_SFDown = None
recomet_p4t = None
PF_SF = None
PF_SFUp = None
PF_SFDown = None
PU_SF = None
PU_SFUp = None
PU_SFDown = None
year = sample.year
if isMC:
runPeriod = ''
else:
runPeriod = sample.runP
if not isMC:
if not Flag.eeBadScFilter:
continue
passMu, passEle, passHT, noTrigger = trig_map(HLT, PV, year, runPeriod,
Flag)
if noTrigger:
continue
"""
GoodEle, ele_TightRegion = SelectLepton(electrons, False)
GoodMu, mu_TightRegion = SelectLepton(muons, True)
if GoodEle is None and GoodMu is None:
continue
ele_lepton_veto = -1
mu_lepton_veto = -1
if GoodEle != None:
ele_lepton_veto = LepVeto(GoodEle, electrons, muons)
if GoodMu != None:
mu_lepton_veto = LepVeto(GoodMu, electrons, muons)
SingleEle=False
SingleMu=False
ElMu=False
LeadLepFamily="not selected"
GoodLep = None
leptons = None
lepton_TightRegion = 0
if 'DataHT' not in sample.label:
if passEle and not passMu:
if GoodEle != None and ele_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
else:
continue
elif passMu and not passEle:
if GoodMu != None and mu_lepton_veto:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleEle = False
SingleMu = True
else:
continue
elif passMu and passEle:
ElMu=True
else:
continue
else:
if passHT:
ElMu = True
else:
continue
if ElMu:
if GoodMu==None and GoodEle!=None and ele_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
elif GoodMu!=None and mu_lepton_veto and GoodEle==None:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
elif GoodMu!=None and GoodEle!=None:
if ele_lepton_veto and not mu_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
elif not ele_lepton_veto and mu_lepton_veto:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
elif ele_lepton_veto and mu_lepton_veto:
if GoodEle.pt > GoodMu.pt:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
else:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
else:
continue
else:
continue
vTrigEle, vTrigMu, vTrigHT = trig_finder(HLT, sample.year, sample.label)
if SingleEle==True:
if isMC:
HLT_effLumi = lumiFinder("Ele", vTrigEle, sample.year)
leptons = electrons
elif SingleMu==True:
if isMC:
HLT_effLumi = lumiFinder("Mu", vTrigMu, sample.year)
leptons = muons
elif not (SingleMu or SingleEle):
continue
if SingleEle and dataMu:
continue
if SingleMu and dataEle:
continue
if GoodLep==None or (lepton_TightRegion < 1):
if Debug:
print("exiting at lepton selection (without saving)")
continue
"""
goodJets = get_Jet(jets, 30)
bjets, nobjets = bjet_filter(goodJets, 'DeepFlv', 'M')
if len(goodJets) < 2 or len(fatjets) < 2:
continue
for jet in goodJets:
if abs(jet.partonFlavour) == 5:
h2_BTaggingEff_Denom_b.Fill(jet.pt, abs(jet.eta))
if len(bjet_filter([jet], 'DeepFlv', 'M')[0]) == 1:
h2_BTaggingEff_Num_b.Fill(jet.pt, abs(jet.eta))
elif abs(jet.partonFlavour) == 4:
h2_BTaggingEff_Denom_c.Fill(jet.pt, abs(jet.eta))
if len(bjet_filter([jet], 'DeepFlv', 'M')[0]) == 1:
h2_BTaggingEff_Num_c.Fill(jet.pt, abs(jet.eta))
else:
h2_BTaggingEff_Denom_udsg.Fill(jet.pt, abs(jet.eta))
if len(bjet_filter([jet], 'DeepFlv', 'M')[0]) == 1:
h2_BTaggingEff_Num_udsg.Fill(jet.pt, abs(jet.eta))
outTreeFile.cd()
h2_BTaggingEff_Denom_b.Write()
h2_BTaggingEff_Denom_c.Write()
h2_BTaggingEff_Denom_udsg.Write()
h2_BTaggingEff_Num_b.Write()
h2_BTaggingEff_Num_c.Write()
h2_BTaggingEff_Num_udsg.Write()
<mask token>
h2_Eff_b.SetTotalHistogram(h2_BTaggingEff_Denom_b, '')
h2_Eff_b.SetPassedHistogram(h2_BTaggingEff_Num_b, '')
<mask token>
h2_Eff_c.SetTotalHistogram(h2_BTaggingEff_Denom_c, '')
h2_Eff_c.SetPassedHistogram(h2_BTaggingEff_Num_c, '')
<mask token>
h2_Eff_udsg.SetTotalHistogram(h2_BTaggingEff_Denom_udsg, '')
h2_Eff_udsg.SetPassedHistogram(h2_BTaggingEff_Num_udsg, '')
h2_Eff_b.Write()
h2_Eff_c.Write()
h2_Eff_udsg.Write()
<mask token>
print('Ending running at ' + str(endTime))
| <mask token>
if not '_UL' in sys.argv[1]:
if sys.argv[4] == 'remote':
from samples import *
Debug = False
else:
from samples.samples import *
Debug = True
elif sys.argv[4] == 'remote':
from samplesUL import *
Debug = False
else:
from samples.samplesUL import *
Debug = True
sample = sample_dict[sys.argv[1]]
part_idx = sys.argv[2]
file_list = list(map(str, sys.argv[3].strip('[]').split(',')))
def AddOverflow(h):
nxbins = h.GetXaxis().GetNbins()
nybins = h.GetYaxis().GetNbins()
idxx = 0.0
idxy = nybins + 1
for ix in range(nxbins):
idxx = ix + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(idxx, nybins)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(idxx, nybins, new_last_bincont)
idxx = nxbins + 1
idxy = 0.0
for iy in range(nybins):
idxy = iy + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(nxbins, idxy)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(nxbins, idxy, new_last_bincont)
startTime = datetime.datetime.now()
print('Starting running at ' + str(startTime))
ROOT.gROOT.SetBatch()
leadingjet_ptcut = 150.0
chain = ROOT.TChain('Events')
for infile in file_list:
chain.Add(infile)
print('Number of events in chain ' + str(chain.GetEntries()))
tree = InputTree(chain)
print('Number of entries: ' + str(tree.GetEntries()))
isMC = True
if 'Data' in sample.name:
isMC = False
IsDim8 = False
if 'aQGC' in sample.name:
IsDim8 = True
dataEle = False
dataMu = False
if 'DataMu' in sample.name:
dataMu = True
if 'DataEle' in sample.name:
dataEle = True
username = str(os.environ.get('USER'))
inituser = str(os.environ.get('USER')[0])
outTreeFile = ROOT.TFile(sample.label + '_part' + str(part_idx) + '.root',
'RECREATE')
ptNBins = 100
ptMin = 0
ptMax = 1000.0
etaNBins = 60
etaMin = -3.0
etaMax = 3.0
ptbins = array.array('d', [30, 50, 80, 140, 200, 300, 600, 1000])
etabins = array.array('d', [0.0, 0.8, 1.6, 2.4])
nptbins = len(ptbins) - 1
netabins = len(etabins) - 1
h2_BTaggingEff_Denom_b = ROOT.TH2D('h2_BTaggingEff_Denom_b',
'MC bjet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Denom_c = ROOT.TH2D('h2_BTaggingEff_Denom_c',
'MC cjet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Denom_udsg = ROOT.TH2D('h2_BTaggingEff_Denom_udsg',
'MC ljet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Num_b = ROOT.TH2D('h2_BTaggingEff_Num_b',
'Tagged bjet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Num_c = ROOT.TH2D('h2_BTaggingEff_Num_c',
'Tagged cjet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Num_udsg = ROOT.TH2D('h2_BTaggingEff_Num_udsg',
'Tagged ljet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Denom_b.Sumw2()
h2_BTaggingEff_Denom_c.Sumw2()
h2_BTaggingEff_Denom_udsg.Sumw2()
h2_BTaggingEff_Num_b.Sumw2()
h2_BTaggingEff_Num_c.Sumw2()
h2_BTaggingEff_Num_udsg.Sumw2()
for i in range(tree.GetEntries()):
if Debug:
if i > 100:
break
if not Debug and i % 5000 == 0:
print('Event #', i + 1, ' out of ', tree.GetEntries())
event = Event(tree, i)
electrons = Collection(event, 'Electron')
muons = Collection(event, 'Muon')
jets = Collection(event, 'Jet')
njets = len(jets)
fatjets = Collection(event, 'FatJet')
HLT = Object(event, 'HLT')
PV = Object(event, 'PV')
Flag = Object(event, 'Flag')
tightlep = None
tightlep_p4 = None
tightlep_p4t = None
tightlep_SF = None
tightlep_SFUp = None
tightlep_SFDown = None
recomet_p4t = None
PF_SF = None
PF_SFUp = None
PF_SFDown = None
PU_SF = None
PU_SFUp = None
PU_SFDown = None
year = sample.year
if isMC:
runPeriod = ''
else:
runPeriod = sample.runP
if not isMC:
if not Flag.eeBadScFilter:
continue
passMu, passEle, passHT, noTrigger = trig_map(HLT, PV, year, runPeriod,
Flag)
if noTrigger:
continue
"""
GoodEle, ele_TightRegion = SelectLepton(electrons, False)
GoodMu, mu_TightRegion = SelectLepton(muons, True)
if GoodEle is None and GoodMu is None:
continue
ele_lepton_veto = -1
mu_lepton_veto = -1
if GoodEle != None:
ele_lepton_veto = LepVeto(GoodEle, electrons, muons)
if GoodMu != None:
mu_lepton_veto = LepVeto(GoodMu, electrons, muons)
SingleEle=False
SingleMu=False
ElMu=False
LeadLepFamily="not selected"
GoodLep = None
leptons = None
lepton_TightRegion = 0
if 'DataHT' not in sample.label:
if passEle and not passMu:
if GoodEle != None and ele_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
else:
continue
elif passMu and not passEle:
if GoodMu != None and mu_lepton_veto:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleEle = False
SingleMu = True
else:
continue
elif passMu and passEle:
ElMu=True
else:
continue
else:
if passHT:
ElMu = True
else:
continue
if ElMu:
if GoodMu==None and GoodEle!=None and ele_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
elif GoodMu!=None and mu_lepton_veto and GoodEle==None:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
elif GoodMu!=None and GoodEle!=None:
if ele_lepton_veto and not mu_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
elif not ele_lepton_veto and mu_lepton_veto:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
elif ele_lepton_veto and mu_lepton_veto:
if GoodEle.pt > GoodMu.pt:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
else:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
else:
continue
else:
continue
vTrigEle, vTrigMu, vTrigHT = trig_finder(HLT, sample.year, sample.label)
if SingleEle==True:
if isMC:
HLT_effLumi = lumiFinder("Ele", vTrigEle, sample.year)
leptons = electrons
elif SingleMu==True:
if isMC:
HLT_effLumi = lumiFinder("Mu", vTrigMu, sample.year)
leptons = muons
elif not (SingleMu or SingleEle):
continue
if SingleEle and dataMu:
continue
if SingleMu and dataEle:
continue
if GoodLep==None or (lepton_TightRegion < 1):
if Debug:
print("exiting at lepton selection (without saving)")
continue
"""
goodJets = get_Jet(jets, 30)
bjets, nobjets = bjet_filter(goodJets, 'DeepFlv', 'M')
if len(goodJets) < 2 or len(fatjets) < 2:
continue
for jet in goodJets:
if abs(jet.partonFlavour) == 5:
h2_BTaggingEff_Denom_b.Fill(jet.pt, abs(jet.eta))
if len(bjet_filter([jet], 'DeepFlv', 'M')[0]) == 1:
h2_BTaggingEff_Num_b.Fill(jet.pt, abs(jet.eta))
elif abs(jet.partonFlavour) == 4:
h2_BTaggingEff_Denom_c.Fill(jet.pt, abs(jet.eta))
if len(bjet_filter([jet], 'DeepFlv', 'M')[0]) == 1:
h2_BTaggingEff_Num_c.Fill(jet.pt, abs(jet.eta))
else:
h2_BTaggingEff_Denom_udsg.Fill(jet.pt, abs(jet.eta))
if len(bjet_filter([jet], 'DeepFlv', 'M')[0]) == 1:
h2_BTaggingEff_Num_udsg.Fill(jet.pt, abs(jet.eta))
outTreeFile.cd()
h2_BTaggingEff_Denom_b.Write()
h2_BTaggingEff_Denom_c.Write()
h2_BTaggingEff_Denom_udsg.Write()
h2_BTaggingEff_Num_b.Write()
h2_BTaggingEff_Num_c.Write()
h2_BTaggingEff_Num_udsg.Write()
h2_Eff_b = ROOT.TEfficiency('h2_BTaggingEff_b',
'bjet efficiency;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_Eff_b.SetTotalHistogram(h2_BTaggingEff_Denom_b, '')
h2_Eff_b.SetPassedHistogram(h2_BTaggingEff_Num_b, '')
h2_Eff_c = ROOT.TEfficiency('h2_BTaggingEff_c',
'cjet efficiency;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_Eff_c.SetTotalHistogram(h2_BTaggingEff_Denom_c, '')
h2_Eff_c.SetPassedHistogram(h2_BTaggingEff_Num_c, '')
h2_Eff_udsg = ROOT.TEfficiency('h2_BTaggingEff_udsg',
'light jet efficiency;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins
)
h2_Eff_udsg.SetTotalHistogram(h2_BTaggingEff_Denom_udsg, '')
h2_Eff_udsg.SetPassedHistogram(h2_BTaggingEff_Num_udsg, '')
h2_Eff_b.Write()
h2_Eff_c.Write()
h2_Eff_udsg.Write()
endTime = datetime.datetime.now()
print('Ending running at ' + str(endTime))
| import os
import sys
import ROOT
import math
import datetime
import copy
from array import array
from skimtree_utils_ssWW_wFakes_old import *
if not '_UL' in sys.argv[1]:
if sys.argv[4] == 'remote':
from samples import *
Debug = False
else:
from samples.samples import *
Debug = True
elif sys.argv[4] == 'remote':
from samplesUL import *
Debug = False
else:
from samples.samplesUL import *
Debug = True
sample = sample_dict[sys.argv[1]]
part_idx = sys.argv[2]
file_list = list(map(str, sys.argv[3].strip('[]').split(',')))
def AddOverflow(h):
nxbins = h.GetXaxis().GetNbins()
nybins = h.GetYaxis().GetNbins()
idxx = 0.0
idxy = nybins + 1
for ix in range(nxbins):
idxx = ix + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(idxx, nybins)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(idxx, nybins, new_last_bincont)
idxx = nxbins + 1
idxy = 0.0
for iy in range(nybins):
idxy = iy + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(nxbins, idxy)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(nxbins, idxy, new_last_bincont)
startTime = datetime.datetime.now()
print('Starting running at ' + str(startTime))
ROOT.gROOT.SetBatch()
leadingjet_ptcut = 150.0
chain = ROOT.TChain('Events')
for infile in file_list:
chain.Add(infile)
print('Number of events in chain ' + str(chain.GetEntries()))
tree = InputTree(chain)
print('Number of entries: ' + str(tree.GetEntries()))
isMC = True
if 'Data' in sample.name:
isMC = False
IsDim8 = False
if 'aQGC' in sample.name:
IsDim8 = True
dataEle = False
dataMu = False
if 'DataMu' in sample.name:
dataMu = True
if 'DataEle' in sample.name:
dataEle = True
username = str(os.environ.get('USER'))
inituser = str(os.environ.get('USER')[0])
outTreeFile = ROOT.TFile(sample.label + '_part' + str(part_idx) + '.root',
'RECREATE')
ptNBins = 100
ptMin = 0
ptMax = 1000.0
etaNBins = 60
etaMin = -3.0
etaMax = 3.0
ptbins = array.array('d', [30, 50, 80, 140, 200, 300, 600, 1000])
etabins = array.array('d', [0.0, 0.8, 1.6, 2.4])
nptbins = len(ptbins) - 1
netabins = len(etabins) - 1
h2_BTaggingEff_Denom_b = ROOT.TH2D('h2_BTaggingEff_Denom_b',
'MC bjet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Denom_c = ROOT.TH2D('h2_BTaggingEff_Denom_c',
'MC cjet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Denom_udsg = ROOT.TH2D('h2_BTaggingEff_Denom_udsg',
'MC ljet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Num_b = ROOT.TH2D('h2_BTaggingEff_Num_b',
'Tagged bjet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Num_c = ROOT.TH2D('h2_BTaggingEff_Num_c',
'Tagged cjet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Num_udsg = ROOT.TH2D('h2_BTaggingEff_Num_udsg',
'Tagged ljet;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Denom_b.Sumw2()
h2_BTaggingEff_Denom_c.Sumw2()
h2_BTaggingEff_Denom_udsg.Sumw2()
h2_BTaggingEff_Num_b.Sumw2()
h2_BTaggingEff_Num_c.Sumw2()
h2_BTaggingEff_Num_udsg.Sumw2()
for i in range(tree.GetEntries()):
if Debug:
if i > 100:
break
if not Debug and i % 5000 == 0:
print('Event #', i + 1, ' out of ', tree.GetEntries())
event = Event(tree, i)
electrons = Collection(event, 'Electron')
muons = Collection(event, 'Muon')
jets = Collection(event, 'Jet')
njets = len(jets)
fatjets = Collection(event, 'FatJet')
HLT = Object(event, 'HLT')
PV = Object(event, 'PV')
Flag = Object(event, 'Flag')
tightlep = None
tightlep_p4 = None
tightlep_p4t = None
tightlep_SF = None
tightlep_SFUp = None
tightlep_SFDown = None
recomet_p4t = None
PF_SF = None
PF_SFUp = None
PF_SFDown = None
PU_SF = None
PU_SFUp = None
PU_SFDown = None
year = sample.year
if isMC:
runPeriod = ''
else:
runPeriod = sample.runP
if not isMC:
if not Flag.eeBadScFilter:
continue
passMu, passEle, passHT, noTrigger = trig_map(HLT, PV, year, runPeriod,
Flag)
if noTrigger:
continue
"""
GoodEle, ele_TightRegion = SelectLepton(electrons, False)
GoodMu, mu_TightRegion = SelectLepton(muons, True)
if GoodEle is None and GoodMu is None:
continue
ele_lepton_veto = -1
mu_lepton_veto = -1
if GoodEle != None:
ele_lepton_veto = LepVeto(GoodEle, electrons, muons)
if GoodMu != None:
mu_lepton_veto = LepVeto(GoodMu, electrons, muons)
SingleEle=False
SingleMu=False
ElMu=False
LeadLepFamily="not selected"
GoodLep = None
leptons = None
lepton_TightRegion = 0
if 'DataHT' not in sample.label:
if passEle and not passMu:
if GoodEle != None and ele_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
else:
continue
elif passMu and not passEle:
if GoodMu != None and mu_lepton_veto:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleEle = False
SingleMu = True
else:
continue
elif passMu and passEle:
ElMu=True
else:
continue
else:
if passHT:
ElMu = True
else:
continue
if ElMu:
if GoodMu==None and GoodEle!=None and ele_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
elif GoodMu!=None and mu_lepton_veto and GoodEle==None:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
elif GoodMu!=None and GoodEle!=None:
if ele_lepton_veto and not mu_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
elif not ele_lepton_veto and mu_lepton_veto:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
elif ele_lepton_veto and mu_lepton_veto:
if GoodEle.pt > GoodMu.pt:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
else:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
else:
continue
else:
continue
vTrigEle, vTrigMu, vTrigHT = trig_finder(HLT, sample.year, sample.label)
if SingleEle==True:
if isMC:
HLT_effLumi = lumiFinder("Ele", vTrigEle, sample.year)
leptons = electrons
elif SingleMu==True:
if isMC:
HLT_effLumi = lumiFinder("Mu", vTrigMu, sample.year)
leptons = muons
elif not (SingleMu or SingleEle):
continue
if SingleEle and dataMu:
continue
if SingleMu and dataEle:
continue
if GoodLep==None or (lepton_TightRegion < 1):
if Debug:
print("exiting at lepton selection (without saving)")
continue
"""
goodJets = get_Jet(jets, 30)
bjets, nobjets = bjet_filter(goodJets, 'DeepFlv', 'M')
if len(goodJets) < 2 or len(fatjets) < 2:
continue
for jet in goodJets:
if abs(jet.partonFlavour) == 5:
h2_BTaggingEff_Denom_b.Fill(jet.pt, abs(jet.eta))
if len(bjet_filter([jet], 'DeepFlv', 'M')[0]) == 1:
h2_BTaggingEff_Num_b.Fill(jet.pt, abs(jet.eta))
elif abs(jet.partonFlavour) == 4:
h2_BTaggingEff_Denom_c.Fill(jet.pt, abs(jet.eta))
if len(bjet_filter([jet], 'DeepFlv', 'M')[0]) == 1:
h2_BTaggingEff_Num_c.Fill(jet.pt, abs(jet.eta))
else:
h2_BTaggingEff_Denom_udsg.Fill(jet.pt, abs(jet.eta))
if len(bjet_filter([jet], 'DeepFlv', 'M')[0]) == 1:
h2_BTaggingEff_Num_udsg.Fill(jet.pt, abs(jet.eta))
outTreeFile.cd()
h2_BTaggingEff_Denom_b.Write()
h2_BTaggingEff_Denom_c.Write()
h2_BTaggingEff_Denom_udsg.Write()
h2_BTaggingEff_Num_b.Write()
h2_BTaggingEff_Num_c.Write()
h2_BTaggingEff_Num_udsg.Write()
h2_Eff_b = ROOT.TEfficiency('h2_BTaggingEff_b',
'bjet efficiency;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_Eff_b.SetTotalHistogram(h2_BTaggingEff_Denom_b, '')
h2_Eff_b.SetPassedHistogram(h2_BTaggingEff_Num_b, '')
h2_Eff_c = ROOT.TEfficiency('h2_BTaggingEff_c',
'cjet efficiency;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins)
h2_Eff_c.SetTotalHistogram(h2_BTaggingEff_Denom_c, '')
h2_Eff_c.SetPassedHistogram(h2_BTaggingEff_Num_c, '')
h2_Eff_udsg = ROOT.TEfficiency('h2_BTaggingEff_udsg',
'light jet efficiency;p_{T} [GeV];#eta', nptbins, ptbins, netabins, etabins
)
h2_Eff_udsg.SetTotalHistogram(h2_BTaggingEff_Denom_udsg, '')
h2_Eff_udsg.SetPassedHistogram(h2_BTaggingEff_Num_udsg, '')
h2_Eff_b.Write()
h2_Eff_c.Write()
h2_Eff_udsg.Write()
endTime = datetime.datetime.now()
print('Ending running at ' + str(endTime))
| #!/bin/env python3
import os
##print(os.environ)
##print("**********************************************************************")
##print("**********************************************************************")
##print("**********************************************************************")
##print(str(os.environ.get('PYTHONPATH')))
##print(str(os.environ.get('PYTHON3PATH')))
import sys
##print("*************** This is system version info ***************************")
##print(sys.version_info)
#import platform
##print("*************** This is python version info ***************************")
##print(platform.python_version())
import ROOT
##print("Succesfully imported ROOT")
import math
import datetime
import copy
from array import array
from skimtree_utils_ssWW_wFakes_old import *
if not "_UL" in sys.argv[1]:
if sys.argv[4] == 'remote':
from samples import *
Debug = False
else:
from samples.samples import *
Debug = True
else:
if sys.argv[4] == 'remote':
from samplesUL import *
Debug = False
else:
from samples.samplesUL import *
Debug = True
sample = sample_dict[sys.argv[1]]
part_idx = sys.argv[2]
file_list = list(map(str, sys.argv[3].strip('[]').split(',')))
#print("file_list: ", file_list, "\nloop #1 over it")
#for infile in file_list:
#print(infile)
def AddOverflow(h):
nxbins = h.GetXaxis().GetNbins()
nybins = h.GetYaxis().GetNbins()
idxx = 0.
idxy = nybins + 1
for ix in range(nxbins):
idxx = ix + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(idxx, nybins)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(idxx, nybins, new_last_bincont)
idxx = nxbins + 1
idxy = 0.
for iy in range(nybins):
idxy = iy + 1
ovf_bincont = h.GetBinContent(idxx, idxy)
last_bincont = h.GetBinContent(nxbins, idxy)
new_last_bincont = ovf_bincont + last_bincont
h.SetBinContent(nxbins, idxy, new_last_bincont)
startTime = datetime.datetime.now()
print("Starting running at " + str(startTime))
ROOT.gROOT.SetBatch()
leadingjet_ptcut = 150.
chain = ROOT.TChain('Events')
#print(chain)
#print("loop #2 over file_list")
for infile in file_list:
#print("Adding %s to the chain" %(infile))
chain.Add(infile)
print("Number of events in chain " + str(chain.GetEntries()))
#print("Number of events in tree from chain " + str((chain.GetTree()).GetEntries()))
#print("Type of tree from chain " + str(type(chain.GetTree())))
#treechain = (ROOT.TTree)(chain.GetTree())
tree = InputTree(chain)
print("Number of entries: " +str(tree.GetEntries()))
#print("tree: ", tree)
isMC = True
if ('Data' in sample.name):
isMC = False
#MCReco = MCReco * isMC
IsDim8 = False
if 'aQGC' in sample.name:
IsDim8 = True
dataEle = False
dataMu = False
if 'DataMu' in sample.name:
dataMu = True
if 'DataEle' in sample.name:
dataEle = True
username = str(os.environ.get('USER'))
inituser = str(os.environ.get('USER')[0])
#folder = 'vbtag'
#if not os.path.exists("/eos/user/" + inituser + "/" + username + "/VBS/nosynch/" + folder + "/" + sample.label):
#os.makedirs("/eos/user/" + inituser + "/" + username +"/VBS/nosynch/" + folder + "/" + sample.label)
#outpath = "/eos/user/" + inituser + "/" + username +"/VBS/nosynch/" + folder + "/" + sample.label + "/"
#++++++++++++++++++++++++++++++++++
#++ branching the new trees ++
#++++++++++++++++++++++++++++++++++
#print(outpath + sample.label+"_part"+str(part_idx)+".root")
outTreeFile = ROOT.TFile(sample.label+"_part"+str(part_idx)+".root", "RECREATE") #some name of the output file
#++++++++++++++++++++++++++++++++++
#++ All category ++
#++++++++++++++++++++++++++++++++++
#++++++++++++++++++++++++++++++++++
#++ Efficiency studies ++
#++++++++++++++++++++++++++++++++++
ptNBins = 100
ptMin = 0
ptMax = 1000.
etaNBins = 60
etaMin = -3.
etaMax = 3.
ptbins = array.array('d', [30, 50, 80, 140, 200, 300, 600, 1000])
etabins = array.array('d', [0.0, 0.8, 1.6, 2.4])
nptbins = len(ptbins)-1
netabins = len(etabins)-1
h2_BTaggingEff_Denom_b = ROOT.TH2D("h2_BTaggingEff_Denom_b", "MC bjet;p_{T} [GeV];#eta", nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Denom_c = ROOT.TH2D("h2_BTaggingEff_Denom_c", "MC cjet;p_{T} [GeV];#eta", nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Denom_udsg = ROOT.TH2D("h2_BTaggingEff_Denom_udsg", "MC ljet;p_{T} [GeV];#eta", nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Num_b = ROOT.TH2D("h2_BTaggingEff_Num_b", "Tagged bjet;p_{T} [GeV];#eta", nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Num_c = ROOT.TH2D("h2_BTaggingEff_Num_c", "Tagged cjet;p_{T} [GeV];#eta", nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Num_udsg = ROOT.TH2D("h2_BTaggingEff_Num_udsg", "Tagged ljet;p_{T} [GeV];#eta", nptbins, ptbins, netabins, etabins)
h2_BTaggingEff_Denom_b.Sumw2()
h2_BTaggingEff_Denom_c.Sumw2()
h2_BTaggingEff_Denom_udsg.Sumw2()
h2_BTaggingEff_Num_b.Sumw2()
h2_BTaggingEff_Num_c.Sumw2()
h2_BTaggingEff_Num_udsg.Sumw2()
#++++++++++++++++++++++++++++++++++
#++ looping over the events ++
#++++++++++++++++++++++++++++++++++
for i in range(tree.GetEntries()):
#++++++++++++++++++++++++++++++++++
#++ taking objects ++
#++++++++++++++++++++++++++++++++++
if Debug:
if i > 100:
break
if not Debug and i%5000 == 0:
print("Event #", i+1, " out of ", tree.GetEntries())
event = Event(tree,i)
electrons = Collection(event, "Electron")
muons = Collection(event, "Muon")
jets = Collection(event, "Jet")
njets = len(jets)
fatjets = Collection(event, "FatJet")
HLT = Object(event, "HLT")
PV = Object(event, "PV")
Flag = Object(event, 'Flag')
#++++++++++++++++++++++++++++++++++
#++ defining variables ++
#++++++++++++++++++++++++++++++++++
tightlep = None
tightlep_p4 = None
tightlep_p4t = None
tightlep_SF = None
tightlep_SFUp = None
tightlep_SFDown = None
recomet_p4t = None
PF_SF = None
PF_SFUp = None
PF_SFDown = None
PU_SF = None
PU_SFUp = None
PU_SFDown = None
#++++++++++++++++++++++++++++++++++
#++ starting the analysis ++
#++++++++++++++++++++++++++++++++++
#VetoMu = get_LooseMu(muons)
#goodMu = get_Mu(muons)
#VetoEle = get_LooseEle(electrons)
#goodEle = get_Ele(electrons)
year = sample.year
if(isMC):
runPeriod = ''
else:
runPeriod = sample.runP
if not isMC:
if not Flag.eeBadScFilter:
continue
#print "------ ", i
passMu, passEle, passHT, noTrigger = trig_map(HLT, PV, year, runPeriod, Flag)
if noTrigger:
continue
'''
GoodEle, ele_TightRegion = SelectLepton(electrons, False)
GoodMu, mu_TightRegion = SelectLepton(muons, True)
if GoodEle is None and GoodMu is None:
continue
ele_lepton_veto = -1
mu_lepton_veto = -1
if GoodEle != None:
ele_lepton_veto = LepVeto(GoodEle, electrons, muons)
if GoodMu != None:
mu_lepton_veto = LepVeto(GoodMu, electrons, muons)
SingleEle=False
SingleMu=False
ElMu=False
LeadLepFamily="not selected"
GoodLep = None
leptons = None
lepton_TightRegion = 0
if 'DataHT' not in sample.label:
if passEle and not passMu:
if GoodEle != None and ele_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
else:
continue
elif passMu and not passEle:
if GoodMu != None and mu_lepton_veto:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleEle = False
SingleMu = True
else:
continue
elif passMu and passEle:
ElMu=True
else:
continue
else:
if passHT:
ElMu = True
else:
continue
if ElMu:
if GoodMu==None and GoodEle!=None and ele_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
elif GoodMu!=None and mu_lepton_veto and GoodEle==None:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
elif GoodMu!=None and GoodEle!=None:
if ele_lepton_veto and not mu_lepton_veto:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
elif not ele_lepton_veto and mu_lepton_veto:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
elif ele_lepton_veto and mu_lepton_veto:
if GoodEle.pt > GoodMu.pt:
GoodLep = GoodEle
lepton_TightRegion = copy.deepcopy(ele_TightRegion)
SingleEle = True
SingleMu = False
else:
GoodLep = GoodMu
lepton_TightRegion = copy.deepcopy(mu_TightRegion)
SingleMu = True
SingleEle = False
else:
continue
else:
continue
vTrigEle, vTrigMu, vTrigHT = trig_finder(HLT, sample.year, sample.label)
if SingleEle==True:
if isMC:
HLT_effLumi = lumiFinder("Ele", vTrigEle, sample.year)
leptons = electrons
elif SingleMu==True:
if isMC:
HLT_effLumi = lumiFinder("Mu", vTrigMu, sample.year)
leptons = muons
elif not (SingleMu or SingleEle):
continue
if SingleEle and dataMu:
continue
if SingleMu and dataEle:
continue
if GoodLep==None or (lepton_TightRegion < 1):
if Debug:
print("exiting at lepton selection (without saving)")
continue
'''
######################################
## Selecting only jets with pt>30 ##
######################################
goodJets = get_Jet(jets, 30)
bjets, nobjets = bjet_filter(goodJets, 'DeepFlv', 'M')
if (len(goodJets) < 2 or len(fatjets) < 2):
continue
for jet in goodJets:
if(abs(jet.partonFlavour) == 5):
h2_BTaggingEff_Denom_b.Fill(jet.pt, abs(jet.eta))
if(len(bjet_filter([jet], 'DeepFlv', 'M')[0])==1):
h2_BTaggingEff_Num_b.Fill(jet.pt, abs(jet.eta))
elif(abs(jet.partonFlavour) == 4):
h2_BTaggingEff_Denom_c.Fill(jet.pt, abs(jet.eta))
if(len(bjet_filter([jet], 'DeepFlv', 'M')[0])==1):
h2_BTaggingEff_Num_c.Fill(jet.pt, abs(jet.eta))
else:
h2_BTaggingEff_Denom_udsg.Fill(jet.pt, abs(jet.eta))
if(len(bjet_filter([jet], 'DeepFlv', 'M')[0])==1):
h2_BTaggingEff_Num_udsg.Fill(jet.pt, abs(jet.eta))
outTreeFile.cd()
h2_BTaggingEff_Denom_b.Write()
h2_BTaggingEff_Denom_c.Write()
h2_BTaggingEff_Denom_udsg.Write()
h2_BTaggingEff_Num_b.Write()
h2_BTaggingEff_Num_c.Write()
h2_BTaggingEff_Num_udsg.Write()
h2_Eff_b = ROOT.TEfficiency("h2_BTaggingEff_b", "bjet efficiency;p_{T} [GeV];#eta", nptbins, ptbins, netabins, etabins)
h2_Eff_b.SetTotalHistogram(h2_BTaggingEff_Denom_b, "")
h2_Eff_b.SetPassedHistogram(h2_BTaggingEff_Num_b, "")
h2_Eff_c = ROOT.TEfficiency("h2_BTaggingEff_c", "cjet efficiency;p_{T} [GeV];#eta", nptbins, ptbins, netabins, etabins)
h2_Eff_c.SetTotalHistogram(h2_BTaggingEff_Denom_c, "")
h2_Eff_c.SetPassedHistogram(h2_BTaggingEff_Num_c, "")
h2_Eff_udsg = ROOT.TEfficiency("h2_BTaggingEff_udsg", "light jet efficiency;p_{T} [GeV];#eta", nptbins, ptbins, netabins, etabins)
h2_Eff_udsg.SetTotalHistogram(h2_BTaggingEff_Denom_udsg, "")
h2_Eff_udsg.SetPassedHistogram(h2_BTaggingEff_Num_udsg, "")
h2_Eff_b.Write()
h2_Eff_c.Write()
h2_Eff_udsg.Write()
endTime = datetime.datetime.now()
print("Ending running at " + str(endTime))
| [
1,
2,
3,
4,
5
] |
1,941 | f41ab6813fb7067089abe223b9006adde40630cd | <mask token>
def index() ->dict:
return {}
<mask token>
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
response = client.get('/')
assert response.headers['x-request-id']
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
response = client.get('/', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
response = client.get('/fail', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
with pytest.raises(RuntimeError):
client.get('/fail-2', headers={'x-request-id': 'a-request-id'})
assert RequestId.get_request_id() is None
| <mask token>
def index() ->dict:
return {}
def fail() ->dict:
raise exceptions.BadRequest('fail')
def fail_2() ->dict:
raise RuntimeError('fail')
<mask token>
@pytest.fixture(scope='session')
def app():
return App(routes=routes, event_hooks=event_hooks)
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
response = client.get('/')
assert response.headers['x-request-id']
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
response = client.get('/', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
response = client.get('/fail', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
with pytest.raises(RuntimeError):
client.get('/fail-2', headers={'x-request-id': 'a-request-id'})
assert RequestId.get_request_id() is None
| <mask token>
def index() ->dict:
return {}
def fail() ->dict:
raise exceptions.BadRequest('fail')
def fail_2() ->dict:
raise RuntimeError('fail')
routes = [Route('/', method='GET', handler=index), Route('/fail', method=
'GET', handler=fail), Route('/fail-2', method='GET', handler=fail_2)]
event_hooks = [RequestIdHooks()]
@pytest.fixture(scope='session')
def app():
return App(routes=routes, event_hooks=event_hooks)
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
response = client.get('/')
assert response.headers['x-request-id']
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
response = client.get('/', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
response = client.get('/fail', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
with pytest.raises(RuntimeError):
client.get('/fail-2', headers={'x-request-id': 'a-request-id'})
assert RequestId.get_request_id() is None
| import pytest
from apistar import App, Route, TestClient, exceptions
from apistar_request_id import RequestId, RequestIdHooks
def index() ->dict:
return {}
def fail() ->dict:
raise exceptions.BadRequest('fail')
def fail_2() ->dict:
raise RuntimeError('fail')
routes = [Route('/', method='GET', handler=index), Route('/fail', method=
'GET', handler=fail), Route('/fail-2', method='GET', handler=fail_2)]
event_hooks = [RequestIdHooks()]
@pytest.fixture(scope='session')
def app():
return App(routes=routes, event_hooks=event_hooks)
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
response = client.get('/')
assert response.headers['x-request-id']
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
response = client.get('/', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
response = client.get('/fail', headers={'x-request-id': 'a-request-id'})
assert response.headers['x-request-id'] == 'a-request-id'
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
with pytest.raises(RuntimeError):
client.get('/fail-2', headers={'x-request-id': 'a-request-id'})
assert RequestId.get_request_id() is None
| import pytest
from apistar import App, Route, TestClient, exceptions
from apistar_request_id import RequestId, RequestIdHooks
def index() -> dict:
return {}
def fail() -> dict:
raise exceptions.BadRequest("fail")
def fail_2() -> dict:
raise RuntimeError("fail")
routes = [
Route("/", method="GET", handler=index),
Route("/fail", method="GET", handler=fail),
Route("/fail-2", method="GET", handler=fail_2),
]
event_hooks = [
RequestIdHooks()
]
@pytest.fixture(scope="session")
def app():
return App(routes=routes, event_hooks=event_hooks)
@pytest.fixture
def client(app):
return TestClient(app)
def test_request_id_can_be_autogenerated(client):
# Given that I don't have an existing request id
# When I make a request to the app
response = client.get("/")
# Then my response should contain an autogenerated request id
assert response.headers["x-request-id"]
# And the request id for the current thread should be cleared
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_from_request_headers(client):
# Given that I have an existing request id
# When I make a request to the app
response = client.get("/", headers={"x-request-id": "a-request-id"})
# Then that same request id should appear in the response headers
assert response.headers["x-request-id"] == "a-request-id"
# And the request id for the current thread should be cleared
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_error(client):
# Given that I have an existing request id
# When I make a request to the app
response = client.get("/fail", headers={"x-request-id": "a-request-id"})
# Then that same request id should appear in the response headers
assert response.headers["x-request-id"] == "a-request-id"
# And the request id for the current thread should be cleared
assert RequestId.get_request_id() is None
def test_request_id_can_be_set_on_internal_error(client):
# Given that I have an existing request id
# When I make a request to the app
with pytest.raises(RuntimeError):
client.get("/fail-2", headers={"x-request-id": "a-request-id"})
# Then the request id should be set and subsequently cleared
assert RequestId.get_request_id() is None
| [
6,
9,
10,
11,
12
] |
1,942 | bf2a827e9c314da2ce9ad9f8f61b82c9c798e2f9 | #! /usr/bin/env python
from nutils import *
@log.title
def makeplots( domain, geom, c, psi, index ):
force = c * psi.grad(geom)
xpnt, cpnt = domain.elem_eval( [ geom, c ], ischeme='bezier5', title='mesh', separate=True )
xy, uv = domain.elem_eval( [ geom, force ], ischeme='uniform1', title='quiver', separate=False )
with plot.PyPlot( 'concentration', ndigits=4, index=index ) as plt:
plt.mesh( xpnt, cpnt )
plt.colorbar()
plt.quiver( xy[:,0], xy[:,1], uv[:,0], uv[:,1], pivot='middle' )
plt.xlim( 0, 1 )
plt.ylim( 0, 1 )
def main( nelems=40, epsilon=None, timestep=.01, init='random', mean=-.3, std=.1, tol=1e-5, maxiter=-1, plot=True ):
mineps = 2./nelems
if epsilon is None:
log.info( 'setting epsilon=%f' % mineps )
epsilon = mineps
elif epsilon < mineps:
log.warning( 'epsilon under crititical threshold: %f < %f' % ( epsilon, mineps ) )
xnodes = ynodes = numpy.linspace(0,1,nelems+1)
domain, geom = mesh.rectilinear( [ xnodes, ynodes ], periodic=[0] )
funcsp = domain.splinefunc( degree=3, neumann=(2,3), removedofs=(None,(0,-1)) )
psi_c = lambda c_: (1./epsilon) * (2*c_)
psi_m = lambda c_: -epsilon * c_.laplace(geom)
psi_e = lambda c_: (1./epsilon) * (c_**3-3*c_)
psi = lambda c_: psi_c(c_) + psi_m(c_) + psi_e(c_)
A = function.outer( funcsp ) \
+ timestep * function.outer( funcsp.grad(geom), psi_c(funcsp).grad(geom) ).sum() \
- timestep * function.outer( funcsp.laplace(geom), psi_m(funcsp) )
matrix = domain.integrate( A, geometry=geom, ischeme='gauss4' )
if init == 'random':
numpy.random.seed( 0 )
c = funcsp.dot( numpy.random.normal( mean, std, funcsp.shape ) )
elif init == 'sine':
x, y = geom
c = mean + (y-.5+.5*function.sin(x*2*numpy.pi)) * std
else:
raise Exception, 'unknown init %r' % init
__log__ = log.count( 'timestep' )
for iiter in __log__:
if plot:
makeplots( domain, geom, c, psi(c), iiter )
if iiter == maxiter:
break
b = funcsp * c - timestep * ( funcsp.grad(geom) * psi_e(c).grad(geom) ).sum()
rhs, total = domain.integrate( [ b, c ], geometry=geom, ischeme='gauss4' )
log.user( 'total concentration:', total )
if iiter:
res = matrix.res( lhs, rhs )
log.info( 'residual:', res )
if res < tol:
break
lhs = matrix.solve( rhs, tol=1e-10 )
c = funcsp.dot( lhs )
return iiter, lhs
def unittest():
retvals = main( nelems=8, init='sine', mean=0, std=.1, maxiter=10, plot=False )
assert debug.checkdata( retvals, '''
eNqtkdsNBCEIRdvRBDY8VLCg6b+FzSBYwXyBgofLlaEN4NmhMUFDVx8PTkDbc0ayWCQSVTslVqIHFyhz
tExf+kZb297oLCsaXZJhnjdz74TJCBj9oNKqVfN9fXk1AGsiloQrKTWW6NriruVkkqyhyaJ9WLQ4WK47
WEP8sOwwiOLsQ+17XR9Z9en33UVzcywrsLzBa1aaV26WveV3h/4HtuWGNw==''' )
util.run( main, unittest )
| null | null | null | null | [
0
] |
1,943 | 174f5b04f02ec0c9651d5e34c8b04df8bfd4dff4 | #!/usr/bin/env python
import sys
def solve():
numEngines = int(sys.stdin.readline())
engines = []
for _ in range(numEngines):
engine = sys.stdin.readline()
engines.append(engine)
numQueries = int(sys.stdin.readline())
queries = []
for _ in range(numQueries):
query = sys.stdin.readline()
queries.append(query)
remainingEngines = set(engines)
switches = 0
for query in queries:
remainingEngines.discard(query)
if not remainingEngines:
remainingEngines = set(engines)
remainingEngines.discard(query)
switches += 1
return switches
cases = int(sys.stdin.readline())
for case in range(cases):
print 'Case #%d: %s' % (case + 1, solve())
| null | null | null | null | [
0
] |
1,944 | 5b4651f37cdcbb13f8ddd03327ef65af0f9cf61d | <mask token>
def getDates():
dates = store.mapStore('dates')
data = store.mapStore('data')
exceptions = store.mapStore('exceptions')
if len(exceptions) > 0:
return False
try:
d0 = date(2020, 1, 13)
d1 = data[0, FIRST:]
i = 0
newdates = []
while i <= d1.shape[0] - 1:
diffday = np.datetime64(d0 + timedelta(days=i))
newdates.append(diffday)
i += 1
newdates = np.concatenate((dates, newdates))
store.updateStore(dates=newdates)
except Exception as e:
exceptions = store.mapStore('exceptions')
exceptions.append(e)
print('Problems with handling data numpy array')
print(e)
return True
def addDataToDB(conn, filterData):
data = store.mapStore('data')
dates = store.mapStore('dates')
exceptions = store.mapStore('exceptions')
if len(exceptions) > 0:
return False
dataValues = data[1:, FIRST:]
datesValues = dates
if filterData is not None:
datesValues = datesValues[filterData]
dataValues = dataValues[:, filterData]
sql = (
'INSERT INTO apple_transport(geo_type, region, transportation_type, alternative_name, date, value) VALUES(%s, %s, %s, %s, %s, %s)'
)
for ix, iy in np.ndindex(dataValues.shape):
try:
date = datesValues[iy].astype(datetime)
values = data[ix + 1, :FIRST]
values = tuple(values.tolist())
item = dataValues[ix, iy].item()
try:
item = float(item)
except:
item = None
values = values + tuple([date, item])
cursor = conn.cursor()
cursor.execute(sql, values)
conn.commit()
cursor.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
exceptions = store.mapStore('exceptions')
exceptions.append(error)
def addPercentileMessageToDB():
data = store.mapStore('data')
states_walking = filters.filterStates(data[1:, :])
states_driving = filters.filterStates(data[1:, :], 'driving')
states_transit = filters.filterStates(data[1:, :], 'transit')
underq1, overq1, percentile_walking_25 = filters.filterPercentiles(
states_walking, 25)
undermedian, overmedian, percentile_walking_50 = filters.filterPercentiles(
states_walking, 50)
underq3, overq3, percentile_walking_75 = filters.filterPercentiles(
states_walking, 75)
underq1_driving, overq1_driving, percentile_driving_25 = (filters.
filterPercentiles(states_driving, 25))
undermedian_driving, overmedian_driving, percentile_driving_50 = (filters
.filterPercentiles(states_driving, 50))
underq3_driving, overq3_driving, percentile_driving_75 = (filters.
filterPercentiles(states_driving, 75))
underq1_transit, overq1_transit, percentile_transit_25 = (filters.
filterPercentiles(states_transit, 25))
undermedian_transit, overmedian_transit, percentile_transit_50 = (filters
.filterPercentiles(states_transit, 50))
underq3_transit, overq3_transit, percentile_transit_75 = (filters.
filterPercentiles(states_transit, 75))
over100_waling = filters.filerOver100(states_walking)
underq1_states = states_walking[underq1, 1]
overq3_states = states_walking[overq3, 1]
over100_states = states_walking[over100_waling, 1]
over100_driving = filters.filerOver100(states_driving)
underq1_states_driving = states_driving[underq1_driving, 1]
overq3_states_driving = states_driving[overq3_driving, 1]
over100_states_driving = states_driving[over100_driving, 1]
over100_transit = filters.filerOver100(states_transit)
underq1_states_transit = states_transit[underq1_transit, 1]
overq3_states_transit = states_transit[overq3_transit, 1]
over100_states_transit = states_transit[over100_transit, 1]
print('walking under 25 percentile (far to normal) ' +
percentile_walking_25.astype(np.str))
print(underq1_states)
print('walking over 75 percentile (over normal trnasportation) ' +
percentile_walking_75.astype(np.str))
print(overq3_states)
print('walking over 100 in comparison to 13.1.2020')
print(over100_states)
print('Median value is ' + percentile_walking_50.astype(np.str))
print(' ')
print('Driving under 25 percentile (far to normal) ' +
percentile_driving_25.astype(np.str))
print(underq1_states_driving)
print('Driving over 75 percentile (over normal trnasportation) ',
percentile_driving_75.astype(np.str))
print(overq3_states_driving)
print('Driving over 100% in comparison to 13.1.2020')
print(over100_states_driving)
print('Median value is ' + percentile_driving_50.astype(np.str))
print(' ')
print('Transit under 25 percentile (far to normal) ' +
percentile_transit_25.astype(np.str))
print(underq1_states_transit)
print('Transit over 75 percentile (over normal trnasportation) ',
percentile_transit_75.astype(np.str))
print(overq3_states_transit.astype(np.str))
print('Transit over 100 in comparison to 13.1.2020')
print(over100_states_transit)
print('Median value is ' + percentile_transit_50.astype(np.str))
print(' ')
| <mask token>
def getData():
today = store.mapStore('today')
npdata = store.mapStore('data')
filedate = np.datetime64(today - timedelta(days=2))
try:
url = (
'https://covid19-static.cdn-apple.com/covid19-mobility-data/2007HotfixDev49/v2/en-us/applemobilitytrends-{}.csv'
.format(filedate))
download = requests.get(url)
download.encoding = 'utf-8'
temp_file = open('temp/temp.csv', 'w', encoding='utf8')
temp_file.writelines(download.text)
npcsv = np.genfromtxt('temp/temp.csv', delimiter=',', dtype=np.str,
encoding='utf8', invalid_raise=False, missing_values=np.nan,
filling_values=np.nan)
temp_file.close()
store.updateStore(data=npcsv)
except Exception as e:
exceptions = store.mapStore('exceptions')
exceptions.append(e)
print('Not possible to read csv file .')
print(e)
def getDates():
dates = store.mapStore('dates')
data = store.mapStore('data')
exceptions = store.mapStore('exceptions')
if len(exceptions) > 0:
return False
try:
d0 = date(2020, 1, 13)
d1 = data[0, FIRST:]
i = 0
newdates = []
while i <= d1.shape[0] - 1:
diffday = np.datetime64(d0 + timedelta(days=i))
newdates.append(diffday)
i += 1
newdates = np.concatenate((dates, newdates))
store.updateStore(dates=newdates)
except Exception as e:
exceptions = store.mapStore('exceptions')
exceptions.append(e)
print('Problems with handling data numpy array')
print(e)
return True
def addDataToDB(conn, filterData):
data = store.mapStore('data')
dates = store.mapStore('dates')
exceptions = store.mapStore('exceptions')
if len(exceptions) > 0:
return False
dataValues = data[1:, FIRST:]
datesValues = dates
if filterData is not None:
datesValues = datesValues[filterData]
dataValues = dataValues[:, filterData]
sql = (
'INSERT INTO apple_transport(geo_type, region, transportation_type, alternative_name, date, value) VALUES(%s, %s, %s, %s, %s, %s)'
)
for ix, iy in np.ndindex(dataValues.shape):
try:
date = datesValues[iy].astype(datetime)
values = data[ix + 1, :FIRST]
values = tuple(values.tolist())
item = dataValues[ix, iy].item()
try:
item = float(item)
except:
item = None
values = values + tuple([date, item])
cursor = conn.cursor()
cursor.execute(sql, values)
conn.commit()
cursor.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
exceptions = store.mapStore('exceptions')
exceptions.append(error)
def addPercentileMessageToDB():
data = store.mapStore('data')
states_walking = filters.filterStates(data[1:, :])
states_driving = filters.filterStates(data[1:, :], 'driving')
states_transit = filters.filterStates(data[1:, :], 'transit')
underq1, overq1, percentile_walking_25 = filters.filterPercentiles(
states_walking, 25)
undermedian, overmedian, percentile_walking_50 = filters.filterPercentiles(
states_walking, 50)
underq3, overq3, percentile_walking_75 = filters.filterPercentiles(
states_walking, 75)
underq1_driving, overq1_driving, percentile_driving_25 = (filters.
filterPercentiles(states_driving, 25))
undermedian_driving, overmedian_driving, percentile_driving_50 = (filters
.filterPercentiles(states_driving, 50))
underq3_driving, overq3_driving, percentile_driving_75 = (filters.
filterPercentiles(states_driving, 75))
underq1_transit, overq1_transit, percentile_transit_25 = (filters.
filterPercentiles(states_transit, 25))
undermedian_transit, overmedian_transit, percentile_transit_50 = (filters
.filterPercentiles(states_transit, 50))
underq3_transit, overq3_transit, percentile_transit_75 = (filters.
filterPercentiles(states_transit, 75))
over100_waling = filters.filerOver100(states_walking)
underq1_states = states_walking[underq1, 1]
overq3_states = states_walking[overq3, 1]
over100_states = states_walking[over100_waling, 1]
over100_driving = filters.filerOver100(states_driving)
underq1_states_driving = states_driving[underq1_driving, 1]
overq3_states_driving = states_driving[overq3_driving, 1]
over100_states_driving = states_driving[over100_driving, 1]
over100_transit = filters.filerOver100(states_transit)
underq1_states_transit = states_transit[underq1_transit, 1]
overq3_states_transit = states_transit[overq3_transit, 1]
over100_states_transit = states_transit[over100_transit, 1]
print('walking under 25 percentile (far to normal) ' +
percentile_walking_25.astype(np.str))
print(underq1_states)
print('walking over 75 percentile (over normal trnasportation) ' +
percentile_walking_75.astype(np.str))
print(overq3_states)
print('walking over 100 in comparison to 13.1.2020')
print(over100_states)
print('Median value is ' + percentile_walking_50.astype(np.str))
print(' ')
print('Driving under 25 percentile (far to normal) ' +
percentile_driving_25.astype(np.str))
print(underq1_states_driving)
print('Driving over 75 percentile (over normal trnasportation) ',
percentile_driving_75.astype(np.str))
print(overq3_states_driving)
print('Driving over 100% in comparison to 13.1.2020')
print(over100_states_driving)
print('Median value is ' + percentile_driving_50.astype(np.str))
print(' ')
print('Transit under 25 percentile (far to normal) ' +
percentile_transit_25.astype(np.str))
print(underq1_states_transit)
print('Transit over 75 percentile (over normal trnasportation) ',
percentile_transit_75.astype(np.str))
print(overq3_states_transit.astype(np.str))
print('Transit over 100 in comparison to 13.1.2020')
print(over100_states_transit)
print('Median value is ' + percentile_transit_50.astype(np.str))
print(' ')
| <mask token>
FIRST = 4
def prepareDate():
pc_tz = timezone('US/Pacific')
n = datetime.now(pc_tz)
nd = n.date()
store.updateStore(today=nd)
def getData():
today = store.mapStore('today')
npdata = store.mapStore('data')
filedate = np.datetime64(today - timedelta(days=2))
try:
url = (
'https://covid19-static.cdn-apple.com/covid19-mobility-data/2007HotfixDev49/v2/en-us/applemobilitytrends-{}.csv'
.format(filedate))
download = requests.get(url)
download.encoding = 'utf-8'
temp_file = open('temp/temp.csv', 'w', encoding='utf8')
temp_file.writelines(download.text)
npcsv = np.genfromtxt('temp/temp.csv', delimiter=',', dtype=np.str,
encoding='utf8', invalid_raise=False, missing_values=np.nan,
filling_values=np.nan)
temp_file.close()
store.updateStore(data=npcsv)
except Exception as e:
exceptions = store.mapStore('exceptions')
exceptions.append(e)
print('Not possible to read csv file .')
print(e)
def getDates():
dates = store.mapStore('dates')
data = store.mapStore('data')
exceptions = store.mapStore('exceptions')
if len(exceptions) > 0:
return False
try:
d0 = date(2020, 1, 13)
d1 = data[0, FIRST:]
i = 0
newdates = []
while i <= d1.shape[0] - 1:
diffday = np.datetime64(d0 + timedelta(days=i))
newdates.append(diffday)
i += 1
newdates = np.concatenate((dates, newdates))
store.updateStore(dates=newdates)
except Exception as e:
exceptions = store.mapStore('exceptions')
exceptions.append(e)
print('Problems with handling data numpy array')
print(e)
return True
def addDataToDB(conn, filterData):
data = store.mapStore('data')
dates = store.mapStore('dates')
exceptions = store.mapStore('exceptions')
if len(exceptions) > 0:
return False
dataValues = data[1:, FIRST:]
datesValues = dates
if filterData is not None:
datesValues = datesValues[filterData]
dataValues = dataValues[:, filterData]
sql = (
'INSERT INTO apple_transport(geo_type, region, transportation_type, alternative_name, date, value) VALUES(%s, %s, %s, %s, %s, %s)'
)
for ix, iy in np.ndindex(dataValues.shape):
try:
date = datesValues[iy].astype(datetime)
values = data[ix + 1, :FIRST]
values = tuple(values.tolist())
item = dataValues[ix, iy].item()
try:
item = float(item)
except:
item = None
values = values + tuple([date, item])
cursor = conn.cursor()
cursor.execute(sql, values)
conn.commit()
cursor.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
exceptions = store.mapStore('exceptions')
exceptions.append(error)
def addPercentileMessageToDB():
data = store.mapStore('data')
states_walking = filters.filterStates(data[1:, :])
states_driving = filters.filterStates(data[1:, :], 'driving')
states_transit = filters.filterStates(data[1:, :], 'transit')
underq1, overq1, percentile_walking_25 = filters.filterPercentiles(
states_walking, 25)
undermedian, overmedian, percentile_walking_50 = filters.filterPercentiles(
states_walking, 50)
underq3, overq3, percentile_walking_75 = filters.filterPercentiles(
states_walking, 75)
underq1_driving, overq1_driving, percentile_driving_25 = (filters.
filterPercentiles(states_driving, 25))
undermedian_driving, overmedian_driving, percentile_driving_50 = (filters
.filterPercentiles(states_driving, 50))
underq3_driving, overq3_driving, percentile_driving_75 = (filters.
filterPercentiles(states_driving, 75))
underq1_transit, overq1_transit, percentile_transit_25 = (filters.
filterPercentiles(states_transit, 25))
undermedian_transit, overmedian_transit, percentile_transit_50 = (filters
.filterPercentiles(states_transit, 50))
underq3_transit, overq3_transit, percentile_transit_75 = (filters.
filterPercentiles(states_transit, 75))
over100_waling = filters.filerOver100(states_walking)
underq1_states = states_walking[underq1, 1]
overq3_states = states_walking[overq3, 1]
over100_states = states_walking[over100_waling, 1]
over100_driving = filters.filerOver100(states_driving)
underq1_states_driving = states_driving[underq1_driving, 1]
overq3_states_driving = states_driving[overq3_driving, 1]
over100_states_driving = states_driving[over100_driving, 1]
over100_transit = filters.filerOver100(states_transit)
underq1_states_transit = states_transit[underq1_transit, 1]
overq3_states_transit = states_transit[overq3_transit, 1]
over100_states_transit = states_transit[over100_transit, 1]
print('walking under 25 percentile (far to normal) ' +
percentile_walking_25.astype(np.str))
print(underq1_states)
print('walking over 75 percentile (over normal trnasportation) ' +
percentile_walking_75.astype(np.str))
print(overq3_states)
print('walking over 100 in comparison to 13.1.2020')
print(over100_states)
print('Median value is ' + percentile_walking_50.astype(np.str))
print(' ')
print('Driving under 25 percentile (far to normal) ' +
percentile_driving_25.astype(np.str))
print(underq1_states_driving)
print('Driving over 75 percentile (over normal trnasportation) ',
percentile_driving_75.astype(np.str))
print(overq3_states_driving)
print('Driving over 100% in comparison to 13.1.2020')
print(over100_states_driving)
print('Median value is ' + percentile_driving_50.astype(np.str))
print(' ')
print('Transit under 25 percentile (far to normal) ' +
percentile_transit_25.astype(np.str))
print(underq1_states_transit)
print('Transit over 75 percentile (over normal trnasportation) ',
percentile_transit_75.astype(np.str))
print(overq3_states_transit.astype(np.str))
print('Transit over 100 in comparison to 13.1.2020')
print(over100_states_transit)
print('Median value is ' + percentile_transit_50.astype(np.str))
print(' ')
| import numpy as np
from datetime import date, timedelta, datetime
from pytz import timezone
import store
import psycopg2
import requests
import os
import filters
FIRST = 4
def prepareDate():
pc_tz = timezone('US/Pacific')
n = datetime.now(pc_tz)
nd = n.date()
store.updateStore(today=nd)
def getData():
today = store.mapStore('today')
npdata = store.mapStore('data')
filedate = np.datetime64(today - timedelta(days=2))
try:
url = (
'https://covid19-static.cdn-apple.com/covid19-mobility-data/2007HotfixDev49/v2/en-us/applemobilitytrends-{}.csv'
.format(filedate))
download = requests.get(url)
download.encoding = 'utf-8'
temp_file = open('temp/temp.csv', 'w', encoding='utf8')
temp_file.writelines(download.text)
npcsv = np.genfromtxt('temp/temp.csv', delimiter=',', dtype=np.str,
encoding='utf8', invalid_raise=False, missing_values=np.nan,
filling_values=np.nan)
temp_file.close()
store.updateStore(data=npcsv)
except Exception as e:
exceptions = store.mapStore('exceptions')
exceptions.append(e)
print('Not possible to read csv file .')
print(e)
def getDates():
dates = store.mapStore('dates')
data = store.mapStore('data')
exceptions = store.mapStore('exceptions')
if len(exceptions) > 0:
return False
try:
d0 = date(2020, 1, 13)
d1 = data[0, FIRST:]
i = 0
newdates = []
while i <= d1.shape[0] - 1:
diffday = np.datetime64(d0 + timedelta(days=i))
newdates.append(diffday)
i += 1
newdates = np.concatenate((dates, newdates))
store.updateStore(dates=newdates)
except Exception as e:
exceptions = store.mapStore('exceptions')
exceptions.append(e)
print('Problems with handling data numpy array')
print(e)
return True
def addDataToDB(conn, filterData):
data = store.mapStore('data')
dates = store.mapStore('dates')
exceptions = store.mapStore('exceptions')
if len(exceptions) > 0:
return False
dataValues = data[1:, FIRST:]
datesValues = dates
if filterData is not None:
datesValues = datesValues[filterData]
dataValues = dataValues[:, filterData]
sql = (
'INSERT INTO apple_transport(geo_type, region, transportation_type, alternative_name, date, value) VALUES(%s, %s, %s, %s, %s, %s)'
)
for ix, iy in np.ndindex(dataValues.shape):
try:
date = datesValues[iy].astype(datetime)
values = data[ix + 1, :FIRST]
values = tuple(values.tolist())
item = dataValues[ix, iy].item()
try:
item = float(item)
except:
item = None
values = values + tuple([date, item])
cursor = conn.cursor()
cursor.execute(sql, values)
conn.commit()
cursor.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
exceptions = store.mapStore('exceptions')
exceptions.append(error)
def addPercentileMessageToDB():
data = store.mapStore('data')
states_walking = filters.filterStates(data[1:, :])
states_driving = filters.filterStates(data[1:, :], 'driving')
states_transit = filters.filterStates(data[1:, :], 'transit')
underq1, overq1, percentile_walking_25 = filters.filterPercentiles(
states_walking, 25)
undermedian, overmedian, percentile_walking_50 = filters.filterPercentiles(
states_walking, 50)
underq3, overq3, percentile_walking_75 = filters.filterPercentiles(
states_walking, 75)
underq1_driving, overq1_driving, percentile_driving_25 = (filters.
filterPercentiles(states_driving, 25))
undermedian_driving, overmedian_driving, percentile_driving_50 = (filters
.filterPercentiles(states_driving, 50))
underq3_driving, overq3_driving, percentile_driving_75 = (filters.
filterPercentiles(states_driving, 75))
underq1_transit, overq1_transit, percentile_transit_25 = (filters.
filterPercentiles(states_transit, 25))
undermedian_transit, overmedian_transit, percentile_transit_50 = (filters
.filterPercentiles(states_transit, 50))
underq3_transit, overq3_transit, percentile_transit_75 = (filters.
filterPercentiles(states_transit, 75))
over100_waling = filters.filerOver100(states_walking)
underq1_states = states_walking[underq1, 1]
overq3_states = states_walking[overq3, 1]
over100_states = states_walking[over100_waling, 1]
over100_driving = filters.filerOver100(states_driving)
underq1_states_driving = states_driving[underq1_driving, 1]
overq3_states_driving = states_driving[overq3_driving, 1]
over100_states_driving = states_driving[over100_driving, 1]
over100_transit = filters.filerOver100(states_transit)
underq1_states_transit = states_transit[underq1_transit, 1]
overq3_states_transit = states_transit[overq3_transit, 1]
over100_states_transit = states_transit[over100_transit, 1]
print('walking under 25 percentile (far to normal) ' +
percentile_walking_25.astype(np.str))
print(underq1_states)
print('walking over 75 percentile (over normal trnasportation) ' +
percentile_walking_75.astype(np.str))
print(overq3_states)
print('walking over 100 in comparison to 13.1.2020')
print(over100_states)
print('Median value is ' + percentile_walking_50.astype(np.str))
print(' ')
print('Driving under 25 percentile (far to normal) ' +
percentile_driving_25.astype(np.str))
print(underq1_states_driving)
print('Driving over 75 percentile (over normal trnasportation) ',
percentile_driving_75.astype(np.str))
print(overq3_states_driving)
print('Driving over 100% in comparison to 13.1.2020')
print(over100_states_driving)
print('Median value is ' + percentile_driving_50.astype(np.str))
print(' ')
print('Transit under 25 percentile (far to normal) ' +
percentile_transit_25.astype(np.str))
print(underq1_states_transit)
print('Transit over 75 percentile (over normal trnasportation) ',
percentile_transit_75.astype(np.str))
print(overq3_states_transit.astype(np.str))
print('Transit over 100 in comparison to 13.1.2020')
print(over100_states_transit)
print('Median value is ' + percentile_transit_50.astype(np.str))
print(' ')
| import numpy as np
from datetime import date, timedelta, datetime
from pytz import timezone
import store
import psycopg2
import requests
import os
import filters
FIRST = 4
def prepareDate():
pc_tz = timezone('US/Pacific')
n = datetime.now(pc_tz)
nd = n.date()
store.updateStore(today=nd)
def getData():
today = store.mapStore("today")
npdata = store.mapStore("data")
filedate = np.datetime64(today - timedelta(days=2))
try:
url = 'https://covid19-static.cdn-apple.com/covid19-mobility-data/2007HotfixDev49/v2/en-us/applemobilitytrends-{}.csv'.format(filedate)
download = requests.get(url)
download.encoding = "utf-8"
temp_file = open("temp/temp.csv", 'w', encoding='utf8')
temp_file.writelines(download.text)
npcsv = np.genfromtxt("temp/temp.csv", delimiter=',', dtype=np.str, encoding='utf8', invalid_raise=False, missing_values = np.nan, filling_values=np.nan)
temp_file.close()
store.updateStore(data=npcsv)
except Exception as e:
exceptions = store.mapStore("exceptions")
exceptions.append(e)
print("Not possible to read csv file .")
print(e)
def getDates():
dates = store.mapStore("dates")
data = store.mapStore("data")
exceptions = store.mapStore("exceptions")
if(len(exceptions) > 0):
return False
try:
d0 = date(2020, 1, 13)
d1 = data[0,FIRST:]
i = 0
newdates = []
while i <= d1.shape[0] - 1:
diffday = np.datetime64(d0 + timedelta(days=i))
newdates.append(diffday)
i += 1
newdates = np.concatenate((dates, newdates))
store.updateStore(dates=newdates)
except Exception as e:
exceptions = store.mapStore("exceptions")
exceptions.append(e)
print("Problems with handling data numpy array")
print(e)
return True
def addDataToDB(conn, filterData):
data = store.mapStore("data")
dates = store.mapStore("dates")
exceptions = store.mapStore("exceptions")
if(len(exceptions) > 0):
return False
dataValues = data[1:,FIRST:]
datesValues = dates
if(filterData is not None):
datesValues = datesValues[filterData]
dataValues = dataValues[:,filterData]
sql = "INSERT INTO apple_transport(geo_type, region, transportation_type, alternative_name, date, value) VALUES(%s, %s, %s, %s, %s, %s)"
for ix,iy in np.ndindex(dataValues.shape):
try:
date = datesValues[iy].astype(datetime)
values = data[ix+1, :FIRST]
values = tuple(values.tolist())
item = dataValues[ix, iy].item()
try:
item = float(item)
except:
item = None
values = values + tuple([date, item])
cursor = conn.cursor()
cursor.execute(sql, values)
conn.commit()
cursor.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
exceptions = store.mapStore("exceptions")
exceptions.append(error)
def addPercentileMessageToDB():
data = store.mapStore("data")
states_walking = filters.filterStates(data[1:, :])
states_driving = filters.filterStates(data[1:, :], "driving")
states_transit = filters.filterStates(data[1:, :], "transit")
underq1, overq1, percentile_walking_25 = filters.filterPercentiles(states_walking, 25)
undermedian, overmedian, percentile_walking_50 = filters.filterPercentiles(states_walking, 50)
underq3, overq3, percentile_walking_75 = filters.filterPercentiles(states_walking, 75)
underq1_driving, overq1_driving, percentile_driving_25 = filters.filterPercentiles(states_driving, 25)
undermedian_driving, overmedian_driving, percentile_driving_50 = filters.filterPercentiles(states_driving, 50)
underq3_driving, overq3_driving, percentile_driving_75 = filters.filterPercentiles(states_driving, 75)
underq1_transit, overq1_transit, percentile_transit_25 = filters.filterPercentiles(states_transit, 25)
undermedian_transit, overmedian_transit, percentile_transit_50 = filters.filterPercentiles(states_transit, 50)
underq3_transit, overq3_transit, percentile_transit_75 = filters.filterPercentiles(states_transit, 75)
over100_waling = filters.filerOver100(states_walking)
underq1_states = states_walking[underq1,1]
overq3_states = states_walking[overq3,1]
over100_states = states_walking[over100_waling, 1]
over100_driving = filters.filerOver100(states_driving)
underq1_states_driving = states_driving[underq1_driving,1]
overq3_states_driving = states_driving[overq3_driving,1]
over100_states_driving = states_driving[over100_driving, 1]
over100_transit = filters.filerOver100(states_transit)
underq1_states_transit = states_transit[underq1_transit,1]
overq3_states_transit = states_transit[overq3_transit,1]
over100_states_transit = states_transit[over100_transit, 1]
print("walking under 25 percentile (far to normal) " + percentile_walking_25.astype(np.str))
print(underq1_states)
print("walking over 75 percentile (over normal trnasportation) " + percentile_walking_75.astype(np.str))
print(overq3_states)
print("walking over 100 in comparison to 13.1.2020")
print(over100_states)
print("Median value is " + percentile_walking_50.astype(np.str))
print(" ")
print("Driving under 25 percentile (far to normal) " + percentile_driving_25.astype(np.str))
print(underq1_states_driving)
print("Driving over 75 percentile (over normal trnasportation) ", percentile_driving_75.astype(np.str))
print(overq3_states_driving)
print("Driving over 100% in comparison to 13.1.2020")
print(over100_states_driving)
print("Median value is " + percentile_driving_50.astype(np.str))
print(" ")
print("Transit under 25 percentile (far to normal) " + percentile_transit_25.astype(np.str))
print(underq1_states_transit)
print("Transit over 75 percentile (over normal trnasportation) ", percentile_transit_75.astype(np.str))
print(overq3_states_transit.astype(np.str))
print("Transit over 100 in comparison to 13.1.2020")
print(over100_states_transit)
print("Median value is " + percentile_transit_50.astype(np.str))
print(" ")
| [
3,
4,
6,
7,
8
] |
1,945 | 96210942b01c510300120913bed1bc6d497a39a9 | <mask token>
| <mask token>
np.random.seed(1)
<mask token>
K.set_image_dim_ordering('th')
<mask token>
model.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
assert os.path.exists(weights_path
), "Model weights not found (see 'weights_path' variable in script)."
<mask token>
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
model.layers[k].set_weights(weights)
f.close()
print('Model loaded.\n')
<mask token>
top_model.add(Flatten(input_shape=model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(nb_classes, activation='softmax'))
top_model.load_weights(top_model_weights_path)
model.add(top_model)
print('Final Model Assembled.\n')
<mask token>
img.load()
<mask token>
print(data.shape)
<mask token>
print('Prediction begins.\n')
<mask token>
print(output)
| <mask token>
os.environ['THEANO_FLAGS'] = 'device=gpu1, floatX=float32'
<mask token>
np.random.seed(1)
<mask token>
K.set_image_dim_ordering('th')
weights_path = 'E:\\Interesting\\Code Fun Do 2017\\vgg16_weights.h5'
top_model_weights_path = (
'E:\\Interesting\\Code Fun Do 2017\\bottleneck_fc_model.h5')
validation_data_dir = (
'E:\\Interesting\\Code Fun Do 2017\\Trial\\cercospora_leaf_spot_365.jpg')
img_width = 200
img_height = 200
input_shape = 3, img_height, img_width
batch_size = 32
nb_classes = 4
nb_epoch = 3
nb_train_samples = 50
nb_validation_samples = 25
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
assert os.path.exists(weights_path
), "Model weights not found (see 'weights_path' variable in script)."
f = h5py.File(weights_path)
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
model.layers[k].set_weights(weights)
f.close()
print('Model loaded.\n')
top_model = Sequential()
top_model.add(Flatten(input_shape=model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(nb_classes, activation='softmax'))
top_model.load_weights(top_model_weights_path)
model.add(top_model)
print('Final Model Assembled.\n')
img = Image.open(validation_data_dir)
img.load()
data = np.asarray(img, dtype='int32')
print(data.shape)
data = data.reshape(1, 3, 200, 200)
print('Prediction begins.\n')
output = model.predict_classes(data, batch_size=32, verbose=1)
print(output)
| <mask token>
import os
os.environ['THEANO_FLAGS'] = 'device=gpu1, floatX=float32'
import theano
import numpy as np
np.random.seed(1)
import pandas as pd
import h5py
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
from PIL import Image
K.set_image_dim_ordering('th')
weights_path = 'E:\\Interesting\\Code Fun Do 2017\\vgg16_weights.h5'
top_model_weights_path = (
'E:\\Interesting\\Code Fun Do 2017\\bottleneck_fc_model.h5')
validation_data_dir = (
'E:\\Interesting\\Code Fun Do 2017\\Trial\\cercospora_leaf_spot_365.jpg')
img_width = 200
img_height = 200
input_shape = 3, img_height, img_width
batch_size = 32
nb_classes = 4
nb_epoch = 3
nb_train_samples = 50
nb_validation_samples = 25
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
assert os.path.exists(weights_path
), "Model weights not found (see 'weights_path' variable in script)."
f = h5py.File(weights_path)
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
model.layers[k].set_weights(weights)
f.close()
print('Model loaded.\n')
top_model = Sequential()
top_model.add(Flatten(input_shape=model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(nb_classes, activation='softmax'))
top_model.load_weights(top_model_weights_path)
model.add(top_model)
print('Final Model Assembled.\n')
img = Image.open(validation_data_dir)
img.load()
data = np.asarray(img, dtype='int32')
print(data.shape)
data = data.reshape(1, 3, 200, 200)
print('Prediction begins.\n')
output = model.predict_classes(data, batch_size=32, verbose=1)
print(output)
| # -*- coding: utf-8 -*-
"""
Created on Sun Mar 5 02:39:55 2017
@author: sparsh
"""
"""
Crop Disease Classification Project for Code Fun Do 2017 - IIT Roorkee
"""
"""
File for predicting a test image.
"""
import os
os.environ['THEANO_FLAGS'] = "device=gpu1, floatX=float32"
import theano
import numpy as np
np.random.seed(1)
import pandas as pd
import h5py
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
from PIL import Image
K.set_image_dim_ordering('th')
#Path to model weights file
weights_path = "E:\\Interesting\\Code Fun Do 2017\\vgg16_weights.h5"
top_model_weights_path = "E:\\Interesting\\Code Fun Do 2017\\bottleneck_fc_model.h5"
#Unknown Image Location
validation_data_dir = "E:\\Interesting\\Code Fun Do 2017\\Trial\\cercospora_leaf_spot_365.jpg"
#validation_data_dir = "E:\\Interesting\\Code Fun Do 2017\\Trial"
#input image dimensions
img_width = 200
img_height = 200
input_shape = (3, img_height, img_width)
#Model parameters
batch_size = 32
nb_classes = 4
nb_epoch = 3
nb_train_samples = 50
nb_validation_samples = 25
# build the VGG16 network
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
# load the weights of the VGG16 networks
# (trained on ImageNet, won the ILSVRC competition in 2014)
# note: when there is a complete match between your model definition
# and your weight savefile, you can simply call model.load_weights(filename)
assert os.path.exists(weights_path), "Model weights not found (see 'weights_path' variable in script)."
f = h5py.File(weights_path)
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
# we don't look at the last (fully-connected) layers in the savefile
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
model.layers[k].set_weights(weights)
f.close()
print("Model loaded.\n")
# build a classifier model to put on top of the convolutional model
top_model = Sequential()
top_model.add(Flatten(input_shape=model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(nb_classes, activation='softmax'))
# note that it is necessary to start with a fully-trained
# classifier, including the top classifier,
# in order to successfully do fine-tuning
top_model.load_weights(top_model_weights_path)
# add the model on top of the convolutional base
model.add(top_model)
#print("DC.\n")
print("Final Model Assembled.\n")
#datagen = ImageDataGenerator(rescale=1./255)
#generator = datagen.flow_from_directory(
# validation_data_dir,
# target_size=(img_width, img_height),
# batch_size=32,
# class_mode=None,
# shuffle=False)
#bottleneck_features_validation = model.predict_generator(generator, nb_validation_samples)
#np.save(open('bottleneck_features_validation.npy', 'w'), bottleneck_features_validation)
#print("Testing features stored.\n")
#data = np.load(open('bottleneck_features_validation.npy'))
img = Image.open(validation_data_dir)
img.load()
#print("chutiya.\n")
data = np.asarray(img, dtype="int32")
#print("harami.\n")
print(data.shape)
data = data.reshape(1, 3, 200, 200)
print("Prediction begins.\n")
output = model.predict_classes(data, batch_size=32, verbose=1)
print(output) | [
0,
1,
2,
3,
4
] |
1,946 | e835e75f444e97ca948ce27504cc9149ea0092f6 | <mask token>
| def multiply(num1, num2):
return num1 * num2
| null | null | null | [
0,
1
] |
1,947 | bea7853d1f3eac50825bc6eb10438f3f656d6d04 | <mask token>
| <mask token>
print(i)
<mask token>
print(i)
<mask token>
print(i)
<mask token>
print(i)
<mask token>
print(s)
<mask token>
print(s)
<mask token>
print(s)
<mask token>
print(s)
<mask token>
print(s)
<mask token>
print(s)
<mask token>
print(i)
print('value is', i)
print('value is', i)
| i = 5
print(i)
i = i + 1
print(i)
i = 1.1
print(i)
i = 'change i to a string '
print(i)
s = 'hello'
print(s)
s = 'hello'
print(s)
s = """This is a "multi-line" string.
This is the second line."""
print(s)
s = "'"
print(s)
s = '\\n'
print(s)
s = '这是一个行连接符的例子'
print(s)
i = 55
print(i)
print('value is', i)
print('value is', i)
| # Filename : var.py
#整数
i = 5
print(i)
i = i + 1
print(i)
#浮点数
i = 1.1
print(i)
#python的弱语言特性,可以随时改变变量的类型
i = 'change i to a string '
print(i)
s = 'hello'#单引号
print(s)
s = "hello"#双引号
print(s)
#三引号为多行字符串
s = '''This is a "multi-line" string.
This is the second line.'''
print(s)
s = '\''#斜杠用于转义
print(s)
#r或R开头的字符串表示自然字符串,后面的斜杠不转义
s = r'\n'
print(s)
s = '这是一个行\
连接符的例子'
print(s)
#斜杠在这里是行连接符,可以把一行中太长的代码分为多行不影响实际意义
#强烈建议坚持在每个物理行只写一句逻辑行。仅仅当逻辑行太长的时候,在多于一个物理行写一个逻辑行
#有一种暗示的假设,可以使你不需要使用反斜杠。这种情况出现在逻辑行中使用了圆括号、方括号或波形括号的时候。这被称为暗示的行连接
i = \
55
print(i)
#同一层次的语句必须有相同的缩进。每一组这样的语句称为一个块,错误的缩进会引发错误
print('value is',i)
print('value is',i)
| null | [
0,
1,
2,
3
] |
1,948 | 093b2afef7cdfb7070eb5e94e84624afe495db66 | <mask token>
def get_pos_name(code, name='parent', english=True, pos_tags=POS_MAP):
"""Gets the part of speech name for *code*.
:param str code: The part of speech code to lookup, e.g. ``'nsf'``.
:param str name: Which part of speech name to include in the output. Must
be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.
Defaults to ``'parent'``. ``'parent'`` indicates that only the most
generic name should be used, e.g. ``'noun'`` for ``'nsf'``.
``'child'`` indicates that the most specific name should be used, e.g.
``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all
names should be used, e.g. ``('noun', 'toponym',
'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the
part of speech code is not transformed at all.
:param bool english: Whether to return an English or Chinese name.
:param dict pos_tags: Custom part of speech tags to use.
:returns: ``str`` if *name* is ``'parent'`` or ``'child'``.
``tuple`` if *name* is ``'all'``.
"""
return _get_pos_name(code, name, english, pos_tags)
| <mask token>
def _get_pos_name(pos_code, names='parent', english=True, pos_map=POS_MAP):
"""Gets the part of speech name for *pos_code*."""
if names not in ('parent', 'child', 'all', 'raw'):
raise ValueError(
"names must be one of 'parent', 'child', 'all', or 'raw'; not '{0}'"
.format(names))
logger.debug("Getting {0} POS name for '{1}' formatted as '{2}'.".
format('English' if english else 'Chinese', pos_code, names))
if names == 'raw':
return pos_code
pos_code = pos_code.lower()
for i in range(1, len(pos_code) + 1):
try:
pos_key = pos_code[0:i]
pos_entry = pos_map[pos_key]
break
except KeyError:
if i == len(pos_code):
logger.warning("part of speech not recognized: '{0}'".
format(pos_code))
return None
pos = pos_entry[1 if english else 0],
if names == 'parent':
logger.debug("Part of speech name found: '{0}'".format(pos[0]))
return pos[0]
if len(pos_entry) == 3 and pos_key != pos_code:
sub_map = pos_entry[2]
logger.debug(
"Found parent part of speech name '{0}'. Descending to look for child name for '{1}'"
.format(pos_entry[1], pos_code))
sub_pos = _get_pos_name(pos_code, names, english, sub_map)
if names == 'all':
pos = pos + sub_pos if sub_pos else pos
else:
pos = sub_pos,
name = pos if names == 'all' else pos[-1]
logger.debug("Part of speech name found: '{0}'".format(name))
return name
def get_pos_name(code, name='parent', english=True, pos_tags=POS_MAP):
"""Gets the part of speech name for *code*.
:param str code: The part of speech code to lookup, e.g. ``'nsf'``.
:param str name: Which part of speech name to include in the output. Must
be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.
Defaults to ``'parent'``. ``'parent'`` indicates that only the most
generic name should be used, e.g. ``'noun'`` for ``'nsf'``.
``'child'`` indicates that the most specific name should be used, e.g.
``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all
names should be used, e.g. ``('noun', 'toponym',
'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the
part of speech code is not transformed at all.
:param bool english: Whether to return an English or Chinese name.
:param dict pos_tags: Custom part of speech tags to use.
:returns: ``str`` if *name* is ``'parent'`` or ``'child'``.
``tuple`` if *name* is ``'all'``.
"""
return _get_pos_name(code, name, english, pos_tags)
| <mask token>
logger = logging.getLogger('pynlpir.pos_map')
POS_MAP = {'n': ('名词', 'noun', {'nr': ('人名', 'personal name', {'nr1': (
'汉语姓氏', 'Chinese surname'), 'nr2': ('汉语名字', 'Chinese given name'),
'nrj': ('日语人名', 'Japanese personal name'), 'nrf': ('音译人名',
'transcribed personal name')}), 'ns': ('地名', 'toponym', {'nsf': ('音译地名',
'transcribed toponym')}), 'nt': ('机构团体名', 'organization/group name'),
'nz': ('其它专名', 'other proper noun'), 'nl': ('名词性惯用语', 'noun phrase'),
'ng': ('名词性语素', 'noun morpheme')}), 't': ('时间词', 'time word', {'tg': (
'时间词性语素', 'time morpheme')}), 's': ('处所词', 'locative word'), 'f': (
'方位词', 'noun of locality'), 'v': ('动词', 'verb', {'vd': ('副动词',
'auxiliary verb'), 'vn': ('名动词', 'noun-verb'), 'vshi': ('动词"是"',
'verb 是'), 'vyou': ('动词"有"', 'verb 有'), 'vf': ('趋向动词',
'directional verb'), 'vx': ('行事动词', 'performative verb'), 'vi': (
'不及物动词', 'intransitive verb'), 'vl': ('动词性惯用语', 'verb phrase'), 'vg': (
'动词性语素', 'verb morpheme')}), 'a': ('形容词', 'adjective', {'ad': ('副形词',
'auxiliary adjective'), 'an': ('名形词', 'noun-adjective'), 'ag': (
'形容词性语素', 'adjective morpheme'), 'al': ('形容词性惯用语', 'adjective phrase')}
), 'b': ('区别词', 'distinguishing word', {'bl': ('区别词性惯用语',
'distinguishing phrase')}), 'z': ('状态词', 'status word'), 'r': ('代词',
'pronoun', {'rr': ('人称代词', 'personal pronoun'), 'rz': ('指示代词',
'demonstrative pronoun', {'rzt': ('时间指示代词',
'temporal demonstrative pronoun'), 'rzs': ('处所指示代词',
'locative demonstrative pronoun'), 'rzv': ('谓词性指示代词',
'predicate demonstrative pronoun')}), 'ry': ('疑问代词',
'interrogative pronoun', {'ryt': ('时间疑问代词',
'temporal interrogative pronoun'), 'rys': ('处所疑问代词',
'locative interrogative pronoun'), 'ryv': ('谓词性疑问代词',
'predicate interrogative pronoun')}), 'rg': ('代词性语素',
'pronoun morpheme')}), 'm': ('数词', 'numeral', {'mq': ('数量词',
'numeral-plus-classifier compound'), 'mg': ('干支', 'zodiac')}), 'q': (
'量词', 'classifier', {'qv': ('动量词', 'verbal classifier'), 'qt': ('时量词',
'temporal classifier')}), 'd': ('副词', 'adverb'), 'p': ('介词',
'preposition', {'pba': ('介词“把”', 'preposition 把'), 'pbei': ('介词“被”',
'preposition 被')}), 'c': ('连词', 'conjunction', {'cc': ('并列连词',
'coordinating conjunction')}), 'u': ('助词', 'particle', {'uzhe': ('着',
'particle 着'), 'ule': ('了/喽', 'particle 了/喽'), 'uguo': ('过',
'particle 过'), 'ude1': ('的/底', 'particle 的/底'), 'ude2': ('地',
'particle 地'), 'ude3': ('得', 'particle 得'), 'usuo': ('所', 'particle 所'),
'udeng': ('等/等等/云云', 'particle 等/等等/云云'), 'uyy': ('一样/一般/似的/般',
'particle 一样/一般/似的/般'), 'udh': ('的话', 'particle 的话'), 'uls': (
'来讲/来说/而言/说来', 'particle 来讲/来说/而言/说来'), 'uzhi': ('之', 'particle 之'),
'ulian': ('连', 'particle 连')}), 'e': ('叹词', 'interjection'), 'y': (
'语气词', 'modal particle'), 'o': ('拟声词', 'onomatopoeia'), 'h': ('前缀',
'prefix'), 'k': ('后缀', 'suffix'), 'x': ('字符串', 'string', {'xe': (
'Email字符串', 'email address'), 'xs': ('微博会话分隔符', 'hashtag'), 'xm': (
'表情符合', 'emoticon'), 'xu': ('网址URL', 'URL'), 'xx': ('非语素字',
'non-morpheme character')}), 'w': ('标点符号', 'punctuation mark', {'wkz':
('左括号', 'left parenthesis/bracket'), 'wky': ('右括号',
'right parenthesis/bracket'), 'wyz': ('左引号', 'left quotation mark'),
'wyy': ('右引号', 'right quotation mark'), 'wj': ('句号', 'period'), 'ww': (
'问号', 'question mark'), 'wt': ('叹号', 'exclamation mark'), 'wd': ('逗号',
'comma'), 'wf': ('分号', 'semicolon'), 'wn': ('顿号', 'enumeration comma'),
'wm': ('冒号', 'colon'), 'ws': ('省略号', 'ellipsis'), 'wp': ('破折号', 'dash'),
'wb': ('百分号千分号', 'percent/per mille sign'), 'wh': ('单位符号',
'unit of measure sign')}), 'g': ('复合语', 'multiword expression'), 'j': (
'略语', 'abbreviation')}
def _get_pos_name(pos_code, names='parent', english=True, pos_map=POS_MAP):
"""Gets the part of speech name for *pos_code*."""
if names not in ('parent', 'child', 'all', 'raw'):
raise ValueError(
"names must be one of 'parent', 'child', 'all', or 'raw'; not '{0}'"
.format(names))
logger.debug("Getting {0} POS name for '{1}' formatted as '{2}'.".
format('English' if english else 'Chinese', pos_code, names))
if names == 'raw':
return pos_code
pos_code = pos_code.lower()
for i in range(1, len(pos_code) + 1):
try:
pos_key = pos_code[0:i]
pos_entry = pos_map[pos_key]
break
except KeyError:
if i == len(pos_code):
logger.warning("part of speech not recognized: '{0}'".
format(pos_code))
return None
pos = pos_entry[1 if english else 0],
if names == 'parent':
logger.debug("Part of speech name found: '{0}'".format(pos[0]))
return pos[0]
if len(pos_entry) == 3 and pos_key != pos_code:
sub_map = pos_entry[2]
logger.debug(
"Found parent part of speech name '{0}'. Descending to look for child name for '{1}'"
.format(pos_entry[1], pos_code))
sub_pos = _get_pos_name(pos_code, names, english, sub_map)
if names == 'all':
pos = pos + sub_pos if sub_pos else pos
else:
pos = sub_pos,
name = pos if names == 'all' else pos[-1]
logger.debug("Part of speech name found: '{0}'".format(name))
return name
def get_pos_name(code, name='parent', english=True, pos_tags=POS_MAP):
"""Gets the part of speech name for *code*.
:param str code: The part of speech code to lookup, e.g. ``'nsf'``.
:param str name: Which part of speech name to include in the output. Must
be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.
Defaults to ``'parent'``. ``'parent'`` indicates that only the most
generic name should be used, e.g. ``'noun'`` for ``'nsf'``.
``'child'`` indicates that the most specific name should be used, e.g.
``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all
names should be used, e.g. ``('noun', 'toponym',
'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the
part of speech code is not transformed at all.
:param bool english: Whether to return an English or Chinese name.
:param dict pos_tags: Custom part of speech tags to use.
:returns: ``str`` if *name* is ``'parent'`` or ``'child'``.
``tuple`` if *name* is ``'all'``.
"""
return _get_pos_name(code, name, english, pos_tags)
| <mask token>
import logging
logger = logging.getLogger('pynlpir.pos_map')
POS_MAP = {'n': ('名词', 'noun', {'nr': ('人名', 'personal name', {'nr1': (
'汉语姓氏', 'Chinese surname'), 'nr2': ('汉语名字', 'Chinese given name'),
'nrj': ('日语人名', 'Japanese personal name'), 'nrf': ('音译人名',
'transcribed personal name')}), 'ns': ('地名', 'toponym', {'nsf': ('音译地名',
'transcribed toponym')}), 'nt': ('机构团体名', 'organization/group name'),
'nz': ('其它专名', 'other proper noun'), 'nl': ('名词性惯用语', 'noun phrase'),
'ng': ('名词性语素', 'noun morpheme')}), 't': ('时间词', 'time word', {'tg': (
'时间词性语素', 'time morpheme')}), 's': ('处所词', 'locative word'), 'f': (
'方位词', 'noun of locality'), 'v': ('动词', 'verb', {'vd': ('副动词',
'auxiliary verb'), 'vn': ('名动词', 'noun-verb'), 'vshi': ('动词"是"',
'verb 是'), 'vyou': ('动词"有"', 'verb 有'), 'vf': ('趋向动词',
'directional verb'), 'vx': ('行事动词', 'performative verb'), 'vi': (
'不及物动词', 'intransitive verb'), 'vl': ('动词性惯用语', 'verb phrase'), 'vg': (
'动词性语素', 'verb morpheme')}), 'a': ('形容词', 'adjective', {'ad': ('副形词',
'auxiliary adjective'), 'an': ('名形词', 'noun-adjective'), 'ag': (
'形容词性语素', 'adjective morpheme'), 'al': ('形容词性惯用语', 'adjective phrase')}
), 'b': ('区别词', 'distinguishing word', {'bl': ('区别词性惯用语',
'distinguishing phrase')}), 'z': ('状态词', 'status word'), 'r': ('代词',
'pronoun', {'rr': ('人称代词', 'personal pronoun'), 'rz': ('指示代词',
'demonstrative pronoun', {'rzt': ('时间指示代词',
'temporal demonstrative pronoun'), 'rzs': ('处所指示代词',
'locative demonstrative pronoun'), 'rzv': ('谓词性指示代词',
'predicate demonstrative pronoun')}), 'ry': ('疑问代词',
'interrogative pronoun', {'ryt': ('时间疑问代词',
'temporal interrogative pronoun'), 'rys': ('处所疑问代词',
'locative interrogative pronoun'), 'ryv': ('谓词性疑问代词',
'predicate interrogative pronoun')}), 'rg': ('代词性语素',
'pronoun morpheme')}), 'm': ('数词', 'numeral', {'mq': ('数量词',
'numeral-plus-classifier compound'), 'mg': ('干支', 'zodiac')}), 'q': (
'量词', 'classifier', {'qv': ('动量词', 'verbal classifier'), 'qt': ('时量词',
'temporal classifier')}), 'd': ('副词', 'adverb'), 'p': ('介词',
'preposition', {'pba': ('介词“把”', 'preposition 把'), 'pbei': ('介词“被”',
'preposition 被')}), 'c': ('连词', 'conjunction', {'cc': ('并列连词',
'coordinating conjunction')}), 'u': ('助词', 'particle', {'uzhe': ('着',
'particle 着'), 'ule': ('了/喽', 'particle 了/喽'), 'uguo': ('过',
'particle 过'), 'ude1': ('的/底', 'particle 的/底'), 'ude2': ('地',
'particle 地'), 'ude3': ('得', 'particle 得'), 'usuo': ('所', 'particle 所'),
'udeng': ('等/等等/云云', 'particle 等/等等/云云'), 'uyy': ('一样/一般/似的/般',
'particle 一样/一般/似的/般'), 'udh': ('的话', 'particle 的话'), 'uls': (
'来讲/来说/而言/说来', 'particle 来讲/来说/而言/说来'), 'uzhi': ('之', 'particle 之'),
'ulian': ('连', 'particle 连')}), 'e': ('叹词', 'interjection'), 'y': (
'语气词', 'modal particle'), 'o': ('拟声词', 'onomatopoeia'), 'h': ('前缀',
'prefix'), 'k': ('后缀', 'suffix'), 'x': ('字符串', 'string', {'xe': (
'Email字符串', 'email address'), 'xs': ('微博会话分隔符', 'hashtag'), 'xm': (
'表情符合', 'emoticon'), 'xu': ('网址URL', 'URL'), 'xx': ('非语素字',
'non-morpheme character')}), 'w': ('标点符号', 'punctuation mark', {'wkz':
('左括号', 'left parenthesis/bracket'), 'wky': ('右括号',
'right parenthesis/bracket'), 'wyz': ('左引号', 'left quotation mark'),
'wyy': ('右引号', 'right quotation mark'), 'wj': ('句号', 'period'), 'ww': (
'问号', 'question mark'), 'wt': ('叹号', 'exclamation mark'), 'wd': ('逗号',
'comma'), 'wf': ('分号', 'semicolon'), 'wn': ('顿号', 'enumeration comma'),
'wm': ('冒号', 'colon'), 'ws': ('省略号', 'ellipsis'), 'wp': ('破折号', 'dash'),
'wb': ('百分号千分号', 'percent/per mille sign'), 'wh': ('单位符号',
'unit of measure sign')}), 'g': ('复合语', 'multiword expression'), 'j': (
'略语', 'abbreviation')}
def _get_pos_name(pos_code, names='parent', english=True, pos_map=POS_MAP):
"""Gets the part of speech name for *pos_code*."""
if names not in ('parent', 'child', 'all', 'raw'):
raise ValueError(
"names must be one of 'parent', 'child', 'all', or 'raw'; not '{0}'"
.format(names))
logger.debug("Getting {0} POS name for '{1}' formatted as '{2}'.".
format('English' if english else 'Chinese', pos_code, names))
if names == 'raw':
return pos_code
pos_code = pos_code.lower()
for i in range(1, len(pos_code) + 1):
try:
pos_key = pos_code[0:i]
pos_entry = pos_map[pos_key]
break
except KeyError:
if i == len(pos_code):
logger.warning("part of speech not recognized: '{0}'".
format(pos_code))
return None
pos = pos_entry[1 if english else 0],
if names == 'parent':
logger.debug("Part of speech name found: '{0}'".format(pos[0]))
return pos[0]
if len(pos_entry) == 3 and pos_key != pos_code:
sub_map = pos_entry[2]
logger.debug(
"Found parent part of speech name '{0}'. Descending to look for child name for '{1}'"
.format(pos_entry[1], pos_code))
sub_pos = _get_pos_name(pos_code, names, english, sub_map)
if names == 'all':
pos = pos + sub_pos if sub_pos else pos
else:
pos = sub_pos,
name = pos if names == 'all' else pos[-1]
logger.debug("Part of speech name found: '{0}'".format(name))
return name
def get_pos_name(code, name='parent', english=True, pos_tags=POS_MAP):
"""Gets the part of speech name for *code*.
:param str code: The part of speech code to lookup, e.g. ``'nsf'``.
:param str name: Which part of speech name to include in the output. Must
be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.
Defaults to ``'parent'``. ``'parent'`` indicates that only the most
generic name should be used, e.g. ``'noun'`` for ``'nsf'``.
``'child'`` indicates that the most specific name should be used, e.g.
``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all
names should be used, e.g. ``('noun', 'toponym',
'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the
part of speech code is not transformed at all.
:param bool english: Whether to return an English or Chinese name.
:param dict pos_tags: Custom part of speech tags to use.
:returns: ``str`` if *name* is ``'parent'`` or ``'child'``.
``tuple`` if *name* is ``'all'``.
"""
return _get_pos_name(code, name, english, pos_tags)
| # -*- coding: utf-8 -*-
"""Part of speech mapping constants and functions for NLPIR/ICTCLAS.
This module is used by :mod:`pynlpir` to format segmented words for output.
"""
import logging
logger = logging.getLogger("pynlpir.pos_map")
#: A dictionary that maps part of speech codes returned by NLPIR to
#: human-readable names (English and Chinese).
POS_MAP = {
"n": (
"名词",
"noun",
{
"nr": (
"人名",
"personal name",
{
"nr1": ("汉语姓氏", "Chinese surname"),
"nr2": ("汉语名字", "Chinese given name"),
"nrj": ("日语人名", "Japanese personal name"),
"nrf": ("音译人名", "transcribed personal name"),
},
),
"ns": (
"地名",
"toponym",
{
"nsf": ("音译地名", "transcribed toponym"),
},
),
"nt": ("机构团体名", "organization/group name"),
"nz": ("其它专名", "other proper noun"),
"nl": ("名词性惯用语", "noun phrase"),
"ng": ("名词性语素", "noun morpheme"),
},
),
"t": (
"时间词",
"time word",
{
"tg": ("时间词性语素", "time morpheme"),
},
),
"s": ("处所词", "locative word"),
"f": ("方位词", "noun of locality"),
"v": (
"动词",
"verb",
{
"vd": ("副动词", "auxiliary verb"),
"vn": ("名动词", "noun-verb"),
"vshi": ('动词"是"', "verb 是"),
"vyou": ('动词"有"', "verb 有"),
"vf": ("趋向动词", "directional verb"),
"vx": ("行事动词", "performative verb"),
"vi": ("不及物动词", "intransitive verb"),
"vl": ("动词性惯用语", "verb phrase"),
"vg": ("动词性语素", "verb morpheme"),
},
),
"a": (
"形容词",
"adjective",
{
"ad": ("副形词", "auxiliary adjective"),
"an": ("名形词", "noun-adjective"),
"ag": ("形容词性语素", "adjective morpheme"),
"al": ("形容词性惯用语", "adjective phrase"),
},
),
"b": (
"区别词",
"distinguishing word",
{
"bl": ("区别词性惯用语", "distinguishing phrase"),
},
),
"z": ("状态词", "status word"),
"r": (
"代词",
"pronoun",
{
"rr": ("人称代词", "personal pronoun"),
"rz": (
"指示代词",
"demonstrative pronoun",
{
"rzt": ("时间指示代词", "temporal demonstrative pronoun"),
"rzs": ("处所指示代词", "locative demonstrative pronoun"),
"rzv": ("谓词性指示代词", "predicate demonstrative pronoun"),
},
),
"ry": (
"疑问代词",
"interrogative pronoun",
{
"ryt": ("时间疑问代词", "temporal interrogative pronoun"),
"rys": ("处所疑问代词", "locative interrogative pronoun"),
"ryv": ("谓词性疑问代词", "predicate interrogative pronoun"),
},
),
"rg": ("代词性语素", "pronoun morpheme"),
},
),
"m": (
"数词",
"numeral",
{
"mq": ("数量词", "numeral-plus-classifier compound"),
"mg": ("干支", "zodiac"),
},
),
"q": (
"量词",
"classifier",
{
"qv": ("动量词", "verbal classifier"),
"qt": ("时量词", "temporal classifier"),
},
),
"d": ("副词", "adverb"),
"p": (
"介词",
"preposition",
{
"pba": ("介词“把”", "preposition 把"),
"pbei": ("介词“被”", "preposition 被"),
},
),
"c": (
"连词",
"conjunction",
{
"cc": ("并列连词", "coordinating conjunction"),
},
),
"u": (
"助词",
"particle",
{
"uzhe": ("着", "particle 着"),
"ule": ("了/喽", "particle 了/喽"),
"uguo": ("过", "particle 过"),
"ude1": ("的/底", "particle 的/底"),
"ude2": ("地", "particle 地"),
"ude3": ("得", "particle 得"),
"usuo": ("所", "particle 所"),
"udeng": ("等/等等/云云", "particle 等/等等/云云"),
"uyy": ("一样/一般/似的/般", "particle 一样/一般/似的/般"),
"udh": ("的话", "particle 的话"),
"uls": ("来讲/来说/而言/说来", "particle 来讲/来说/而言/说来"),
"uzhi": ("之", "particle 之"),
"ulian": ("连", "particle 连"),
},
),
"e": ("叹词", "interjection"),
"y": ("语气词", "modal particle"),
"o": ("拟声词", "onomatopoeia"),
"h": ("前缀", "prefix"),
"k": ("后缀", "suffix"),
"x": (
"字符串",
"string",
{
"xe": ("Email字符串", "email address"),
"xs": ("微博会话分隔符", "hashtag"),
"xm": ("表情符合", "emoticon"),
"xu": ("网址URL", "URL"),
"xx": ("非语素字", "non-morpheme character"),
},
),
"w": (
"标点符号",
"punctuation mark",
{
"wkz": ("左括号", "left parenthesis/bracket"),
"wky": ("右括号", "right parenthesis/bracket"),
"wyz": ("左引号", "left quotation mark"),
"wyy": ("右引号", "right quotation mark"),
"wj": ("句号", "period"),
"ww": ("问号", "question mark"),
"wt": ("叹号", "exclamation mark"),
"wd": ("逗号", "comma"),
"wf": ("分号", "semicolon"),
"wn": ("顿号", "enumeration comma"),
"wm": ("冒号", "colon"),
"ws": ("省略号", "ellipsis"),
"wp": ("破折号", "dash"),
"wb": ("百分号千分号", "percent/per mille sign"),
"wh": ("单位符号", "unit of measure sign"),
},
),
"g": ("复合语", "multiword expression"),
"j": ("略语", "abbreviation"),
}
def _get_pos_name(pos_code, names="parent", english=True, pos_map=POS_MAP):
"""Gets the part of speech name for *pos_code*."""
if names not in ("parent", "child", "all", "raw"):
raise ValueError(
"names must be one of 'parent', 'child', 'all', or "
"'raw'; not '{0}'".format(names)
)
logger.debug(
"Getting {0} POS name for '{1}' formatted as '{2}'.".format(
"English" if english else "Chinese", pos_code, names
)
)
if names == "raw":
return pos_code
pos_code = pos_code.lower() # Issue #10
for i in range(1, len(pos_code) + 1):
try:
pos_key = pos_code[0:i]
pos_entry = pos_map[pos_key]
break
except KeyError:
if i == len(pos_code):
logger.warning("part of speech not recognized: '{0}'".format(pos_code))
return None # Issue #20
pos = (pos_entry[1 if english else 0],)
if names == "parent":
logger.debug("Part of speech name found: '{0}'".format(pos[0]))
return pos[0]
if len(pos_entry) == 3 and pos_key != pos_code:
sub_map = pos_entry[2]
logger.debug(
"Found parent part of speech name '{0}'. Descending to "
"look for child name for '{1}'".format(pos_entry[1], pos_code)
)
sub_pos = _get_pos_name(pos_code, names, english, sub_map)
if names == "all":
# sub_pos can be None sometimes (e.g. for a word '甲')
pos = pos + sub_pos if sub_pos else pos
else:
pos = (sub_pos,)
name = pos if names == "all" else pos[-1]
logger.debug("Part of speech name found: '{0}'".format(name))
return name
def get_pos_name(code, name="parent", english=True, pos_tags=POS_MAP):
"""Gets the part of speech name for *code*.
:param str code: The part of speech code to lookup, e.g. ``'nsf'``.
:param str name: Which part of speech name to include in the output. Must
be one of ``'parent'``, ``'child'``, ``'all'``, or ``'raw'``.
Defaults to ``'parent'``. ``'parent'`` indicates that only the most
generic name should be used, e.g. ``'noun'`` for ``'nsf'``.
``'child'`` indicates that the most specific name should be used, e.g.
``'transcribed toponym'`` for ``'nsf'``. ``'all'`` indicates that all
names should be used, e.g. ``('noun', 'toponym',
'transcribed toponym')`` for ``'nsf'``. ``'raw'`` indicates that the
part of speech code is not transformed at all.
:param bool english: Whether to return an English or Chinese name.
:param dict pos_tags: Custom part of speech tags to use.
:returns: ``str`` if *name* is ``'parent'`` or ``'child'``.
``tuple`` if *name* is ``'all'``.
"""
return _get_pos_name(code, name, english, pos_tags)
| [
1,
2,
3,
4,
5
] |
1,949 | 47817d6cf58ac54e501ed24ae3ababc821bdd5c8 | <mask token>
def getTracks(result):
data = json.loads(result.content.decode('utf-8'))
tracks = data['response']['items']
tracks.reverse()
return tracks
def getMp3FromM3u8(url):
if url.find('index.m3u8?') == -1:
return url
parts = url.split('/')
newUrl = parts[0] + '//' + parts[2] + '/' + parts[3] + '/' + parts[5
] + '.mp3'
return newUrl
@bot.message_handler(content_types=['text'])
def get_text_messages(message):
if message.text == '/start':
bot.send_message(message.from_user.id,
'Moscow Music Bot. Введите число треков')
elif message.text == '/help':
bot.send_message(message.from_user.id, 'Введите число треков')
else:
try:
count = int(message.text)
tracks = getTracks(sess.get(
'https://api.vk.com/method/audio.get', params=[(
'access_token', token), ('count', count), ('v', '5.95')]))
for track in tracks:
title = track['title']
artist = track['artist']
duration = track['duration']
url = getMp3FromM3u8(track['url'])
file = urllib.request.urlopen(url)
try:
bot.send_audio(message.from_user.id, file, duration=
duration, title=title, performer=artist)
except:
bot.send_message(message.from_user.id,
'Ошибка загрузки {}'.format(title))
except:
bot.send_message(message.from_user.id, 'Ошибка исполнения')
<mask token>
| <mask token>
try:
if len(sys.argv) != 4:
raise Exception
botApiKey = sys.argv[1]
login = sys.argv[2]
password = sys.argv[3]
except:
print('Not enough arguments')
print('Example: py filename.py botApiKey login password')
print('')
sys.exit()
try:
bot = telebot.TeleBot(botApiKey)
except:
print('Bot Error: Check botApiKey')
print('')
sys.exit()
try:
tokenObj = get_vk_official_token(login, password)
except:
print('Login Error: Check login and password')
print('')
sys.exit()
<mask token>
sess.headers.update({'User-Agent': user_agent})
def getTracks(result):
data = json.loads(result.content.decode('utf-8'))
tracks = data['response']['items']
tracks.reverse()
return tracks
def getMp3FromM3u8(url):
if url.find('index.m3u8?') == -1:
return url
parts = url.split('/')
newUrl = parts[0] + '//' + parts[2] + '/' + parts[3] + '/' + parts[5
] + '.mp3'
return newUrl
@bot.message_handler(content_types=['text'])
def get_text_messages(message):
if message.text == '/start':
bot.send_message(message.from_user.id,
'Moscow Music Bot. Введите число треков')
elif message.text == '/help':
bot.send_message(message.from_user.id, 'Введите число треков')
else:
try:
count = int(message.text)
tracks = getTracks(sess.get(
'https://api.vk.com/method/audio.get', params=[(
'access_token', token), ('count', count), ('v', '5.95')]))
for track in tracks:
title = track['title']
artist = track['artist']
duration = track['duration']
url = getMp3FromM3u8(track['url'])
file = urllib.request.urlopen(url)
try:
bot.send_audio(message.from_user.id, file, duration=
duration, title=title, performer=artist)
except:
bot.send_message(message.from_user.id,
'Ошибка загрузки {}'.format(title))
except:
bot.send_message(message.from_user.id, 'Ошибка исполнения')
bot.infinity_polling()
| <mask token>
try:
if len(sys.argv) != 4:
raise Exception
botApiKey = sys.argv[1]
login = sys.argv[2]
password = sys.argv[3]
except:
print('Not enough arguments')
print('Example: py filename.py botApiKey login password')
print('')
sys.exit()
try:
bot = telebot.TeleBot(botApiKey)
except:
print('Bot Error: Check botApiKey')
print('')
sys.exit()
try:
tokenObj = get_vk_official_token(login, password)
except:
print('Login Error: Check login and password')
print('')
sys.exit()
token = tokenObj['token']
user_agent = tokenObj['user_agent']
sess = requests.session()
sess.headers.update({'User-Agent': user_agent})
def getTracks(result):
data = json.loads(result.content.decode('utf-8'))
tracks = data['response']['items']
tracks.reverse()
return tracks
def getMp3FromM3u8(url):
if url.find('index.m3u8?') == -1:
return url
parts = url.split('/')
newUrl = parts[0] + '//' + parts[2] + '/' + parts[3] + '/' + parts[5
] + '.mp3'
return newUrl
@bot.message_handler(content_types=['text'])
def get_text_messages(message):
if message.text == '/start':
bot.send_message(message.from_user.id,
'Moscow Music Bot. Введите число треков')
elif message.text == '/help':
bot.send_message(message.from_user.id, 'Введите число треков')
else:
try:
count = int(message.text)
tracks = getTracks(sess.get(
'https://api.vk.com/method/audio.get', params=[(
'access_token', token), ('count', count), ('v', '5.95')]))
for track in tracks:
title = track['title']
artist = track['artist']
duration = track['duration']
url = getMp3FromM3u8(track['url'])
file = urllib.request.urlopen(url)
try:
bot.send_audio(message.from_user.id, file, duration=
duration, title=title, performer=artist)
except:
bot.send_message(message.from_user.id,
'Ошибка загрузки {}'.format(title))
except:
bot.send_message(message.from_user.id, 'Ошибка исполнения')
bot.infinity_polling()
| from vkaudiotoken import get_vk_official_token
import requests
import json
import telebot
import urllib
import sys
try:
if len(sys.argv) != 4:
raise Exception
botApiKey = sys.argv[1]
login = sys.argv[2]
password = sys.argv[3]
except:
print('Not enough arguments')
print('Example: py filename.py botApiKey login password')
print('')
sys.exit()
try:
bot = telebot.TeleBot(botApiKey)
except:
print('Bot Error: Check botApiKey')
print('')
sys.exit()
try:
tokenObj = get_vk_official_token(login, password)
except:
print('Login Error: Check login and password')
print('')
sys.exit()
token = tokenObj['token']
user_agent = tokenObj['user_agent']
sess = requests.session()
sess.headers.update({'User-Agent': user_agent})
def getTracks(result):
data = json.loads(result.content.decode('utf-8'))
tracks = data['response']['items']
tracks.reverse()
return tracks
def getMp3FromM3u8(url):
if url.find('index.m3u8?') == -1:
return url
parts = url.split('/')
newUrl = parts[0] + '//' + parts[2] + '/' + parts[3] + '/' + parts[5
] + '.mp3'
return newUrl
@bot.message_handler(content_types=['text'])
def get_text_messages(message):
if message.text == '/start':
bot.send_message(message.from_user.id,
'Moscow Music Bot. Введите число треков')
elif message.text == '/help':
bot.send_message(message.from_user.id, 'Введите число треков')
else:
try:
count = int(message.text)
tracks = getTracks(sess.get(
'https://api.vk.com/method/audio.get', params=[(
'access_token', token), ('count', count), ('v', '5.95')]))
for track in tracks:
title = track['title']
artist = track['artist']
duration = track['duration']
url = getMp3FromM3u8(track['url'])
file = urllib.request.urlopen(url)
try:
bot.send_audio(message.from_user.id, file, duration=
duration, title=title, performer=artist)
except:
bot.send_message(message.from_user.id,
'Ошибка загрузки {}'.format(title))
except:
bot.send_message(message.from_user.id, 'Ошибка исполнения')
bot.infinity_polling()
| from vkaudiotoken import get_vk_official_token
import requests
import json
import telebot
import urllib
import sys
#check start args
try:
if len(sys.argv) != 4:
raise Exception
botApiKey = sys.argv[1]
login = sys.argv[2]
password = sys.argv[3]
except:
print('Not enough arguments')
print('Example: py filename.py botApiKey login password')
print('')
sys.exit()
#check apikey
try:
bot = telebot.TeleBot(botApiKey)
except:
print('Bot Error: Check botApiKey')
print('')
sys.exit()
#check vk auth
try:
tokenObj = get_vk_official_token(login, password)
except:
print('Login Error: Check login and password')
print('')
sys.exit()
#create vk session
token = tokenObj['token']
user_agent = tokenObj['user_agent']
sess = requests.session()
sess.headers.update({'User-Agent': user_agent})
#trackList transform
def getTracks(result):
data = json.loads(result.content.decode('utf-8'))
tracks = data['response']['items']
tracks.reverse()
return tracks
#m3u8 url convet to mp3 url
def getMp3FromM3u8(url):
if url.find("index.m3u8?") == -1:
return url
parts = url.split('/')
newUrl = parts[0] + '//' + parts[2] + '/' + parts[3] + '/' + parts[5] + '.mp3'
return newUrl
#telegram bot
@bot.message_handler(content_types=['text'])
def get_text_messages(message):
if message.text == "/start":
bot.send_message(message.from_user.id, "Moscow Music Bot. Введите число треков")
elif message.text == "/help":
bot.send_message(message.from_user.id, "Введите число треков")
else:
try:
count = int(message.text)
tracks = getTracks(sess.get(
"https://api.vk.com/method/audio.get",
params=[('access_token', token),
('count', count),
('v', '5.95')]
))
for track in tracks:
title = track['title']
artist = track['artist']
duration = track['duration']
url = getMp3FromM3u8(track['url'])
file = urllib.request.urlopen(url)
try:
bot.send_audio(message.from_user.id, file, duration=duration, title=title, performer=artist)
except:
bot.send_message(message.from_user.id, "Ошибка загрузки {}".format(title))
except:
bot.send_message(message.from_user.id, "Ошибка исполнения")
bot.infinity_polling()
| [
3,
4,
5,
6,
7
] |
1,950 | 60ca8b1d7307a9d8183e3617f238efcfb9d707dd | <mask token>
| <mask token>
def create_app():
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/process_game', methods=['POST'])
def process_game():
move_sequence = json.loads(request.data)['moves']
return jsonify(run_game(move_sequence))
return app
<mask token>
| <mask token>
def create_app():
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/process_game', methods=['POST'])
def process_game():
move_sequence = json.loads(request.data)['moves']
return jsonify(run_game(move_sequence))
return app
if __name__ == '__main__':
app = create_app()
app.run(port=5000)
| import json
from flask import Flask, request, jsonify
from lib.chess_utils import run_game
def create_app():
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/process_game', methods=['POST'])
def process_game():
move_sequence = json.loads(request.data)['moves']
return jsonify(run_game(move_sequence))
return app
if __name__ == '__main__':
app = create_app()
app.run(port=5000)
| null | [
0,
1,
2,
3
] |
1,951 | 929e6deeb017fd338c63439f689d05331b016d0f | class CUtil:
@staticmethod
def generate_board(initial_board, grid_size):
board_dictionary = dict()
iterator = 0
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
for row in initial_board:
for data in row:
identifier = board_identifiers[iterator]
board_dictionary[identifier] = str(data)
if data == 0:
board_dictionary[identifier] = '123456789'
iterator += 1
return board_dictionary
@staticmethod
def generate_constraint_dictionary(grid_size):
identifiers = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
board_size = grid_size * grid_size
rows_selected = columns_selected = identifiers[:board_size]
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
generated_grids = CUtil.__generate_grids(rows_selected, grid_size)
constraints = dict()
for board_identifier in board_identifiers:
rows = CUtil.__others_in_row(board_identifier, rows_selected)
columns = CUtil.__others_in_columns(board_identifier,
columns_selected)
grids = CUtil.__others_in_grid(board_identifier, generated_grids)
constraints[board_identifier] = set(rows + columns + grids)
return constraints
<mask token>
<mask token>
@staticmethod
def __others_in_columns(board_identifier, identifiers):
column_identifier = board_identifier[1]
others = []
for identifier in identifiers:
new_element = identifier + column_identifier
if new_element != board_identifier:
others.append(new_element)
return others
@staticmethod
def __others_in_grid(board_identifier, grids):
selected_grid = []
for index, grid in enumerate(grids):
for element in grid:
if element == board_identifier:
selected_grid = list(grid)
break
selected_grid.remove(board_identifier)
return selected_grid
@staticmethod
def __generate_grids(identifiers, grid_size):
split_identifiers = []
for i in range(grid_size):
start = i * grid_size
end = grid_size * (i + 1)
selected = identifiers[start:end]
split_identifiers.append(list(selected))
grids = []
for row in split_identifiers:
for column in split_identifiers:
inner_grid = []
for identifier_row in row:
for identifier_column in column:
inner_grid.append(identifier_row + identifier_column)
grids.append(inner_grid)
return grids
<mask token>
<mask token>
| class CUtil:
@staticmethod
def generate_board(initial_board, grid_size):
board_dictionary = dict()
iterator = 0
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
for row in initial_board:
for data in row:
identifier = board_identifiers[iterator]
board_dictionary[identifier] = str(data)
if data == 0:
board_dictionary[identifier] = '123456789'
iterator += 1
return board_dictionary
@staticmethod
def generate_constraint_dictionary(grid_size):
identifiers = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
board_size = grid_size * grid_size
rows_selected = columns_selected = identifiers[:board_size]
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
generated_grids = CUtil.__generate_grids(rows_selected, grid_size)
constraints = dict()
for board_identifier in board_identifiers:
rows = CUtil.__others_in_row(board_identifier, rows_selected)
columns = CUtil.__others_in_columns(board_identifier,
columns_selected)
grids = CUtil.__others_in_grid(board_identifier, generated_grids)
constraints[board_identifier] = set(rows + columns + grids)
return constraints
<mask token>
<mask token>
@staticmethod
def __others_in_columns(board_identifier, identifiers):
column_identifier = board_identifier[1]
others = []
for identifier in identifiers:
new_element = identifier + column_identifier
if new_element != board_identifier:
others.append(new_element)
return others
@staticmethod
def __others_in_grid(board_identifier, grids):
selected_grid = []
for index, grid in enumerate(grids):
for element in grid:
if element == board_identifier:
selected_grid = list(grid)
break
selected_grid.remove(board_identifier)
return selected_grid
@staticmethod
def __generate_grids(identifiers, grid_size):
split_identifiers = []
for i in range(grid_size):
start = i * grid_size
end = grid_size * (i + 1)
selected = identifiers[start:end]
split_identifiers.append(list(selected))
grids = []
for row in split_identifiers:
for column in split_identifiers:
inner_grid = []
for identifier_row in row:
for identifier_column in column:
inner_grid.append(identifier_row + identifier_column)
grids.append(inner_grid)
return grids
<mask token>
@staticmethod
def __testing9x9():
string = 'ABCDEFGHI'
output_string = ''
for letter1 in string:
for letter2 in string:
output_string += letter1 + letter2 + ' '
output_string += '\n'
print(output_string)
| class CUtil:
@staticmethod
def generate_board(initial_board, grid_size):
board_dictionary = dict()
iterator = 0
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
for row in initial_board:
for data in row:
identifier = board_identifiers[iterator]
board_dictionary[identifier] = str(data)
if data == 0:
board_dictionary[identifier] = '123456789'
iterator += 1
return board_dictionary
@staticmethod
def generate_constraint_dictionary(grid_size):
identifiers = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
board_size = grid_size * grid_size
rows_selected = columns_selected = identifiers[:board_size]
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
generated_grids = CUtil.__generate_grids(rows_selected, grid_size)
constraints = dict()
for board_identifier in board_identifiers:
rows = CUtil.__others_in_row(board_identifier, rows_selected)
columns = CUtil.__others_in_columns(board_identifier,
columns_selected)
grids = CUtil.__others_in_grid(board_identifier, generated_grids)
constraints[board_identifier] = set(rows + columns + grids)
return constraints
<mask token>
@staticmethod
def __others_in_row(board_identifier, identifiers):
row_identifier = board_identifier[0]
others = []
for identifier in identifiers:
new_element = row_identifier + identifier
if new_element != board_identifier:
others.append(new_element)
return others
@staticmethod
def __others_in_columns(board_identifier, identifiers):
column_identifier = board_identifier[1]
others = []
for identifier in identifiers:
new_element = identifier + column_identifier
if new_element != board_identifier:
others.append(new_element)
return others
@staticmethod
def __others_in_grid(board_identifier, grids):
selected_grid = []
for index, grid in enumerate(grids):
for element in grid:
if element == board_identifier:
selected_grid = list(grid)
break
selected_grid.remove(board_identifier)
return selected_grid
@staticmethod
def __generate_grids(identifiers, grid_size):
split_identifiers = []
for i in range(grid_size):
start = i * grid_size
end = grid_size * (i + 1)
selected = identifiers[start:end]
split_identifiers.append(list(selected))
grids = []
for row in split_identifiers:
for column in split_identifiers:
inner_grid = []
for identifier_row in row:
for identifier_column in column:
inner_grid.append(identifier_row + identifier_column)
grids.append(inner_grid)
return grids
<mask token>
@staticmethod
def __testing9x9():
string = 'ABCDEFGHI'
output_string = ''
for letter1 in string:
for letter2 in string:
output_string += letter1 + letter2 + ' '
output_string += '\n'
print(output_string)
| class CUtil:
@staticmethod
def generate_board(initial_board, grid_size):
board_dictionary = dict()
iterator = 0
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
for row in initial_board:
for data in row:
identifier = board_identifiers[iterator]
board_dictionary[identifier] = str(data)
if data == 0:
board_dictionary[identifier] = '123456789'
iterator += 1
return board_dictionary
@staticmethod
def generate_constraint_dictionary(grid_size):
identifiers = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
board_size = grid_size * grid_size
rows_selected = columns_selected = identifiers[:board_size]
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
generated_grids = CUtil.__generate_grids(rows_selected, grid_size)
constraints = dict()
for board_identifier in board_identifiers:
rows = CUtil.__others_in_row(board_identifier, rows_selected)
columns = CUtil.__others_in_columns(board_identifier,
columns_selected)
grids = CUtil.__others_in_grid(board_identifier, generated_grids)
constraints[board_identifier] = set(rows + columns + grids)
return constraints
@staticmethod
def constraints_as_tuple(constraints):
constraints_tuples = []
for key, values in constraints.items():
for value in values:
constraints_tuples.append((key, value))
return constraints_tuples
@staticmethod
def __others_in_row(board_identifier, identifiers):
row_identifier = board_identifier[0]
others = []
for identifier in identifiers:
new_element = row_identifier + identifier
if new_element != board_identifier:
others.append(new_element)
return others
@staticmethod
def __others_in_columns(board_identifier, identifiers):
column_identifier = board_identifier[1]
others = []
for identifier in identifiers:
new_element = identifier + column_identifier
if new_element != board_identifier:
others.append(new_element)
return others
@staticmethod
def __others_in_grid(board_identifier, grids):
selected_grid = []
for index, grid in enumerate(grids):
for element in grid:
if element == board_identifier:
selected_grid = list(grid)
break
selected_grid.remove(board_identifier)
return selected_grid
@staticmethod
def __generate_grids(identifiers, grid_size):
split_identifiers = []
for i in range(grid_size):
start = i * grid_size
end = grid_size * (i + 1)
selected = identifiers[start:end]
split_identifiers.append(list(selected))
grids = []
for row in split_identifiers:
for column in split_identifiers:
inner_grid = []
for identifier_row in row:
for identifier_column in column:
inner_grid.append(identifier_row + identifier_column)
grids.append(inner_grid)
return grids
@staticmethod
def __generate_board_identifiers(grid_size):
identifiers = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
board_size = grid_size * grid_size
rows_selected = columns_selected = identifiers[:board_size]
board = []
for row_identifier in rows_selected:
for column_identifier in columns_selected:
board.append(row_identifier + column_identifier)
return board
@staticmethod
def __testing9x9():
string = 'ABCDEFGHI'
output_string = ''
for letter1 in string:
for letter2 in string:
output_string += letter1 + letter2 + ' '
output_string += '\n'
print(output_string)
| class CUtil:
# Returns a dictionary containing the cell UID as they key and the data for the cell as the value
# Ex: 'AA': 2, 'AB': 4 ....
@staticmethod
def generate_board(initial_board, grid_size):
board_dictionary = dict()
iterator = 0
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
for row in initial_board:
for data in row:
identifier = board_identifiers[iterator]
board_dictionary[identifier] = str(data)
if data == 0:
board_dictionary[identifier] = "123456789"
iterator += 1
return board_dictionary
# returns a dictionary containing possible constraints for each cell
# Ex: 'AA': 'AB', 'AC' ....
@staticmethod
def generate_constraint_dictionary(grid_size):
identifiers = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
board_size = grid_size * grid_size
rows_selected = columns_selected = identifiers[:board_size]
board_identifiers = CUtil.__generate_board_identifiers(grid_size)
generated_grids = CUtil.__generate_grids(rows_selected, grid_size)
constraints = dict()
for board_identifier in board_identifiers:
rows = CUtil.__others_in_row(board_identifier, rows_selected)
columns = CUtil.__others_in_columns(board_identifier, columns_selected)
grids = CUtil.__others_in_grid(board_identifier, generated_grids)
constraints[board_identifier] = set(rows + columns + grids)
return constraints
# returns a tuple containing possible constraints for each cell
# Ex: ('AA', 'AB'), ('AA', AC') ....
@staticmethod
def constraints_as_tuple(constraints):
constraints_tuples = []
for key, values in constraints.items():
for value in values:
constraints_tuples.append((key, value))
return constraints_tuples
@staticmethod
def __others_in_row(board_identifier, identifiers):
# if 'AB' then get just 'A', because that's the row
row_identifier = board_identifier[0]
others = []
for identifier in identifiers:
new_element = row_identifier + identifier
if new_element != board_identifier:
others.append(new_element)
return others
@staticmethod
def __others_in_columns(board_identifier, identifiers):
# if 'AB' then get just 'B', because that's the columns
column_identifier = board_identifier[1]
others = []
for identifier in identifiers:
new_element = identifier + column_identifier
if new_element != board_identifier:
others.append(new_element)
return others
@staticmethod
def __others_in_grid(board_identifier, grids):
# if 'AB' then get just 'B', because that's the columns
selected_grid = []
for index, grid in enumerate(grids):
for element in grid:
if element == board_identifier:
selected_grid = list(grid)
break
selected_grid.remove(board_identifier)
return selected_grid
@staticmethod
def __generate_grids(identifiers, grid_size):
split_identifiers = []
for i in range(grid_size):
start = i * grid_size
end = grid_size * (i + 1)
selected = identifiers[start:end]
split_identifiers.append(list(selected))
grids = []
for row in split_identifiers:
# ["A", "B", "C"]
for column in split_identifiers:
# ["A", "B", "C"]
inner_grid = []
for identifier_row in row:
for identifier_column in column:
inner_grid.append(identifier_row + identifier_column)
grids.append(inner_grid)
return grids
@staticmethod
def __generate_board_identifiers(grid_size):
identifiers = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
board_size = grid_size * grid_size
rows_selected = columns_selected = identifiers[:board_size]
board = []
for row_identifier in rows_selected:
for column_identifier in columns_selected:
board.append(row_identifier + column_identifier)
return board
@staticmethod
def __testing9x9():
string = "ABCDEFGHI"
output_string = ""
for letter1 in string:
for letter2 in string:
output_string += letter1 + letter2 + " "
output_string += "\n"
print(output_string) | [
6,
7,
8,
10,
11
] |
1,952 | d1f0baa1ff87ece50aaded5e60908269e81b6734 | <mask token>
class Tela:
<mask token>
<mask token>
def setEstagio(self, temp):
if temp in self.telas:
self.estagio = temp
else:
print('Tela não existe, erro de digitação no código')
<mask token>
def atualizarSprites(self):
if self.j.getVidas() == 2:
self.sprites.remove(self.v2)
if self.j.getVidas() == 1:
self.sprites.remove(self.v1)
if self.j.getVidas() == 0:
self.sprites.remove(self.v0)
| <mask token>
class Tela:
def __init__(self, j, t0):
self.telas = ['jogo', 'game over']
self.estagio = 'jogo'
self.j = j
self.v0 = Sprite(40, 40, 30, 30, t0)
self.v1 = Sprite(40, 80, 30, 30, t0)
self.v2 = Sprite(40, 120, 30, 30, t0)
self.sprites = [self.v0, self.v1, self.v2]
def getEstagio(self):
return self.estagio
def setEstagio(self, temp):
if temp in self.telas:
self.estagio = temp
else:
print('Tela não existe, erro de digitação no código')
<mask token>
def atualizarSprites(self):
if self.j.getVidas() == 2:
self.sprites.remove(self.v2)
if self.j.getVidas() == 1:
self.sprites.remove(self.v1)
if self.j.getVidas() == 0:
self.sprites.remove(self.v0)
| <mask token>
class Tela:
def __init__(self, j, t0):
self.telas = ['jogo', 'game over']
self.estagio = 'jogo'
self.j = j
self.v0 = Sprite(40, 40, 30, 30, t0)
self.v1 = Sprite(40, 80, 30, 30, t0)
self.v2 = Sprite(40, 120, 30, 30, t0)
self.sprites = [self.v0, self.v1, self.v2]
def getEstagio(self):
return self.estagio
def setEstagio(self, temp):
if temp in self.telas:
self.estagio = temp
else:
print('Tela não existe, erro de digitação no código')
def getSprites(self):
return self.sprites
def atualizarSprites(self):
if self.j.getVidas() == 2:
self.sprites.remove(self.v2)
if self.j.getVidas() == 1:
self.sprites.remove(self.v1)
if self.j.getVidas() == 0:
self.sprites.remove(self.v0)
| from SpritesClass import Sprite
from JogadorClass import Jogador
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
class Tela:
def __init__(self, j, t0):
self.telas = ['jogo', 'game over']
self.estagio = 'jogo'
self.j = j
self.v0 = Sprite(40, 40, 30, 30, t0)
self.v1 = Sprite(40, 80, 30, 30, t0)
self.v2 = Sprite(40, 120, 30, 30, t0)
self.sprites = [self.v0, self.v1, self.v2]
def getEstagio(self):
return self.estagio
def setEstagio(self, temp):
if temp in self.telas:
self.estagio = temp
else:
print('Tela não existe, erro de digitação no código')
def getSprites(self):
return self.sprites
def atualizarSprites(self):
if self.j.getVidas() == 2:
self.sprites.remove(self.v2)
if self.j.getVidas() == 1:
self.sprites.remove(self.v1)
if self.j.getVidas() == 0:
self.sprites.remove(self.v0)
| from SpritesClass import Sprite
from JogadorClass import Jogador
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
class Tela:
def __init__(self,j,t0):
self.telas = ["jogo","game over"] #telas existentes
self.estagio = "jogo"
self.j = j
#sprites
self.v0 = Sprite(40,40,30,30,t0)
self.v1 = Sprite(40,80,30,30,t0)
self.v2 = Sprite(40,120,30,30,t0)
self.sprites = [self.v0,self.v1,self.v2]
def getEstagio(self):
return self.estagio
def setEstagio(self,temp):
if temp in self.telas:
self.estagio=temp
else:
print("Tela não existe, erro de digitação no código")
def getSprites(self):
return self.sprites
def atualizarSprites(self):
if self.j.getVidas() == 2:
self.sprites.remove(self.v2)
if self.j.getVidas() == 1:
self.sprites.remove(self.v1)
if self.j.getVidas() == 0:
self.sprites.remove(self.v0) | [
3,
5,
6,
7,
8
] |
1,953 | 8e22db940124f92d3048055cf72dcaa79564cdc6 | <mask token>
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
<mask token>
@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',
'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',
'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',
'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((
'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'),
False)])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert equal and this_node == other_node or not equal and this_node != other_node
@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
| <mask token>
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
<mask token>
def test_str():
name = 'myname'
typestring = 'mytype'
path = 'mypath'
node = Node(name, typestring, path)
assert str((typestring, name, path)) == str(node)
@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',
'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',
'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',
'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((
'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'),
False)])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert equal and this_node == other_node or not equal and this_node != other_node
@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
| <mask token>
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
def test_add_predecessor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
child.add_predecessor(parent)
assert child in parent.successors
assert parent in child.predecessors
def test_str():
name = 'myname'
typestring = 'mytype'
path = 'mypath'
node = Node(name, typestring, path)
assert str((typestring, name, path)) == str(node)
@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',
'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',
'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',
'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((
'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'),
False)])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert equal and this_node == other_node or not equal and this_node != other_node
@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
| import pytest
from ansiblediscover.graph.node import Node
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
def test_add_predecessor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
child.add_predecessor(parent)
assert child in parent.successors
assert parent in child.predecessors
def test_str():
name = 'myname'
typestring = 'mytype'
path = 'mypath'
node = Node(name, typestring, path)
assert str((typestring, name, path)) == str(node)
@pytest.mark.parametrize('this, other, equal', [(('myname', 'mytype',
'mypath'), ('myname', 'mytype', 'mypath'), True), (('myname', 'mytype',
'mypath'), ('othername', 'mytype', 'mypath'), False), (('myname',
'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False), ((
'myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'),
False)])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert equal and this_node == other_node or not equal and this_node != other_node
@pytest.mark.parametrize('other', [None, [], ('myname', 'mytype', 'mypath')])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
| import pytest
from ansiblediscover.graph.node import Node
def test_build_identifier():
assert 'role:server_base' == Node.build_identifier('server_base', 'role')
def test_identifier():
node = Node('server_base', 'role', 'irrelevant')
assert 'role:server_base' == node.identifier()
def test_add_successor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
parent.add_successor(child)
assert child in parent.successors
assert parent in child.predecessors
def test_add_predecessor():
parent = Node('appserver', 'playbook', 'appserver.yml')
child = Node('server_base', 'role', 'roles/server_base')
child.add_predecessor(parent)
assert child in parent.successors
assert parent in child.predecessors
def test_str():
name = 'myname'
typestring = 'mytype'
path = 'mypath'
node = Node(name, typestring, path)
assert str((typestring, name, path)) == str(node)
@pytest.mark.parametrize('this, other, equal', [
(('myname', 'mytype', 'mypath'), ('myname', 'mytype', 'mypath'), True),
(('myname', 'mytype', 'mypath'), ('othername', 'mytype', 'mypath'), False),
(('myname', 'mytype', 'mypath'), ('myname', 'othertype', 'mypath'), False),
(('myname', 'mytype', 'mypath'), ('myname', 'othertype', 'otherpath'), False),
])
def test_eq(this, other, equal):
this_node = Node(*this)
other_node = Node(*other)
assert (equal and (this_node == other_node)) or (not equal and (this_node != other_node))
@pytest.mark.parametrize('other', [
None,
[],
('myname', 'mytype', 'mypath'),
])
def test_eq_unequal_types(other):
this = Node('myname', 'mytype', 'mypath')
assert this != other
| [
5,
6,
7,
8,
9
] |
1,954 | ed1df078ad2e8d770f3d8c41493b5537ed106e3a | ##############################
# SConscript for OgreOpcode #
##############################
#SCons scripts maintained by:
# Van Aarde "nanocell" Krynauw.
#TODO:
# - Add commandline options to specify include dirs, defines, compiler defs, libraries, etc.
# - Add Sconscripts for the samples.
# - Add a binary SConstruct file.
#####################
# Options #
#####################
OPT_INCLUDES = '''#include
/usr/local/include/OGRE
'''
OPT_FLAGS = ' '
OPT_LIBNAME = 'OgreOpcode'
OPT_SUBDIRS = 'src'
OPT_CC = 'g++'
OPT_FLAGS = '-Wall -ansi'
OPT_LIBTARGET = '#/libs'
####################
import sys, os
################################
# Some helper functions #
################################
def BuildSharedLib(environment, name, objs):
output = env['LIBTARGET'] + '/' + name
environment.SharedLibrary(output, objs)
def CreateAppFromObjs(objs):
output = '#' + OPT_APPNAME
environment.Program( output, objs, duplicate=0)
#################################
platform_build = 'build' + os.path.sep + sys.platform
#Set up a building environment
env = Environment()
env.BuildSharedLib = BuildSharedLib
env.CreateAppFromObjs = CreateAppFromObjs
#Check if OgreOpcode objects should be built as static or shared
env.BuildObject = env.SharedObject
#Check if OgreOpcode is being compiled on linux. If so,
#then define LINUX_FLOAT
if sys.platform == "linux2":
OPT_FLAGS = OPT_FLAGS + ' -DLINUX_FLOAT'
env.Replace(CC = OPT_CC)
env.Replace(CXXFLAGS = Split(OPT_FLAGS))
#Current path. Used for relative hierarchical building
env['CURRENT_PATH'] = '#'
#The build directory for the current
#platform. TODO: Add command line
#support for cross compiling.
env['PLATFORM_BUILD'] = platform_build
#Target directory where the libaries
#will be built.
env['LIBTARGET'] = OPT_LIBTARGET
env.Append(CPPPATH = Split(OPT_INCLUDES))
#env.Append(LIBPATH = Split(LIBSDIR))
Export('env')
print "Building to: " + env['PLATFORM_BUILD']
#TODO: Check for dependencies...?
objs = []
#Map the 'build' subdirectory to be compile from 'src'
#Run all the SConscripts in sub directories.
for subdir in Split(OPT_SUBDIRS):
#Map a given subdirectory into the build directory...Let's see how this goes.
env.BuildDir( env['PLATFORM_BUILD'] + "/" + subdir, subdir, duplicate=0)
o = env.SConscript( env['PLATFORM_BUILD'] + "/" + subdir + '/SConscript')
#o = senv.SConscript( senv['PLATFORM_BUILD'] + '/' + subdir + '/SConscript')
objs.append(o)
#All the objects that were returned should be compiled
#into the final OgreOpcode library
#o = env.SConscript( env['PLATFORM_BUILD'] + os.path.sep + 'SConscript')
env.SharedLibrary(OPT_LIBTARGET + "/" + OPT_LIBNAME, o);
| null | null | null | null | [
0
] |
1,955 | 547926904f9a4b88a988e3b59c49b94fe0e30de4 | <mask token>
class Merkle:
<mask token>
<mask token>
def commit_(leafs):
assert len(leafs) & len(leafs
) - 1 == 0, 'List must be of a power two length'
if len(leafs) == 1:
return leafs[0]
return Merkle.H(Merkle.commit_(leafs[:len(leafs) // 2]) + Merkle.
commit_(leafs[len(leafs) // 2:])).digest()
def open_(index, leafs):
assert len(leafs) & len(leafs
) - 1 == 0, 'List must be of a power two length'
assert 0 <= index and index < len(leafs)
if len(leafs) == 2:
return [leafs[1 - index]]
elif index < len(leafs) / 2:
return Merkle.open_(index, leafs[:len(leafs) // 2]) + [Merkle.
commit_(leafs[len(leafs) // 2:])]
else:
return Merkle.open_(index - len(leafs) // 2, leafs[len(leafs) //
2:]) + [Merkle.commit_(leafs[:len(leafs) // 2])]
def verify_(root, index, path, leaf):
assert 0 <= index and index < 1 << len(path
), 'cannot verify invalid index'
if len(path) == 1:
if index == 0:
return root == Merkle.H(leaf + path[0]).digest()
else:
return root == Merkle.H(path[0] + leaf).digest()
elif index % 2 == 0:
return Merkle.verify_(root, index >> 1, path[1:], Merkle.H(leaf +
path[0]).digest())
else:
return Merkle.verify_(root, index >> 1, path[1:], Merkle.H(path
[0] + leaf).digest())
def commit(leafs: List[Any]):
return Merkle.commit_([Merkle.H(bytes(leaf)).digest() for leaf in
leafs])
<mask token>
def verify(root: bytes, index: int, path: List[List[Any]], leaf: List[Any]
):
return Merkle.verify_(root, index, path, Merkle.H(bytes(leaf)).digest()
)
| <mask token>
class Merkle:
<mask token>
<mask token>
def commit_(leafs):
assert len(leafs) & len(leafs
) - 1 == 0, 'List must be of a power two length'
if len(leafs) == 1:
return leafs[0]
return Merkle.H(Merkle.commit_(leafs[:len(leafs) // 2]) + Merkle.
commit_(leafs[len(leafs) // 2:])).digest()
def open_(index, leafs):
assert len(leafs) & len(leafs
) - 1 == 0, 'List must be of a power two length'
assert 0 <= index and index < len(leafs)
if len(leafs) == 2:
return [leafs[1 - index]]
elif index < len(leafs) / 2:
return Merkle.open_(index, leafs[:len(leafs) // 2]) + [Merkle.
commit_(leafs[len(leafs) // 2:])]
else:
return Merkle.open_(index - len(leafs) // 2, leafs[len(leafs) //
2:]) + [Merkle.commit_(leafs[:len(leafs) // 2])]
def verify_(root, index, path, leaf):
assert 0 <= index and index < 1 << len(path
), 'cannot verify invalid index'
if len(path) == 1:
if index == 0:
return root == Merkle.H(leaf + path[0]).digest()
else:
return root == Merkle.H(path[0] + leaf).digest()
elif index % 2 == 0:
return Merkle.verify_(root, index >> 1, path[1:], Merkle.H(leaf +
path[0]).digest())
else:
return Merkle.verify_(root, index >> 1, path[1:], Merkle.H(path
[0] + leaf).digest())
def commit(leafs: List[Any]):
return Merkle.commit_([Merkle.H(bytes(leaf)).digest() for leaf in
leafs])
def open(index: int, leafs: List[Any]):
return Merkle.open_(index, [Merkle.H(bytes(leaf)).digest() for leaf in
leafs])
def verify(root: bytes, index: int, path: List[List[Any]], leaf: List[Any]
):
return Merkle.verify_(root, index, path, Merkle.H(bytes(leaf)).digest()
)
| <mask token>
class Merkle:
"""
We consider the merkle tree as a commitment protocol implementing
the interface:
* commit_() : commits to a list by computing the merkle tree.
* open_() : opens the commitment by computing the authentification path.
* verify_() : verify that a value is commited by checking that its a leaf.
"""
H = blake2b
def commit_(leafs):
assert len(leafs) & len(leafs
) - 1 == 0, 'List must be of a power two length'
if len(leafs) == 1:
return leafs[0]
return Merkle.H(Merkle.commit_(leafs[:len(leafs) // 2]) + Merkle.
commit_(leafs[len(leafs) // 2:])).digest()
def open_(index, leafs):
assert len(leafs) & len(leafs
) - 1 == 0, 'List must be of a power two length'
assert 0 <= index and index < len(leafs)
if len(leafs) == 2:
return [leafs[1 - index]]
elif index < len(leafs) / 2:
return Merkle.open_(index, leafs[:len(leafs) // 2]) + [Merkle.
commit_(leafs[len(leafs) // 2:])]
else:
return Merkle.open_(index - len(leafs) // 2, leafs[len(leafs) //
2:]) + [Merkle.commit_(leafs[:len(leafs) // 2])]
def verify_(root, index, path, leaf):
assert 0 <= index and index < 1 << len(path
), 'cannot verify invalid index'
if len(path) == 1:
if index == 0:
return root == Merkle.H(leaf + path[0]).digest()
else:
return root == Merkle.H(path[0] + leaf).digest()
elif index % 2 == 0:
return Merkle.verify_(root, index >> 1, path[1:], Merkle.H(leaf +
path[0]).digest())
else:
return Merkle.verify_(root, index >> 1, path[1:], Merkle.H(path
[0] + leaf).digest())
def commit(leafs: List[Any]):
return Merkle.commit_([Merkle.H(bytes(leaf)).digest() for leaf in
leafs])
def open(index: int, leafs: List[Any]):
return Merkle.open_(index, [Merkle.H(bytes(leaf)).digest() for leaf in
leafs])
def verify(root: bytes, index: int, path: List[List[Any]], leaf: List[Any]
):
return Merkle.verify_(root, index, path, Merkle.H(bytes(leaf)).digest()
)
| <mask token>
from typing import List, Any
from hashlib import blake2b
class Merkle:
"""
We consider the merkle tree as a commitment protocol implementing
the interface:
* commit_() : commits to a list by computing the merkle tree.
* open_() : opens the commitment by computing the authentification path.
* verify_() : verify that a value is commited by checking that its a leaf.
"""
H = blake2b
def commit_(leafs):
assert len(leafs) & len(leafs
) - 1 == 0, 'List must be of a power two length'
if len(leafs) == 1:
return leafs[0]
return Merkle.H(Merkle.commit_(leafs[:len(leafs) // 2]) + Merkle.
commit_(leafs[len(leafs) // 2:])).digest()
def open_(index, leafs):
assert len(leafs) & len(leafs
) - 1 == 0, 'List must be of a power two length'
assert 0 <= index and index < len(leafs)
if len(leafs) == 2:
return [leafs[1 - index]]
elif index < len(leafs) / 2:
return Merkle.open_(index, leafs[:len(leafs) // 2]) + [Merkle.
commit_(leafs[len(leafs) // 2:])]
else:
return Merkle.open_(index - len(leafs) // 2, leafs[len(leafs) //
2:]) + [Merkle.commit_(leafs[:len(leafs) // 2])]
def verify_(root, index, path, leaf):
assert 0 <= index and index < 1 << len(path
), 'cannot verify invalid index'
if len(path) == 1:
if index == 0:
return root == Merkle.H(leaf + path[0]).digest()
else:
return root == Merkle.H(path[0] + leaf).digest()
elif index % 2 == 0:
return Merkle.verify_(root, index >> 1, path[1:], Merkle.H(leaf +
path[0]).digest())
else:
return Merkle.verify_(root, index >> 1, path[1:], Merkle.H(path
[0] + leaf).digest())
def commit(leafs: List[Any]):
return Merkle.commit_([Merkle.H(bytes(leaf)).digest() for leaf in
leafs])
def open(index: int, leafs: List[Any]):
return Merkle.open_(index, [Merkle.H(bytes(leaf)).digest() for leaf in
leafs])
def verify(root: bytes, index: int, path: List[List[Any]], leaf: List[Any]
):
return Merkle.verify_(root, index, path, Merkle.H(bytes(leaf)).digest()
)
| """
Merkle: Implementation of Merkle Trees over Blake2
"""
from typing import List, Any
from hashlib import blake2b
class Merkle:
"""
We consider the merkle tree as a commitment protocol implementing
the interface:
* commit_() : commits to a list by computing the merkle tree.
* open_() : opens the commitment by computing the authentification path.
* verify_() : verify that a value is commited by checking that its a leaf.
"""
H = blake2b
def commit_(leafs):
assert len(leafs) & (len(leafs) - 1) == 0, "List must be of a power two length"
if len(leafs) == 1:
return leafs[0]
return Merkle.H(
Merkle.commit_(leafs[: (len(leafs) // 2)])
+ Merkle.commit_(leafs[(len(leafs) // 2) :])
).digest()
def open_(index, leafs):
assert len(leafs) & (len(leafs) - 1) == 0, "List must be of a power two length"
assert 0 <= index and index < len(leafs)
if len(leafs) == 2:
return [leafs[1 - index]]
elif index < (len(leafs) / 2):
return Merkle.open_(index, leafs[: (len(leafs) // 2)]) + [
Merkle.commit_(leafs[(len(leafs) // 2) :])
]
else:
return Merkle.open_(index - len(leafs) // 2, leafs[len(leafs) // 2 :]) + [
Merkle.commit_(leafs[: len(leafs) // 2])
]
def verify_(root, index, path, leaf):
assert 0 <= index and index < (1 << len(path)), "cannot verify invalid index"
if len(path) == 1:
if index == 0:
return root == Merkle.H(leaf + path[0]).digest()
else:
return root == Merkle.H(path[0] + leaf).digest()
else:
if index % 2 == 0:
return Merkle.verify_(
root, index >> 1, path[1:], Merkle.H(leaf + path[0]).digest()
)
else:
return Merkle.verify_(
root, index >> 1, path[1:], Merkle.H(path[0] + leaf).digest()
)
# The following functions expose the API and compute hashes of leafs before
# calling the underlying code.
def commit(leafs: List[Any]):
return Merkle.commit_([Merkle.H(bytes(leaf)).digest() for leaf in leafs])
def open(index: int, leafs: List[Any]):
return Merkle.open_(index, [Merkle.H(bytes(leaf)).digest() for leaf in leafs])
def verify(root: bytes, index: int, path: List[List[Any]], leaf: List[Any]):
return Merkle.verify_(root, index, path, Merkle.H(bytes(leaf)).digest())
| [
6,
7,
9,
10,
11
] |
1,956 | 30251b7c2ce30b7fa899a5885707c078788d0106 | import os
import sys
import json
from subprocess import Popen, PIPE, STDOUT
from twisted.internet.task import deferLater
from twisted.internet import reactor
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol, listenWS
from utils import rsync
# TODO: Add Twisted logger
# TODO: Create plugin for fileserver (using twistd)
# TODO: Thinking about using SSL over my WebSockets message-based protocol (OR using AES algorithm?)
CONFIG_IP = 'localhost'
CONFIG_PORT = 8888
CONFIG_TEMPLATE = ''
CONFIG_DATA = {}
BATCH_SIZE = 1 * 2 ** 20
def sendPrefences(port):
p = Popen(["python", "./utils/preferences_sender.py", str(CONFIG_TEMPLATE), str(port)], stdout=PIPE, stdin=PIPE, stderr=STDOUT)
result = p.communicate()[0]
class MessageBasedServerProtocol(WebSocketServerProtocol):
"""
Message-based WebSockets server
Template contains some parts as string:
[USER_ID:OPERATION_NAME:FILE_ID:FILE_ENC_PASSWORD] - 15 symbols for USER_ID,
10 symbols for OPERATION_NAME,
25 symbols for FILE_ID
32 symbols for FILE_ENC_PASSWORD
other - some data
"""
def __init__(self):
path = CONFIG_DATA['path']
base_dir = CONFIG_DATA['base_dir']
# prepare to working with files...
if os.path.exists(path) and os.path.isdir(path):
os.chdir(path)
if not os.path.exists(base_dir) or not os.path.isdir(base_dir):
os.mkdir(base_dir)
os.chdir(base_dir)
else:
os.mkdir(path)
os.chdir(path)
os.mkdir(base_dir)
os.chdir(base_dir)
# init some things
self.fullpath = path + '/' + base_dir
self.status = 'ONLINE'
self.commands_handlers = self.__initHandlersUser()
self.file_1 = self.file_2 = self.delta_sync = None
self.file_enc_psw = None
def __initHandlersUser(self):
"""
Initialize handlers for every command
"""
handlers = {}
handlers['WRITE_FILE'] = self.write_file
handlers['READU_FILE'] = self.read_file
handlers['DELET_FILE'] = self.delete_file
handlers['STATUS_SRV'] = self.status_server
handlers['RSYNC_FILE'] = self.rsync_file
handlers['WSYNC_FILE'] = self.wsync_file
return handlers
def __checkUserCatalog(self, user_id):
# prepare to working with files...
os.chdir(self.fullpath)
if not os.path.exists(user_id) or not os.path.isdir(user_id):
os.mkdir(user_id)
os.chdir(user_id)
else:
os.chdir(self.fullpath + '/' + user_id)
def __get_standart_states(self):
return "C", 'Succesfull!'
def write_file(self, user_id, file_id, data):
print "[USER] User with %s was write a file..." % (self.transport.getPeer())
status, commentary = self.__get_standart_states()
self.__checkUserCatalog(user_id)
self.status = 'BUSY'
operation = "WRT"
try:
f = open(file_id, "wb")
f.write(data)
except IOError, argument:
status = "E"
commentary = argument
except Exception, argument:
status = "E"
commentary = argument
raise Exception(argument)
finally:
f.close()
self.status = 'ONLINE'
return operation, status, commentary
def read_file(self, user_id, file_id, data):
print "[USER] User with %s was read a file..." % (self.transport.getPeer())
status, commentary = self.__get_standart_states()
self.__checkUserCatalog(user_id)
self.status = 'BUSY'
operation = "REA"
try:
f = open(file_id, "rb")
commentary = f.read()
except IOError, argument:
status = "E"
commentary = argument
except Exception, argument:
status = "E"
commentary = argument
raise Exception(argument)
finally:
f.close()
self.status = 'ONLINE'
return operation, status, commentary
def delete_file(self, user_id, file_id, data):
print "[USER] User with %s was delete a file..." % (self.transport.getPeer())
status, commentary = self.__get_standart_states()
self.__checkUserCatalog(user_id)
self.status = 'BUSY'
operation = "DEL"
try:
os.remove(file_id)
except IOError, argument:
status = "E"
commentary = argument
except Exception, argument:
status = "E"
commentary = argument
raise Exception(argument)
self.status = 'ONLINE'
return operation, status, commentary
def rsync_file(self, user_id, file_id, data):
print "[USER] User with %s sync files..." % (self.transport.getPeer())
status, commentary = self.__get_standart_states()
self.__checkUserCatalog(user_id)
self.status = 'BUSY'
operation = "RSY"
try:
f = open(file_id, "rb")
commentary = f.read()
except IOError, argument:
status = "E"
commentary = argument
except Exception, argument:
status = "E"
commentary = argument
raise Exception(argument)
self.status = 'ONLINE'
return operation, status, commentary
def wsync_file(self, user_id, file_id, data):
print "[USER] User with %s sync files..." % (self.transport.getPeer())
status, commentary = self.__get_standart_states()
self.__checkUserCatalog(user_id)
self.status = 'BUSY'
operation = "WRT"
try:
unpatched = open(file_id, "rb")
hashes = rsync.blockchecksums(unpatched)
new_file = file_id + '.new'
swap_path = file_id + '~'
with open(swap_path, "wb") as out_file:
out_file.write(data)
patchedfile = open(swap_path, "rb")
delta = rsync.rsyncdelta(patchedfile, hashes)
unpatched.seek(0)
save_to = open(new_file, "wb")
rsync.patchstream(unpatched, save_to, delta)
save_to.close()
patchedfile.close()
unpatched.close()
if os.path.exists(file_id):
os.remove(file_id)
os.rename(new_file, file_id)
if os.path.exists(swap_path):
os.remove(swap_path)
except IOError, argument:
status = "E"
commentary = argument
except Exception, argument:
status = "E"
commentary = argument
raise Exception(argument)
finally:
print 'WSYNC was ended successfully!'
self.status = 'ONLINE'
return operation, status, commentary
def status_server(self, user_id, file_id, data):
print "[SERV] Server with %s getting fileserver status..." % (self.transport.getPeer())
status = "C"
operation = "STS"
commentary = self.status
return operation, status, commentary
def onOpen(self):
print "[USER] User with %s connected" % (self.transport.getPeer())
def connectionLost(self, reason):
print '[USER] Lost connection from %s' % (self.transport.getPeer())
def onMessage(self, payload, isBinary):
"""
Processing request from user and send response
"""
user_id, cmd, file_id, self.file_enc_psw = payload[:87].replace('[', '').replace(']', '').split(':')
self.file_enc_psw = self.file_enc_psw.replace('~', '')
data = payload[87:]
operation, status, commentary = "UNK", "C", "Successfull!"
if cmd in ('WRITE_FILE', 'READU_FILE', 'DELET_FILE', 'STATUS_SRV', 'RSYNC_FILE', 'WSYNC_FILE'):
operation, status, commentary = self.commands_handlers[cmd](user_id, file_id, data)
self.file_enc_psw = None
self.sendMessage('[%s][%s]%s' % (operation, status, commentary), isBinary=True, sync=True)
if __name__ == '__main__':
if len(sys.argv) < 3:
print "using python fileserver_client.py [PATH_TO_config.json_FILE] [PORT]"
else:
try:
# read config file
CONFIG_TEMPLATE = sys.argv[1]
with open(CONFIG_TEMPLATE, "r") as f:
CONFIG_DATA = json.load(f)
# checking IP and PORT
CONFIG_PORT = int(sys.argv[2])
except ValueError:
print 'PLEASE, enter correct information about server...'
sys.exit(1)
except Exception, e:
print e
sys.exit(1)
if CONFIG_IP == 'localhost':
CONFIG_IP = '127.0.0.1'
server_addr = "ws://%s:%d" % (CONFIG_IP, CONFIG_PORT)
# create server
factory = WebSocketServerFactory(server_addr)
factory.protocol = MessageBasedServerProtocol
listenWS(factory)
# create special Deffered, which sending our server prefences (ip and port) to main server
if bool(CONFIG_DATA["debug"]) is False:
d = deferLater(reactor, 0, sendPrefences, CONFIG_PORT)
reactor.run()
| null | null | null | null | [
0
] |
1,957 | 0a1d102075cebee13e25f3eb703811d1e22f53c2 | <mask token>
class UserTest(DemoTestCase):
<mask token>
def test_login_bad_password(self):
r = self.post('/api/connect', {'user': 'admin', 'password':
'badpassword'})
self.assertEqual(401, r.status_code)
def test_login_good_password(self):
r = self.post('/api/connect', {'user': 'admin', 'password': 'admin'})
self.assertEqual(200, r.status_code)
<mask token>
| <mask token>
class UserTest(DemoTestCase):
def test_access_secure_area(self):
r = self.get('/api/user')
self.assertEqual(401, r.status_code)
def test_login_bad_password(self):
r = self.post('/api/connect', {'user': 'admin', 'password':
'badpassword'})
self.assertEqual(401, r.status_code)
def test_login_good_password(self):
r = self.post('/api/connect', {'user': 'admin', 'password': 'admin'})
self.assertEqual(200, r.status_code)
<mask token>
| <mask token>
class UserTest(DemoTestCase):
def test_access_secure_area(self):
r = self.get('/api/user')
self.assertEqual(401, r.status_code)
def test_login_bad_password(self):
r = self.post('/api/connect', {'user': 'admin', 'password':
'badpassword'})
self.assertEqual(401, r.status_code)
def test_login_good_password(self):
r = self.post('/api/connect', {'user': 'admin', 'password': 'admin'})
self.assertEqual(200, r.status_code)
def test_get_info(self):
self.login('admin', 'admin')
r = self.get('/api/user')
rep = self.rep_to_dict(r.text)
self.assertEqual(200, r.status_code)
self.assertEqual('admin', rep['login'])
| from test.demo_test_case import DemoTestCase
class UserTest(DemoTestCase):
def test_access_secure_area(self):
r = self.get('/api/user')
self.assertEqual(401, r.status_code)
def test_login_bad_password(self):
r = self.post('/api/connect', {'user': 'admin', 'password':
'badpassword'})
self.assertEqual(401, r.status_code)
def test_login_good_password(self):
r = self.post('/api/connect', {'user': 'admin', 'password': 'admin'})
self.assertEqual(200, r.status_code)
def test_get_info(self):
self.login('admin', 'admin')
r = self.get('/api/user')
rep = self.rep_to_dict(r.text)
self.assertEqual(200, r.status_code)
self.assertEqual('admin', rep['login'])
| null | [
3,
4,
5,
6
] |
1,958 | 1fbe9078748b00efad0211b29ad572df97cda921 | <mask token>
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)
).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(
rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.
call_success()))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {'$and': rule.get('$and'), 'app_list': failed_list}
resp = Hyperloop().post('/bomber/score/verify', json=post_params)
if not resp.ok:
logging.error('hyperloop score verification failed: %s, %s',
str(resp.status_code), str(resp.text))
logging.error('hyperloop score verification failed: %s', str(
post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
if random.randint(0, 5) == 1:
send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':
int(item)})
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = SystemConfig.select().where(SystemConfig.key ==
'DPD1-3_INTO_IVR').first()
now = date.today()
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,
'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,
'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':
10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,
'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,
'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,
'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,
'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,
'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':
101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,
'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,
'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,
'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
ivr_action = bill_service.ivr_pages(page=current_page,
page_size=500, start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
time = str(days).replace('-', 'PDP')
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = AccountService().get_user(path_params={
'user_id': user_id})
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = a['user_mobile_no'] + ',' + user_resp.get(
'mobile_no')
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({'application_id': a['id'], 'numbers':
numbers, 'group': group, 'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page, proc_date=now,
page_size=page_size, current_page=current_page)
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error('dpd1-3_test_error:%s' % str(e))
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = SystemConfig.select().where(SystemConfig.key ==
'IVR_TEST_PROPORTION').first()
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.
status == AutoIVRStatus.AVAILABLE.value)
t2_dict = defaultdict(list)
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.
group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()
<mask token>
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
Application.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber), ptp_bomber=None).where(
Application.id == d[0]).execute()
logging.warning('add new app success')
ptp = date.today() - timedelta(days=1)
del_sql = (
"""
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
"""
% ptp)
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
Application.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None).where(Application.id << ids).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = Application.select().where(Application.external_id ==
application_id).order_by(Application.finished_at).first()
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==
sub_bill_id, OverdueBillR.external_id == application_id)
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' % (
application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error(
'application %s overdue, get sub_bill info failed:Request To repayment Error'
, application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error(
'get user %s apply history failed: Request to Dashboard Failed.',
user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([(1) for i in history if i['status'] in [80,
90, 100, 70] and i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get('bill_id')
amount = sub_bill.get('amount')
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,
'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),
'overdue_days': overdue_days, 'origin_due_at': origin_due_at,
'amount': amount, 'amount_net': amount_net, 'interest_rate':
interest_rate, 'external_id': application_id}
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill['collection_id'] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('application %s,sub_bill_id:%s overdue created' %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill['collection_id'] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(id=id, user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[
'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(
'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=
birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(
gold_app.get('id_ektp')), profile_province=(gold_app.get(
'profile_province') or {}).get('name'), profile_city=(gold_app.get(
'profile_city') or {}).get('name'), profile_district=(gold_app.get(
'profile_district') or {}).get('name'), profile_residence_time=
gold_app.get('profile_residence_time'), profile_residence_type=
gold_app.get('profile_residence_type'), profile_address=gold_app.
get('profile_address'), profile_education=gold_app.get(
'profile_education'), profile_college=(gold_app.get(
'profile_college') or {}).get('name'), job_name=gold_app.get(
'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get
('job_bpjs'), job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'), job_industry=gold_app.get(
'job_industry'), job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'), job_district
=(gold_app.get('job_district') or {}).get('name'), job_address=
gold_app.get('job_address'), amount=amount, amount_net=amount_net,
interest_rate=interest_rate, term=gold_app.get('term'),
origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=
sub_bill.get('repay_at'), loan_success_times=loan_success_times,
arrived_at=datetime.now(), follow_up_date=datetime.now(),
promised_amount=promised_amount, promised_date=promised_date,
external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=
datetime.now())
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)
add_contact(application)
<mask token>
def get_contact_from_mongo(number):
if not number:
return []
query = TotalContact.objects(src_number=number, source__in=TotalContact
.available()).order_by('source')
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({'related_number': c.dest_number, 'source': source,
'is_calc': c.is_calc, 'total_count': c.total_count,
'total_duration': c.total_duration, 'relation': relation,
'name': c.dest_name})
return lst
<mask token>
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
<mask token>
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(repay_at=repay_at).where(Application.
id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
<mask token>
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = Application.update(overdue_days=overdue_days).where(Application
.status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.
overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))
apps = Application.filter(Application.status << [ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value], Application.overdue_days > 95,
Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
<mask token>
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1, hour=1, minute=30,
second=0, microsecond=0)
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days <= 95)
updated_rows_count = query.execute()
logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %
(updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.
created_at, OverdueBill.collection_id, OverdueBill.overdue_days
).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.
collection_id == Application.id).where(Application.status ==
status, Application.type == ApplicationType.CASH_LOAN_STAGING.value
)
app_update = {}
for ob in overdue_bills:
if (ob.status == ApplicationStatus.REPAID.value and ob.
created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
apps = Application.select().where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value)
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'
.format(a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle
.C1B.value):
bomber_id = (a.latest_bomber_id if a.latest_bomber_id else
a.cycle)
DispatchAppHistory.update(out_at=datetime.now(),
out_overdue_days=a.overdue_days).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id).execute()
Escalation.create(application=a.id, type=EscalationType.
AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle, escalate_to=new_cycle,
current_bomber_id=a.latest_bomber)
dis_app_update = DispatchApp.update(status=DisAppStatus.
ABNORMAL.value).where(DispatchApp.application == a.id)
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
<mask token>
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,
'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,
'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':
0, 'sms_sent': 0} for i in employees}
now_date = date.today()
cal_date = now_date - timedelta(days=1)
claimed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('claimed')).where(fn.DATE(Application.
claimed_at) == cal_date, Application.status << [ApplicationStatus.
PROCESSING.value, ApplicationStatus.REPAID.value], Application.
latest_bomber.is_null(False)).group_by(Application.latest_bomber)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False)).group_by(
Application.latest_bomber)
completed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('completed')).where(Application.
latest_bombing_time.is_null(False), fn.DATE(Application.
latest_bombing_time) == cal_date, Application.latest_bomber.is_null
(False)).group_by(Application.latest_bomber)
escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(
Escalation.id).alias('escalated')).where(fn.DATE(Escalation.
created_at) == cal_date, Escalation.type == EscalationType.
AUTOMATIC.value, Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation
.current_bomber)
transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).
alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==
cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(
Transfer.operator)
promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(
BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.
created_at) == cal_date, BombingHistory.result == BombingResult.
HAS_PROGRESS.value).group_by(BombingHistory.bomber)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.
current_bomber)
calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id
).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==
cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)
calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.
record_id).alias('calls_connected')).where(fn.DATE(CallLog.
time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==
'1').group_by(CallLog.user_id)
sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(
ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_
(ConnectType.sms()), ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'claimed': data['claimed'], 'completed': data['completed'],
'cleared': data['cleared'], 'escalated': data['escalated'],
'transferred': data['transferred'], 'promised': data['promised'
], 'amount_recovered': data['amount_recovered'], 'calls_made':
data['calls_made'], 'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'], 'date': cal_date})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(
Escalation.id).alias('escalated_in')).where(Escalation.status ==
ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==
cal_date).group_by(Escalation.escalate_to)
for i in escalated_in:
cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.
escalated_in, 'date': cal_date})
amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM
(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':
amount_recovered_total, 'date': cal_date})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,
AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')
).where(fn.DATE(AutoCallActions.created_at) == cal_date)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.
is_null(False), RepaymentLog.is_bombed == True)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False))
auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,
AutoCallActions.result)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,
'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered':
0} for e in employees}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'answered_calls': data['answered_calls'], 'ptp': data['ptp'],
'follow_up': data['follow_up'], 'not_useful': data['not_useful'
], 'cleared': data['cleared'], 'amount_recovered': str(data[
'amount_recovered']), 'date': cal_date})
if insert_args:
Summary2.insert_many(insert_args).execute()
<mask token>
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error(
'discount approved msg send failed application %s not found',
app_id)
return
template = Template.select(Template.text, Template.app).where(Template.
type == ConnectType.AUTO_SMS.value, Template.id << Template.
get_auto_sms_tpl(msg_type), Template.app == application.app).first()
if not template:
logging.error('discount approved msg send failed template %s not found'
, msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {'user_name': application.user_name, 'due_days': application
.overdue_days, 'app_name': application.app, 'phone': application.
user_mobile_no, 'cs_number': cs_number_conf.get(application.app,
'02150202889'), 'promised_date': promised_date, 'discount_to':
discount_to, 'effective_to': effective_to}
content = template.text.format(**tpl_data)
data_list = [{'receiver': '62' + application.user_mobile_no, 'content':
content, 'title': ''}]
send_sms(data_list, msg_type, application.app)
<mask token>
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
bomber_dispatch_app()
dispatch_instalment_app()
dis_apps = DispatchApp.select(DispatchApp.application).where(
DispatchApp.status == DisAppStatus.NORMAL.value)
c1_apps = Application.select(Application.id, Application.cycle,
Application.follow_up_date, Application.called_times).where(Application
.status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.
AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.
is_rejected == False, Application.promised_date.is_null(True) | (fn
.DATE(Application.promised_date) < datetime.today().date())).order_by(
Application.overdue_days, Application.apply_at)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({'application': a.id, 'cycle': a.cycle,
'follow_up_date': a.follow_up_date, 'called_times': 1 if a.
called_times else 0, 'description': 'init'})
if not insert_args:
logging.error('no application need auto call')
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [i['application'] for i in insert_args[idx:idx +
100]]
send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {
'application_list': application_list})
logging.info('bomber generate auto call list finished')
send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.
alias('application_id'), R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(
'expected_out_time'), Application.overdue_days.alias(
'entry_overdue_days')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << ids)
Application.update(latest_bomber=bomber_id).where(Application.id.
in_(ids)).execute()
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a
.overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]
.get('principal_paid')), out_late_fee_pending=bd[_id].get(
'late_fee') - bd[_id].get('late_fee_paid')).where(
DispatchAppHistory.application == a.id, DispatchAppHistory.
bomber_id == a.latest_bomber_id).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
<mask token>
def bomber_dispatch_app():
try:
c1a_dispatch_app()
except Exception as e:
logging.error('c1a_dispatch_app error:%s' % str(e))
cycle = {(1): 10, (2): 30, (3): 60, (4): 90}
apps = Application.select().where(fn.DATE(Application.C2_entry) == date
.today(), Application.type == ApplicationType.CASH_LOAN.value)
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C2.value)
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(application_ids=apps_ids[
start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = DispatchApp.delete().where(DispatchApp.application == a_id
).execute()
dispatch_inserts.append({'application': a_id, 'bomber': bomber,
'partner': p.id})
application = Application.select().where(Application.id == a_id
).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = cycle.get(application.cycle
) - application.overdue_days
DispatchAppHistory.create(application=a_id, partner_id=p.id,
bomber_id=bomber, entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - Decimal(
bill_dict[a_id].get('principal_paid')),
entry_late_fee_pending=Decimal(bill_dict[a_id].get(
'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')
), expected_out_time=date.today() + timedelta(days=
day_next_cycle))
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(application_ids=c2)
else:
bills = []
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at
=datetime.now(), entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - bill_dict[c].get(
'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(
'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),
expected_out_time=date.today() + timedelta(days=day_next_cycle))
ab_test_other()
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
c1a_apps = Application.select().where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value], Application.dpd1_entry >= today, Application.dpd1_entry <
tomorrow, Application.type == ApplicationType.CASH_LOAN.value)
all_aids = [a.id for a in c1a_apps]
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C1A.value)
end = 0
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
is_del == 0)
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
average_number = get_average_number(len(aids), len(bids))
p_end = 0
for i, bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = Application.update(latest_bomber=bid, status=
ApplicationStatus.AB_TEST.value).where(Application.id <<
b_aids).execute()
params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,
'application_ids': b_aids, 'dest_bomber_id': bid}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid, 'bomber':
bid, 'partner': p.id, 'status': DisAppStatus.NORMAL
.value})
if dispatch_inserts:
q = DispatchApp.insert_many(dispatch_inserts).execute()
except Exception as e:
logging.error('c1a分件写入dispatch_app error:%s' % str(e))
<mask token>
def get_cash_bomber(bids, cycle):
cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==
0, Bomber.instalment != cycle)
cash_bids = [b.id for b in cash_bombers]
return cash_bids
<mask token>
def out_and_in_record_instalment(**kwargs):
if not kwargs.get('application_ids'):
return
out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True)).execute()
cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dest_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).
alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
overdue_bills = OverdueBill.select().where(OverdueBill.
collection_id << app_ids)
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
<mask token>
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get('apps')
partner_id = kwargs.get('partner_id', 'null')
bill_dict = kwargs.get('bill_dict')
period = kwargs.get('period')
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = {str(k): v for k, v in bill_dict.items()}
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().
alias('entry_at'), Application.overdue_days.alias(
'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.id << app_ids)
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
<mask token>
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = CallActions.select().where(CallActions.type == 0,
CallActions.application == application.id, CallActions.
created_at > datetime.now() - timedelta(days=5))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = SystemConfig.select().where(SystemConfig.key ==
'SCAVENGER_TIME').first()
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value, description='scavenger').where(AutoCallList.status ==
AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime
.now() + timedelta(minutes=scavenger_time))
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
mail_box_scavenger_time = -30
mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==
'MAIL_BOX_SCAVENGER_TIME').first()
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.
value, AutoCallList.updated_at < datetime.now() + timedelta(minutes
=mail_box_scavenger_time))
mail_box_count = update_mail_box_call_list.execute()
logging.info('scavenger update mail box %s', mail_box_count)
update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value
).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.
updated_at < datetime.now() + timedelta(minutes=-30))
ivr_result = update_auto_ivr.execute()
logging.info('scavenger update %s ivr' % ivr_result)
<mask token>
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)
).scalar()
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({'apply_date': start_date, 'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system': round(c3.
get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *
100, 1), 'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans': round(c7.get(i, 0),
1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i,
0), 'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
<mask token>
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.
bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.
application << kwargs['application_ids'], DispatchAppHistory.out_at
.is_null(True)).execute()
if kwargs.get('month_dispatch'):
return
try:
Application.update(ptp_bomber=None).where(Application.id << kwargs[
'application_ids']).execute()
except Exception as e:
logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[
'application_ids'], str(e)))
<mask token>
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
<mask token>
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
old_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (begin_time, begin_time))
old_data = run_one_sql(old_sql)
new_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
"""
% (begin_time, end_time))
new_data = run_one_sql(new_sql)
dpd4_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
"""
% (begin_time, end_time))
dpd4_data = run_one_sql(dpd4_sql)
dpd2_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (end_time, end_time))
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = repayment / all_money * 100
RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,
proportion=pro, repayment=repayment)
<mask token>
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = (
"""
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
d[1], RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = round(repay, 3)
pro = repay / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<mask token>
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
not_contain_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(
not_contain_money, 3), proportion='0', repayment=0,
is_first_loan=is_first_loan, contain_out=ContainOut.
NOT_CONTAIN.value)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(all_money, 3),
proportion='0', repayment=0, is_first_loan=
is_first_loan, contain_out=ContainOut.CONTAIN.value)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C3.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
repay[1], RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<mask token>
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),
BomberR.last_active_at.alias('active')).where(BomberR.
last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])
summary = []
for bomber in bombers:
summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':
bomber.role.cycle, 'work_ind': 0})
SummaryBomber.insert_many(summary).execute()
<mask token>
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = (
"""
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
"""
% (begin_date, end_date))
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,
call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(
SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date
).execute()
return calls
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = (
"""
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
"""
% (begin_date, table_date, end_date))
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == bomber_id).execute()
return claimeds
def get_sms_data(end_data, begin_data):
all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(
'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')
).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.
created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())
).group_by(ConnectHistoryR.operator)
for sms in all_sms:
SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==
begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()
return all_sms
<mask token>
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
def get_new_case_amount(begin_date, end_date):
all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.
entry_late_fee_pending + DispatchAppHistoryR.
entry_principal_pending).alias('pending'), DispatchAppHistoryR.
bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')
).where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.
partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)
for case in all_case:
SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt
=case.cnt).where(SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date).execute()
return all_case
<mask token>
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = (
"""
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
"""
% (begin_date, end_date))
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber
.bomber_id == call[0], SummaryBomber.time == begin_date).execute()
return new_case_calls
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=
value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == key).execute()
return result
def get_no_calltime_avg(begin_date, end_date):
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
for data in manuals:
SummaryBomber.update(calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /
data[2] if data[2] else 0).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == data[0]).execute()
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
return result
<mask token>
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'UPDATE_SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
def get_cycle_claimed(begin_date, end_date):
sql = (
"""
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
"""
% begin_date)
result = run_all_sql(sql)
return result
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()
return all_datas
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber
.time == begin_date, SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
"""
% (begin_date, end_date, begin_date, end_date))
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber
.cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.
time == begin_date).execute()
<mask token>
def get_change_bomber():
cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle
.C3.value}
result = {}
bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,
BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=
BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.
created_at) == date.today(), BomberLog.role_id << list(
cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.
instalment == 0).dicts()
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log['role_id'])
group_id = b_log['group_id']
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],
'new_ids': []}
else:
result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],
'new_ids': []}}
if b_log['operation'] == 0:
result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])
if result:
bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map
.keys())), Bomber.is_del == 0, Bomber.instalment == 0)
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result['new_ids'].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
<mask token>
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
for del_id in del_ids:
del_res = classified_apps.get(del_id, {})
p_list = del_res.get('p_list', [])
np_list = del_res.get('np_list', [])
del_res['need_num'] = -(len(p_list) + len(np_list))
del_res['to_list'] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid
).first()
bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':
[], 'np_num': 0, 'need_num': average, 'partner_id': bomber.
partner_id if bomber else ''}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app['p_list'])
np_num = len(bomber_app['np_list'])
if p_num > average:
bomber_app['need_num'] = -np_num
else:
bomber_app['need_num'] = average - (p_num + np_num)
bomber_app['p_num'] = p_num
bomber_app['np_num'] = np_num
if bomber_app['need_num'] < 0:
random.shuffle(bomber_app['np_list'])
res_over = bomber_app['np_list'][:-bomber_app['need_num']]
bomber_app['to_list'] = res_over
surplus_apps.extend(res_over)
classified_apps_list = sorted(classified_apps.values(), key=lambda x: x
['need_num'], reverse=True)
return surplus_apps, classified_apps_list
<mask token>
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type, bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers, type=type)
<mask token>
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
summary_datetime = now_date - timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.
bomber_id, CallActionsR.application_id, CallActionsR.promised_date,
CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(
CallActionsR.created_at >= begin_time, CallActionsR.created_at <
end_time, CallActionsR.type << (0, 1))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.
bomber_id, 'summary_date': str(summary_date)}
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
C1_sql = (
"""
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
"""
% (begin_time, end_time))
C1_repayment = run_all_sql(C1_sql)
other_sql = (
"""
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
"""
% (begin_time, end_time))
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id, pay_amount, cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': cycle, 'repayment': pay_amount, 'bomber_id':
bomber_id, 'summary_date': str(summary_date)}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
<mask token>
def month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
partners = Partner.select().where(Partner.cycle == cycle, Partner.
status == PartnerStatus.NORMAL.value)
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
bombers = Bomber.select().where(Bomber.partner == partner.id,
Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.
value)
bids = {b.id: b for b in bombers}
if len(bids) == 0:
logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = start + int(np_apps_len * partner.app_percentage /
all_app_precentage)
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
np_apps = np_apps[end:]
return np_apps
def month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):
sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',
(4): 'AB_TEST_C3'}
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0
)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id: b for b in bombers}
if cycle == Cycle.C1A.value:
np_ids = [a['id'] for a in np_apps]
np = Application.update(status=ApplicationStatus.PROCESSING.value,
ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids
).execute()
bomber_app_logs = app_logs.get(cycle, {})
out_param = {'application_ids': bomber_app_logs.get('to_ids', []),
'month_dispatch': 1, 'src_bomber_id': cycle}
new_out_record(**out_param)
in_param = {'cycle': cycle, 'application_ids': np_ids,
'dest_bomber_id': cycle}
new_in_record(**in_param)
bomber_app_logs['need_num'] = len(np_apps)
bomber_app_logs['form_ids'] = np_ids
bomber_app_logs['status'] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
def dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info('get_dispatch_app_to_bomber no bids')
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids, status = [], [], [], 0
for ba in bomber_apps:
promised_date = ba.get('promised_date')
from_ids.append(ba['id'])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba['id'])
else:
from_np.append(ba['id'])
app_status = ApplicationStatus.AB_TEST.value
if (cycle == Cycle.C1A.value and not out_partner and type ==
ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = Application.update(ptp_bomber=bid, latest_bomber=bid,
status=app_status).where(Application.id << from_p).execute(
)
p_ids = bomber_app_logs.get('p_ids', []) + from_p
bomber_app_logs['p_ids'] = p_ids
if from_np:
np = Application.update(latest_bomber=bid, ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value).where(
Application.id << from_np).execute()
np_ids = bomber_app_logs.get('np_ids', []) + from_np
bomber_app_logs['np_ids'] = np_ids
in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.
partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}
if type == ApplicationType.CASH_LOAN.value:
out_param = {'src_bomber_id': bid, 'application_ids':
bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}
new_out_record(**out_param)
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs['status'] = 1
need_num = bomber_app_logs.get('need_num', 0) + average_num[index]
bomber_app_logs['need_num'] = need_num
all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids
bomber_app_logs['form_ids'] = all_form_ids
if not out_partner:
continue
try:
DispatchApp.delete().where(DispatchApp.application.in_(
from_ids)).execute()
dispatch_ins = [{'application': id, 'partner':
current_bomber.partner_id, 'bomber': bid, 'status':
DisAppStatus.NORMAL.value} for id in from_ids]
DispatchApp.insert_many(dispatch_ins).execute()
except Exception as e:
logging.info(
'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str
(e), bid, from_ids))
<mask token>
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],
'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}
for key, value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
<mask token>
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
for cycle in cycle_list:
apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,
ApplicationR.ptp_bomber, ApplicationR.overdue_days,
ApplicationR.promised_date, ApplicationR.follow_up_date,
ApplicationR.external_id, OverdueBillR.status, OverdueBillR.
periods, OverdueBillR.sub_bill_id).join(OverdueBillR,
JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id
).where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()
bomber_overdue_list = []
for app in apps:
status = app.get('status')
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get('ptp_bomber')
promised_date = app.get('promised_date')
follow_up_date = app.get('follow_up_date')
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {'collection_id': app.get('id'), 'external_id':
app.get('external_id'), 'sub_bill_id': app.get(
'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app
.get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':
ptp_bomber, 'promised_date': promised_date,
'follow_up_date': follow_up_date, 'which_day': which_day,
'overdue_days': app.get('overdue_days')}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index:index + 1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %
(cycle, str(which_day), str(e)))
<mask token>
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
apps = ApplicationR.select(ApplicationR.latest_bomber).where(
ApplicationR.promised_date < next_day, ApplicationR.promised_date >=
today, ApplicationR.promised_date.is_null(False), ApplicationR.
status != ApplicationStatus.REPAID.value, ApplicationR.cycle <
Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(
ApplicationR.latest_bomber)
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(
BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids
).execute()
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(
BomberPtp.auto_ext.is_null(False)).execute()
<mask token>
| <mask token>
@action(MessageAction.BOMBER_HEALTH_CHECK)
def health_check(payload, msg_id):
pass
<mask token>
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)
).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(
rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.
call_success()))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {'$and': rule.get('$and'), 'app_list': failed_list}
resp = Hyperloop().post('/bomber/score/verify', json=post_params)
if not resp.ok:
logging.error('hyperloop score verification failed: %s, %s',
str(resp.status_code), str(resp.text))
logging.error('hyperloop score verification failed: %s', str(
post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
if random.randint(0, 5) == 1:
send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':
int(item)})
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = SystemConfig.select().where(SystemConfig.key ==
'DPD1-3_INTO_IVR').first()
now = date.today()
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,
'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,
'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':
10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,
'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,
'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,
'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,
'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,
'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':
101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,
'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,
'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,
'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
ivr_action = bill_service.ivr_pages(page=current_page,
page_size=500, start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
time = str(days).replace('-', 'PDP')
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = AccountService().get_user(path_params={
'user_id': user_id})
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = a['user_mobile_no'] + ',' + user_resp.get(
'mobile_no')
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({'application_id': a['id'], 'numbers':
numbers, 'group': group, 'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page, proc_date=now,
page_size=page_size, current_page=current_page)
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error('dpd1-3_test_error:%s' % str(e))
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = SystemConfig.select().where(SystemConfig.key ==
'IVR_TEST_PROPORTION').first()
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.
status == AutoIVRStatus.AVAILABLE.value)
t2_dict = defaultdict(list)
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.
group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()
<mask token>
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
Application.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber), ptp_bomber=None).where(
Application.id == d[0]).execute()
logging.warning('add new app success')
ptp = date.today() - timedelta(days=1)
del_sql = (
"""
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
"""
% ptp)
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
Application.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None).where(Application.id << ids).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = Application.select().where(Application.external_id ==
application_id).order_by(Application.finished_at).first()
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==
sub_bill_id, OverdueBillR.external_id == application_id)
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' % (
application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error(
'application %s overdue, get sub_bill info failed:Request To repayment Error'
, application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error(
'get user %s apply history failed: Request to Dashboard Failed.',
user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([(1) for i in history if i['status'] in [80,
90, 100, 70] and i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get('bill_id')
amount = sub_bill.get('amount')
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,
'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),
'overdue_days': overdue_days, 'origin_due_at': origin_due_at,
'amount': amount, 'amount_net': amount_net, 'interest_rate':
interest_rate, 'external_id': application_id}
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill['collection_id'] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('application %s,sub_bill_id:%s overdue created' %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill['collection_id'] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(id=id, user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[
'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(
'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=
birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(
gold_app.get('id_ektp')), profile_province=(gold_app.get(
'profile_province') or {}).get('name'), profile_city=(gold_app.get(
'profile_city') or {}).get('name'), profile_district=(gold_app.get(
'profile_district') or {}).get('name'), profile_residence_time=
gold_app.get('profile_residence_time'), profile_residence_type=
gold_app.get('profile_residence_type'), profile_address=gold_app.
get('profile_address'), profile_education=gold_app.get(
'profile_education'), profile_college=(gold_app.get(
'profile_college') or {}).get('name'), job_name=gold_app.get(
'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get
('job_bpjs'), job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'), job_industry=gold_app.get(
'job_industry'), job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'), job_district
=(gold_app.get('job_district') or {}).get('name'), job_address=
gold_app.get('job_address'), amount=amount, amount_net=amount_net,
interest_rate=interest_rate, term=gold_app.get('term'),
origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=
sub_bill.get('repay_at'), loan_success_times=loan_success_times,
arrived_at=datetime.now(), follow_up_date=datetime.now(),
promised_amount=promised_amount, promised_date=promised_date,
external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=
datetime.now())
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)
add_contact(application)
def add_contact(application):
logging.info('start add contact for application: %s', application.id)
contacts = Contact.filter(Contact.user_id == application.user_id)
existing_numbers = {contact.number for contact in contacts}
insert_contacts = list()
mon_insert_contact = {}
user_mobile_no = number_strip(application.user_mobile_no)
if user_mobile_no and user_mobile_no not in existing_numbers:
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': user_mobile_no, 'relationship':
Relationship.APPLICANT.value, 'source': 'apply info',
'real_relationship': Relationship.APPLICANT.value})
existing_numbers.add(number_strip(application.user_mobile_no))
extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id
)
if not extra_phone.ok:
extra_phone = []
logging.error('get user %s extra contacts failed', application.user_id)
else:
extra_phone = extra_phone.json()['data']
if extra_phone:
for i in extra_phone:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': number, 'relationship':
Relationship.APPLICANT.value, 'source': 'extra phone',
'real_relationship': Relationship.APPLICANT.value})
key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
ec_contact = []
contact = json.loads(application.contact or '[]')
for i in contact:
if number_strip(i['mobile_no']
) not in existing_numbers and number_strip(i['mobile_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['mobile_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['mobile_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['mobile_no']))
if i['type'] != 1:
continue
if number_strip(i['tel_no']) not in existing_numbers and number_strip(i
['tel_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['tel_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['tel_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['tel_no']))
if ec_contact:
Contact.insert_many(ec_contact).execute()
if all((application.job_tel, number_strip(application.job_tel),
number_strip(application.job_tel) not in existing_numbers)):
insert_contacts.append({'user_id': application.user_id, 'name':
None, 'number': number_strip(application.job_tel),
'relationship': Relationship.COMPANY.value, 'source':
'basic info job_tel', 'real_relationship': Relationship.COMPANY
.value})
key = user_mobile_no, number_strip(application.job_tel
), ContactType.C_BASIC_INFO_JOB_TEL.value
mon_insert_contact[key] = 1, 0, None
existing_numbers.add(number_strip(application.job_tel))
sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %
application.external_id)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id
)
else:
sms_contacts = sms_contacts.json()['data']
if sms_contacts:
for i in sms_contacts:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.SUGGESTED.value, 'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value})
key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value
mon_insert_contact[key] = 1, 0, i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
cf = GoldenEye().get('/applications/%s/call/frequency' % application.
external_id)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
insert_contacts = []
fm = GoldenEye().get('/applications/%s/contact/family-member' %
application.external_id)
if not fm.ok:
family = []
logging.error('get application %s family-member info error',
application.external_id)
else:
family = fm.json()['data']
if family:
for i in family:
if not i.get('number'):
logging.info('family member %s' % str(i))
continue
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
logging.info('family members: %s' % str(i))
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.FAMILY.value, 'source': FamilyContactType.
CALLEC.value, 'total_count': i.get('total_count', 1),
'total_duration': i.get('total_duration', 0),
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number, ContactType.F_CALL_EC.value
mon_insert_contact[key] = i.get('total_count', 1), i.get(
'total_duration', 0), i['name'][:128]
existing_numbers.add(number)
mon_update_contact = {}
if call_frequency:
with db.atomic():
count = 1
for i in call_frequency:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
Contact.update(total_count=i['total_count'],
total_duration=i['total_duration']).where(Contact.
number == number, Contact.user_id == application.
user_id)
key = user_mobile_no, number
mon_update_contact[key] = i['total_count'], i[
'total_duration']
continue
if count < 6:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.FAMILY.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': FamilyContactType.
CALLTOP5.value, 'real_relationship': Relationship.
FAMILY.value})
count += 1
key = user_mobile_no, number, ContactType.F_CALL_TOP5.value
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
else:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value})
key = (user_mobile_no, number, ContactType.
S_CALL_FREQUENCY.value)
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
next_apply_list = AccountService().add_contact(application.user_id)
for next_apply in next_apply_list:
number = number_strip(str(next_apply))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
SUGGESTED.value, source='online profile phone',
real_relationship=Relationship.SUGGESTED.value)
key = (user_mobile_no, number, ContactType.
S_ONLINE_PROFILE_PHONE.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %
application.user_id)
if not next_applicant.ok:
next_applicant = []
logging.error('get user %s dual_contact contacts failed' %
application.user_id)
else:
next_applicant = next_applicant.json()['data']
if next_applicant:
for i in next_applicant:
number = number_strip(str(i))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=
application.user_name, number=number, relationship=
Relationship.APPLICANT.value, source='apply info',
real_relationship=Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_APPLY_INFO.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
numbers = []
try:
numbers = AccountService().ktp_number(path_params={'user_id':
application.user_id})
except Exception as e:
logging.info('request ktp numbers failed %s' % str(e))
for n in numbers:
number = number_strip(str(n))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
APPLICANT.value, source='ktp number', real_relationship=
Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
try:
ecs = GoldenEye().get('/applications/%s/contact/ec' % application.
external_id)
except Exception as e:
logging.info('request ec-member error: %s' % str(e))
try:
if not ecs.ok:
ec = []
logging.info('get application %s ec-member info error',
application.external_id)
else:
ec = ecs.json()['data']
if ec:
for e in ec:
number = number_strip(e['numbers'])[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=e[
'name'][:128], number=number, relationship=
Relationship.FAMILY.value, source=FamilyContactType
.CONTACTEC.value, real_relationship=Relationship.
FAMILY.value)
key = (user_mobile_no, number, ContactType.F_CONTACT_EC
.value)
mon_insert_contact[key] = 1, 0, e['name'][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add ec_member error:%s' % str(e))
try:
mn = GoldenEye().get('/applications/%s/contact/my_number' %
application.external_id)
except Exception as e:
logging.info('request my_number error: %s' % str(e))
try:
if not mn.ok:
my = []
logging.info('get application %s my_number info error',
application.external_id)
else:
my = mn.json()['data']
if my:
for m in my:
number = number_strip(m)[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=my[m][
:128], number=number, relationship=Relationship.
SUGGESTED.value, source='my number',
real_relationship=Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_MY_NUMBER.value
mon_insert_contact[key] = 1, 0, my[m][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add my_member error:%s' % str(e))
try:
cn = GoldenEye().get('/applications/%s/contact/company-number' %
application.external_id)
except Exception as e:
logging.info('request company-number error: %s' % str(e))
try:
if not cn.ok:
cn = []
logging.info('get application %s company_number info error',
application.external_id)
else:
cn = cn.json()['data']
if cn:
for c in cn:
number = c
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=cn[c][
:128], number=number, relationship=Relationship.
COMPANY.value, source='company', real_relationship=
Relationship.COMPANY.value)
key = user_mobile_no, number, ContactType.C_COMPANY.value
mon_insert_contact[key] = 1, 0, cn[c][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add company_member error:%s' % str(e))
try:
ol = AccountService().other_login_contact(userId=application.user_id)
except Exception as e:
logging.error('request other_login error: %s' % e)
ol = {}
try:
for o in ol:
number = number_strip(o)
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=ol[o][:128
], number=number, relationship=Relationship.SUGGESTED.
value, source='other_login', real_relationship=
Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value
mon_insert_contact[key] = 1, 0, ol[o][:128]
except Exception as e:
logging.error('add other_login number error:%s' % e)
logging.info('add contact for application %s finished', application.id)
if mon_insert_contact or mon_update_contact:
send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {
'user_mobile_no': user_mobile_no, 'insert_contact': str(
mon_insert_contact), 'update_contact': str(mon_update_contact),
'user_id': application.user_id, 'name': application.user_name})
<mask token>
def get_contact_from_mongo(number):
if not number:
return []
query = TotalContact.objects(src_number=number, source__in=TotalContact
.available()).order_by('source')
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({'related_number': c.dest_number, 'source': source,
'is_calc': c.is_calc, 'total_count': c.total_count,
'total_duration': c.total_duration, 'relation': relation,
'name': c.dest_name})
return lst
<mask token>
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
@action(MessageAction.BILL_PAID)
def bill_paid(payload, msg_id):
validate = check_key_not_none(payload, ['external_id', 'late_fee_part',
'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])
if not validate:
logging.error('payload key not fully pass in.')
return
external_id = payload['external_id']
late_fee_part = Decimal(payload['late_fee_part'])
principal_part = Decimal(payload['principal_part'])
paid_at = payload['paid_at']
partner_bill_id = payload['partner_bill_id']
logging.debug(
'application %s paid principal part %s, paid late fee part %s',
external_id, principal_part, late_fee_part)
application = Application.filter(Application.external_id == external_id
).order_by(-Application.created_at).first()
if not application:
logging.info('application %s paid, not found application', external_id)
return
sub_bill_id = payload['bill_sub_id']
overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==
application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()
if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not
overdue_bill):
logging.info('bill sub not in bomber %s', sub_bill_id)
return
with db.atomic():
repay_at = str_no_utc_datetime(payload['latest_repay_at'])
Application.update(repay_at=repay_at).where(Application.id ==
application.id).execute()
RepaymentLog.create(application=application.id, is_bombed=True,
current_bomber=application.latest_bomber_id, cycle=application.
cycle, principal_part=principal_part, late_fee_part=
late_fee_part, repay_at=paid_at, ptp_bomber=application.
ptp_bomber, latest_call=application.latest_call, periods=
overdue_bill.periods if overdue_bill else None, overdue_bill_id
=overdue_bill.id if overdue_bill else None, partner_bill_id=
partner_bill_id)
phone_status = PhoneStatus.CONNECTED.value
real_relationship = RealRelationship.user_values()
commit = CallActionCommit.NO.value
number = CallActions.select(CallActions.number).where(CallActions.
phone_status == phone_status, CallActions.real_relationship <<
real_relationship, CallActions.commit == commit, CallActions.
application == application.id).order_by(-CallActions.created_at
).first()
if number:
Contact.update(call_priority=PriorityStatus.REPAY.value).where(
Contact.user_id == application.user_id, Contact.
call_priority == PriorityStatus.LAST.value).execute()
Contact.update(call_priority=PriorityStatus.LAST.value).where(
Contact.user_id == application.user_id, Contact.number ==
number.number).execute()
if not application.latest_bomber_id:
return
Inbox.create(title='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id), content=
'application %s,sub_bill_id %s repaid' % (application.
external_id, sub_bill_id), receiver=application.
latest_bomber_id or application.last_bomber_id, category=
InboxCategory.REPAID.value)
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(repay_at=repay_at).where(Application.
id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
<mask token>
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = Application.update(overdue_days=overdue_days).where(Application
.status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.
overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))
apps = Application.filter(Application.status << [ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value], Application.overdue_days > 95,
Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
def calc_overdue_days_over_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days > 95)
updated_rows_count = query.execute()
logging.info(
'calc_overdue_days_over_instalment done,count:%s,status:%s' % (
updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.collection_id,
OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on
=OverdueBill.collection_id == Application.id).where(Application
.status == status, Application.type == ApplicationType.
CASH_LOAN_STAGING.value)
app_update = {}
for ob in overdue_bills:
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
<mask token>
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1, hour=1, minute=30,
second=0, microsecond=0)
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days <= 95)
updated_rows_count = query.execute()
logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %
(updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.
created_at, OverdueBill.collection_id, OverdueBill.overdue_days
).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.
collection_id == Application.id).where(Application.status ==
status, Application.type == ApplicationType.CASH_LOAN_STAGING.value
)
app_update = {}
for ob in overdue_bills:
if (ob.status == ApplicationStatus.REPAID.value and ob.
created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
apps = Application.select().where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value)
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'
.format(a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle
.C1B.value):
bomber_id = (a.latest_bomber_id if a.latest_bomber_id else
a.cycle)
DispatchAppHistory.update(out_at=datetime.now(),
out_overdue_days=a.overdue_days).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id).execute()
Escalation.create(application=a.id, type=EscalationType.
AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle, escalate_to=new_cycle,
current_bomber_id=a.latest_bomber)
dis_app_update = DispatchApp.update(status=DisAppStatus.
ABNORMAL.value).where(DispatchApp.application == a.id)
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
def application_entry_different_calculations(app):
conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):
[91, 999999]}
for new_cycle, scopes in conf.items():
if scopes[0] <= app.overdue_days <= scopes[1]:
return new_cycle
return app.cycle
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,
'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,
'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':
0, 'sms_sent': 0} for i in employees}
now_date = date.today()
cal_date = now_date - timedelta(days=1)
claimed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('claimed')).where(fn.DATE(Application.
claimed_at) == cal_date, Application.status << [ApplicationStatus.
PROCESSING.value, ApplicationStatus.REPAID.value], Application.
latest_bomber.is_null(False)).group_by(Application.latest_bomber)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False)).group_by(
Application.latest_bomber)
completed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('completed')).where(Application.
latest_bombing_time.is_null(False), fn.DATE(Application.
latest_bombing_time) == cal_date, Application.latest_bomber.is_null
(False)).group_by(Application.latest_bomber)
escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(
Escalation.id).alias('escalated')).where(fn.DATE(Escalation.
created_at) == cal_date, Escalation.type == EscalationType.
AUTOMATIC.value, Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation
.current_bomber)
transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).
alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==
cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(
Transfer.operator)
promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(
BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.
created_at) == cal_date, BombingHistory.result == BombingResult.
HAS_PROGRESS.value).group_by(BombingHistory.bomber)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.
current_bomber)
calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id
).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==
cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)
calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.
record_id).alias('calls_connected')).where(fn.DATE(CallLog.
time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==
'1').group_by(CallLog.user_id)
sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(
ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_
(ConnectType.sms()), ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'claimed': data['claimed'], 'completed': data['completed'],
'cleared': data['cleared'], 'escalated': data['escalated'],
'transferred': data['transferred'], 'promised': data['promised'
], 'amount_recovered': data['amount_recovered'], 'calls_made':
data['calls_made'], 'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'], 'date': cal_date})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(
Escalation.id).alias('escalated_in')).where(Escalation.status ==
ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==
cal_date).group_by(Escalation.escalate_to)
for i in escalated_in:
cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.
escalated_in, 'date': cal_date})
amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM
(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':
amount_recovered_total, 'date': cal_date})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,
AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')
).where(fn.DATE(AutoCallActions.created_at) == cal_date)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.
is_null(False), RepaymentLog.is_bombed == True)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False))
auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,
AutoCallActions.result)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,
'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered':
0} for e in employees}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'answered_calls': data['answered_calls'], 'ptp': data['ptp'],
'follow_up': data['follow_up'], 'not_useful': data['not_useful'
], 'cleared': data['cleared'], 'amount_recovered': str(data[
'amount_recovered']), 'date': cal_date})
if insert_args:
Summary2.insert_many(insert_args).execute()
<mask token>
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error(
'discount approved msg send failed application %s not found',
app_id)
return
template = Template.select(Template.text, Template.app).where(Template.
type == ConnectType.AUTO_SMS.value, Template.id << Template.
get_auto_sms_tpl(msg_type), Template.app == application.app).first()
if not template:
logging.error('discount approved msg send failed template %s not found'
, msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {'user_name': application.user_name, 'due_days': application
.overdue_days, 'app_name': application.app, 'phone': application.
user_mobile_no, 'cs_number': cs_number_conf.get(application.app,
'02150202889'), 'promised_date': promised_date, 'discount_to':
discount_to, 'effective_to': effective_to}
content = template.text.format(**tpl_data)
data_list = [{'receiver': '62' + application.user_mobile_no, 'content':
content, 'title': ''}]
send_sms(data_list, msg_type, application.app)
<mask token>
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
bomber_dispatch_app()
dispatch_instalment_app()
dis_apps = DispatchApp.select(DispatchApp.application).where(
DispatchApp.status == DisAppStatus.NORMAL.value)
c1_apps = Application.select(Application.id, Application.cycle,
Application.follow_up_date, Application.called_times).where(Application
.status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.
AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.
is_rejected == False, Application.promised_date.is_null(True) | (fn
.DATE(Application.promised_date) < datetime.today().date())).order_by(
Application.overdue_days, Application.apply_at)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({'application': a.id, 'cycle': a.cycle,
'follow_up_date': a.follow_up_date, 'called_times': 1 if a.
called_times else 0, 'description': 'init'})
if not insert_args:
logging.error('no application need auto call')
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [i['application'] for i in insert_args[idx:idx +
100]]
send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {
'application_list': application_list})
logging.info('bomber generate auto call list finished')
send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.
alias('application_id'), R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(
'expected_out_time'), Application.overdue_days.alias(
'entry_overdue_days')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << ids)
Application.update(latest_bomber=bomber_id).where(Application.id.
in_(ids)).execute()
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a
.overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]
.get('principal_paid')), out_late_fee_pending=bd[_id].get(
'late_fee') - bd[_id].get('late_fee_paid')).where(
DispatchAppHistory.application == a.id, DispatchAppHistory.
bomber_id == a.latest_bomber_id).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
<mask token>
def bomber_dispatch_app():
try:
c1a_dispatch_app()
except Exception as e:
logging.error('c1a_dispatch_app error:%s' % str(e))
cycle = {(1): 10, (2): 30, (3): 60, (4): 90}
apps = Application.select().where(fn.DATE(Application.C2_entry) == date
.today(), Application.type == ApplicationType.CASH_LOAN.value)
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C2.value)
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(application_ids=apps_ids[
start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = DispatchApp.delete().where(DispatchApp.application == a_id
).execute()
dispatch_inserts.append({'application': a_id, 'bomber': bomber,
'partner': p.id})
application = Application.select().where(Application.id == a_id
).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = cycle.get(application.cycle
) - application.overdue_days
DispatchAppHistory.create(application=a_id, partner_id=p.id,
bomber_id=bomber, entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - Decimal(
bill_dict[a_id].get('principal_paid')),
entry_late_fee_pending=Decimal(bill_dict[a_id].get(
'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')
), expected_out_time=date.today() + timedelta(days=
day_next_cycle))
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(application_ids=c2)
else:
bills = []
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at
=datetime.now(), entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - bill_dict[c].get(
'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(
'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),
expected_out_time=date.today() + timedelta(days=day_next_cycle))
ab_test_other()
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
c1a_apps = Application.select().where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value], Application.dpd1_entry >= today, Application.dpd1_entry <
tomorrow, Application.type == ApplicationType.CASH_LOAN.value)
all_aids = [a.id for a in c1a_apps]
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C1A.value)
end = 0
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
is_del == 0)
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
average_number = get_average_number(len(aids), len(bids))
p_end = 0
for i, bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = Application.update(latest_bomber=bid, status=
ApplicationStatus.AB_TEST.value).where(Application.id <<
b_aids).execute()
params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,
'application_ids': b_aids, 'dest_bomber_id': bid}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid, 'bomber':
bid, 'partner': p.id, 'status': DisAppStatus.NORMAL
.value})
if dispatch_inserts:
q = DispatchApp.insert_many(dispatch_inserts).execute()
except Exception as e:
logging.error('c1a分件写入dispatch_app error:%s' % str(e))
<mask token>
def get_cash_bomber(bids, cycle):
cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==
0, Bomber.instalment != cycle)
cash_bids = [b.id for b in cash_bombers]
return cash_bids
<mask token>
def out_and_in_record_instalment(**kwargs):
if not kwargs.get('application_ids'):
return
out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True)).execute()
cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dest_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).
alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
overdue_bills = OverdueBill.select().where(OverdueBill.
collection_id << app_ids)
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
<mask token>
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get('apps')
partner_id = kwargs.get('partner_id', 'null')
bill_dict = kwargs.get('bill_dict')
period = kwargs.get('period')
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = {str(k): v for k, v in bill_dict.items()}
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().
alias('entry_at'), Application.overdue_days.alias(
'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.id << app_ids)
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
<mask token>
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = CallActions.select().where(CallActions.type == 0,
CallActions.application == application.id, CallActions.
created_at > datetime.now() - timedelta(days=5))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = SystemConfig.select().where(SystemConfig.key ==
'SCAVENGER_TIME').first()
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value, description='scavenger').where(AutoCallList.status ==
AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime
.now() + timedelta(minutes=scavenger_time))
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
mail_box_scavenger_time = -30
mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==
'MAIL_BOX_SCAVENGER_TIME').first()
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.
value, AutoCallList.updated_at < datetime.now() + timedelta(minutes
=mail_box_scavenger_time))
mail_box_count = update_mail_box_call_list.execute()
logging.info('scavenger update mail box %s', mail_box_count)
update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value
).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.
updated_at < datetime.now() + timedelta(minutes=-30))
ivr_result = update_auto_ivr.execute()
logging.info('scavenger update %s ivr' % ivr_result)
<mask token>
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)
).scalar()
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({'apply_date': start_date, 'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system': round(c3.
get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *
100, 1), 'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans': round(c7.get(i, 0),
1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i,
0), 'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
<mask token>
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.
bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.
application << kwargs['application_ids'], DispatchAppHistory.out_at
.is_null(True)).execute()
if kwargs.get('month_dispatch'):
return
try:
Application.update(ptp_bomber=None).where(Application.id << kwargs[
'application_ids']).execute()
except Exception as e:
logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[
'application_ids'], str(e)))
<mask token>
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
<mask token>
def start_old_application(old_app, cancel=False):
application_id = old_app.application_id
if cancel and old_app.status == OldLoanStatus.PAID.value:
now = datetime.now()
if old_app.start_date is None:
old_app.status = OldLoanStatus.WAITING.value
elif now >= max(old_app.end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.
promised_date or now)).where(DispatchAppHistory.bomber_id ==
old_app.bomber_id, DispatchAppHistory.application ==
application_id).execute()
else:
old_app.status = OldLoanStatus.PROCESSING.value
DispatchAppHistory.update(out_at=None).where(DispatchAppHistory
.bomber_id == old_app.bomber_id, DispatchAppHistory.
application == application_id).execute()
old_app.save()
return
application = Application.get_or_none(Application.id == application_id,
Application.status != ApplicationStatus.REPAID.value, Application.
overdue_days > 90, Application.promised_date.is_null(True) | (fn.
DATE(Application.promised_date) < datetime.today().date()))
if not application:
logging.error('Can not set old application %s to start collecting',
application_id)
return
if old_app.status in OldLoanStatus.no_available():
logging.info('%s has finished or paid', old_app.application_id)
return
config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)
sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)
old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value
old_app.status = OldLoanStatus.PROCESSING.value
old_app.bomber_id = old_app_bomber
old_app.start_date = datetime.now()
if not old_app.end_date:
old_app.end_date = datetime.now() + timedelta(days=sp)
old_app.save()
in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,
application_ids=[old_app.application_id], expected_out_time=str(
old_app.end_date))
<mask token>
def run_member_sql(sql):
result = [0, 0]
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
sql_result = cursor.fetchone()
if sql_result:
result = sql_result
except Exception as e:
logging.info('run sql error: %s' % str(sql))
return result
<mask token>
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
old_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (begin_time, begin_time))
old_data = run_one_sql(old_sql)
new_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
"""
% (begin_time, end_time))
new_data = run_one_sql(new_sql)
dpd4_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
"""
% (begin_time, end_time))
dpd4_data = run_one_sql(dpd4_sql)
dpd2_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (end_time, end_time))
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = repayment / all_money * 100
RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,
proportion=pro, repayment=repayment)
<mask token>
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = (
"""
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
d[1], RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = round(repay, 3)
pro = repay / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<mask token>
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
not_contain_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(
not_contain_money, 3), proportion='0', repayment=0,
is_first_loan=is_first_loan, contain_out=ContainOut.
NOT_CONTAIN.value)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(all_money, 3),
proportion='0', repayment=0, is_first_loan=
is_first_loan, contain_out=ContainOut.CONTAIN.value)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C3.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
repay[1], RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<mask token>
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),
BomberR.last_active_at.alias('active')).where(BomberR.
last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])
summary = []
for bomber in bombers:
summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':
bomber.role.cycle, 'work_ind': 0})
SummaryBomber.insert_many(summary).execute()
<mask token>
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = (
"""
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
"""
% (begin_date, end_date))
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,
call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(
SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date
).execute()
return calls
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = (
"""
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
"""
% (begin_date, table_date, end_date))
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == bomber_id).execute()
return claimeds
def get_sms_data(end_data, begin_data):
all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(
'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')
).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.
created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())
).group_by(ConnectHistoryR.operator)
for sms in all_sms:
SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==
begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()
return all_sms
<mask token>
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
def get_new_case_amount(begin_date, end_date):
all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.
entry_late_fee_pending + DispatchAppHistoryR.
entry_principal_pending).alias('pending'), DispatchAppHistoryR.
bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')
).where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.
partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)
for case in all_case:
SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt
=case.cnt).where(SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date).execute()
return all_case
<mask token>
def get_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c2_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c3_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
) a
GROUP BY 1
"""
% (begin_date, end_date, begin_date, end_date, begin_date, end_date))
case_cleared_sums = run_all_sql(sql)
for clear in case_cleared_sums:
SummaryBomber.update(new_case_cleared_sum=clear[1]).where(
SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==
clear[0]).execute()
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = (
"""
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
"""
% (begin_date, end_date))
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber
.bomber_id == call[0], SummaryBomber.time == begin_date).execute()
return new_case_calls
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=
value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == key).execute()
return result
def get_no_calltime_avg(begin_date, end_date):
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
for data in manuals:
SummaryBomber.update(calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /
data[2] if data[2] else 0).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == data[0]).execute()
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
return result
<mask token>
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'UPDATE_SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
def get_cycle_claimed(begin_date, end_date):
sql = (
"""
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
"""
% begin_date)
result = run_all_sql(sql)
return result
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()
return all_datas
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber
.time == begin_date, SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
"""
% (begin_date, end_date, begin_date, end_date))
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber
.cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.
time == begin_date).execute()
<mask token>
def get_change_bomber():
cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle
.C3.value}
result = {}
bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,
BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=
BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.
created_at) == date.today(), BomberLog.role_id << list(
cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.
instalment == 0).dicts()
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log['role_id'])
group_id = b_log['group_id']
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],
'new_ids': []}
else:
result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],
'new_ids': []}}
if b_log['operation'] == 0:
result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])
if result:
bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map
.keys())), Bomber.is_del == 0, Bomber.instalment == 0)
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result['new_ids'].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
<mask token>
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
for del_id in del_ids:
del_res = classified_apps.get(del_id, {})
p_list = del_res.get('p_list', [])
np_list = del_res.get('np_list', [])
del_res['need_num'] = -(len(p_list) + len(np_list))
del_res['to_list'] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid
).first()
bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':
[], 'np_num': 0, 'need_num': average, 'partner_id': bomber.
partner_id if bomber else ''}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app['p_list'])
np_num = len(bomber_app['np_list'])
if p_num > average:
bomber_app['need_num'] = -np_num
else:
bomber_app['need_num'] = average - (p_num + np_num)
bomber_app['p_num'] = p_num
bomber_app['np_num'] = np_num
if bomber_app['need_num'] < 0:
random.shuffle(bomber_app['np_list'])
res_over = bomber_app['np_list'][:-bomber_app['need_num']]
bomber_app['to_list'] = res_over
surplus_apps.extend(res_over)
classified_apps_list = sorted(classified_apps.values(), key=lambda x: x
['need_num'], reverse=True)
return surplus_apps, classified_apps_list
<mask token>
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type, bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers, type=type)
<mask token>
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
summary_datetime = now_date - timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.
bomber_id, CallActionsR.application_id, CallActionsR.promised_date,
CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(
CallActionsR.created_at >= begin_time, CallActionsR.created_at <
end_time, CallActionsR.type << (0, 1))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.
bomber_id, 'summary_date': str(summary_date)}
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
C1_sql = (
"""
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
"""
% (begin_time, end_time))
C1_repayment = run_all_sql(C1_sql)
other_sql = (
"""
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
"""
% (begin_time, end_time))
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id, pay_amount, cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': cycle, 'repayment': pay_amount, 'bomber_id':
bomber_id, 'summary_date': str(summary_date)}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
<mask token>
def get_app_logs(apps):
app_logs = {}
all_np_apps = []
all_p_apps = []
for a in apps:
latest_bomber = a['latest_bomber']
latest_bomber = a['cycle'] if not latest_bomber else latest_bomber
if latest_bomber in app_logs:
app_logs[latest_bomber]['to_ids'].append(a['id'])
else:
app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':
[a['id']], 'np_ids': [], 'p_ids': []}
if a['promised_date'] and a['promised_date'].date() >= datetime.now(
).date():
app_logs[latest_bomber]['p_ids'].append(a['id'])
all_p_apps.append(a)
else:
app_logs[latest_bomber]['np_ids'].append(a['id'])
all_np_apps.append(a)
return app_logs, all_np_apps, all_p_apps
def month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
partners = Partner.select().where(Partner.cycle == cycle, Partner.
status == PartnerStatus.NORMAL.value)
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
bombers = Bomber.select().where(Bomber.partner == partner.id,
Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.
value)
bids = {b.id: b for b in bombers}
if len(bids) == 0:
logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = start + int(np_apps_len * partner.app_percentage /
all_app_precentage)
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
np_apps = np_apps[end:]
return np_apps
def month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):
sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',
(4): 'AB_TEST_C3'}
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0
)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id: b for b in bombers}
if cycle == Cycle.C1A.value:
np_ids = [a['id'] for a in np_apps]
np = Application.update(status=ApplicationStatus.PROCESSING.value,
ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids
).execute()
bomber_app_logs = app_logs.get(cycle, {})
out_param = {'application_ids': bomber_app_logs.get('to_ids', []),
'month_dispatch': 1, 'src_bomber_id': cycle}
new_out_record(**out_param)
in_param = {'cycle': cycle, 'application_ids': np_ids,
'dest_bomber_id': cycle}
new_in_record(**in_param)
bomber_app_logs['need_num'] = len(np_apps)
bomber_app_logs['form_ids'] = np_ids
bomber_app_logs['status'] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
def dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info('get_dispatch_app_to_bomber no bids')
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids, status = [], [], [], 0
for ba in bomber_apps:
promised_date = ba.get('promised_date')
from_ids.append(ba['id'])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba['id'])
else:
from_np.append(ba['id'])
app_status = ApplicationStatus.AB_TEST.value
if (cycle == Cycle.C1A.value and not out_partner and type ==
ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = Application.update(ptp_bomber=bid, latest_bomber=bid,
status=app_status).where(Application.id << from_p).execute(
)
p_ids = bomber_app_logs.get('p_ids', []) + from_p
bomber_app_logs['p_ids'] = p_ids
if from_np:
np = Application.update(latest_bomber=bid, ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value).where(
Application.id << from_np).execute()
np_ids = bomber_app_logs.get('np_ids', []) + from_np
bomber_app_logs['np_ids'] = np_ids
in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.
partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}
if type == ApplicationType.CASH_LOAN.value:
out_param = {'src_bomber_id': bid, 'application_ids':
bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}
new_out_record(**out_param)
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs['status'] = 1
need_num = bomber_app_logs.get('need_num', 0) + average_num[index]
bomber_app_logs['need_num'] = need_num
all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids
bomber_app_logs['form_ids'] = all_form_ids
if not out_partner:
continue
try:
DispatchApp.delete().where(DispatchApp.application.in_(
from_ids)).execute()
dispatch_ins = [{'application': id, 'partner':
current_bomber.partner_id, 'bomber': bid, 'status':
DisAppStatus.NORMAL.value} for id in from_ids]
DispatchApp.insert_many(dispatch_ins).execute()
except Exception as e:
logging.info(
'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str
(e), bid, from_ids))
<mask token>
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],
'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}
for key, value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
<mask token>
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
for cycle in cycle_list:
apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,
ApplicationR.ptp_bomber, ApplicationR.overdue_days,
ApplicationR.promised_date, ApplicationR.follow_up_date,
ApplicationR.external_id, OverdueBillR.status, OverdueBillR.
periods, OverdueBillR.sub_bill_id).join(OverdueBillR,
JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id
).where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()
bomber_overdue_list = []
for app in apps:
status = app.get('status')
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get('ptp_bomber')
promised_date = app.get('promised_date')
follow_up_date = app.get('follow_up_date')
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {'collection_id': app.get('id'), 'external_id':
app.get('external_id'), 'sub_bill_id': app.get(
'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app
.get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':
ptp_bomber, 'promised_date': promised_date,
'follow_up_date': follow_up_date, 'which_day': which_day,
'overdue_days': app.get('overdue_days')}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index:index + 1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %
(cycle, str(which_day), str(e)))
<mask token>
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
apps = ApplicationR.select(ApplicationR.latest_bomber).where(
ApplicationR.promised_date < next_day, ApplicationR.promised_date >=
today, ApplicationR.promised_date.is_null(False), ApplicationR.
status != ApplicationStatus.REPAID.value, ApplicationR.cycle <
Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(
ApplicationR.latest_bomber)
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(
BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids
).execute()
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(
BomberPtp.auto_ext.is_null(False)).execute()
<mask token>
| <mask token>
@action(MessageAction.BOMBER_HEALTH_CHECK)
def health_check(payload, msg_id):
pass
<mask token>
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)
).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(
rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.
call_success()))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {'$and': rule.get('$and'), 'app_list': failed_list}
resp = Hyperloop().post('/bomber/score/verify', json=post_params)
if not resp.ok:
logging.error('hyperloop score verification failed: %s, %s',
str(resp.status_code), str(resp.text))
logging.error('hyperloop score verification failed: %s', str(
post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
if random.randint(0, 5) == 1:
send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':
int(item)})
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = SystemConfig.select().where(SystemConfig.key ==
'DPD1-3_INTO_IVR').first()
now = date.today()
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,
'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,
'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':
10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,
'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,
'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,
'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,
'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,
'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':
101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,
'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,
'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,
'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
ivr_action = bill_service.ivr_pages(page=current_page,
page_size=500, start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
time = str(days).replace('-', 'PDP')
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = AccountService().get_user(path_params={
'user_id': user_id})
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = a['user_mobile_no'] + ',' + user_resp.get(
'mobile_no')
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({'application_id': a['id'], 'numbers':
numbers, 'group': group, 'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page, proc_date=now,
page_size=page_size, current_page=current_page)
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error('dpd1-3_test_error:%s' % str(e))
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = SystemConfig.select().where(SystemConfig.key ==
'IVR_TEST_PROPORTION').first()
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.
status == AutoIVRStatus.AVAILABLE.value)
t2_dict = defaultdict(list)
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.
group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()
<mask token>
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
Application.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber), ptp_bomber=None).where(
Application.id == d[0]).execute()
logging.warning('add new app success')
ptp = date.today() - timedelta(days=1)
del_sql = (
"""
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
"""
% ptp)
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
Application.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None).where(Application.id << ids).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = Application.select().where(Application.external_id ==
application_id).order_by(Application.finished_at).first()
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==
sub_bill_id, OverdueBillR.external_id == application_id)
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' % (
application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error(
'application %s overdue, get sub_bill info failed:Request To repayment Error'
, application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error(
'get user %s apply history failed: Request to Dashboard Failed.',
user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([(1) for i in history if i['status'] in [80,
90, 100, 70] and i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get('bill_id')
amount = sub_bill.get('amount')
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,
'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),
'overdue_days': overdue_days, 'origin_due_at': origin_due_at,
'amount': amount, 'amount_net': amount_net, 'interest_rate':
interest_rate, 'external_id': application_id}
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill['collection_id'] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('application %s,sub_bill_id:%s overdue created' %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill['collection_id'] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(id=id, user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[
'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(
'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=
birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(
gold_app.get('id_ektp')), profile_province=(gold_app.get(
'profile_province') or {}).get('name'), profile_city=(gold_app.get(
'profile_city') or {}).get('name'), profile_district=(gold_app.get(
'profile_district') or {}).get('name'), profile_residence_time=
gold_app.get('profile_residence_time'), profile_residence_type=
gold_app.get('profile_residence_type'), profile_address=gold_app.
get('profile_address'), profile_education=gold_app.get(
'profile_education'), profile_college=(gold_app.get(
'profile_college') or {}).get('name'), job_name=gold_app.get(
'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get
('job_bpjs'), job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'), job_industry=gold_app.get(
'job_industry'), job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'), job_district
=(gold_app.get('job_district') or {}).get('name'), job_address=
gold_app.get('job_address'), amount=amount, amount_net=amount_net,
interest_rate=interest_rate, term=gold_app.get('term'),
origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=
sub_bill.get('repay_at'), loan_success_times=loan_success_times,
arrived_at=datetime.now(), follow_up_date=datetime.now(),
promised_amount=promised_amount, promised_date=promised_date,
external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=
datetime.now())
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)
add_contact(application)
def add_contact(application):
logging.info('start add contact for application: %s', application.id)
contacts = Contact.filter(Contact.user_id == application.user_id)
existing_numbers = {contact.number for contact in contacts}
insert_contacts = list()
mon_insert_contact = {}
user_mobile_no = number_strip(application.user_mobile_no)
if user_mobile_no and user_mobile_no not in existing_numbers:
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': user_mobile_no, 'relationship':
Relationship.APPLICANT.value, 'source': 'apply info',
'real_relationship': Relationship.APPLICANT.value})
existing_numbers.add(number_strip(application.user_mobile_no))
extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id
)
if not extra_phone.ok:
extra_phone = []
logging.error('get user %s extra contacts failed', application.user_id)
else:
extra_phone = extra_phone.json()['data']
if extra_phone:
for i in extra_phone:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': number, 'relationship':
Relationship.APPLICANT.value, 'source': 'extra phone',
'real_relationship': Relationship.APPLICANT.value})
key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
ec_contact = []
contact = json.loads(application.contact or '[]')
for i in contact:
if number_strip(i['mobile_no']
) not in existing_numbers and number_strip(i['mobile_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['mobile_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['mobile_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['mobile_no']))
if i['type'] != 1:
continue
if number_strip(i['tel_no']) not in existing_numbers and number_strip(i
['tel_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['tel_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['tel_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['tel_no']))
if ec_contact:
Contact.insert_many(ec_contact).execute()
if all((application.job_tel, number_strip(application.job_tel),
number_strip(application.job_tel) not in existing_numbers)):
insert_contacts.append({'user_id': application.user_id, 'name':
None, 'number': number_strip(application.job_tel),
'relationship': Relationship.COMPANY.value, 'source':
'basic info job_tel', 'real_relationship': Relationship.COMPANY
.value})
key = user_mobile_no, number_strip(application.job_tel
), ContactType.C_BASIC_INFO_JOB_TEL.value
mon_insert_contact[key] = 1, 0, None
existing_numbers.add(number_strip(application.job_tel))
sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %
application.external_id)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id
)
else:
sms_contacts = sms_contacts.json()['data']
if sms_contacts:
for i in sms_contacts:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.SUGGESTED.value, 'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value})
key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value
mon_insert_contact[key] = 1, 0, i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
cf = GoldenEye().get('/applications/%s/call/frequency' % application.
external_id)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
insert_contacts = []
fm = GoldenEye().get('/applications/%s/contact/family-member' %
application.external_id)
if not fm.ok:
family = []
logging.error('get application %s family-member info error',
application.external_id)
else:
family = fm.json()['data']
if family:
for i in family:
if not i.get('number'):
logging.info('family member %s' % str(i))
continue
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
logging.info('family members: %s' % str(i))
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.FAMILY.value, 'source': FamilyContactType.
CALLEC.value, 'total_count': i.get('total_count', 1),
'total_duration': i.get('total_duration', 0),
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number, ContactType.F_CALL_EC.value
mon_insert_contact[key] = i.get('total_count', 1), i.get(
'total_duration', 0), i['name'][:128]
existing_numbers.add(number)
mon_update_contact = {}
if call_frequency:
with db.atomic():
count = 1
for i in call_frequency:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
Contact.update(total_count=i['total_count'],
total_duration=i['total_duration']).where(Contact.
number == number, Contact.user_id == application.
user_id)
key = user_mobile_no, number
mon_update_contact[key] = i['total_count'], i[
'total_duration']
continue
if count < 6:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.FAMILY.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': FamilyContactType.
CALLTOP5.value, 'real_relationship': Relationship.
FAMILY.value})
count += 1
key = user_mobile_no, number, ContactType.F_CALL_TOP5.value
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
else:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value})
key = (user_mobile_no, number, ContactType.
S_CALL_FREQUENCY.value)
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
next_apply_list = AccountService().add_contact(application.user_id)
for next_apply in next_apply_list:
number = number_strip(str(next_apply))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
SUGGESTED.value, source='online profile phone',
real_relationship=Relationship.SUGGESTED.value)
key = (user_mobile_no, number, ContactType.
S_ONLINE_PROFILE_PHONE.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %
application.user_id)
if not next_applicant.ok:
next_applicant = []
logging.error('get user %s dual_contact contacts failed' %
application.user_id)
else:
next_applicant = next_applicant.json()['data']
if next_applicant:
for i in next_applicant:
number = number_strip(str(i))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=
application.user_name, number=number, relationship=
Relationship.APPLICANT.value, source='apply info',
real_relationship=Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_APPLY_INFO.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
numbers = []
try:
numbers = AccountService().ktp_number(path_params={'user_id':
application.user_id})
except Exception as e:
logging.info('request ktp numbers failed %s' % str(e))
for n in numbers:
number = number_strip(str(n))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
APPLICANT.value, source='ktp number', real_relationship=
Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
try:
ecs = GoldenEye().get('/applications/%s/contact/ec' % application.
external_id)
except Exception as e:
logging.info('request ec-member error: %s' % str(e))
try:
if not ecs.ok:
ec = []
logging.info('get application %s ec-member info error',
application.external_id)
else:
ec = ecs.json()['data']
if ec:
for e in ec:
number = number_strip(e['numbers'])[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=e[
'name'][:128], number=number, relationship=
Relationship.FAMILY.value, source=FamilyContactType
.CONTACTEC.value, real_relationship=Relationship.
FAMILY.value)
key = (user_mobile_no, number, ContactType.F_CONTACT_EC
.value)
mon_insert_contact[key] = 1, 0, e['name'][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add ec_member error:%s' % str(e))
try:
mn = GoldenEye().get('/applications/%s/contact/my_number' %
application.external_id)
except Exception as e:
logging.info('request my_number error: %s' % str(e))
try:
if not mn.ok:
my = []
logging.info('get application %s my_number info error',
application.external_id)
else:
my = mn.json()['data']
if my:
for m in my:
number = number_strip(m)[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=my[m][
:128], number=number, relationship=Relationship.
SUGGESTED.value, source='my number',
real_relationship=Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_MY_NUMBER.value
mon_insert_contact[key] = 1, 0, my[m][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add my_member error:%s' % str(e))
try:
cn = GoldenEye().get('/applications/%s/contact/company-number' %
application.external_id)
except Exception as e:
logging.info('request company-number error: %s' % str(e))
try:
if not cn.ok:
cn = []
logging.info('get application %s company_number info error',
application.external_id)
else:
cn = cn.json()['data']
if cn:
for c in cn:
number = c
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=cn[c][
:128], number=number, relationship=Relationship.
COMPANY.value, source='company', real_relationship=
Relationship.COMPANY.value)
key = user_mobile_no, number, ContactType.C_COMPANY.value
mon_insert_contact[key] = 1, 0, cn[c][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add company_member error:%s' % str(e))
try:
ol = AccountService().other_login_contact(userId=application.user_id)
except Exception as e:
logging.error('request other_login error: %s' % e)
ol = {}
try:
for o in ol:
number = number_strip(o)
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=ol[o][:128
], number=number, relationship=Relationship.SUGGESTED.
value, source='other_login', real_relationship=
Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value
mon_insert_contact[key] = 1, 0, ol[o][:128]
except Exception as e:
logging.error('add other_login number error:%s' % e)
logging.info('add contact for application %s finished', application.id)
if mon_insert_contact or mon_update_contact:
send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {
'user_mobile_no': user_mobile_no, 'insert_contact': str(
mon_insert_contact), 'update_contact': str(mon_update_contact),
'user_id': application.user_id, 'name': application.user_name})
<mask token>
def get_contact_from_mongo(number):
if not number:
return []
query = TotalContact.objects(src_number=number, source__in=TotalContact
.available()).order_by('source')
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({'related_number': c.dest_number, 'source': source,
'is_calc': c.is_calc, 'total_count': c.total_count,
'total_duration': c.total_duration, 'relation': relation,
'name': c.dest_name})
return lst
<mask token>
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
@action(MessageAction.BILL_PAID)
def bill_paid(payload, msg_id):
validate = check_key_not_none(payload, ['external_id', 'late_fee_part',
'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])
if not validate:
logging.error('payload key not fully pass in.')
return
external_id = payload['external_id']
late_fee_part = Decimal(payload['late_fee_part'])
principal_part = Decimal(payload['principal_part'])
paid_at = payload['paid_at']
partner_bill_id = payload['partner_bill_id']
logging.debug(
'application %s paid principal part %s, paid late fee part %s',
external_id, principal_part, late_fee_part)
application = Application.filter(Application.external_id == external_id
).order_by(-Application.created_at).first()
if not application:
logging.info('application %s paid, not found application', external_id)
return
sub_bill_id = payload['bill_sub_id']
overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==
application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()
if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not
overdue_bill):
logging.info('bill sub not in bomber %s', sub_bill_id)
return
with db.atomic():
repay_at = str_no_utc_datetime(payload['latest_repay_at'])
Application.update(repay_at=repay_at).where(Application.id ==
application.id).execute()
RepaymentLog.create(application=application.id, is_bombed=True,
current_bomber=application.latest_bomber_id, cycle=application.
cycle, principal_part=principal_part, late_fee_part=
late_fee_part, repay_at=paid_at, ptp_bomber=application.
ptp_bomber, latest_call=application.latest_call, periods=
overdue_bill.periods if overdue_bill else None, overdue_bill_id
=overdue_bill.id if overdue_bill else None, partner_bill_id=
partner_bill_id)
phone_status = PhoneStatus.CONNECTED.value
real_relationship = RealRelationship.user_values()
commit = CallActionCommit.NO.value
number = CallActions.select(CallActions.number).where(CallActions.
phone_status == phone_status, CallActions.real_relationship <<
real_relationship, CallActions.commit == commit, CallActions.
application == application.id).order_by(-CallActions.created_at
).first()
if number:
Contact.update(call_priority=PriorityStatus.REPAY.value).where(
Contact.user_id == application.user_id, Contact.
call_priority == PriorityStatus.LAST.value).execute()
Contact.update(call_priority=PriorityStatus.LAST.value).where(
Contact.user_id == application.user_id, Contact.number ==
number.number).execute()
if not application.latest_bomber_id:
return
Inbox.create(title='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id), content=
'application %s,sub_bill_id %s repaid' % (application.
external_id, sub_bill_id), receiver=application.
latest_bomber_id or application.last_bomber_id, category=
InboxCategory.REPAID.value)
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(repay_at=repay_at).where(Application.
id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
<mask token>
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = Application.update(overdue_days=overdue_days).where(Application
.status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.
overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))
apps = Application.filter(Application.status << [ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value], Application.overdue_days > 95,
Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
def calc_overdue_days_over_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days > 95)
updated_rows_count = query.execute()
logging.info(
'calc_overdue_days_over_instalment done,count:%s,status:%s' % (
updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.collection_id,
OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on
=OverdueBill.collection_id == Application.id).where(Application
.status == status, Application.type == ApplicationType.
CASH_LOAN_STAGING.value)
app_update = {}
for ob in overdue_bills:
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)
def calc_overdue_days(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query_unclaimed = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.UNCLAIMED.value,
Application.overdue_days <= 95, Application.type == ApplicationType
.CASH_LOAN.value)
updated_rows_count_unclaimed = query_unclaimed.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_unclaimed)
query_processing = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.PROCESSING.value,
Application.overdue_days <= 95, Application.type == ApplicationType
.CASH_LOAN.value)
updated_rows_count_processing = query_processing.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_processing)
query_test = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.AB_TEST.value, Application.
overdue_days <= 95, Application.type == ApplicationType.CASH_LOAN.value
)
updated_rows_count_test = query_test.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_test)
calc_overdue_days_instalment()
apps = Application.select(Application.id).where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value, ApplicationStatus.AB_TEST.value], Application.overdue_days <=
95, Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
Application.update(C1A_entry=datetime.now()).where(Application.status <<
[ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value, ApplicationStatus.AB_TEST.value], Application.overdue_days == 4
).execute()
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1, hour=1, minute=30,
second=0, microsecond=0)
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days <= 95)
updated_rows_count = query.execute()
logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %
(updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.
created_at, OverdueBill.collection_id, OverdueBill.overdue_days
).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.
collection_id == Application.id).where(Application.status ==
status, Application.type == ApplicationType.CASH_LOAN_STAGING.value
)
app_update = {}
for ob in overdue_bills:
if (ob.status == ApplicationStatus.REPAID.value and ob.
created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
apps = Application.select().where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value)
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'
.format(a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle
.C1B.value):
bomber_id = (a.latest_bomber_id if a.latest_bomber_id else
a.cycle)
DispatchAppHistory.update(out_at=datetime.now(),
out_overdue_days=a.overdue_days).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id).execute()
Escalation.create(application=a.id, type=EscalationType.
AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle, escalate_to=new_cycle,
current_bomber_id=a.latest_bomber)
dis_app_update = DispatchApp.update(status=DisAppStatus.
ABNORMAL.value).where(DispatchApp.application == a.id)
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
def application_entry_different_calculations(app):
conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):
[91, 999999]}
for new_cycle, scopes in conf.items():
if scopes[0] <= app.overdue_days <= scopes[1]:
return new_cycle
return app.cycle
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,
'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,
'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':
0, 'sms_sent': 0} for i in employees}
now_date = date.today()
cal_date = now_date - timedelta(days=1)
claimed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('claimed')).where(fn.DATE(Application.
claimed_at) == cal_date, Application.status << [ApplicationStatus.
PROCESSING.value, ApplicationStatus.REPAID.value], Application.
latest_bomber.is_null(False)).group_by(Application.latest_bomber)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False)).group_by(
Application.latest_bomber)
completed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('completed')).where(Application.
latest_bombing_time.is_null(False), fn.DATE(Application.
latest_bombing_time) == cal_date, Application.latest_bomber.is_null
(False)).group_by(Application.latest_bomber)
escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(
Escalation.id).alias('escalated')).where(fn.DATE(Escalation.
created_at) == cal_date, Escalation.type == EscalationType.
AUTOMATIC.value, Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation
.current_bomber)
transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).
alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==
cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(
Transfer.operator)
promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(
BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.
created_at) == cal_date, BombingHistory.result == BombingResult.
HAS_PROGRESS.value).group_by(BombingHistory.bomber)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.
current_bomber)
calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id
).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==
cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)
calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.
record_id).alias('calls_connected')).where(fn.DATE(CallLog.
time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==
'1').group_by(CallLog.user_id)
sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(
ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_
(ConnectType.sms()), ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'claimed': data['claimed'], 'completed': data['completed'],
'cleared': data['cleared'], 'escalated': data['escalated'],
'transferred': data['transferred'], 'promised': data['promised'
], 'amount_recovered': data['amount_recovered'], 'calls_made':
data['calls_made'], 'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'], 'date': cal_date})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(
Escalation.id).alias('escalated_in')).where(Escalation.status ==
ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==
cal_date).group_by(Escalation.escalate_to)
for i in escalated_in:
cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.
escalated_in, 'date': cal_date})
amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM
(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':
amount_recovered_total, 'date': cal_date})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,
AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')
).where(fn.DATE(AutoCallActions.created_at) == cal_date)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.
is_null(False), RepaymentLog.is_bombed == True)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False))
auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,
AutoCallActions.result)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,
'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered':
0} for e in employees}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'answered_calls': data['answered_calls'], 'ptp': data['ptp'],
'follow_up': data['follow_up'], 'not_useful': data['not_useful'
], 'cleared': data['cleared'], 'amount_recovered': str(data[
'amount_recovered']), 'date': cal_date})
if insert_args:
Summary2.insert_many(insert_args).execute()
<mask token>
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error(
'discount approved msg send failed application %s not found',
app_id)
return
template = Template.select(Template.text, Template.app).where(Template.
type == ConnectType.AUTO_SMS.value, Template.id << Template.
get_auto_sms_tpl(msg_type), Template.app == application.app).first()
if not template:
logging.error('discount approved msg send failed template %s not found'
, msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {'user_name': application.user_name, 'due_days': application
.overdue_days, 'app_name': application.app, 'phone': application.
user_mobile_no, 'cs_number': cs_number_conf.get(application.app,
'02150202889'), 'promised_date': promised_date, 'discount_to':
discount_to, 'effective_to': effective_to}
content = template.text.format(**tpl_data)
data_list = [{'receiver': '62' + application.user_mobile_no, 'content':
content, 'title': ''}]
send_sms(data_list, msg_type, application.app)
<mask token>
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
bomber_dispatch_app()
dispatch_instalment_app()
dis_apps = DispatchApp.select(DispatchApp.application).where(
DispatchApp.status == DisAppStatus.NORMAL.value)
c1_apps = Application.select(Application.id, Application.cycle,
Application.follow_up_date, Application.called_times).where(Application
.status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.
AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.
is_rejected == False, Application.promised_date.is_null(True) | (fn
.DATE(Application.promised_date) < datetime.today().date())).order_by(
Application.overdue_days, Application.apply_at)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({'application': a.id, 'cycle': a.cycle,
'follow_up_date': a.follow_up_date, 'called_times': 1 if a.
called_times else 0, 'description': 'init'})
if not insert_args:
logging.error('no application need auto call')
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [i['application'] for i in insert_args[idx:idx +
100]]
send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {
'application_list': application_list})
logging.info('bomber generate auto call list finished')
send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.
alias('application_id'), R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(
'expected_out_time'), Application.overdue_days.alias(
'entry_overdue_days')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << ids)
Application.update(latest_bomber=bomber_id).where(Application.id.
in_(ids)).execute()
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a
.overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]
.get('principal_paid')), out_late_fee_pending=bd[_id].get(
'late_fee') - bd[_id].get('late_fee_paid')).where(
DispatchAppHistory.application == a.id, DispatchAppHistory.
bomber_id == a.latest_bomber_id).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
<mask token>
def bomber_dispatch_app():
try:
c1a_dispatch_app()
except Exception as e:
logging.error('c1a_dispatch_app error:%s' % str(e))
cycle = {(1): 10, (2): 30, (3): 60, (4): 90}
apps = Application.select().where(fn.DATE(Application.C2_entry) == date
.today(), Application.type == ApplicationType.CASH_LOAN.value)
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C2.value)
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(application_ids=apps_ids[
start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = DispatchApp.delete().where(DispatchApp.application == a_id
).execute()
dispatch_inserts.append({'application': a_id, 'bomber': bomber,
'partner': p.id})
application = Application.select().where(Application.id == a_id
).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = cycle.get(application.cycle
) - application.overdue_days
DispatchAppHistory.create(application=a_id, partner_id=p.id,
bomber_id=bomber, entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - Decimal(
bill_dict[a_id].get('principal_paid')),
entry_late_fee_pending=Decimal(bill_dict[a_id].get(
'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')
), expected_out_time=date.today() + timedelta(days=
day_next_cycle))
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(application_ids=c2)
else:
bills = []
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at
=datetime.now(), entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - bill_dict[c].get(
'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(
'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),
expected_out_time=date.today() + timedelta(days=day_next_cycle))
ab_test_other()
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
c1a_apps = Application.select().where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value], Application.dpd1_entry >= today, Application.dpd1_entry <
tomorrow, Application.type == ApplicationType.CASH_LOAN.value)
all_aids = [a.id for a in c1a_apps]
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C1A.value)
end = 0
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
is_del == 0)
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
average_number = get_average_number(len(aids), len(bids))
p_end = 0
for i, bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = Application.update(latest_bomber=bid, status=
ApplicationStatus.AB_TEST.value).where(Application.id <<
b_aids).execute()
params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,
'application_ids': b_aids, 'dest_bomber_id': bid}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid, 'bomber':
bid, 'partner': p.id, 'status': DisAppStatus.NORMAL
.value})
if dispatch_inserts:
q = DispatchApp.insert_many(dispatch_inserts).execute()
except Exception as e:
logging.error('c1a分件写入dispatch_app error:%s' % str(e))
<mask token>
def get_cash_bomber(bids, cycle):
cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==
0, Bomber.instalment != cycle)
cash_bids = [b.id for b in cash_bombers]
return cash_bids
<mask token>
def out_and_in_record_instalment(**kwargs):
if not kwargs.get('application_ids'):
return
out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True)).execute()
cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dest_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).
alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
overdue_bills = OverdueBill.select().where(OverdueBill.
collection_id << app_ids)
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
<mask token>
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get('apps')
partner_id = kwargs.get('partner_id', 'null')
bill_dict = kwargs.get('bill_dict')
period = kwargs.get('period')
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = {str(k): v for k, v in bill_dict.items()}
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().
alias('entry_at'), Application.overdue_days.alias(
'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.id << app_ids)
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
<mask token>
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = CallActions.select().where(CallActions.type == 0,
CallActions.application == application.id, CallActions.
created_at > datetime.now() - timedelta(days=5))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = SystemConfig.select().where(SystemConfig.key ==
'SCAVENGER_TIME').first()
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value, description='scavenger').where(AutoCallList.status ==
AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime
.now() + timedelta(minutes=scavenger_time))
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
mail_box_scavenger_time = -30
mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==
'MAIL_BOX_SCAVENGER_TIME').first()
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.
value, AutoCallList.updated_at < datetime.now() + timedelta(minutes
=mail_box_scavenger_time))
mail_box_count = update_mail_box_call_list.execute()
logging.info('scavenger update mail box %s', mail_box_count)
update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value
).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.
updated_at < datetime.now() + timedelta(minutes=-30))
ivr_result = update_auto_ivr.execute()
logging.info('scavenger update %s ivr' % ivr_result)
<mask token>
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)
).scalar()
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({'apply_date': start_date, 'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system': round(c3.
get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *
100, 1), 'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans': round(c7.get(i, 0),
1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i,
0), 'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)
def bomber_auto_call_list_record(payload, msg_id):
"""记录一年的auto_call_list,删除前一天的数据,增加今天的数据"""
now = datetime.now()
if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):
date_sql = """
SELECT DATE(created_at) FROM auto_call_list_record
GROUP BY DATE(created_at) limit 1
"""
del_date = db.execute_sql(date_sql).fetchone()[0]
del_sql = """
DELETE FROM auto_call_list_record WHERE date(created_at) = %s
"""
db.execute_sql(del_sql, [del_date])
sql = """
INSERT INTO auto_call_list_record
SELECT * FROM auto_call_list
"""
db.execute_sql(sql)
logging.info('bomber_auto_call_list_record done')
<mask token>
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.
bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.
application << kwargs['application_ids'], DispatchAppHistory.out_at
.is_null(True)).execute()
if kwargs.get('month_dispatch'):
return
try:
Application.update(ptp_bomber=None).where(Application.id << kwargs[
'application_ids']).execute()
except Exception as e:
logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[
'application_ids'], str(e)))
<mask token>
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
<mask token>
def start_old_application(old_app, cancel=False):
application_id = old_app.application_id
if cancel and old_app.status == OldLoanStatus.PAID.value:
now = datetime.now()
if old_app.start_date is None:
old_app.status = OldLoanStatus.WAITING.value
elif now >= max(old_app.end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.
promised_date or now)).where(DispatchAppHistory.bomber_id ==
old_app.bomber_id, DispatchAppHistory.application ==
application_id).execute()
else:
old_app.status = OldLoanStatus.PROCESSING.value
DispatchAppHistory.update(out_at=None).where(DispatchAppHistory
.bomber_id == old_app.bomber_id, DispatchAppHistory.
application == application_id).execute()
old_app.save()
return
application = Application.get_or_none(Application.id == application_id,
Application.status != ApplicationStatus.REPAID.value, Application.
overdue_days > 90, Application.promised_date.is_null(True) | (fn.
DATE(Application.promised_date) < datetime.today().date()))
if not application:
logging.error('Can not set old application %s to start collecting',
application_id)
return
if old_app.status in OldLoanStatus.no_available():
logging.info('%s has finished or paid', old_app.application_id)
return
config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)
sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)
old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value
old_app.status = OldLoanStatus.PROCESSING.value
old_app.bomber_id = old_app_bomber
old_app.start_date = datetime.now()
if not old_app.end_date:
old_app.end_date = datetime.now() + timedelta(days=sp)
old_app.save()
in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,
application_ids=[old_app.application_id], expected_out_time=str(
old_app.end_date))
<mask token>
def run_member_sql(sql):
result = [0, 0]
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
sql_result = cursor.fetchone()
if sql_result:
result = sql_result
except Exception as e:
logging.info('run sql error: %s' % str(sql))
return result
<mask token>
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
old_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (begin_time, begin_time))
old_data = run_one_sql(old_sql)
new_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
"""
% (begin_time, end_time))
new_data = run_one_sql(new_sql)
dpd4_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
"""
% (begin_time, end_time))
dpd4_data = run_one_sql(dpd4_sql)
dpd2_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (end_time, end_time))
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = repayment / all_money * 100
RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,
proportion=pro, repayment=repayment)
<mask token>
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = (
"""
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
d[1], RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = round(repay, 3)
pro = repay / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<mask token>
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
not_contain_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(
not_contain_money, 3), proportion='0', repayment=0,
is_first_loan=is_first_loan, contain_out=ContainOut.
NOT_CONTAIN.value)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(all_money, 3),
proportion='0', repayment=0, is_first_loan=
is_first_loan, contain_out=ContainOut.CONTAIN.value)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C3.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
repay[1], RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<mask token>
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),
BomberR.last_active_at.alias('active')).where(BomberR.
last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])
summary = []
for bomber in bombers:
summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':
bomber.role.cycle, 'work_ind': 0})
SummaryBomber.insert_many(summary).execute()
<mask token>
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = (
"""
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
"""
% (begin_date, end_date))
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,
call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(
SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date
).execute()
return calls
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = (
"""
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
"""
% (begin_date, table_date, end_date))
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == bomber_id).execute()
return claimeds
def get_sms_data(end_data, begin_data):
all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(
'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')
).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.
created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())
).group_by(ConnectHistoryR.operator)
for sms in all_sms:
SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==
begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()
return all_sms
<mask token>
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
def get_new_case_amount(begin_date, end_date):
all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.
entry_late_fee_pending + DispatchAppHistoryR.
entry_principal_pending).alias('pending'), DispatchAppHistoryR.
bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')
).where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.
partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)
for case in all_case:
SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt
=case.cnt).where(SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date).execute()
return all_case
<mask token>
def get_kp_today(begin_date, end_date):
sql = (
"""
select bomber_id, count(distinct application_id)
from(
SELECT bomber_id, application_id
FROM bomber.auto_call_actions a
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where a.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))
UNION
SELECT bomber_id, application_id
FROM bomber.bombing_history b
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where b.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))) result
GROUP BY 1
"""
% (begin_date, end_date, begin_date, begin_date, end_date, begin_date)
)
kp_today = run_all_sql(sql)
for kp in kp_today:
SummaryBomber.update(KP_today_cnt=kp[1]).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == kp[0]).execute()
<mask token>
def get_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c2_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c3_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
) a
GROUP BY 1
"""
% (begin_date, end_date, begin_date, end_date, begin_date, end_date))
case_cleared_sums = run_all_sql(sql)
for clear in case_cleared_sums:
SummaryBomber.update(new_case_cleared_sum=clear[1]).where(
SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==
clear[0]).execute()
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = (
"""
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
"""
% (begin_date, end_date))
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber
.bomber_id == call[0], SummaryBomber.time == begin_date).execute()
return new_case_calls
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=
value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == key).execute()
return result
def get_no_calltime_avg(begin_date, end_date):
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
for data in manuals:
SummaryBomber.update(calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /
data[2] if data[2] else 0).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == data[0]).execute()
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
return result
<mask token>
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'UPDATE_SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
def get_cycle_claimed(begin_date, end_date):
sql = (
"""
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
"""
% begin_date)
result = run_all_sql(sql)
return result
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()
return all_datas
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber
.time == begin_date, SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
"""
% (begin_date, end_date, begin_date, end_date))
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber
.cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.
time == begin_date).execute()
<mask token>
@action(MessageAction.SUMMARY_NEW_CYCLE)
def summary_new_cycle(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW_CYCLE').first()
if worker_log and worker_log.logs >= 5:
return
cycle_datas = SummaryBomber.select(fn.SUM(SummaryBomber.
new_case_amount_sum).alias('new_case_amount_sum'), fn.SUM(
SummaryBomber.new_case_cleared_sum).alias('new_case_cleared_sum'),
fn.SUM(SummaryBomber.case_made_cnt).alias('case_made_cnt'), fn.SUM(
SummaryBomber.case_connect_cnt).alias('case_connect_cnt'), fn.SUM(
SummaryBomber.promised_cnt).alias('promised_cnt'), fn.SUM(
SummaryBomber.promised_amount).alias('promised_amount'), fn.SUM(
SummaryBomber.cleared_cnt).alias('cleared_cnt'), fn.SUM(
SummaryBomber.cleared_amount).alias('cleared_amount'), fn.SUM(
SummaryBomber.new_case_cnt).alias('new_case_cnt'), fn.SUM(
SummaryBomber.new_case_call_cnt).alias('new_case_call_cnt'), fn.SUM
(SummaryBomber.unfollowed_cnt).alias('unfollowed_cnt'), fn.SUM(
SummaryBomber.unfollowed_call_cnt).alias('unfollowed_call_cnt'), fn
.SUM(SummaryBomber.call_cnt).alias('call_cnt'), fn.SUM(
SummaryBomber.sms_cnt).alias('sms_cnt'), fn.SUM(SummaryBomber.
call_connect_cnt).alias('call_connect_cnt'), fn.SUM(SummaryBomber.
ptp_today_cnt).alias('ptp_today_cnt'), fn.SUM(SummaryBomber.
ptp_today_call_cnt).alias('ptp_today_call_cnt'), fn.SUM(
SummaryBomber.ptp_next_cnt).alias('ptp_next_cnt'), fn.SUM(
SummaryBomber.ptp_next_call_cnt).alias('ptp_next_call_cnt'), fn.SUM
(SummaryBomber.KP_cleared_cnt).alias('KP_cleared_cnt'), fn.SUM(
SummaryBomber.KP_today_cnt).alias('KP_today_cnt'), fn.SUM(
SummaryBomber.work_ind).alias('work_ind'), fn.SUM(SummaryBomber.
calltime_sum).alias('calltime_sum'), fn.SUM(SummaryBomber.
calltime_case_sum).alias('calltime_case_sum'), fn.SUM(SummaryBomber
.calltime_case_cnt).alias('calltime_case_cnt'), fn.SUM(
SummaryBomber.calltime_no_case_sum).alias('calltime_no_case_sum'),
fn.SUM(SummaryBomber.calltime_no_case_cnt).alias(
'calltime_no_case_cnt'), SummaryBomber.cycle.alias('cycle')).where(
SummaryBomber.time == begin_date, SummaryBomber.cycle << Cycle.values()
).group_by(SummaryBomber.cycle)
for cycle_data in cycle_datas:
SummaryBomber.create(bomber_id=cycle_data.cycle, time=begin_date,
cycle=cycle_data.cycle, new_case_amount_sum=cycle_data.
new_case_amount_sum, new_case_cleared_sum=cycle_data.
new_case_cleared_sum, new_case_cleard_rate=0, case_made_cnt=
cycle_data.case_made_cnt, case_made_rate=0, case_connect_cnt=
cycle_data.case_connect_cnt, case_connect_rate=0, promised_cnt=
cycle_data.promised_cnt, promised_amount=cycle_data.
promised_amount, cleared_cnt=cycle_data.cleared_cnt,
cleared_amount=cycle_data.cleared_amount, new_case_cnt=
cycle_data.new_case_cnt, new_case_call_cnt=cycle_data.
new_case_call_cnt, unfollowed_cnt=cycle_data.unfollowed_cnt,
unfollowed_call_cnt=cycle_data.unfollowed_call_cnt, call_cnt=
cycle_data.call_cnt, sms_cnt=cycle_data.sms_cnt,
call_connect_cnt=cycle_data.call_connect_cnt, calltime_case_avg
=0, ptp_today_cnt=cycle_data.ptp_today_cnt, ptp_today_call_cnt=
cycle_data.ptp_today_call_cnt, ptp_next_cnt=cycle_data.
ptp_next_cnt, ptp_next_call_cnt=cycle_data.ptp_next_call_cnt,
KP_cleared_cnt=cycle_data.KP_cleared_cnt, KP_today_cnt=
cycle_data.KP_today_cnt, KP_cleared_rate=0, work_ind=cycle_data
.work_ind, calltime_sum=cycle_data.calltime_sum,
calltime_case_sum=cycle_data.calltime_case_sum,
calltime_case_cnt=cycle_data.calltime_case_cnt,
calltime_no_case_sum=cycle_data.calltime_no_case_sum,
calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,
work_time_sum=cycle_data.work_time_sum)
cycle_claimed = get_cycle_claimed(begin_date, end_date)
for claimed in cycle_claimed:
SummaryBomber.update(claimed_cnt=claimed[1]).where(SummaryBomber.
time == begin_date, SummaryBomber.cycle == claimed[0],
SummaryBomber.bomber_id == claimed[0]).execute()
cycle_new_case(begin_date, end_date)
get_cycle_new_case_call(begin_date, end_date)
get_cycle_new_case_cleared(begin_date, end_date)
get_cycle_case_made_cnt(begin_date, end_date)
all_datas = SummaryBomber.filter(SummaryBomber.time == begin_date)
for data in all_datas:
cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum if
data.new_case_amount_sum else 0) * 100
data.new_case_cleard_rate = cl_rat
case_made_rate = (data.case_made_cnt / data.claimed_cnt if data.
claimed_cnt else 0) * 100
data.case_made_rate = case_made_rate
case_connect_rate = (data.case_connect_cnt / data.case_made_cnt if
data.case_made_cnt else 0) * 100
data.case_connect_rate = case_connect_rate
calltime_case_avg = (data.calltime_case_sum / data.
calltime_case_cnt if data.calltime_case_cnt else 0)
data.calltime_case_avg = calltime_case_avg
calltime_no_case_avg = (data.calltime_no_case_sum / data.
calltime_no_case_cnt if data.calltime_no_case_cnt else 0)
data.calltime_no_case_avg = calltime_no_case_avg
KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt if data.
KP_today_cnt else 0) * 100
data.KP_cleared_rate = KP_cleared_rate
data.save()
<mask token>
def get_change_bomber():
cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle
.C3.value}
result = {}
bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,
BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=
BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.
created_at) == date.today(), BomberLog.role_id << list(
cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.
instalment == 0).dicts()
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log['role_id'])
group_id = b_log['group_id']
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],
'new_ids': []}
else:
result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],
'new_ids': []}}
if b_log['operation'] == 0:
result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])
if result:
bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map
.keys())), Bomber.is_del == 0, Bomber.instalment == 0)
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result['new_ids'].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
<mask token>
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
for del_id in del_ids:
del_res = classified_apps.get(del_id, {})
p_list = del_res.get('p_list', [])
np_list = del_res.get('np_list', [])
del_res['need_num'] = -(len(p_list) + len(np_list))
del_res['to_list'] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid
).first()
bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':
[], 'np_num': 0, 'need_num': average, 'partner_id': bomber.
partner_id if bomber else ''}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app['p_list'])
np_num = len(bomber_app['np_list'])
if p_num > average:
bomber_app['need_num'] = -np_num
else:
bomber_app['need_num'] = average - (p_num + np_num)
bomber_app['p_num'] = p_num
bomber_app['np_num'] = np_num
if bomber_app['need_num'] < 0:
random.shuffle(bomber_app['np_list'])
res_over = bomber_app['np_list'][:-bomber_app['need_num']]
bomber_app['to_list'] = res_over
surplus_apps.extend(res_over)
classified_apps_list = sorted(classified_apps.values(), key=lambda x: x
['need_num'], reverse=True)
return surplus_apps, classified_apps_list
<mask token>
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type, bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers, type=type)
@action(MessageAction.REPAIR_BOMBER)
def repair_bomber(payload, msg_id):
app_mobile = payload['app_mobile']
username = payload.get('user_name')
logging.info('start repair bomber, number: %s' % app_mobile)
if 'mobile_no' in payload and payload['mobile_no']:
mobile = number_strip(str(payload['mobile_no']))[:64]
name = payload.get('mobile_name')
application = Application.filter(Application.user_mobile_no == mobile)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, mobile, username, name)
if 'tel_no' in payload and payload['tel_no']:
tel_no = number_strip(str(payload['tel_no']))[:64]
name = payload.get('tel_name')
application = Application.filter(Application.user_mobile_no == tel_no)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, tel_no, username, name)
<mask token>
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
summary_datetime = now_date - timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.
bomber_id, CallActionsR.application_id, CallActionsR.promised_date,
CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(
CallActionsR.created_at >= begin_time, CallActionsR.created_at <
end_time, CallActionsR.type << (0, 1))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.
bomber_id, 'summary_date': str(summary_date)}
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
C1_sql = (
"""
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
"""
% (begin_time, end_time))
C1_repayment = run_all_sql(C1_sql)
other_sql = (
"""
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
"""
% (begin_time, end_time))
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id, pay_amount, cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': cycle, 'repayment': pay_amount, 'bomber_id':
bomber_id, 'summary_date': str(summary_date)}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
<mask token>
def get_app_logs(apps):
app_logs = {}
all_np_apps = []
all_p_apps = []
for a in apps:
latest_bomber = a['latest_bomber']
latest_bomber = a['cycle'] if not latest_bomber else latest_bomber
if latest_bomber in app_logs:
app_logs[latest_bomber]['to_ids'].append(a['id'])
else:
app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':
[a['id']], 'np_ids': [], 'p_ids': []}
if a['promised_date'] and a['promised_date'].date() >= datetime.now(
).date():
app_logs[latest_bomber]['p_ids'].append(a['id'])
all_p_apps.append(a)
else:
app_logs[latest_bomber]['np_ids'].append(a['id'])
all_np_apps.append(a)
return app_logs, all_np_apps, all_p_apps
def month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
partners = Partner.select().where(Partner.cycle == cycle, Partner.
status == PartnerStatus.NORMAL.value)
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
bombers = Bomber.select().where(Bomber.partner == partner.id,
Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.
value)
bids = {b.id: b for b in bombers}
if len(bids) == 0:
logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = start + int(np_apps_len * partner.app_percentage /
all_app_precentage)
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
np_apps = np_apps[end:]
return np_apps
def month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):
sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',
(4): 'AB_TEST_C3'}
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0
)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id: b for b in bombers}
if cycle == Cycle.C1A.value:
np_ids = [a['id'] for a in np_apps]
np = Application.update(status=ApplicationStatus.PROCESSING.value,
ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids
).execute()
bomber_app_logs = app_logs.get(cycle, {})
out_param = {'application_ids': bomber_app_logs.get('to_ids', []),
'month_dispatch': 1, 'src_bomber_id': cycle}
new_out_record(**out_param)
in_param = {'cycle': cycle, 'application_ids': np_ids,
'dest_bomber_id': cycle}
new_in_record(**in_param)
bomber_app_logs['need_num'] = len(np_apps)
bomber_app_logs['form_ids'] = np_ids
bomber_app_logs['status'] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
def dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info('get_dispatch_app_to_bomber no bids')
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids, status = [], [], [], 0
for ba in bomber_apps:
promised_date = ba.get('promised_date')
from_ids.append(ba['id'])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba['id'])
else:
from_np.append(ba['id'])
app_status = ApplicationStatus.AB_TEST.value
if (cycle == Cycle.C1A.value and not out_partner and type ==
ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = Application.update(ptp_bomber=bid, latest_bomber=bid,
status=app_status).where(Application.id << from_p).execute(
)
p_ids = bomber_app_logs.get('p_ids', []) + from_p
bomber_app_logs['p_ids'] = p_ids
if from_np:
np = Application.update(latest_bomber=bid, ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value).where(
Application.id << from_np).execute()
np_ids = bomber_app_logs.get('np_ids', []) + from_np
bomber_app_logs['np_ids'] = np_ids
in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.
partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}
if type == ApplicationType.CASH_LOAN.value:
out_param = {'src_bomber_id': bid, 'application_ids':
bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}
new_out_record(**out_param)
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs['status'] = 1
need_num = bomber_app_logs.get('need_num', 0) + average_num[index]
bomber_app_logs['need_num'] = need_num
all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids
bomber_app_logs['form_ids'] = all_form_ids
if not out_partner:
continue
try:
DispatchApp.delete().where(DispatchApp.application.in_(
from_ids)).execute()
dispatch_ins = [{'application': id, 'partner':
current_bomber.partner_id, 'bomber': bid, 'status':
DisAppStatus.NORMAL.value} for id in from_ids]
DispatchApp.insert_many(dispatch_ins).execute()
except Exception as e:
logging.info(
'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str
(e), bid, from_ids))
<mask token>
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],
'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}
for key, value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
<mask token>
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
for cycle in cycle_list:
apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,
ApplicationR.ptp_bomber, ApplicationR.overdue_days,
ApplicationR.promised_date, ApplicationR.follow_up_date,
ApplicationR.external_id, OverdueBillR.status, OverdueBillR.
periods, OverdueBillR.sub_bill_id).join(OverdueBillR,
JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id
).where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()
bomber_overdue_list = []
for app in apps:
status = app.get('status')
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get('ptp_bomber')
promised_date = app.get('promised_date')
follow_up_date = app.get('follow_up_date')
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {'collection_id': app.get('id'), 'external_id':
app.get('external_id'), 'sub_bill_id': app.get(
'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app
.get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':
ptp_bomber, 'promised_date': promised_date,
'follow_up_date': follow_up_date, 'which_day': which_day,
'overdue_days': app.get('overdue_days')}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index:index + 1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %
(cycle, str(which_day), str(e)))
<mask token>
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
apps = ApplicationR.select(ApplicationR.latest_bomber).where(
ApplicationR.promised_date < next_day, ApplicationR.promised_date >=
today, ApplicationR.promised_date.is_null(False), ApplicationR.
status != ApplicationStatus.REPAID.value, ApplicationR.cycle <
Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(
ApplicationR.latest_bomber)
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(
BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids
).execute()
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(
BomberPtp.auto_ext.is_null(False)).execute()
<mask token>
| <mask token>
@action(MessageAction.BOMBER_HEALTH_CHECK)
def health_check(payload, msg_id):
pass
<mask token>
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = AutoIVRActions.select(fn.DISTINCT(AutoIVRActions.loanid)
).where(AutoIVRActions.loanid.in_(l), AutoIVRActions.group.in_(
rule.get('group')), AutoIVRActions.callstate.in_(IVRCallStatus.
call_success()))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {'$and': rule.get('$and'), 'app_list': failed_list}
resp = Hyperloop().post('/bomber/score/verify', json=post_params)
if not resp.ok:
logging.error('hyperloop score verification failed: %s, %s',
str(resp.status_code), str(resp.text))
logging.error('hyperloop score verification failed: %s', str(
post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
if random.randint(0, 5) == 1:
send_to_default_q(MessageAction.APPLICATION_BOMBER, {'id':
int(item)})
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = SystemConfig.select().where(SystemConfig.key ==
'DPD1-3_INTO_IVR').first()
now = date.today()
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
auto_ivr = {'DanaCepat01': 1, 'DanaCepat00': 2, 'DanaCepat0PDP1': 3,
'PinjamUang01': 4, 'PinjamUang00': 5, 'PinjamUang0PDP1': 6,
'KtaKilat01': 7, 'KtaKilat00': 8, 'KtaKilat0PDP1': 9, 'DanaCepat11':
10, 'DanaCepat10': 11, 'DanaCepat1PDP1': 12, 'PinjamUang11': 13,
'PinjamUang10': 14, 'PinjamUang1PDP1': 15, 'KtaKilat11': 16,
'KtaKilat10': 17, 'KtaKilat1PDP1': 18, 'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20, 'DanaCepat03': 21, 'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23, 'PinjamUang03': 24, 'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26, 'KtaKilat03': 27, 'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29, 'PinjamUang1PDP2': 30, 'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32, 'KtaKilat1PDP3': 33, 'DanaCepat13': 36,
'PinjamUang13': 37, 'KtaKilat13': 38, 'DanaCepat12': 39,
'PinjamUang12': 40, 'KtaKilat12': 41, 'DanaCepat02': 42,
'PinjamUang02': 43, 'KtaKilat02': 44, 'IKIDana01': 100, 'IKIDana00':
101, 'IKIDana0PDP1': 102, 'IKIDana11': 103, 'IKIDana10': 104,
'IKIDana1PDP1': 105, 'IKIDana0PDP2': 106, 'IKIDana0PDP3': 107,
'IKIDana03': 108, 'IKIDana1PDP2': 109, 'IKIDana1PDP3': 110,
'IKIDana13': 111, 'IKIDana12': 112, 'IKIDana02': 113}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
ivr_action = bill_service.ivr_pages(page=current_page,
page_size=500, start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
time = str(days).replace('-', 'PDP')
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = AccountService().get_user(path_params={
'user_id': user_id})
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = a['user_mobile_no'] + ',' + user_resp.get(
'mobile_no')
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({'application_id': a['id'], 'numbers':
numbers, 'group': group, 'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page, proc_date=now,
page_size=page_size, current_page=current_page)
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error('dpd1-3_test_error:%s' % str(e))
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = SystemConfig.select().where(SystemConfig.key ==
'IVR_TEST_PROPORTION').first()
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
t2_ivrs = AutoIVR.select().where(AutoIVR.group << t2_groups, AutoIVR.
status == AutoIVRStatus.AVAILABLE.value)
t2_dict = defaultdict(list)
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
q = AutoIVR.update(status=AutoIVRStatus.SUCCESS.value).where(AutoIVR.
group << t2_groups, AutoIVR.id.not_in(test_ivr_ids)).execute()
<mask token>
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
Application.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber), ptp_bomber=None).where(
Application.id == d[0]).execute()
logging.warning('add new app success')
ptp = date.today() - timedelta(days=1)
del_sql = (
"""
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
"""
% ptp)
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
Application.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None).where(Application.id << ids).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = Application.select().where(Application.external_id ==
application_id).order_by(Application.finished_at).first()
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = OverdueBillR.select().where(OverdueBillR.sub_bill_id ==
sub_bill_id, OverdueBillR.external_id == application_id)
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' % (
application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error(
'application %s overdue, get sub_bill info failed:Request To repayment Error'
, application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error(
'get user %s apply history failed: Request to Dashboard Failed.',
user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([(1) for i in history if i['status'] in [80,
90, 100, 70] and i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get('bill_id')
amount = sub_bill.get('amount')
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {'collection_id': id, 'bill_id': bill_id,
'sub_bill_id': sub_bill_id, 'periods': sub_bill.get('periods'),
'overdue_days': overdue_days, 'origin_due_at': origin_due_at,
'amount': amount, 'amount_net': amount_net, 'interest_rate':
interest_rate, 'external_id': application_id}
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill['collection_id'] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('application %s,sub_bill_id:%s overdue created' %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill['collection_id'] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(id=id, user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'], user_name=gold_app[
'id_name'], app=gold_app['app'], device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')), apply_at=gold_app.get(
'apply_date'), id_ektp=gold_app.get('id_ektp'), birth_date=
birth_dt_ktp(gold_app.get('id_ektp')), gender=gender_ktpnum(
gold_app.get('id_ektp')), profile_province=(gold_app.get(
'profile_province') or {}).get('name'), profile_city=(gold_app.get(
'profile_city') or {}).get('name'), profile_district=(gold_app.get(
'profile_district') or {}).get('name'), profile_residence_time=
gold_app.get('profile_residence_time'), profile_residence_type=
gold_app.get('profile_residence_type'), profile_address=gold_app.
get('profile_address'), profile_education=gold_app.get(
'profile_education'), profile_college=(gold_app.get(
'profile_college') or {}).get('name'), job_name=gold_app.get(
'job_name'), job_tel=gold_app.get('job_tel'), job_bpjs=gold_app.get
('job_bpjs'), job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'), job_industry=gold_app.get(
'job_industry'), job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'), job_district
=(gold_app.get('job_district') or {}).get('name'), job_address=
gold_app.get('job_address'), amount=amount, amount_net=amount_net,
interest_rate=interest_rate, term=gold_app.get('term'),
origin_due_at=origin_due_at, overdue_days=overdue_days, repay_at=
sub_bill.get('repay_at'), loan_success_times=loan_success_times,
arrived_at=datetime.now(), follow_up_date=datetime.now(),
promised_amount=promised_amount, promised_date=promised_date,
external_id=application_id, type=type, bill_id=bill_id, dpd1_entry=
datetime.now())
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
Escalation.create(application=id, type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value, current_cycle=0, escalate_to=1)
add_contact(application)
def add_contact(application):
logging.info('start add contact for application: %s', application.id)
contacts = Contact.filter(Contact.user_id == application.user_id)
existing_numbers = {contact.number for contact in contacts}
insert_contacts = list()
mon_insert_contact = {}
user_mobile_no = number_strip(application.user_mobile_no)
if user_mobile_no and user_mobile_no not in existing_numbers:
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': user_mobile_no, 'relationship':
Relationship.APPLICANT.value, 'source': 'apply info',
'real_relationship': Relationship.APPLICANT.value})
existing_numbers.add(number_strip(application.user_mobile_no))
extra_phone = GoldenEye().get('/users/%s/extra-phone' % application.user_id
)
if not extra_phone.ok:
extra_phone = []
logging.error('get user %s extra contacts failed', application.user_id)
else:
extra_phone = extra_phone.json()['data']
if extra_phone:
for i in extra_phone:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
application.user_name, 'number': number, 'relationship':
Relationship.APPLICANT.value, 'source': 'extra phone',
'real_relationship': Relationship.APPLICANT.value})
key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
ec_contact = []
contact = json.loads(application.contact or '[]')
for i in contact:
if number_strip(i['mobile_no']
) not in existing_numbers and number_strip(i['mobile_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['mobile_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['mobile_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['mobile_no']))
if i['type'] != 1:
continue
if number_strip(i['tel_no']) not in existing_numbers and number_strip(i
['tel_no']):
ec_contact.append({'user_id': application.user_id, 'name': i[
'name'], 'number': number_strip(i['tel_no']),
'relationship': Relationship.FAMILY.value, 'sub_relation':
SubRelation.EC.value, 'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number_strip(i['tel_no']
), ContactType.F_EC.value
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['tel_no']))
if ec_contact:
Contact.insert_many(ec_contact).execute()
if all((application.job_tel, number_strip(application.job_tel),
number_strip(application.job_tel) not in existing_numbers)):
insert_contacts.append({'user_id': application.user_id, 'name':
None, 'number': number_strip(application.job_tel),
'relationship': Relationship.COMPANY.value, 'source':
'basic info job_tel', 'real_relationship': Relationship.COMPANY
.value})
key = user_mobile_no, number_strip(application.job_tel
), ContactType.C_BASIC_INFO_JOB_TEL.value
mon_insert_contact[key] = 1, 0, None
existing_numbers.add(number_strip(application.job_tel))
sms_contacts = GoldenEye().get('/applications/%s/sms-contacts' %
application.external_id)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id
)
else:
sms_contacts = sms_contacts.json()['data']
if sms_contacts:
for i in sms_contacts:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.SUGGESTED.value, 'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value})
key = user_mobile_no, number, ContactType.S_SMS_CONTACTS.value
mon_insert_contact[key] = 1, 0, i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
cf = GoldenEye().get('/applications/%s/call/frequency' % application.
external_id)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
insert_contacts = []
fm = GoldenEye().get('/applications/%s/contact/family-member' %
application.external_id)
if not fm.ok:
family = []
logging.error('get application %s family-member info error',
application.external_id)
else:
family = fm.json()['data']
if family:
for i in family:
if not i.get('number'):
logging.info('family member %s' % str(i))
continue
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
logging.info('family members: %s' % str(i))
insert_contacts.append({'user_id': application.user_id, 'name':
i['name'][:128], 'number': number, 'relationship':
Relationship.FAMILY.value, 'source': FamilyContactType.
CALLEC.value, 'total_count': i.get('total_count', 1),
'total_duration': i.get('total_duration', 0),
'real_relationship': Relationship.FAMILY.value})
key = user_mobile_no, number, ContactType.F_CALL_EC.value
mon_insert_contact[key] = i.get('total_count', 1), i.get(
'total_duration', 0), i['name'][:128]
existing_numbers.add(number)
mon_update_contact = {}
if call_frequency:
with db.atomic():
count = 1
for i in call_frequency:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
Contact.update(total_count=i['total_count'],
total_duration=i['total_duration']).where(Contact.
number == number, Contact.user_id == application.
user_id)
key = user_mobile_no, number
mon_update_contact[key] = i['total_count'], i[
'total_duration']
continue
if count < 6:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.FAMILY.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': FamilyContactType.
CALLTOP5.value, 'real_relationship': Relationship.
FAMILY.value})
count += 1
key = user_mobile_no, number, ContactType.F_CALL_TOP5.value
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
else:
insert_contacts.append({'user_id': application.user_id,
'name': i['name'][:128], 'number': number,
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'], 'total_duration':
i['total_duration'], 'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value})
key = (user_mobile_no, number, ContactType.
S_CALL_FREQUENCY.value)
mon_insert_contact[key] = i['total_count'], i[
'total_duration'], i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
next_apply_list = AccountService().add_contact(application.user_id)
for next_apply in next_apply_list:
number = number_strip(str(next_apply))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
SUGGESTED.value, source='online profile phone',
real_relationship=Relationship.SUGGESTED.value)
key = (user_mobile_no, number, ContactType.
S_ONLINE_PROFILE_PHONE.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
next_applicant = GoldenEye().get('/bomber/%s/dual_contact' %
application.user_id)
if not next_applicant.ok:
next_applicant = []
logging.error('get user %s dual_contact contacts failed' %
application.user_id)
else:
next_applicant = next_applicant.json()['data']
if next_applicant:
for i in next_applicant:
number = number_strip(str(i))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=
application.user_name, number=number, relationship=
Relationship.APPLICANT.value, source='apply info',
real_relationship=Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_APPLY_INFO.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
numbers = []
try:
numbers = AccountService().ktp_number(path_params={'user_id':
application.user_id})
except Exception as e:
logging.info('request ktp numbers failed %s' % str(e))
for n in numbers:
number = number_strip(str(n))[:64]
if number and number not in existing_numbers:
Contact.create(user_id=application.user_id, name=application.
user_name, number=number, relationship=Relationship.
APPLICANT.value, source='ktp number', real_relationship=
Relationship.APPLICANT.value)
key = user_mobile_no, number, ContactType.A_KTP_NUMBER.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
try:
ecs = GoldenEye().get('/applications/%s/contact/ec' % application.
external_id)
except Exception as e:
logging.info('request ec-member error: %s' % str(e))
try:
if not ecs.ok:
ec = []
logging.info('get application %s ec-member info error',
application.external_id)
else:
ec = ecs.json()['data']
if ec:
for e in ec:
number = number_strip(e['numbers'])[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=e[
'name'][:128], number=number, relationship=
Relationship.FAMILY.value, source=FamilyContactType
.CONTACTEC.value, real_relationship=Relationship.
FAMILY.value)
key = (user_mobile_no, number, ContactType.F_CONTACT_EC
.value)
mon_insert_contact[key] = 1, 0, e['name'][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add ec_member error:%s' % str(e))
try:
mn = GoldenEye().get('/applications/%s/contact/my_number' %
application.external_id)
except Exception as e:
logging.info('request my_number error: %s' % str(e))
try:
if not mn.ok:
my = []
logging.info('get application %s my_number info error',
application.external_id)
else:
my = mn.json()['data']
if my:
for m in my:
number = number_strip(m)[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=my[m][
:128], number=number, relationship=Relationship.
SUGGESTED.value, source='my number',
real_relationship=Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_MY_NUMBER.value
mon_insert_contact[key] = 1, 0, my[m][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add my_member error:%s' % str(e))
try:
cn = GoldenEye().get('/applications/%s/contact/company-number' %
application.external_id)
except Exception as e:
logging.info('request company-number error: %s' % str(e))
try:
if not cn.ok:
cn = []
logging.info('get application %s company_number info error',
application.external_id)
else:
cn = cn.json()['data']
if cn:
for c in cn:
number = c
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=cn[c][
:128], number=number, relationship=Relationship.
COMPANY.value, source='company', real_relationship=
Relationship.COMPANY.value)
key = user_mobile_no, number, ContactType.C_COMPANY.value
mon_insert_contact[key] = 1, 0, cn[c][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add company_member error:%s' % str(e))
try:
ol = AccountService().other_login_contact(userId=application.user_id)
except Exception as e:
logging.error('request other_login error: %s' % e)
ol = {}
try:
for o in ol:
number = number_strip(o)
if not number:
continue
if number not in existing_numbers:
Contact.create(user_id=application.user_id, name=ol[o][:128
], number=number, relationship=Relationship.SUGGESTED.
value, source='other_login', real_relationship=
Relationship.SUGGESTED.value)
key = user_mobile_no, number, ContactType.S_OTHER_LOGIN.value
mon_insert_contact[key] = 1, 0, ol[o][:128]
except Exception as e:
logging.error('add other_login number error:%s' % e)
logging.info('add contact for application %s finished', application.id)
if mon_insert_contact or mon_update_contact:
send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON, {
'user_mobile_no': user_mobile_no, 'insert_contact': str(
mon_insert_contact), 'update_contact': str(mon_update_contact),
'user_id': application.user_id, 'name': application.user_name})
<mask token>
def get_contact_from_mongo(number):
if not number:
return []
query = TotalContact.objects(src_number=number, source__in=TotalContact
.available()).order_by('source')
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({'related_number': c.dest_number, 'source': source,
'is_calc': c.is_calc, 'total_count': c.total_count,
'total_duration': c.total_duration, 'relation': relation,
'name': c.dest_name})
return lst
<mask token>
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
@action(MessageAction.BILL_PAID)
def bill_paid(payload, msg_id):
validate = check_key_not_none(payload, ['external_id', 'late_fee_part',
'principal_part', 'paid_at', 'bill_sub_id', 'partner_bill_id'])
if not validate:
logging.error('payload key not fully pass in.')
return
external_id = payload['external_id']
late_fee_part = Decimal(payload['late_fee_part'])
principal_part = Decimal(payload['principal_part'])
paid_at = payload['paid_at']
partner_bill_id = payload['partner_bill_id']
logging.debug(
'application %s paid principal part %s, paid late fee part %s',
external_id, principal_part, late_fee_part)
application = Application.filter(Application.external_id == external_id
).order_by(-Application.created_at).first()
if not application:
logging.info('application %s paid, not found application', external_id)
return
sub_bill_id = payload['bill_sub_id']
overdue_bill = OverdueBillR.select().where(OverdueBillR.collection_id ==
application.id, OverdueBillR.sub_bill_id == sub_bill_id).first()
if (application.type == ApplicationType.CASH_LOAN_STAGING.value and not
overdue_bill):
logging.info('bill sub not in bomber %s', sub_bill_id)
return
with db.atomic():
repay_at = str_no_utc_datetime(payload['latest_repay_at'])
Application.update(repay_at=repay_at).where(Application.id ==
application.id).execute()
RepaymentLog.create(application=application.id, is_bombed=True,
current_bomber=application.latest_bomber_id, cycle=application.
cycle, principal_part=principal_part, late_fee_part=
late_fee_part, repay_at=paid_at, ptp_bomber=application.
ptp_bomber, latest_call=application.latest_call, periods=
overdue_bill.periods if overdue_bill else None, overdue_bill_id
=overdue_bill.id if overdue_bill else None, partner_bill_id=
partner_bill_id)
phone_status = PhoneStatus.CONNECTED.value
real_relationship = RealRelationship.user_values()
commit = CallActionCommit.NO.value
number = CallActions.select(CallActions.number).where(CallActions.
phone_status == phone_status, CallActions.real_relationship <<
real_relationship, CallActions.commit == commit, CallActions.
application == application.id).order_by(-CallActions.created_at
).first()
if number:
Contact.update(call_priority=PriorityStatus.REPAY.value).where(
Contact.user_id == application.user_id, Contact.
call_priority == PriorityStatus.LAST.value).execute()
Contact.update(call_priority=PriorityStatus.LAST.value).where(
Contact.user_id == application.user_id, Contact.number ==
number.number).execute()
if not application.latest_bomber_id:
return
Inbox.create(title='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id), content=
'application %s,sub_bill_id %s repaid' % (application.
external_id, sub_bill_id), receiver=application.
latest_bomber_id or application.last_bomber_id, category=
InboxCategory.REPAID.value)
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(repay_at=repay_at).where(Application.
id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
<mask token>
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = Application.update(overdue_days=overdue_days).where(Application
.status << [ApplicationStatus.PROCESSING.value, ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.AB_TEST.value], Application.
overdue_days > 95, Application.type == ApplicationType.CASH_LOAN.value)
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error('calc_overdue_days_over_instalment_error: %s' % str(e))
apps = Application.filter(Application.status << [ApplicationStatus.
UNCLAIMED.value, ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value], Application.overdue_days > 95,
Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
def calc_overdue_days_over_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days > 95)
updated_rows_count = query.execute()
logging.info(
'calc_overdue_days_over_instalment done,count:%s,status:%s' % (
updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.collection_id,
OverdueBill.overdue_days).join(Application, JOIN_LEFT_OUTER, on
=OverdueBill.collection_id == Application.id).where(Application
.status == status, Application.type == ApplicationType.
CASH_LOAN_STAGING.value)
app_update = {}
for ob in overdue_bills:
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)
def calc_overdue_days(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query_unclaimed = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.UNCLAIMED.value,
Application.overdue_days <= 95, Application.type == ApplicationType
.CASH_LOAN.value)
updated_rows_count_unclaimed = query_unclaimed.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_unclaimed)
query_processing = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.PROCESSING.value,
Application.overdue_days <= 95, Application.type == ApplicationType
.CASH_LOAN.value)
updated_rows_count_processing = query_processing.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_processing)
query_test = Application.update(overdue_days=overdue_days).where(
Application.status == ApplicationStatus.AB_TEST.value, Application.
overdue_days <= 95, Application.type == ApplicationType.CASH_LOAN.value
)
updated_rows_count_test = query_test.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_test)
calc_overdue_days_instalment()
apps = Application.select(Application.id).where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value, ApplicationStatus.AB_TEST.value], Application.overdue_days <=
95, Application.promised_date.is_null(True) | (fn.DATE(Application.
promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(MessageAction.BOMBER_AUTOMATIC_ESCALATION, {
'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
Application.update(C1A_entry=datetime.now()).where(Application.status <<
[ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value, ApplicationStatus.AB_TEST.value], Application.overdue_days == 4
).execute()
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.AB_TEST.value]
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1, hour=1, minute=30,
second=0, microsecond=0)
for status in sub_bill_status_list:
query = OverdueBill.update(overdue_days=overdue_days).where(
OverdueBill.status == status, OverdueBill.overdue_days <= 95)
updated_rows_count = query.execute()
logging.info('calc_overdue_days_instalment done,count:%s,status:%s' %
(updated_rows_count, status))
overdue_bills = OverdueBill.select(OverdueBill.status, OverdueBill.
created_at, OverdueBill.collection_id, OverdueBill.overdue_days
).join(Application, JOIN_LEFT_OUTER, on=OverdueBill.
collection_id == Application.id).where(Application.status ==
status, Application.type == ApplicationType.CASH_LOAN_STAGING.value
)
app_update = {}
for ob in overdue_bills:
if (ob.status == ApplicationStatus.REPAID.value and ob.
created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
for aid, a_days in app_update.items():
q = Application.update(overdue_days=a_days).where(Application.
id == aid).execute()
logging.info('update instalment application done')
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
apps = Application.select().where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value)
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
'automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}'
.format(a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if a.latest_bomber_id or a.cycle in (Cycle.C1A.value, Cycle
.C1B.value):
bomber_id = (a.latest_bomber_id if a.latest_bomber_id else
a.cycle)
DispatchAppHistory.update(out_at=datetime.now(),
out_overdue_days=a.overdue_days).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id).execute()
Escalation.create(application=a.id, type=EscalationType.
AUTOMATIC.value, status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle, escalate_to=new_cycle,
current_bomber_id=a.latest_bomber)
dis_app_update = DispatchApp.update(status=DisAppStatus.
ABNORMAL.value).where(DispatchApp.application == a.id)
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
def application_entry_different_calculations(app):
conf = {(1): [1, 10], (2): [11, 30], (3): [31, 60], (4): [61, 90], (5):
[91, 999999]}
for new_cycle, scopes in conf.items():
if scopes[0] <= app.overdue_days <= scopes[1]:
return new_cycle
return app.cycle
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {i.id: {'cycle': i.role.cycle, 'claimed': 0, 'completed': 0,
'cleared': 0, 'escalated': 0, 'transferred': 0, 'promised': 0,
'amount_recovered': Decimal(0), 'calls_made': 0, 'calls_connected':
0, 'sms_sent': 0} for i in employees}
now_date = date.today()
cal_date = now_date - timedelta(days=1)
claimed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('claimed')).where(fn.DATE(Application.
claimed_at) == cal_date, Application.status << [ApplicationStatus.
PROCESSING.value, ApplicationStatus.REPAID.value], Application.
latest_bomber.is_null(False)).group_by(Application.latest_bomber)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False)).group_by(
Application.latest_bomber)
completed = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('completed')).where(Application.
latest_bombing_time.is_null(False), fn.DATE(Application.
latest_bombing_time) == cal_date, Application.latest_bomber.is_null
(False)).group_by(Application.latest_bomber)
escalated = Escalation.select(Escalation.current_bomber, fn.COUNT(
Escalation.id).alias('escalated')).where(fn.DATE(Escalation.
created_at) == cal_date, Escalation.type == EscalationType.
AUTOMATIC.value, Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value).group_by(Escalation
.current_bomber)
transferred = Transfer.select(Transfer.operator, fn.COUNT(Transfer.id).
alias('transferred')).where(fn.DATE(Transfer.reviewed_at) ==
cal_date, Transfer.status == ApprovalStatus.APPROVED.value).group_by(
Transfer.operator)
promised = BombingHistory.select(BombingHistory.bomber, fn.COUNT(
BombingHistory.id).alias('promised')).where(fn.DATE(BombingHistory.
created_at) == cal_date, BombingHistory.result == BombingResult.
HAS_PROGRESS.value).group_by(BombingHistory.bomber)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False)).group_by(RepaymentLog.
current_bomber)
calls_made = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.record_id
).alias('calls_made')).where(fn.DATE(CallLog.time_start) ==
cal_date, CallLog.system_type == '1').group_by(CallLog.user_id)
calls_connected = CallLog.select(CallLog.user_id, fn.COUNT(CallLog.
record_id).alias('calls_connected')).where(fn.DATE(CallLog.
time_start) == cal_date, CallLog.duration > 10, CallLog.system_type ==
'1').group_by(CallLog.user_id)
sms_sent = ConnectHistory.select(ConnectHistory.operator, fn.COUNT(
ConnectHistory.id).alias('sms_sent')).where(ConnectHistory.type.in_
(ConnectType.sms()), ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date).group_by(ConnectHistory.operator)
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'claimed': data['claimed'], 'completed': data['completed'],
'cleared': data['cleared'], 'escalated': data['escalated'],
'transferred': data['transferred'], 'promised': data['promised'
], 'amount_recovered': data['amount_recovered'], 'calls_made':
data['calls_made'], 'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'], 'date': cal_date})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
escalated_in = Escalation.select(Escalation.escalate_to, fn.COUNT(
Escalation.id).alias('escalated_in')).where(Escalation.status ==
ApprovalStatus.APPROVED.value, fn.DATE(Escalation.created_at) ==
cal_date).group_by(Escalation.escalate_to)
for i in escalated_in:
cycle_args.append({'cycle': i.escalate_to, 'escalated_in': i.
escalated_in, 'date': cal_date})
amount_recovered_total = RepaymentLog.select(RepaymentLog.cycle, fn.SUM
(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date).group_by(RepaymentLog.cycle)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({'cycle': i.cycle, 'amount_recovered_total':
amount_recovered_total, 'date': cal_date})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = AutoCallActions.select(AutoCallActions.bomber,
AutoCallActions.result, fn.COUNT(AutoCallActions.id).alias('count')
).where(fn.DATE(AutoCallActions.created_at) == cal_date)
amount_recovered = RepaymentLog.select(RepaymentLog.current_bomber, fn.
SUM(RepaymentLog.principal_part).alias('principal_part'), fn.SUM(
RepaymentLog.late_fee_part).alias('late_fee_part')).where(fn.DATE(
RepaymentLog.repay_at) == cal_date, RepaymentLog.current_bomber.
is_null(False), RepaymentLog.is_bombed == True)
cleared = Application.select(Application.latest_bomber, fn.COUNT(
Application.id).alias('cleared')).where(fn.DATE(Application.
finished_at) == cal_date, Application.status == ApplicationStatus.
REPAID.value, Application.latest_bomber.is_null(False))
auto_call_actions = auto_call_actions.group_by(AutoCallActions.bomber,
AutoCallActions.result)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {e.id: {'cycle': e.role.cycle, 'answered_calls': 0, 'ptp': 0,
'follow_up': 0, 'not_useful': 0, 'cleared': 0, 'amount_recovered':
0} for e in employees}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({'bomber': bomber_id, 'cycle': data['cycle'],
'answered_calls': data['answered_calls'], 'ptp': data['ptp'],
'follow_up': data['follow_up'], 'not_useful': data['not_useful'
], 'cleared': data['cleared'], 'amount_recovered': str(data[
'amount_recovered']), 'date': cal_date})
if insert_args:
Summary2.insert_many(insert_args).execute()
@action(MessageAction.BOMBER_SYNC_CONTACTS)
def sync_suggested_contacts(payload, msg_id):
""" suggested contacts sync """
applications = Application.select(Application.id, Application.user_id
).where(Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value])
logging.debug('start sync contact')
for a in applications:
sync_contacts(a)
logging.info('contact sync finished')
<mask token>
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error(
'discount approved msg send failed application %s not found',
app_id)
return
template = Template.select(Template.text, Template.app).where(Template.
type == ConnectType.AUTO_SMS.value, Template.id << Template.
get_auto_sms_tpl(msg_type), Template.app == application.app).first()
if not template:
logging.error('discount approved msg send failed template %s not found'
, msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {'user_name': application.user_name, 'due_days': application
.overdue_days, 'app_name': application.app, 'phone': application.
user_mobile_no, 'cs_number': cs_number_conf.get(application.app,
'02150202889'), 'promised_date': promised_date, 'discount_to':
discount_to, 'effective_to': effective_to}
content = template.text.format(**tpl_data)
data_list = [{'receiver': '62' + application.user_mobile_no, 'content':
content, 'title': ''}]
send_sms(data_list, msg_type, application.app)
<mask token>
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
bomber_dispatch_app()
dispatch_instalment_app()
dis_apps = DispatchApp.select(DispatchApp.application).where(
DispatchApp.status == DisAppStatus.NORMAL.value)
c1_apps = Application.select(Application.id, Application.cycle,
Application.follow_up_date, Application.called_times).where(Application
.status.not_in([ApplicationStatus.REPAID.value, ApplicationStatus.
AB_TEST.value]), Application.cycle == Cycle.C1A.value, Application.
is_rejected == False, Application.promised_date.is_null(True) | (fn
.DATE(Application.promised_date) < datetime.today().date())).order_by(
Application.overdue_days, Application.apply_at)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({'application': a.id, 'cycle': a.cycle,
'follow_up_date': a.follow_up_date, 'called_times': 1 if a.
called_times else 0, 'description': 'init'})
if not insert_args:
logging.error('no application need auto call')
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [i['application'] for i in insert_args[idx:idx +
100]]
send_to_default_q(MessageAction.BOMBER_AUTO_CALL_CONTACT, {
'application_list': application_list})
logging.info('bomber generate auto call list finished')
send_to_default_q(MessageAction.UPDATE_BOMBER_FOR_SPECIAL, {})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.
alias('application_id'), R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'), R('null').alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL 14 DAY)').alias(
'expected_out_time'), Application.overdue_days.alias(
'entry_overdue_days')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << ids)
Application.update(latest_bomber=bomber_id).where(Application.id.
in_(ids)).execute()
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
DispatchAppHistory.update(out_at=datetime.now(), out_overdue_days=a
.overdue_days, out_principal_pending=a.amount - Decimal(bd[_id]
.get('principal_paid')), out_late_fee_pending=bd[_id].get(
'late_fee') - bd[_id].get('late_fee_paid')).where(
DispatchAppHistory.application == a.id, DispatchAppHistory.
bomber_id == a.latest_bomber_id).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
<mask token>
def bomber_dispatch_app():
try:
c1a_dispatch_app()
except Exception as e:
logging.error('c1a_dispatch_app error:%s' % str(e))
cycle = {(1): 10, (2): 30, (3): 60, (4): 90}
apps = Application.select().where(fn.DATE(Application.C2_entry) == date
.today(), Application.type == ApplicationType.CASH_LOAN.value)
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C2.value)
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
status != BomberStatus.OUTER_LEADER.value, Bomber.is_del == 0)
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(application_ids=apps_ids[
start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = DispatchApp.delete().where(DispatchApp.application == a_id
).execute()
dispatch_inserts.append({'application': a_id, 'bomber': bomber,
'partner': p.id})
application = Application.select().where(Application.id == a_id
).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = cycle.get(application.cycle
) - application.overdue_days
DispatchAppHistory.create(application=a_id, partner_id=p.id,
bomber_id=bomber, entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - Decimal(
bill_dict[a_id].get('principal_paid')),
entry_late_fee_pending=Decimal(bill_dict[a_id].get(
'late_fee')) - Decimal(bill_dict[a_id].get('late_fee_paid')
), expected_out_time=date.today() + timedelta(days=
day_next_cycle))
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(application_ids=c2)
else:
bills = []
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(application=c, bomber_id=bomber, entry_at
=datetime.now(), entry_overdue_days=application.overdue_days,
entry_principal_pending=application.amount - bill_dict[c].get(
'principal_paid', 0), entry_late_fee_pending=bill_dict[c].get(
'late_fee', 0) - bill_dict[c].get('late_fee_paid', 0),
expected_out_time=date.today() + timedelta(days=day_next_cycle))
ab_test_other()
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
c1a_apps = Application.select().where(Application.status << [
ApplicationStatus.UNCLAIMED.value, ApplicationStatus.PROCESSING.
value], Application.dpd1_entry >= today, Application.dpd1_entry <
tomorrow, Application.type == ApplicationType.CASH_LOAN.value)
all_aids = [a.id for a in c1a_apps]
partners = Partner.select().where(Partner.status == PartnerStatus.
NORMAL.value, Partner.cycle == Cycle.C1A.value)
end = 0
for p in partners:
bombers = Bomber.select().where(Bomber.partner == p.id, Bomber.
is_del == 0)
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
average_number = get_average_number(len(aids), len(bids))
p_end = 0
for i, bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = Application.update(latest_bomber=bid, status=
ApplicationStatus.AB_TEST.value).where(Application.id <<
b_aids).execute()
params = {'cycle': Cycle.C1A.value, 'dest_partner_id': p.id,
'application_ids': b_aids, 'dest_bomber_id': bid}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid, 'bomber':
bid, 'partner': p.id, 'status': DisAppStatus.NORMAL
.value})
if dispatch_inserts:
q = DispatchApp.insert_many(dispatch_inserts).execute()
except Exception as e:
logging.error('c1a分件写入dispatch_app error:%s' % str(e))
<mask token>
def get_cash_bomber(bids, cycle):
cash_bombers = Bomber.select().where(Bomber.id << bids, Bomber.is_del ==
0, Bomber.instalment != cycle)
cash_bids = [b.id for b in cash_bombers]
return cash_bids
<mask token>
def out_and_in_record_instalment(**kwargs):
if not kwargs.get('application_ids'):
return
out_q = DispatchAppHistory.update(out_at=fn.NOW()).where(
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True)).execute()
cycle_period = {(1): '10', (2): '30', (3): '60', (4): '90'}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dest_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dest_partner_id'])).
alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
overdue_bills = OverdueBill.select().where(OverdueBill.
collection_id << app_ids)
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
<mask token>
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get('apps')
partner_id = kwargs.get('partner_id', 'null')
bill_dict = kwargs.get('bill_dict')
period = kwargs.get('period')
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = {str(k): v for k, v in bill_dict.items()}
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(bomber_id)).alias('bomber_id'), fn.NOW().
alias('entry_at'), Application.overdue_days.alias(
'entry_overdue_days'), R(str(partner_id)).alias('partner_id'), SQL(
'DATE_ADD(CURDATE(),INTERVAL (%s - t1.overdue_days) DAY)' % period)
.alias('expected_out_time')).where(Application.id << app_ids)
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@action(MessageAction.BOMBER_AUTO_CALL_CONTACT)
def bomber_auto_call_contact(payload, msg_id):
application_list = payload['application_list']
applications = []
for app_id in application_list:
applications.append(Application.filter(Application.id == app_id).
first())
with db.atomic():
for application in applications:
cycle = application.cycle
contacts = Contact.select().where(Contact.user_id ==
application.user_id, Contact.latest_status.not_in(
ContactStatus.no_use())).order_by(-Contact.useful, Contact.
relationship, -Contact.total_duration, -Contact.total_count)
level1 = []
level2 = []
level3 = []
level = []
for c in contacts:
if c.relationship == Relationship.APPLICANT.value:
level.append(c)
elif c.relationship == Relationship.FAMILY.value:
level1.append(c)
elif c.relationship == Relationship.COMPANY.value:
level2.append(c)
elif c.relationship == Relationship.SUGGESTED.value:
level3.append(c)
contacts = level + level2 + level1 + level3
numbers = []
fc_count = 0
app_calls = []
need_verify = False
for eac_contact in contacts:
if (eac_contact.relationship == Relationship.FAMILY.value and
eac_contact.useful == ContactsUseful.NONE.value):
need_verify = True
break
if need_verify:
logging.info('Found contact need update. app id {}'.format(
str(application.id)))
app_calls = AuditService().phone_invalid(cat=Relationship(1
).name, application_id=application.external_id)
call_history = True
c1b_family_dict = defaultdict(list)
for c in contacts:
if c.relationship == Relationship.COMPANY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
if cycle == Cycle.C1B.value:
if (c.source != CompanyContactType.
BASIC_INFO_JOB_TEL.value):
continue
if c.relationship == Relationship.FAMILY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
if c.useful == ContactsUseful.NONE.value:
c.useful = check_valid_phone(app_calls, c)
c.save()
if c.useful == ContactsUseful.INVALID.value:
logging.info('Found invalid contact. {}'.format(str
(c.id)))
continue
if cycle == Cycle.C1B.value:
c1b_family_dict[c.source].append(c.number)
continue
if c.relationship == Relationship.SUGGESTED.value:
if cycle not in (Cycle.C2.value, Cycle.C3.value):
break
if cycle == Cycle.C2.value and fc_count > 10:
break
if cycle == Cycle.C3.value and fc_count > 20:
break
fc_count += 1
numbers.append(c.number)
if len(numbers) == 0 or not call_history:
src_contact = Contact.select().where(Contact.user_id ==
application.user_id, Contact.source in
FamilyContactType.c1a_order())
c1a_family_dict = defaultdict(list)
for e in src_contact:
c1a_family_dict[e.source].append(e.number)
for call_type in FamilyContactType.c1a_order():
numbers.extend(c1a_family_dict[call_type])
if cycle == Cycle.C1B.value:
for call_type in FamilyContactType.c1b_order():
numbers.extend(c1b_family_dict[call_type])
numbers = list(set(numbers))
update_query = AutoCallList.update(numbers=','.join(numbers)
).where(AutoCallList.application == application.id)
update_query.execute()
<mask token>
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = CallActions.select().where(CallActions.type == 0,
CallActions.application == application.id, CallActions.
created_at > datetime.now() - timedelta(days=5))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = SystemConfig.select().where(SystemConfig.key ==
'SCAVENGER_TIME').first()
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value, description='scavenger').where(AutoCallList.status ==
AutoListStatus.PROCESSING.value, AutoCallList.updated_at < datetime
.now() + timedelta(minutes=scavenger_time))
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
mail_box_scavenger_time = -30
mail_box_scavenger = SystemConfig.select().where(SystemConfig.key ==
'MAIL_BOX_SCAVENGER_TIME').first()
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = AutoCallList.update(status=AutoListStatus.
PENDING.value).where(AutoCallList.status == AutoListStatus.MAILBOX.
value, AutoCallList.updated_at < datetime.now() + timedelta(minutes
=mail_box_scavenger_time))
mail_box_count = update_mail_box_call_list.execute()
logging.info('scavenger update mail box %s', mail_box_count)
update_auto_ivr = AutoIVR.update(status=AutoIVRStatus.AVAILABLE.value
).where(AutoIVR.status == AutoIVRStatus.PROCESSING.value, AutoIVR.
updated_at < datetime.now() + timedelta(minutes=-30))
ivr_result = update_auto_ivr.execute()
logging.info('scavenger update %s ivr' % ivr_result)
<mask token>
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = ReportCollection.select(fn.MAX(ReportCollection.apply_date)
).scalar()
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({'apply_date': start_date, 'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system': round(c3.
get(i, 0) * 100, 1), 'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system': round(c5.get(i, 0) *
100, 1), 'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans': round(c7.get(i, 0),
1), 'connected_calls_manual': c8.get(i, 0), 'agent': c9.get(i,
0), 'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)
def bomber_auto_call_list_record(payload, msg_id):
"""记录一年的auto_call_list,删除前一天的数据,增加今天的数据"""
now = datetime.now()
if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):
date_sql = """
SELECT DATE(created_at) FROM auto_call_list_record
GROUP BY DATE(created_at) limit 1
"""
del_date = db.execute_sql(date_sql).fetchone()[0]
del_sql = """
DELETE FROM auto_call_list_record WHERE date(created_at) = %s
"""
db.execute_sql(del_sql, [del_date])
sql = """
INSERT INTO auto_call_list_record
SELECT * FROM auto_call_list
"""
db.execute_sql(sql)
logging.info('bomber_auto_call_list_record done')
<mask token>
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
DispatchAppHistory.update(out_at=fn.NOW()).where(DispatchAppHistory.
bomber_id == kwargs['src_bomber_id'], DispatchAppHistory.
application << kwargs['application_ids'], DispatchAppHistory.out_at
.is_null(True)).execute()
if kwargs.get('month_dispatch'):
return
try:
Application.update(ptp_bomber=None).where(Application.id << kwargs[
'application_ids']).execute()
except Exception as e:
logging.error('new_out_record error:aids:%s,error:%s' % (kwargs[
'application_ids'], str(e)))
<mask token>
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
<mask token>
def in_record(**kwargs):
"""
:param kwargs: dist_partner_id, dist_bomber_id,
expected_out_time, application_ids
:return:
"""
kwargs['dist_partner_id'] = kwargs.get('dist_partner_id') or 'null'
subquery = Application.select(Application.amount, fn.NOW().alias(
'created_at'), fn.NOW().alias('updated_at'), Application.id.alias(
'application_id'), R(str(kwargs['dist_bomber_id'])).alias(
'bomber_id'), fn.NOW().alias('entry_at'), Application.overdue_days.
alias('entry_overdue_days'), R(str(kwargs['dist_partner_id'])).
alias('partner_id'), R('"{}"'.format(kwargs['expected_out_time'])).
alias('expected_out_time')).where(Application.status !=
ApplicationStatus.REPAID.value, Application.id << kwargs[
'application_ids'])
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
app_ids = [i.application_id for i in applications]
bill_list = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in bill_list}
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
<mask token>
def start_old_application(old_app, cancel=False):
application_id = old_app.application_id
if cancel and old_app.status == OldLoanStatus.PAID.value:
now = datetime.now()
if old_app.start_date is None:
old_app.status = OldLoanStatus.WAITING.value
elif now >= max(old_app.end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
DispatchAppHistory.update(out_at=max(old_app.end_date, old_app.
promised_date or now)).where(DispatchAppHistory.bomber_id ==
old_app.bomber_id, DispatchAppHistory.application ==
application_id).execute()
else:
old_app.status = OldLoanStatus.PROCESSING.value
DispatchAppHistory.update(out_at=None).where(DispatchAppHistory
.bomber_id == old_app.bomber_id, DispatchAppHistory.
application == application_id).execute()
old_app.save()
return
application = Application.get_or_none(Application.id == application_id,
Application.status != ApplicationStatus.REPAID.value, Application.
overdue_days > 90, Application.promised_date.is_null(True) | (fn.
DATE(Application.promised_date) < datetime.today().date()))
if not application:
logging.error('Can not set old application %s to start collecting',
application_id)
return
if old_app.status in OldLoanStatus.no_available():
logging.info('%s has finished or paid', old_app.application_id)
return
config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)
sp = config.get(SCI.OLD_APP_PERIOD, SCI.OLD_APP_PERIOD.default_value)
old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value
old_app.status = OldLoanStatus.PROCESSING.value
old_app.bomber_id = old_app_bomber
old_app.start_date = datetime.now()
if not old_app.end_date:
old_app.end_date = datetime.now() + timedelta(days=sp)
old_app.save()
in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,
application_ids=[old_app.application_id], expected_out_time=str(
old_app.end_date))
<mask token>
def run_member_sql(sql):
result = [0, 0]
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
sql_result = cursor.fetchone()
if sql_result:
result = sql_result
except Exception as e:
logging.info('run sql error: %s' % str(sql))
return result
<mask token>
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
old_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (begin_time, begin_time))
old_data = run_one_sql(old_sql)
new_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
"""
% (begin_time, end_time))
new_data = run_one_sql(new_sql)
dpd4_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
"""
% (begin_time, end_time))
dpd4_data = run_one_sql(dpd4_sql)
dpd2_sql = (
"""
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
"""
% (end_time, end_time))
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = repayment / all_money * 100
RepaymentReport.create(time=begin_time, cycle=0, all_money=all_money,
proportion=pro, repayment=repayment)
<mask token>
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = (
"""
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C1A.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
d[1], RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = round(repay, 3)
pro = repay / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<mask token>
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
not_contain_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(
not_contain_money, 3), proportion='0', repayment=0,
is_first_loan=is_first_loan, contain_out=ContainOut.
NOT_CONTAIN.value)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(time=end_date - timedelta(days=1
), cycle=Cycle.C2.value, all_money=round(all_money, 3),
proportion='0', repayment=0, is_first_loan=
is_first_loan, contain_out=ContainOut.CONTAIN.value)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto
.time == repay[1], RepaymentReportInto.
is_first_loan == is_first_loan, RepaymentReportInto
.contain_out == i, RepaymentReportInto.cycle ==
Cycle.C2.value).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = (
"""
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
"""
% (begin_date, end_date, is_first_loan))
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = (
"""
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
"""
% (begin_date, end_date, is_first_loan))
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(time=end_date - timedelta(days=1), cycle
=Cycle.C3.value, all_money=round(all_money, 3), proportion='0',
repayment=0, is_first_loan=is_first_loan, contain_out=
ContainOut.CONTAIN.value)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(RepaymentReportInto.time ==
repay[1], RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = repay_money / report.all_money * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
<mask token>
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = BomberR.select(BomberR.id, BomberR.role.alias('role'),
BomberR.last_active_at.alias('active')).where(BomberR.
last_active_at > active_date, BomberR.role << [1, 2, 4, 5, 6, 8, 9])
summary = []
for bomber in bombers:
summary.append({'time': begin_date, 'bomber_id': bomber.id, 'cycle':
bomber.role.cycle, 'work_ind': 0})
SummaryBomber.insert_many(summary).execute()
<mask token>
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = (
"""
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
"""
% (begin_date, end_date))
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
SummaryBomber.update(case_made_cnt=case_made, call_cnt=call_cnt,
call_connect_cnt=connect_cnt, case_connect_cnt=case_connect).where(
SummaryBomber.bomber_id == bomber, SummaryBomber.time == begin_date
).execute()
return calls
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = (
"""
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
"""
% (begin_date, table_date, end_date))
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
SummaryBomber.update(claimed_cnt=cnt).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == bomber_id).execute()
return claimeds
def get_sms_data(end_data, begin_data):
all_sms = ConnectHistoryR.select(ConnectHistoryR.operator.alias(
'bomber_id'), fn.COUNT(ConnectHistoryR.application).alias('sms_send')
).where(ConnectHistoryR.created_at > begin_data, ConnectHistoryR.
created_at < end_data, ConnectHistoryR.type.in_(ConnectType.sms())
).group_by(ConnectHistoryR.operator)
for sms in all_sms:
SummaryBomber.update(sms_cnt=sms.sms_send).where(SummaryBomber.time ==
begin_data, SummaryBomber.bomber_id == sms.bomber_id).execute()
return all_sms
<mask token>
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
def get_new_case_amount(begin_date, end_date):
all_case = DispatchAppHistoryR.select(fn.SUM(DispatchAppHistoryR.
entry_late_fee_pending + DispatchAppHistoryR.
entry_principal_pending).alias('pending'), DispatchAppHistoryR.
bomber_id, fn.COUNT(DispatchAppHistoryR.application).alias('cnt')
).where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date, DispatchAppHistoryR.
partner_id.is_null(True)).group_by(DispatchAppHistoryR.bomber_id)
for case in all_case:
SummaryBomber.update(new_case_amount_sum=case.pending, new_case_cnt
=case.cnt).where(SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date).execute()
return all_case
<mask token>
def get_kp_today(begin_date, end_date):
sql = (
"""
select bomber_id, count(distinct application_id)
from(
SELECT bomber_id, application_id
FROM bomber.auto_call_actions a
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where a.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))
UNION
SELECT bomber_id, application_id
FROM bomber.bombing_history b
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where b.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))) result
GROUP BY 1
"""
% (begin_date, end_date, begin_date, begin_date, end_date, begin_date)
)
kp_today = run_all_sql(sql)
for kp in kp_today:
SummaryBomber.update(KP_today_cnt=kp[1]).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == kp[0]).execute()
<mask token>
def get_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c2_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c3_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
) a
GROUP BY 1
"""
% (begin_date, end_date, begin_date, end_date, begin_date, end_date))
case_cleared_sums = run_all_sql(sql)
for clear in case_cleared_sums:
SummaryBomber.update(new_case_cleared_sum=clear[1]).where(
SummaryBomber.time == begin_date, SummaryBomber.bomber_id ==
clear[0]).execute()
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = (
"""
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
"""
% (begin_date, end_date))
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
SummaryBomber.update(new_case_call_cnt=call[1]).where(SummaryBomber
.bomber_id == call[0], SummaryBomber.time == begin_date).execute()
return new_case_calls
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_case_sum=value[0], calltime_case_cnt=
value[1], calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == key).execute()
return result
def get_no_calltime_avg(begin_date, end_date):
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
for data in manuals:
SummaryBomber.update(calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2], calltime_no_case_avg=data[1] /
data[2] if data[2] else 0).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == data[0]).execute()
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date))
autos = run_all_sql(autos_sql)
manual_sql = (
"""
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
"""
% (begin_date, end_date, '5%', '3%'))
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
SummaryBomber.update(calltime_sum=value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
return result
<mask token>
def get_unfollowed_call(begin_date):
sql = (
"""
SELECT
bomber_id,
count(1)
FROM
(
SELECT
bd.application_id,
date(bd.entry_at) AS entry_at,
bd.bomber_id,
date(bd.out_at) AS out_at
FROM
bomber.dispatch_app_history bd
WHERE
(
out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)
OR out_at IS NULL
)
AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)
AND partner_id IS NULL
AND NOT EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
bd.bomber_id = bc.bomber_id
AND bc.application_id = bd.application_id
AND bc.created_at < '%(begin_date)s'
)
) a
WHERE
EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
a.application_id = bc.application_id
AND a.bomber_id = bc.bomber_id
AND bc.created_at > '%(begin_date)s'
AND bc.created_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND bc.created_at >= a.entry_at
)
OR EXISTS (
SELECT
1
FROM
bomber.application ba
WHERE
ba.id = a.application_id
AND ba.finished_at > '%(begin_date)s'
AND ba.finished_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
)
GROUP BY
1
"""
% {'begin_date': begin_date})
data = run_all_sql(sql)
result = defaultdict(int)
for d in data:
result[d[0]] += d[1]
bomber_list = []
for key, value in result.items():
bomber_list.append(key)
SummaryBomber.update(unfollowed_call_cnt=SummaryBomber.
new_case_call_cnt + value).where(SummaryBomber.time ==
begin_date, SummaryBomber.bomber_id == key).execute()
update_sql = SummaryBomber.update(unfollowed_call_cnt=SummaryBomber.
new_case_call_cnt).where(SummaryBomber.time == begin_date)
if bomber_list:
update_sql = update_sql.where(SummaryBomber.bomber_id.not_in(
bomber_list))
update_sql.execute()
return result
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'UPDATE_SUMMARY_NEW').first()
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
def get_cycle_claimed(begin_date, end_date):
sql = (
"""
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
"""
% begin_date)
result = run_all_sql(sql)
return result
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
SummaryBomber.update(new_case_amount_sum=data[2], new_case_cnt=data[1]
).where(SummaryBomber.time == begin_date, SummaryBomber.
bomber_id == data[0], SummaryBomber.cycle == data[0]).execute()
return all_datas
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = (
"""
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
"""
% (begin_date, end_date, begin_date, end_date, begin_date,
end_date, begin_date, end_date))
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
SummaryBomber.update(new_case_call_cnt=data[1]).where(SummaryBomber
.time == begin_date, SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = (
"""
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
"""
% (begin_date, end_date, begin_date, end_date))
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
SummaryBomber.update(new_case_cleared_sum=i[2]).where(SummaryBomber
.cycle == i[0], SummaryBomber.bomber_id == i[0], SummaryBomber.
time == begin_date).execute()
def get_cycle_case_made_cnt(begin_date, end_date):
sql = (
"""
select cycle,count(distinct application) from (
select distinct cycle,application from bomber.auto_call_list_record
where created_at >= '%s'
and created_at < '%s'
and called_counts <> 0
and cycle in (1,2,3,4)
union
select distinct cycle,application_id from bomber.call_actions
where created_at >= '%s'
and created_at < '%s'
and cycle in (1,2,3,4)
) c
group by 1
"""
% (begin_date, end_date, begin_date, end_date))
case_made_datas = run_all_sql(sql)
for case_made_data in case_made_datas:
SummaryBomber.update(case_made_cnt=case_made_data[1]).where(
SummaryBomber.time == begin_date, SummaryBomber.cycle ==
case_made_data[0], SummaryBomber.bomber_id == case_made_data[0]
).execute()
@action(MessageAction.SUMMARY_NEW_CYCLE)
def summary_new_cycle(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs')
).where(WorkerLog.created_at >= end_date, WorkerLog.action ==
'SUMMARY_NEW_CYCLE').first()
if worker_log and worker_log.logs >= 5:
return
cycle_datas = SummaryBomber.select(fn.SUM(SummaryBomber.
new_case_amount_sum).alias('new_case_amount_sum'), fn.SUM(
SummaryBomber.new_case_cleared_sum).alias('new_case_cleared_sum'),
fn.SUM(SummaryBomber.case_made_cnt).alias('case_made_cnt'), fn.SUM(
SummaryBomber.case_connect_cnt).alias('case_connect_cnt'), fn.SUM(
SummaryBomber.promised_cnt).alias('promised_cnt'), fn.SUM(
SummaryBomber.promised_amount).alias('promised_amount'), fn.SUM(
SummaryBomber.cleared_cnt).alias('cleared_cnt'), fn.SUM(
SummaryBomber.cleared_amount).alias('cleared_amount'), fn.SUM(
SummaryBomber.new_case_cnt).alias('new_case_cnt'), fn.SUM(
SummaryBomber.new_case_call_cnt).alias('new_case_call_cnt'), fn.SUM
(SummaryBomber.unfollowed_cnt).alias('unfollowed_cnt'), fn.SUM(
SummaryBomber.unfollowed_call_cnt).alias('unfollowed_call_cnt'), fn
.SUM(SummaryBomber.call_cnt).alias('call_cnt'), fn.SUM(
SummaryBomber.sms_cnt).alias('sms_cnt'), fn.SUM(SummaryBomber.
call_connect_cnt).alias('call_connect_cnt'), fn.SUM(SummaryBomber.
ptp_today_cnt).alias('ptp_today_cnt'), fn.SUM(SummaryBomber.
ptp_today_call_cnt).alias('ptp_today_call_cnt'), fn.SUM(
SummaryBomber.ptp_next_cnt).alias('ptp_next_cnt'), fn.SUM(
SummaryBomber.ptp_next_call_cnt).alias('ptp_next_call_cnt'), fn.SUM
(SummaryBomber.KP_cleared_cnt).alias('KP_cleared_cnt'), fn.SUM(
SummaryBomber.KP_today_cnt).alias('KP_today_cnt'), fn.SUM(
SummaryBomber.work_ind).alias('work_ind'), fn.SUM(SummaryBomber.
calltime_sum).alias('calltime_sum'), fn.SUM(SummaryBomber.
calltime_case_sum).alias('calltime_case_sum'), fn.SUM(SummaryBomber
.calltime_case_cnt).alias('calltime_case_cnt'), fn.SUM(
SummaryBomber.calltime_no_case_sum).alias('calltime_no_case_sum'),
fn.SUM(SummaryBomber.calltime_no_case_cnt).alias(
'calltime_no_case_cnt'), SummaryBomber.cycle.alias('cycle')).where(
SummaryBomber.time == begin_date, SummaryBomber.cycle << Cycle.values()
).group_by(SummaryBomber.cycle)
for cycle_data in cycle_datas:
SummaryBomber.create(bomber_id=cycle_data.cycle, time=begin_date,
cycle=cycle_data.cycle, new_case_amount_sum=cycle_data.
new_case_amount_sum, new_case_cleared_sum=cycle_data.
new_case_cleared_sum, new_case_cleard_rate=0, case_made_cnt=
cycle_data.case_made_cnt, case_made_rate=0, case_connect_cnt=
cycle_data.case_connect_cnt, case_connect_rate=0, promised_cnt=
cycle_data.promised_cnt, promised_amount=cycle_data.
promised_amount, cleared_cnt=cycle_data.cleared_cnt,
cleared_amount=cycle_data.cleared_amount, new_case_cnt=
cycle_data.new_case_cnt, new_case_call_cnt=cycle_data.
new_case_call_cnt, unfollowed_cnt=cycle_data.unfollowed_cnt,
unfollowed_call_cnt=cycle_data.unfollowed_call_cnt, call_cnt=
cycle_data.call_cnt, sms_cnt=cycle_data.sms_cnt,
call_connect_cnt=cycle_data.call_connect_cnt, calltime_case_avg
=0, ptp_today_cnt=cycle_data.ptp_today_cnt, ptp_today_call_cnt=
cycle_data.ptp_today_call_cnt, ptp_next_cnt=cycle_data.
ptp_next_cnt, ptp_next_call_cnt=cycle_data.ptp_next_call_cnt,
KP_cleared_cnt=cycle_data.KP_cleared_cnt, KP_today_cnt=
cycle_data.KP_today_cnt, KP_cleared_rate=0, work_ind=cycle_data
.work_ind, calltime_sum=cycle_data.calltime_sum,
calltime_case_sum=cycle_data.calltime_case_sum,
calltime_case_cnt=cycle_data.calltime_case_cnt,
calltime_no_case_sum=cycle_data.calltime_no_case_sum,
calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,
work_time_sum=cycle_data.work_time_sum)
cycle_claimed = get_cycle_claimed(begin_date, end_date)
for claimed in cycle_claimed:
SummaryBomber.update(claimed_cnt=claimed[1]).where(SummaryBomber.
time == begin_date, SummaryBomber.cycle == claimed[0],
SummaryBomber.bomber_id == claimed[0]).execute()
cycle_new_case(begin_date, end_date)
get_cycle_new_case_call(begin_date, end_date)
get_cycle_new_case_cleared(begin_date, end_date)
get_cycle_case_made_cnt(begin_date, end_date)
all_datas = SummaryBomber.filter(SummaryBomber.time == begin_date)
for data in all_datas:
cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum if
data.new_case_amount_sum else 0) * 100
data.new_case_cleard_rate = cl_rat
case_made_rate = (data.case_made_cnt / data.claimed_cnt if data.
claimed_cnt else 0) * 100
data.case_made_rate = case_made_rate
case_connect_rate = (data.case_connect_cnt / data.case_made_cnt if
data.case_made_cnt else 0) * 100
data.case_connect_rate = case_connect_rate
calltime_case_avg = (data.calltime_case_sum / data.
calltime_case_cnt if data.calltime_case_cnt else 0)
data.calltime_case_avg = calltime_case_avg
calltime_no_case_avg = (data.calltime_no_case_sum / data.
calltime_no_case_cnt if data.calltime_no_case_cnt else 0)
data.calltime_no_case_avg = calltime_no_case_avg
KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt if data.
KP_today_cnt else 0) * 100
data.KP_cleared_rate = KP_cleared_rate
data.save()
<mask token>
def get_change_bomber():
cycle_role_map = {(5): Cycle.C1B.value, (6): Cycle.C2.value, (8): Cycle
.C3.value}
result = {}
bomber_logs = BomberLog.select(BomberLog.bomber_id, BomberLog.role_id,
BomberLog.operation, Bomber.group_id).join(Bomber, JOIN_INNER, on=
BomberLog.bomber_id == Bomber.id).where(fn.DATE(BomberLog.
created_at) == date.today(), BomberLog.role_id << list(
cycle_role_map.keys()), BomberLog.operation << (0, 1), Bomber.
instalment == 0).dicts()
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log['role_id'])
group_id = b_log['group_id']
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {'cycle': cycle, 'del_ids': [],
'new_ids': []}
else:
result[cycle] = {group_id: {'cycle': cycle, 'del_ids': [],
'new_ids': []}}
if b_log['operation'] == 0:
result[cycle][group_id]['del_ids'].append(b_log['bomber_id'])
if result:
bombers = Bomber.select().where(Bomber.role.in_(list(cycle_role_map
.keys())), Bomber.is_del == 0, Bomber.instalment == 0)
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result['new_ids'].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
<mask token>
def get_average_number(app_nums, bomber_nums):
average = app_nums // bomber_nums
remainder = app_nums % bomber_nums
average_list = [average for i in range(bomber_nums)]
if remainder == 0:
return average_list
for i in range(remainder):
average_list[i] += 1
random.shuffle(average_list)
return average_list
<mask token>
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
for del_id in del_ids:
del_res = classified_apps.get(del_id, {})
p_list = del_res.get('p_list', [])
np_list = del_res.get('np_list', [])
del_res['need_num'] = -(len(p_list) + len(np_list))
del_res['to_list'] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
bomber = Bomber.select(Bomber.partner_id).where(Bomber.id == bid
).first()
bomber_app = {'bid': bid, 'p_list': [], 'p_num': 0, 'np_list':
[], 'np_num': 0, 'need_num': average, 'partner_id': bomber.
partner_id if bomber else ''}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app['p_list'])
np_num = len(bomber_app['np_list'])
if p_num > average:
bomber_app['need_num'] = -np_num
else:
bomber_app['need_num'] = average - (p_num + np_num)
bomber_app['p_num'] = p_num
bomber_app['np_num'] = np_num
if bomber_app['need_num'] < 0:
random.shuffle(bomber_app['np_list'])
res_over = bomber_app['np_list'][:-bomber_app['need_num']]
bomber_app['to_list'] = res_over
surplus_apps.extend(res_over)
classified_apps_list = sorted(classified_apps.values(), key=lambda x: x
['need_num'], reverse=True)
return surplus_apps, classified_apps_list
<mask token>
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type, bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers, type=type)
@action(MessageAction.REPAIR_BOMBER)
def repair_bomber(payload, msg_id):
app_mobile = payload['app_mobile']
username = payload.get('user_name')
logging.info('start repair bomber, number: %s' % app_mobile)
if 'mobile_no' in payload and payload['mobile_no']:
mobile = number_strip(str(payload['mobile_no']))[:64]
name = payload.get('mobile_name')
application = Application.filter(Application.user_mobile_no == mobile)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, mobile, username, name)
if 'tel_no' in payload and payload['tel_no']:
tel_no = number_strip(str(payload['tel_no']))[:64]
name = payload.get('tel_name')
application = Application.filter(Application.user_mobile_no == tel_no)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, tel_no, username, name)
<mask token>
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
summary_datetime = now_date - timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = CallActionsR.select(CallActionsR.id, CallActionsR.
bomber_id, CallActionsR.application_id, CallActionsR.promised_date,
CallActionsR.cycle, CallActionsR.name, CallActionsR.number).where(
CallActionsR.created_at >= begin_time, CallActionsR.created_at <
end_time, CallActionsR.type << (0, 1))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': call.cycle, 'repayment': 0, 'bomber_id': call.
bomber_id, 'summary_date': str(summary_date)}
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
C1_sql = (
"""
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
"""
% (begin_time, end_time))
C1_repayment = run_all_sql(C1_sql)
other_sql = (
"""
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
"""
% (begin_time, end_time))
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id, pay_amount, cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0, 'call_cnt': 0,
'cycle': cycle, 'repayment': pay_amount, 'bomber_id':
bomber_id, 'summary_date': str(summary_date)}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
<mask token>
def get_app_logs(apps):
app_logs = {}
all_np_apps = []
all_p_apps = []
for a in apps:
latest_bomber = a['latest_bomber']
latest_bomber = a['cycle'] if not latest_bomber else latest_bomber
if latest_bomber in app_logs:
app_logs[latest_bomber]['to_ids'].append(a['id'])
else:
app_logs[latest_bomber] = {'bomber_id': latest_bomber, 'to_ids':
[a['id']], 'np_ids': [], 'p_ids': []}
if a['promised_date'] and a['promised_date'].date() >= datetime.now(
).date():
app_logs[latest_bomber]['p_ids'].append(a['id'])
all_p_apps.append(a)
else:
app_logs[latest_bomber]['np_ids'].append(a['id'])
all_np_apps.append(a)
return app_logs, all_np_apps, all_p_apps
def month_dispatch_app_out_partner(cycle, apps, app_logs, np_apps):
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
partners = Partner.select().where(Partner.cycle == cycle, Partner.
status == PartnerStatus.NORMAL.value)
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
bombers = Bomber.select().where(Bomber.partner == partner.id,
Bomber.is_del == 0, Bomber.status != BomberStatus.OUTER_LEADER.
value)
bids = {b.id: b for b in bombers}
if len(bids) == 0:
logging.info('cycle:%s,partner:%s,no bomber' % (cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = start + int(np_apps_len * partner.app_percentage /
all_app_precentage)
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
np_apps = np_apps[end:]
return np_apps
def month_dispatch_app_inner(cycle, np_apps, app_logs, p_apps):
sys_cycle = {(1): 'AB_TEST_C1A', (2): 'AB_TEST_C1B', (3): 'AB_TEST_C2',
(4): 'AB_TEST_C3'}
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = Bomber.select().where(Bomber.id << sys_values, Bomber.is_del == 0
)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id: b for b in bombers}
if cycle == Cycle.C1A.value:
np_ids = [a['id'] for a in np_apps]
np = Application.update(status=ApplicationStatus.PROCESSING.value,
ptp_bomber=None, latest_bomber=None).where(Application.id << np_ids
).execute()
bomber_app_logs = app_logs.get(cycle, {})
out_param = {'application_ids': bomber_app_logs.get('to_ids', []),
'month_dispatch': 1, 'src_bomber_id': cycle}
new_out_record(**out_param)
in_param = {'cycle': cycle, 'application_ids': np_ids,
'dest_bomber_id': cycle}
new_in_record(**in_param)
bomber_app_logs['need_num'] = len(np_apps)
bomber_app_logs['form_ids'] = np_ids
bomber_app_logs['status'] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
def dispatch_apps_to_bomber(cycle, apps, bids, app_logs, out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info('get_dispatch_app_to_bomber no bids')
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids, status = [], [], [], 0
for ba in bomber_apps:
promised_date = ba.get('promised_date')
from_ids.append(ba['id'])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba['id'])
else:
from_np.append(ba['id'])
app_status = ApplicationStatus.AB_TEST.value
if (cycle == Cycle.C1A.value and not out_partner and type ==
ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = Application.update(ptp_bomber=bid, latest_bomber=bid,
status=app_status).where(Application.id << from_p).execute(
)
p_ids = bomber_app_logs.get('p_ids', []) + from_p
bomber_app_logs['p_ids'] = p_ids
if from_np:
np = Application.update(latest_bomber=bid, ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value).where(
Application.id << from_np).execute()
np_ids = bomber_app_logs.get('np_ids', []) + from_np
bomber_app_logs['np_ids'] = np_ids
in_param = {'cycle': cycle, 'dest_partner_id': current_bomber.
partner_id, 'application_ids': from_ids, 'dest_bomber_id': bid}
if type == ApplicationType.CASH_LOAN.value:
out_param = {'src_bomber_id': bid, 'application_ids':
bomber_app_logs.get('to_ids', []), 'month_dispatch': 1}
new_out_record(**out_param)
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs['status'] = 1
need_num = bomber_app_logs.get('need_num', 0) + average_num[index]
bomber_app_logs['need_num'] = need_num
all_form_ids = bomber_app_logs.get('form_ids', []) + from_ids
bomber_app_logs['form_ids'] = all_form_ids
if not out_partner:
continue
try:
DispatchApp.delete().where(DispatchApp.application.in_(
from_ids)).execute()
dispatch_ins = [{'application': id, 'partner':
current_bomber.partner_id, 'bomber': bid, 'status':
DisAppStatus.NORMAL.value} for id in from_ids]
DispatchApp.insert_many(dispatch_ins).execute()
except Exception as e:
logging.info(
'month_disapp_error error:%s,bid:%s,from_ids:%s' % (str
(e), bid, from_ids))
<mask token>
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {'dpd1_entry': [1, 3], 'C1A_entry': [4, 10],
'C1B_entry': [11, 30], 'C2_entry': [31, 60], 'C3_entry': [61, 90]}
for key, value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
<mask token>
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
for cycle in cycle_list:
apps = ApplicationR.select(ApplicationR.id, ApplicationR.cycle,
ApplicationR.ptp_bomber, ApplicationR.overdue_days,
ApplicationR.promised_date, ApplicationR.follow_up_date,
ApplicationR.external_id, OverdueBillR.status, OverdueBillR.
periods, OverdueBillR.sub_bill_id).join(OverdueBillR,
JOIN_LEFT_OUTER, on=ApplicationR.id == OverdueBillR.collection_id
).where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0, ApplicationR.cycle == cycle).dicts()
bomber_overdue_list = []
for app in apps:
status = app.get('status')
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get('ptp_bomber')
promised_date = app.get('promised_date')
follow_up_date = app.get('follow_up_date')
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {'collection_id': app.get('id'), 'external_id':
app.get('external_id'), 'sub_bill_id': app.get(
'sub_bill_id'), 'periods': app.get('periods'), 'cycle': app
.get('cycle') if app.get('cycle') else cycle, 'ptp_bomber':
ptp_bomber, 'promised_date': promised_date,
'follow_up_date': follow_up_date, 'which_day': which_day,
'overdue_days': app.get('overdue_days')}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index:index + 1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
'summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s' %
(cycle, str(which_day), str(e)))
<mask token>
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
apps = ApplicationR.select(ApplicationR.latest_bomber).where(
ApplicationR.promised_date < next_day, ApplicationR.promised_date >=
today, ApplicationR.promised_date.is_null(False), ApplicationR.
status != ApplicationStatus.REPAID.value, ApplicationR.cycle <
Cycle.C2.value, ApplicationR.latest_bomber.is_null(False)).group_by(
ApplicationR.latest_bomber)
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = BomberPtp.update(today_switch=BomberCallSwitch.OFF.value).where(
BomberPtp.auto_ext.is_null(False), BomberPtp.bomber_id << bids
).execute()
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = BomberPtp.update(today_switch=BomberCallSwitch.ON.value).where(
BomberPtp.auto_ext.is_null(False)).execute()
<mask token>
| import traceback
from functools import partial
import json
import logging
from collections import defaultdict
from itertools import cycle as CycleIter
from datetime import datetime, date, timedelta
from decimal import Decimal
import random
from copy import deepcopy
from math import ceil
import boto3
import bottle
from peewee import fn, SQL, JOIN_LEFT_OUTER, JOIN_INNER, R
from mongoengine import Q
from deprecated.sphinx import deprecated
from bomber.api import (
AccountService,
MessageService,
AuditService,
BillService,
Dashboard,
GoldenEye,
Hyperloop,
Message,
Scout)
from bomber.constant_mapping import (
AutoCallMessageCycle,
ApplicationStatus,
RealRelationship,
BomberCallSwitch,
CallActionCommit,
ApplicantSource,
ApplicationType,
EscalationType,
ApprovalStatus,
AutoListStatus,
AutoCallResult,
BeforeInBomber,
PriorityStatus,
InboxCategory,
OldLoanStatus,
BombingResult,
ContactStatus,
SpecialBomber,
PartnerStatus,
Relationship,
ConnectType,
SubRelation,
PhoneStatus,
ContactType,
SmsChannel,
ContainOut,
FIRSTLOAN,
AppName,
RipeInd,
Cycle,
ContactsUseful,
DisAppStatus,
BomberStatus,
PartnerType)
from bomber.controllers.templates import cs_number_conf
from bomber.controllers.report_calculation.collection_tool import (
average_call_duration_team
)
from bomber.controllers.report_calculation.collection_agent import get_agent
from bomber.db import db, readonly_db
from bomber.models_readonly import (
DispatchAppHistoryR,
AutoCallActionsR,
ConnectHistoryR,
ApplicationR,
CallActionsR,
OverdueBillR,
BomberR)
from bomber.models import (
ManualCallListStatus,
RepaymentReportInto,
OldLoanApplication,
DispatchAppHistory,
CompanyContactType,
FamilyContactType,
ReportCollection,
RepaymentReport,
AutoCallActions,
DispatchAppLogs,
ConnectHistory,
BombingHistory,
ManualCallList,
AutoIVRActions,
SummaryBomber,
SummaryDaily,
IVRCallStatus,
BomberOverdue,
AutoCallList,
AutoIVRStatus,
SystemConfig,
RepaymentLog,
IVRActionLog,
TotalContact,
Application,
CallActions,
DispatchApp,
OverdueBill,
Escalation,
BomberPtp,
WorkerLog,
BomberLog,
CycleList,
Template,
Transfer,
Summary2,
AutoIVR,
Partner,
Contact,
CallLog,
Summary,
Bomber,
Inbox,
Role,
SCI,
)
from bomber.sns import MessageAction, send_to_default_q
from bomber.utils import (
get_cycle_by_overdue_days,
str_no_utc_datetime,
no_utc_datetime,
gender_ktpnum,
list_to_dict,
birth_dt_ktp,
number_strip,
utc_datetime,
OperatedDict,
average_gen,
time_logger,
idg,
)
from bomber.report_work import get_every_cycle_report
app = bottle.default_app()
client = boto3.client('sqs')
#对外展示dict,key-函数名;v-函数数组
actions = {}
def action(msg_action):
action_name = msg_action.value.lower()
if action_name not in actions:
actions[action_name] = []
def wrapper(func):
actions[action_name].append(func)
return func
return wrapper
@action(MessageAction.BOMBER_HEALTH_CHECK)
def health_check(payload, msg_id):
pass
def dpd1_classify(item, lst):
app_name = str(item['app_name']).upper()
key = '{}_{}_DPD1'.format(app_name, str(item['su']))
if key in BeforeInBomber.keys():
lst[key].append(item['id'])
return lst
def dpd1_process(lst):
"""已废弃的方法"""
if not lst:
return
for key, l in lst.items():
rule = getattr(BeforeInBomber, key).value
query = (AutoIVRActions
.select(fn.DISTINCT(AutoIVRActions.loanid))
.where(AutoIVRActions.loanid.in_(l),
AutoIVRActions.group.in_(rule.get('group')),
AutoIVRActions.callstate
.in_(IVRCallStatus.call_success())))
success_set = {i.loanid for i in query}
failed_list = list(set(l) - success_set)
post_params = {
'$and': rule.get('$and'),
'app_list': failed_list
}
resp = Hyperloop().post("/bomber/score/verify", json=post_params)
if not resp.ok:
logging.error(
'hyperloop score verification failed: %s, %s',
str(resp.status_code),
str(resp.text)
)
logging.error('hyperloop score verification failed: %s',
str(post_params))
continue
logging.debug('hyperloop score verification success: %s', resp.content)
resp_json = resp.json()
# dpd1 提前进入bomber
app_list = resp_json['data']
if not app_list:
continue
for item in app_list:
# 做ab_test,三分之一的人提前入催
if random.randint(0, 5) == 1:
send_to_default_q(
MessageAction.APPLICATION_BOMBER,
{'id': int(item)}
)
# auto_ivr,自动外呼系统
@action(MessageAction.GET_IVR)
def get_ivr(payload, msg_id):
logging.warning('start get_ivr')
sys_config = (SystemConfig.select()
.where(SystemConfig.key == 'DPD1-3_INTO_IVR')
.first())
# 得到所有的lid
now = date.today()
# 预期用户不再使用ivr,而是直接进入催收,故修改时间窗口不再获取预期数据
if sys_config and sys_config.value:
start = now - timedelta(days=3)
else:
start = now
end = now + timedelta(days=4)
# TODO: 使用redis
item = IVRActionLog.filter(IVRActionLog.proc_date == now).first()
if not item:
# 开始时清空ivr数据
AutoIVR.delete().execute()
current_page = 0
elif item.current_page >= item.total_page:
return
else:
current_page = item.current_page
#逾期分组 appname + 逾期次数 + 逾期天数
auto_ivr = {
'DanaCepat01': 1,
'DanaCepat00': 2,
'DanaCepat0PDP1': 3,
'PinjamUang01': 4,
'PinjamUang00': 5,
'PinjamUang0PDP1': 6,
'KtaKilat01': 7,
'KtaKilat00': 8,
'KtaKilat0PDP1': 9,
'DanaCepat11': 10,
'DanaCepat10': 11,
'DanaCepat1PDP1': 12,
'PinjamUang11': 13,
'PinjamUang10': 14,
'PinjamUang1PDP1': 15,
'KtaKilat11': 16,
'KtaKilat10': 17,
'KtaKilat1PDP1': 18,
'DanaCepat0PDP2': 19,
'DanaCepat0PDP3': 20,
'DanaCepat03': 21,
'PinjamUang0PDP2': 22,
'PinjamUang0PDP3': 23,
'PinjamUang03': 24,
'KtaKilat0PDP2': 25,
'KtaKilat0PDP3': 26,
'KtaKilat03': 27,
'DanaCepat1PDP2': 28,
'DanaCepat1PDP3': 29,
'PinjamUang1PDP2': 30,
'PinjamUang1PDP3': 31,
'KtaKilat1PDP2': 32,
'KtaKilat1PDP3': 33,
'DanaCepat13': 36,
'PinjamUang13': 37,
'KtaKilat13': 38,
'DanaCepat12': 39,
'PinjamUang12': 40,
'KtaKilat12': 41,
'DanaCepat02': 42,
'PinjamUang02': 43,
'KtaKilat02': 44,
'IKIDana01': 100,
'IKIDana00': 101,
'IKIDana0PDP1': 102,
'IKIDana11': 103,
'IKIDana10': 104,
'IKIDana1PDP1': 105,
'IKIDana0PDP2': 106,
'IKIDana0PDP3': 107,
'IKIDana03': 108,
'IKIDana1PDP2': 109,
'IKIDana1PDP3': 110,
'IKIDana13': 111,
'IKIDana12': 112,
'IKIDana02': 113,
}
current_page += 1
with db.atomic() as transaction:
while True:
bill_service = BillService()
#获取当天到未来4天的到期bill_sub.origin_due_at
ivr_action = bill_service.ivr_pages(
page=current_page,
page_size=500,
start_time=utc_datetime(str(start)),
end_time=utc_datetime(str(end)))
result = ivr_action['result']
page_size = int(ivr_action.get('page_size', 0))
total_page = int(ivr_action.get('total_page', 0))
insert_args = []
for a in result:
due_at = no_utc_datetime(a['due_at'])
days = (due_at.date() - now).days
if days == 2:
continue
if days > 0:
time = str(days)
else:
# 上面通过时间控制了请求的数据,不会获取到逾期为两天的件
time = str(days).replace('-', 'PDP')
#su 该用户逾期多少次
key = a['app_name'] + str(a['su']) + time
group = auto_ivr.get(key)
user_id = a['user_id']
try:
user_resp = (AccountService()
.get_user(path_params={'user_id': user_id}))
if str(user_resp['mobile_no']) == str(a['user_mobile_no']):
numbers = a['user_mobile_no']
else:
numbers = (a['user_mobile_no'] +
',' + user_resp.get('mobile_no'))
except:
logging.error('Request Account Service Error.')
numbers = a['user_mobile_no']
insert_args.append({
'application_id': a['id'],
'numbers': numbers,
'group': group,
'user_id': user_id})
AutoIVR.insert_many(insert_args).execute()
if current_page == 1:
IVRActionLog.create(total_page=total_page,
proc_date=now,
page_size=page_size,
current_page=current_page)
# 不知道什么原因,此处create不返回刚创建的对象
item = IVRActionLog.get(IVRActionLog.proc_date == now)
else:
item.current_page = current_page
item.page_size = page_size
item.total_page = total_page
item.save()
transaction.commit()
current_page += 1
if current_page > int(total_page):
break
# try:
# ivr_t2_test()
# except Exception as e:
# logging.error("ivr_test_error:%s"%str(e))
if sys_config and sys_config.value:
try:
classfiy_dpd_ptp_apps()
except Exception as e:
logging.error("dpd1-3_test_error:%s"%str(e))
# t-2进ivr测试代码
def ivr_t2_test():
t2_groups = [39, 40, 41, 42, 43, 44]
ivr_test_proportion = 0.2
sys_config = (SystemConfig.select()
.where(SystemConfig.key == 'IVR_TEST_PROPORTION')
.first())
if sys_config and sys_config.value:
ivr_test_proportion = float(sys_config.value)
# 获取所有t-2的件
t2_ivrs = (AutoIVR.select()
.where(AutoIVR.group << t2_groups,
AutoIVR.status == AutoIVRStatus.AVAILABLE.value))
t2_dict = defaultdict(list)
# 每个group获取一定比例的件
for ivr in t2_ivrs:
t2_dict[ivr.group].append(ivr.id)
test_ivr_ids = []
for group, ivr_ids in t2_dict.items():
number = ceil(len(ivr_ids) * ivr_test_proportion)
test_ivr_ids += ivr_ids[:number]
if not test_ivr_ids:
return
# 更新ivr状态
q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)
.where(AutoIVR.group << t2_groups,
AutoIVR.id.not_in(test_ivr_ids))
.execute())
# 过滤到bomber中下p的件
def classfiy_dpd_ptp_apps():
dpd_group = AutoIVR.dpd_groups()
dpd1_3_ivr_pro = 0.2
sys_config = (SystemConfig.select()
.where(SystemConfig.key == 'DPD1-3_IVR_TEST')
.first())
if sys_config and sys_config.value:
dpd1_3_ivr_pro = float(sys_config.value)
# 获取有是有已经下p的件
apps = (ApplicationR.select(ApplicationR.external_id)
.where(ApplicationR.overdue_days < 4,
ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.promised_date >= date.today(),
ApplicationR.promised_date.is_null(False)))
apps_ids = [a.external_id for a in apps]
# 删除ivr中下p的件
if apps_ids:
d = (AutoIVR.delete()
.where(AutoIVR.application_id.in_(apps_ids),
AutoIVR.group.in_(dpd_group))
.execute())
# 所有dpd1-3的件
ivrs = (AutoIVR.select().where(AutoIVR.group.in_(dpd_group)))
ivrs_dict = defaultdict(list)
for ivr in ivrs:
ivrs_dict[ivr.group].append(ivr.id)
test_ivrs = []
for group, ivr_ids in ivrs_dict.items():
number = ceil(len(ivr_ids) * dpd1_3_ivr_pro)
test_ivrs += ivr_ids[:number]
if not test_ivrs:
return
# 更新不测试的数据的状态
q = (AutoIVR.update(status=AutoIVRStatus.SUCCESS.value)
.where(AutoIVR.group.in_(dpd_group),
AutoIVR.id.not_in(test_ivrs))
.execute())
# APP 合并特殊处理
@action(MessageAction.APP_MERGE)
@deprecated(version='1.0', reason='This function will be removed soon')
def app_merge(payload, msg_id):
# 将DPD未到4的提前拉近bomber
sql = """
select *
from (
select a.id as id
from dashboard.application as a
inner join repayment.bill2 as b on b.external_id = a.id
where not exists (
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.app = 'DanaCepat'
and a.is_first_loan = 1
and a.apply_at < '2018-08-23 20:50:00'
and b.overdue_days between 1 and 3
and b.status != 2) result
where not exists (
select 1
from bomber.application as a
where a.cycle = 1
and a.status = 4
and a.id = result.id
)
"""
cursor = readonly_db.get_cursor()
cursor.execute(sql)
new_data = cursor.fetchall()
cursor.close()
if new_data:
bomber = [103, 104]
for d in new_data:
app_id = {'id': d[0]}
application_overdue(app_id, None)
# 将新进的件随机分给对应催收员
(Application
.update(status=ApplicationStatus.AB_TEST.value,
latest_bomber=random.choice(bomber),
ptp_bomber=None
)
.where(Application.id == d[0])
).execute()
logging.warning('add new app success')
# 重新登陆后,且没有ptp,将其从人工催收中删除
ptp = date.today() - timedelta(days=1)
del_sql = """
select a.id
from bomber.application as a
where exists(
select 1
from battlefront.user_login_log as u
where u.created_at > '2018-08-16'
and u.user_id = a.user_id
)
and a.cycle = 1
and a.status = 4
and (a.promised_date is null or a.promised_date < "%s")
""" % ptp
cursor = readonly_db.get_cursor()
cursor.execute(del_sql)
del_date = cursor.fetchall()
cursor.close()
if del_date:
return
ids = list()
for d in del_date:
ids.append(d[0])
(Application
.update(status=ApplicationStatus.UNCLAIMED.value,
latest_bomber=None)
.where(Application.id << ids)).execute()
@action(MessageAction.APPLICATION_BOMBER)
def application_overdue(payload, msg_id):
application_id = payload['id']
sub_bill_id = payload['bill_sub_id']
local_app = (Application.select()
.where(Application.external_id == application_id)
.order_by(Application.finished_at)
.first())
# 如果是单期且催收单存在
if local_app and local_app.type != ApplicationType.CASH_LOAN_STAGING.value:
logging.info('application %s overdue, already exists', application_id)
add_contact(local_app)
return
# 如果是分期,查看子账单是否存在
if local_app and local_app.type == ApplicationType.CASH_LOAN_STAGING.value:
overdue_bill = (OverdueBillR.select()
.where(OverdueBillR.sub_bill_id == sub_bill_id,
OverdueBillR.external_id == application_id))
if overdue_bill.exists():
logging.info(
'application %s,sub_bill_id %s overdue, already exists' %
(application_id, sub_bill_id))
return
try:
sub_bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
sub_bill = sub_bill[0]
except Exception:
logging.error('application %s overdue, get sub_bill info failed:'
'Request To repayment Error', application_id)
return
if sub_bill['status'] == 2:
logging.error('application %s overdue, but bills already cleared',
application_id)
return
overdue_days = sub_bill.get('overdue_days', 0)
if overdue_days == 0:
logging.info('application {} no overdue'
.format(str(application_id)))
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
logging.error('get application %s failed: Request to GoldenEye.',
application_id)
return
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
apply_history = Dashboard().get('/users/%s/apply-history' % user_id)
if not apply_history.ok:
logging.error('get user %s apply history failed: Request '
'to Dashboard Failed.', user_id)
return
history = apply_history.json().get('data')
loan_success_times = len([1 for i in history
if i['status'] in [80, 90, 100, 70] and
i['id'] != gold_app['id']])
id = application_id
type = ApplicationType.CASH_LOAN.value
bill_id = sub_bill.get("bill_id")
amount = sub_bill.get("amount")
amount_net = sub_bill.get('amount_net')
interest_rate = sub_bill.get('interest_rate')
overdue_days = sub_bill.get('overdue_days')
origin_due_at = sub_bill.get('origin_due_at')
sub_overdue_bill = {
"collection_id": id,
"bill_id": bill_id,
"sub_bill_id": sub_bill_id,
"periods": sub_bill.get("periods"),
"overdue_days": overdue_days,
"origin_due_at": origin_due_at,
"amount": amount,
"amount_net": amount_net,
"interest_rate": interest_rate,
"external_id": application_id
}
# 根据催收单类型来生成id
if sub_bill['category'] == ApplicationType.CASH_LOAN_STAGING.value:
if local_app and local_app.status != ApplicationStatus.REPAID.value:
sub_overdue_bill["collection_id"] = local_app.id
local_app.amount += amount
local_app.amount_net += amount_net
local_app.save()
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info(
"application %s,sub_bill_id:%s overdue created" %
(application_id, sub_bill_id))
return
else:
id = idg()
type = ApplicationType.CASH_LOAN_STAGING.value
sub_overdue_bill["collection_id"] = id
ptp_info = BombingHistory.filter(BombingHistory.application == id).first()
promised_amount = ptp_info and ptp_info.promised_amount
promised_date = ptp_info and ptp_info.promised_date
application = Application.create(
id=id,
user_id=gold_app['user_id'],
user_mobile_no=gold_app['user_mobile_no'],
user_name=gold_app['id_name'],
app=gold_app['app'],
device_no=gold_app['device_no'],
contact=json.dumps(gold_app.get('contact')),
apply_at=gold_app.get('apply_date'),
id_ektp=gold_app.get('id_ektp'),
birth_date=birth_dt_ktp(gold_app.get('id_ektp')),
gender=gender_ktpnum(gold_app.get('id_ektp')),
profile_province=(gold_app.get('profile_province') or {}).get('name'),
profile_city=(gold_app.get('profile_city') or {}).get('name'),
profile_district=(gold_app.get('profile_district') or {}).get('name'),
profile_residence_time=gold_app.get('profile_residence_time'),
profile_residence_type=gold_app.get('profile_residence_type'),
profile_address=gold_app.get('profile_address'),
profile_education=gold_app.get('profile_education'),
profile_college=(gold_app.get('profile_college') or {}).get('name'),
job_name=gold_app.get('job_name'),
job_tel=gold_app.get('job_tel'),
job_bpjs=gold_app.get('job_bpjs'),
job_user_email=gold_app.get('job_user_email'),
job_type=gold_app.get('job_type'),
job_industry=gold_app.get('job_industry'),
job_department=gold_app.get('job_department'),
job_province=(gold_app.get('job_province') or {}).get('name'),
job_city=(gold_app.get('job_city') or {}).get('name'),
job_district=(gold_app.get('job_district') or {}).get('name'),
job_address=gold_app.get('job_address'),
amount=amount,
amount_net=amount_net,
interest_rate=interest_rate,
# late_fee_rate=bill.get('late_fee_rate'),
# late_fee_initial=late_fee_initial,
# late_fee=late_fee,
# interest=interest,
term=gold_app.get('term'),
origin_due_at=origin_due_at,
# due_at=bill.get('due_at'),
overdue_days=overdue_days,
repay_at=sub_bill.get('repay_at'),
# principal_paid=principal_paid,
# late_fee_paid=late_fee_paid,
# repaid=repaid,
# unpaid=unpaid,
loan_success_times=loan_success_times,
arrived_at=datetime.now(),
follow_up_date=datetime.now(),
promised_amount=promised_amount,
promised_date=promised_date,
external_id=application_id,
type=type,
bill_id=bill_id,
dpd1_entry=datetime.now()
)
new_overdue = OverdueBill.create(**sub_overdue_bill)
logging.info('overdue application %s created', application_id)
# new overdue application equals to 'escalate from 0 to 1'
Escalation.create(
application=id,
type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value,
current_cycle=0,
escalate_to=1,
)
add_contact(application)
def add_contact(application):
logging.info('start add contact for application: %s', application.id)
# 添加联系人信息
contacts = Contact.filter(
Contact.user_id == application.user_id,
)
existing_numbers = {contact.number for contact in contacts}
insert_contacts = list()
mon_insert_contact = {}
# applicant
user_mobile_no = number_strip(application.user_mobile_no)
if user_mobile_no and user_mobile_no not in existing_numbers:
insert_contacts.append({
'user_id': application.user_id,
'name': application.user_name,
'number': user_mobile_no,
'relationship': Relationship.APPLICANT.value,
'source': 'apply info',
'real_relationship': Relationship.APPLICANT.value
})
existing_numbers.add(number_strip(application.user_mobile_no))
extra_phone = GoldenEye().get(
'/users/%s/extra-phone' % application.user_id
)
if not extra_phone.ok:
extra_phone = []
logging.error('get user %s extra contacts failed',
application.user_id)
else:
extra_phone = extra_phone.json()['data']
if extra_phone:
for i in extra_phone:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({
'user_id': application.user_id,
'name': application.user_name,
'number': number,
'relationship': Relationship.APPLICANT.value,
'source': 'extra phone',
'real_relationship': Relationship.APPLICANT.value
})
key = user_mobile_no, number, ContactType.A_EXTRA_PHONE.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
# family
# ec contact
ec_contact = []
contact = json.loads(application.contact or '[]')
for i in contact:
if (number_strip(i['mobile_no']) not in existing_numbers and
number_strip(i['mobile_no'])):
ec_contact.append({
'user_id': application.user_id,
'name': i['name'],
'number': number_strip(i['mobile_no']),
'relationship': Relationship.FAMILY.value,
'sub_relation': SubRelation.EC.value,
'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value
})
key = (user_mobile_no,
number_strip(i['mobile_no']),
ContactType.F_EC.value)
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['mobile_no']))
if i['type'] != 1:
continue
if (number_strip(i['tel_no']) not in existing_numbers and
number_strip(i['tel_no'])):
ec_contact.append({
'user_id': application.user_id,
'name': i['name'],
'number': number_strip(i['tel_no']),
'relationship': Relationship.FAMILY.value,
'sub_relation': SubRelation.EC.value,
'source': FamilyContactType.EC.value,
'real_relationship': Relationship.FAMILY.value
})
key = (user_mobile_no,
number_strip(i['tel_no']),
ContactType.F_EC.value)
mon_insert_contact[key] = 1, 0, i['name']
existing_numbers.add(number_strip(i['tel_no']))
if ec_contact:
Contact.insert_many(ec_contact).execute()
# company
if all((application.job_tel,
number_strip(application.job_tel),
number_strip(application.job_tel) not in existing_numbers)):
insert_contacts.append({
'user_id': application.user_id,
'name': None,
'number': number_strip(application.job_tel),
'relationship': Relationship.COMPANY.value,
'source': 'basic info job_tel',
'real_relationship': Relationship.COMPANY.value
})
key = (user_mobile_no,
number_strip(application.job_tel),
ContactType.C_BASIC_INFO_JOB_TEL.value)
mon_insert_contact[key] = 1, 0, None
existing_numbers.add(number_strip(application.job_tel))
# suggested
sms_contacts = GoldenEye().get(
'/applications/%s/sms-contacts' % application.external_id
)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id)
else:
sms_contacts = sms_contacts.json()['data']
if sms_contacts:
for i in sms_contacts:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.SUGGESTED.value,
'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value
})
key = (user_mobile_no,
number,
ContactType.S_SMS_CONTACTS.value)
mon_insert_contact[key] = 1, 0, i['name'][:128]
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
cf = GoldenEye().get(
'/applications/%s/call/frequency' % application.external_id
)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
# 结构不一样,重新生成
insert_contacts = []
fm = GoldenEye().get(
'/applications/%s/contact/family-member' % application.external_id
)
if not fm.ok:
family = []
logging.error('get application %s family-member info error',
application.external_id)
else:
family = fm.json()['data']
if family:
for i in family:
if not (i.get('number')):
logging.info('family member %s' % str(i))
continue
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
continue
logging.info('family members: %s' % str(i))
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.FAMILY.value,
'source': FamilyContactType.CALLEC.value,
'total_count': i.get('total_count', 1),
'total_duration': i.get('total_duration', 0),
'real_relationship': Relationship.FAMILY.value
})
key = user_mobile_no, number, ContactType.F_CALL_EC.value
mon_insert_contact[key] = (i.get('total_count', 1),
i.get('total_duration', 0),
i['name'][:128])
existing_numbers.add(number)
mon_update_contact = {}
if call_frequency:
with db.atomic():
count = 1
for i in call_frequency:
number = number_strip(i['number'])[:64]
if not number:
continue
if number in existing_numbers:
(Contact
.update(total_count=i['total_count'],
total_duration=i['total_duration'])
.where(Contact.number == number,
Contact.user_id == application.user_id))
key = user_mobile_no, number
mon_update_contact[key] = (i['total_count'],
i['total_duration'])
continue
# 设置通话频率最多的五个为family member
if count < 6:
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.FAMILY.value,
'total_count': i['total_count'],
'total_duration': i['total_duration'],
'source': FamilyContactType.CALLTOP5.value,
'real_relationship': Relationship.FAMILY.value
})
count += 1
key = user_mobile_no, number, ContactType.F_CALL_TOP5.value
mon_insert_contact[key] = (i['total_count'],
i['total_duration'],
i['name'][:128])
else:
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'][:128],
'number': number,
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'],
'total_duration': i['total_duration'],
'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value
})
key = (user_mobile_no,
number,
ContactType.S_CALL_FREQUENCY.value)
mon_insert_contact[key] = (i['total_count'],
i['total_duration'],
i['name'][:128])
existing_numbers.add(number)
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
# 信用认证号码加入到本人
next_apply_list = (AccountService().add_contact(application.user_id))
for next_apply in next_apply_list:
number = number_strip(str(next_apply))[:64]
if number and number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=application.user_name,
number=number,
relationship=Relationship.SUGGESTED.value,
source='online profile phone',
real_relationship=Relationship.SUGGESTED.value
)
key = (user_mobile_no,
number,
ContactType.S_ONLINE_PROFILE_PHONE.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
# 双卡手机另一个号码加入到本人队列
next_applicant = GoldenEye().get(
'/bomber/%s/dual_contact' % application.user_id
)
if not next_applicant.ok:
next_applicant = []
logging.error('get user %s dual_contact contacts failed'
% application.user_id)
else:
next_applicant = next_applicant.json()['data']
if next_applicant:
for i in next_applicant:
number = number_strip(str(i))[:64]
if number and number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=application.user_name,
number=number,
relationship=Relationship.APPLICANT.value,
source='apply info',
real_relationship=Relationship.APPLICANT.value
)
key = user_mobile_no, number, ContactType.A_APPLY_INFO.value
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success' %
application.user_id)
# add new contact
# 将同个ktp注册的多个号码添加到本人
numbers = []
try:
numbers = (AccountService()
.ktp_number(path_params={'user_id': application.user_id}))
except Exception as e:
logging.info('request ktp numbers failed %s' % str(e))
for n in numbers:
number = number_strip(str(n))[:64]
if number and number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=application.user_name,
number=number,
relationship=Relationship.APPLICANT.value,
source='ktp number',
real_relationship=Relationship.APPLICANT.value
)
key = (user_mobile_no,
number,
ContactType.A_KTP_NUMBER.value)
mon_insert_contact[key] = 1, 0, application.user_name
existing_numbers.add(number)
logging.info('get user %s dual_contact contacts success'
% application.user_id)
# 将contact表中is_family为true的标记为ec
try:
ecs = GoldenEye().get(
'/applications/%s/contact/ec' % application.external_id
)
except Exception as e:
logging.info('request ec-member error: %s' % str(e))
try:
if not ecs.ok:
ec = []
logging.info('get application %s ec-member info error',
application.external_id)
else:
ec = ecs.json()['data']
if ec:
for e in ec:
number = number_strip(e['numbers'])[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=e['name'][:128],
number=number,
relationship=Relationship.FAMILY.value,
source=FamilyContactType.CONTACTEC.value,
real_relationship=Relationship.FAMILY.value
)
key = (user_mobile_no,
number,
ContactType.F_CONTACT_EC.value)
mon_insert_contact[key] = 1, 0, e['name'][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add ec_member error:%s' % str(e))
# 将contact中is_me标记为true的标记为本人
try:
mn = GoldenEye().get(
'/applications/%s/contact/my_number' % application.external_id
)
except Exception as e:
logging.info('request my_number error: %s' % str(e))
try:
if not mn.ok:
my = []
logging.info('get application %s my_number info error',
application.external_id)
else:
my = mn.json()['data']
if my:
for m in my:
number = number_strip(m)[:64]
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=my[m][:128],
number=number,
relationship=Relationship.SUGGESTED.value,
source='my number',
real_relationship=Relationship.SUGGESTED.value
)
key = user_mobile_no, number, ContactType.S_MY_NUMBER.value
mon_insert_contact[key] = 1, 0, my[m][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add my_member error:%s' % str(e))
# 得到company的号码
try:
cn = GoldenEye().get(
'/applications/%s/contact/company-number' % application.external_id
)
except Exception as e:
logging.info('request company-number error: %s' % str(e))
try:
if not cn.ok:
cn = []
logging.info('get application %s company_number info error',
application.external_id)
else:
cn = cn.json()['data']
if cn:
for c in cn:
number = c
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=cn[c][:128],
number=number,
relationship=Relationship.COMPANY.value,
source='company',
real_relationship=Relationship.COMPANY.value
)
key = user_mobile_no, number, ContactType.C_COMPANY.value
mon_insert_contact[key] = 1, 0, cn[c][:128]
existing_numbers.add(number)
except Exception as e:
logging.info('add company_member error:%s' % str(e))
# 得到本人在其他设备上登陆的sim联系方式,加入applicant中
try:
ol = (AccountService()
.other_login_contact(userId=application.user_id))
except Exception as e:
logging.error('request other_login error: %s' % e)
ol = {}
try:
for o in ol:
number = number_strip(o)
if not number:
continue
if number not in existing_numbers:
Contact.create(
user_id=application.user_id,
name=ol[o][:128],
number=number,
relationship=Relationship.SUGGESTED.value,
source='other_login',
real_relationship=Relationship.SUGGESTED.value
)
key = (user_mobile_no,
number,
ContactType.S_OTHER_LOGIN.value)
mon_insert_contact[key] = 1, 0, ol[o][:128]
except Exception as e:
logging.error('add other_login number error:%s' % e)
logging.info('add contact for application %s finished', application.id)
if mon_insert_contact or mon_update_contact:
send_to_default_q(MessageAction.IMPORT_CONTACT_TO_MON,
{
'user_mobile_no': user_mobile_no,
'insert_contact': str(mon_insert_contact),
'update_contact': str(mon_update_contact),
'user_id': application.user_id,
'name': application.user_name
})
@action(MessageAction.IMPORT_CONTACT_TO_MON)
def import_contact_to_mon(payload, msg_id):
user_mobile_no = payload['user_mobile_no']
insert_contact = eval(payload['insert_contact'])
update_contact = eval(payload['update_contact'])
user_id = payload['user_id']
name = payload['name']
if not (insert_contact or update_contact or user_mobile_no):
logging.error("Invalid params")
drop_duplicated_contact({'numbers': [user_mobile_no]}, None)
send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {
'number': user_mobile_no,
'user_id': user_id
})
return
contacts = TotalContact.objects(src_number=user_mobile_no, is_calc=False)
insert_list = []
for c in contacts:
key = (user_mobile_no, c.dest_number, c.source)
if key in insert_contact:
insert_contact.pop(key)
for (sn, dn, s), (tc, td, na) in insert_contact.items():
insert_list.append({
'src_number': sn,
'src_name': name,
'dest_number': dn,
'dest_name': na,
'source': s,
'total_count': tc,
'total_duration': td
})
if insert_list:
insert_count = len((TotalContact
.objects
.insert([TotalContact(**dct)
for dct in insert_list])))
logging.info("insert success %s", insert_count)
update_count = 0
for (sn, dn), (tc, td) in update_contact.items():
result = (TotalContact
.objects(src_number=sn, dest_number=dn, is_calc=False)
.update(total_count=tc, total_duration=td))
if result:
update_count += 1
logging.info("update success %s", update_count)
drop_duplicated_contact({'numbers': [user_mobile_no]}, None)
send_to_default_q(MessageAction.CONTACT_FROM_TOTAL, {
'number': user_mobile_no,
'user_id': user_id
})
@action(MessageAction.DROP_DUPLICATED_CONTACT)
def drop_duplicated_contact(payload, msg_id):
"""
total_count,total_duration去重时,先total_count, 后total_duration
:param payload:
:param msg_id:
:return:
"""
numbers = payload.get('numbers', [])
if not numbers:
logging.error("no numbers should drop")
query = (TotalContact
.objects(Q(src_number__in=numbers) | Q(dest_number__in=numbers)))
contact_list = defaultdict(list)
delete_list = []
insert_list = []
for c in query:
if c.src_number == c.dest_number:
delete_list.append(c.id)
key = c.src_number, c.dest_number, c.source
contact_list[key].append({
'id': c.id,
'src_number': c.src_number,
'dest_number': c.dest_number,
'total_count': c.total_count,
'total_duration': c.total_duration,
'is_calc': c.is_calc,
'source': c.source,
'src_name': c.src_name,
'dest_name': c.dest_name
})
contact_list2 = deepcopy(contact_list)
for key, info in contact_list.items():
_info = sorted(info,
key=lambda x: (not x['is_calc'],
x['total_count'],
x['total_duration']),
reverse=True)
rs = _info[0]
if not rs['is_calc']:
contact_list2[(key[1], key[0], key[2])].append({
'src_number': rs['dest_number'],
'dest_number': rs['src_number'],
'total_count': rs['total_count'],
'total_duration': rs['total_duration'],
'is_calc': True,
'source': rs['source'],
'id': '',
'src_name': rs['dest_name'],
'dest_name': rs['src_name']
})
delete_ids = [i['id'] for i in _info[1:] if i['id']]
delete_list.extend(delete_ids)
for key, info in contact_list2.items():
_info = sorted(info,
key=lambda x: (not x['is_calc'],
x['total_count'],
x['total_duration']),
reverse=True)
rs = _info[0]
# 第一轮已经把不是反转的号码全部刷过
if not rs['is_calc']:
continue
if not rs['id']:
rs.pop('id')
insert_list.append(rs)
delete_ids = [i['id'] for i in _info[1:] if i['id']]
delete_list.extend(delete_ids)
if delete_list:
delete_count = TotalContact.objects(id__in=delete_list).delete()
logging.info("numbers %s: delete success %s", numbers, delete_count)
if insert_list:
insert_count = len((TotalContact
.objects
.insert([TotalContact(**dct)
for dct in insert_list])))
logging.info("numbers %s: insert success %s", numbers, insert_count)
def get_contact_from_mongo(number):
if not number:
return []
query = (TotalContact
.objects(src_number=number,
source__in=TotalContact.available())
.order_by('source'))
lst = []
for c in query:
relation = TotalContact.relationship(c.source)
if relation == -1:
continue
source = TotalContact.str_source(c.source)
if not source:
continue
lst.append({
'related_number': c.dest_number,
'source': source,
'is_calc': c.is_calc,
'total_count': c.total_count,
'total_duration': c.total_duration,
'relation': relation,
'name': c.dest_name
})
return lst
@action(MessageAction.CONTACT_FROM_TOTAL)
def contact_from_total(payload, msg_id):
number = payload.get('number')
user_id = payload.get('user_id')
if not (number and user_id):
logging.error("Invalid params")
return
result = get_contact_from_mongo(number)
if not result:
logging.error("contact from mongo is none")
return
contacts = Contact.filter(Contact.user_id == user_id)
existing_numbers = {contact.number for contact in contacts}
contact_list = []
for c in result:
number = number_strip(c['related_number'])
if number in existing_numbers:
continue
contact_list.append({
'user_id': user_id,
'name': c['name'],
'number': number,
'relationship': c['relation'],
'source': c['source'],
'total_duration': c['total_duration'],
'total_count': c['total_count'],
'real_relationship': c['relation']
})
existing_numbers.add(number)
if contact_list:
Contact.insert_many(contact_list).execute()
@action(MessageAction.BILL_REVOKE)
def bill_revoke(payload, msg_id):
application_id = payload['external_id']
if 'bill_sub_id' not in payload:
bill_revoke_old(application_id)
return
# 子账单id
sub_bill_id = payload['bill_sub_id']
# java中还款时的唯一标志
partner_bill_id = payload['partner_bill_id']
application = (Application
.filter(Application.external_id == application_id).first())
if application.type == ApplicationType.CASH_LOAN_STAGING.value:
# 根据子账单获取催收单的id
application = (Application.select(Application)
.join(OverdueBill,JOIN_LEFT_OUTER,
on = Application.id == OverdueBill.collection_id)
.where(OverdueBill.external_id == application_id,
OverdueBill.sub_bill_id == sub_bill_id)
.first())
if not application:
logging.info('application %s paid, not found application',
application_id)
return
try:
bill = BillService().sub_bill_list(bill_sub_ids = [sub_bill_id])
bill = bill[0]
except Exception:
logging.error('application %s overdue, get bill info failed: '
'Request To Repayment Error', application_id)
raise RuntimeError('Get repayment bills failed. {}'
.format(str(application.id)))
if bill.get('overdue_days') > 0 and bill.get('status') != 2:
Application.update(
status=ApplicationStatus.UNCLAIMED.value
).where(Application.id == application.id).execute()
# 获取子账单
overdue_bill = (OverdueBill
.filter(OverdueBill.external_id == application_id,
OverdueBill.sub_bill_id == sub_bill_id)
.first())
if not overdue_bill:
logging.info("not find overdue_bill,sub_bill_id:%s,appid:%s" %
(sub_bill_id, application_id))
return
if overdue_bill.status == ApplicationStatus.REPAID.value:
overdue_bill.status = ApplicationStatus.UNCLAIMED.value
overdue_bill.finished_at = None
overdue_bill.save()
# 还款记录要置为无效
RepaymentLog.update(
no_active = 1
).where(RepaymentLog.partner_bill_id == partner_bill_id,
RepaymentLog.overdue_bill_id == overdue_bill.id).execute()
# 老数据消息处理
def bill_revoke_old(external_id):
application = (Application.select()
.where(Application.id == external_id)
.first())
if not application:
logging.info("not get application")
return
try:
bill = BillService().bill_dict(
application_id=external_id)
except Exception:
logging.error('application %s overdue, get bill info failed: '
'Request To Repayment Error', external_id)
return
if bill.get('overdue_days') >0 and bill.get("status") != 2:
q = (Application
.update(status=ApplicationStatus.UNCLAIMED.value,
repay_at=bill.get('repay_at'))
.where(Application.id == external_id).execute())
p = (OverdueBill.update(status=ApplicationStatus.UNCLAIMED.value)
.where(OverdueBill.collection_id == external_id).execute())
return
def check_key_not_none(payload, keys):
for key in keys:
if payload.get(key) is None:
logging.error('Missing args {}'.format(str(key)))
return False
return True
# 还款
@action(MessageAction.BILL_PAID)
def bill_paid(payload, msg_id):
# Don't use validator, it will throw exception
validate = check_key_not_none(payload,
['external_id', 'late_fee_part',
'principal_part', 'paid_at','bill_sub_id',
'partner_bill_id'])
if not validate:
logging.error('payload key not fully pass in.')
return
external_id = payload['external_id']
late_fee_part = Decimal(payload['late_fee_part'])
principal_part = Decimal(payload['principal_part'])
paid_at = payload['paid_at']
partner_bill_id = payload['partner_bill_id']
logging.debug('application %s paid principal part %s, paid late fee '
'part %s', external_id, principal_part, late_fee_part)
application = (Application
.filter(Application.external_id == external_id)
.order_by(-Application.created_at)
.first())
if not application:
logging.info('application %s paid, not found application',external_id)
return
# 获取期数
sub_bill_id = payload['bill_sub_id']
overdue_bill = (OverdueBillR.select()
.where(OverdueBillR.collection_id == application.id,
OverdueBillR.sub_bill_id == sub_bill_id)
.first())
if (application.type == ApplicationType.CASH_LOAN_STAGING.value
and not overdue_bill):
logging.info("bill sub not in bomber %s",sub_bill_id)
return
with db.atomic():
repay_at = str_no_utc_datetime(payload['latest_repay_at'])
Application.update(
repay_at=repay_at
).where(Application.id == application.id).execute()
# 预测呼出系统上线后 全部认为 is_bombed = True
RepaymentLog.create(
application=application.id,
is_bombed=True,
current_bomber=application.latest_bomber_id,
cycle=application.cycle,
principal_part=principal_part,
late_fee_part=late_fee_part,
repay_at=paid_at,
ptp_bomber=application.ptp_bomber,
latest_call=application.latest_call,
periods=overdue_bill.periods if overdue_bill else None,
overdue_bill_id=overdue_bill.id if overdue_bill else None,
partner_bill_id=partner_bill_id
)
# 智能催收 —— 催收号码进行排序
phone_status = PhoneStatus.CONNECTED.value
real_relationship = RealRelationship.user_values()
commit = CallActionCommit.NO.value
number = (CallActions.select(CallActions.number)
.where(CallActions.phone_status == phone_status,
CallActions.real_relationship << real_relationship,
CallActions.commit == commit,
CallActions.application == application.id)
.order_by(-CallActions.created_at)
.first())
if number:
(Contact.update(call_priority=PriorityStatus.REPAY.value)
.where(Contact.user_id == application.user_id,
Contact.call_priority == PriorityStatus.LAST.value)
).execute()
(Contact.update(call_priority=PriorityStatus.LAST.value)
.where(Contact.user_id == application.user_id,
Contact.number == number.number)
).execute()
if not application.latest_bomber_id:
return
Inbox.create(
title='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id),
content='application %s,sub_bill_id %s repaid' % (
application.external_id, sub_bill_id),
receiver=(application.latest_bomber_id or
application.last_bomber_id),
category=InboxCategory.REPAID.value,
)
@action(MessageAction.BILL_RELIEF)
def bill_relief(payload, msg_id):
"""已废弃"""
bill = payload['head_bill']
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_row = Application.update(
repay_at=repay_at,
).where(Application.id == bill['external_id']).execute()
logging.info('application %s bill relief done', bill['external_id'])
return updated_row
# 还款完成,
@action(MessageAction.BILL_CLEARED)
@action(MessageAction.BILL_CLEARED_BEFORE_CONFIRM)
def bill_cleared(payload, msg_id):
"""
BILL_CLEARED_BEFORE_CONFIRM仅在bomber系统中使用,MST清除账单时先修改其状态
为还款完成,让其不被催收
"""
external_id = payload.get('external_id')
sub_bill_id = payload.get('bill_sub_id')
if not external_id:
logging.warning('payload has no external_id. {}'.format(str(payload)))
return
# 如果还清,清除不在拨打ivr
AutoIVR.update(
status=AutoIVRStatus.REPAID.value
).where(AutoIVR.application_id == external_id).execute()
try:
bill = BillService().sub_bill_list(bill_sub_ids=[sub_bill_id])
bill = bill[0]
except Exception:
logging.error('get bill info failed: '
'Request To Repayment Error', external_id)
return
application = Application.filter(
Application.external_id == external_id,
Application.status << [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.BAD_DEBT.value,
ApplicationStatus.AB_TEST.value]
).first()
if not application:
logging.info('application %s repay clear, not found bomber record',
external_id)
return
with db.atomic():
# 修改本次还清的自账单状态
sub_bill_update = (OverdueBill.update(
status = ApplicationStatus.REPAID.value,
finished_at = datetime.now())
.where(OverdueBill.collection_id == application.id,
OverdueBill.sub_bill_id == sub_bill_id)
.execute())
# 如果是分期的件,判断是否完成还款
overdue_bill = (OverdueBill.select()
.where(OverdueBill.collection_id == application.id,
OverdueBill.status != 2,
OverdueBill.sub_bill_id != sub_bill_id))
if overdue_bill.exists():
if application.latest_bomber_id:
Inbox.create(
title='application %s sub_bill_id %s cleared' % (
application.external_id, sub_bill_id),
content='application %s sub_bill_id %s cleared' % (
application.external_id, sub_bill_id),
receiver=application.latest_bomber_id,
category=InboxCategory.CLEARED.value,
)
return
# 还款完成同步更新到外包
partner = DispatchApp.filter(DispatchApp.application == application.id)
if partner.exists():
DispatchApp.update(
status=DisAppStatus.ABNORMAL.value
).where(DispatchApp.application == application.id).execute()
# 更新自动拨号系统队列 application 状态
AutoCallList.update(
status=AutoListStatus.REMOVED.value,
description='bill clear'
).where(AutoCallList.application == application.id).execute()
application.status = ApplicationStatus.REPAID.value
application.finished_at = datetime.now()
application.paid_at = datetime.now()
# 如果逾期天数为0说明没有逾期,该件不应该进bomber
if int(bill.get("overdue_days")) <= 0:
application.no_active = 1
(RepaymentLog.update(no_active=1)
.where(RepaymentLog.application == application.id)
.execute())
application.save()
bomber_id = application.latest_bomber_id
# c1b月底清件之后会入案,支付完成时要出案,2是默认的bomber_id
if (application.cycle in (Cycle.C1A.value,Cycle.C1B.value) and
not bomber_id):
bomber_id = application.cycle
if not bomber_id:
return
(DispatchAppHistory.update(
out_at=datetime.now()
).where(
DispatchAppHistory.application == application.id,
DispatchAppHistory.bomber_id == bomber_id)).execute()
if not application.latest_bomber_id:
return
item = (OldLoanApplication
.get_or_none(OldLoanApplication.status ==
OldLoanStatus.PROCESSING.value,
OldLoanApplication.application_id ==
application.id))
if item:
end_old_application(item, paid=True)
out_record(src_bomber_id=bomber_id,
application_ids=[item.application_id])
Inbox.create(
title='application %s cleared' % application.external_id,
content='application %s cleared' % application.external_id,
receiver=application.latest_bomber_id,
category=InboxCategory.CLEARED.value,
)
# 同步bill2
@action(MessageAction.OVERDUE_BILL_SYNC)
def overdue_bill_sync(payload, msg_id):
"""已废弃"""
bill2_list = payload
updated_count = 0
with db.atomic():
for bill in bill2_list:
principal = Decimal(bill['principal'])
repay_at = str_no_utc_datetime(bill['latest_repay_at'])
updated_count += Application.update(
amount=principal,
repay_at=repay_at,
).where(Application.id == bill['external_id']).execute()
logging.info('overdue sync done, updated count: %s', updated_count)
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS_OVER)
def calc_overdue_days_over(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
#更新逾期天数大于95天的件
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query = (Application
.update(overdue_days=overdue_days)
.where(Application.status <<
[ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days > 95,
Application.type == ApplicationType.CASH_LOAN.value))
updated_rows_count = query.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count)
try:
calc_overdue_days_over_instalment()
except Exception as e:
logging.error("calc_overdue_days_over_instalment_error: %s"%str(e))
# 计算overdue_days后自动触发升级
apps = Application.filter(
Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days > 95,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(
MessageAction.BOMBER_AUTOMATIC_ESCALATION,
{'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
# 计算逾期天数超过95天的件的逾期天数
def calc_overdue_days_over_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value]
for status in sub_bill_status_list:
# 更新逾期天数
query = (OverdueBill.update(overdue_days=overdue_days)
.where(OverdueBill.status == status,
OverdueBill.overdue_days > 95))
updated_rows_count = query.execute()
logging.info("calc_overdue_days_over_instalment done,count:%s,status:%s" %
(updated_rows_count, status))
# 获取所有的子账单信息
overdue_bills = (OverdueBill
.select(OverdueBill.collection_id,
OverdueBill.overdue_days)
.join(Application, JOIN_LEFT_OUTER,
on=OverdueBill.collection_id == Application.id)
.where(Application.status == status,
(Application.type ==
ApplicationType.CASH_LOAN_STAGING.value)))
# 获取每个分期催收单要更新的逾期天数
app_update = {}
for ob in overdue_bills:
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id], ob.overdue_days)
app_update[ob.collection_id] = ob_days
# 更新催收单的逾期天数
for aid, a_days in app_update.items():
q = (Application.update(overdue_days=a_days)
.where(Application.id == aid)
.execute())
logging.info("update instalment application done")
@action(MessageAction.BOMBER_CALC_OVERDUE_DAYS)
def calc_overdue_days(payload, msg_id):
"""
Call by BOMBER_CALC_SUMMARY
:param payload:
:param msg_id:
:return:
"""
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, Application.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
query_unclaimed = (Application
.update(overdue_days=overdue_days)
.where(Application.status ==
ApplicationStatus.UNCLAIMED.value,
Application.overdue_days <= 95,
(Application.type ==
ApplicationType.CASH_LOAN.value)))
updated_rows_count_unclaimed = query_unclaimed.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_unclaimed)
query_processing = (Application
.update(overdue_days=overdue_days)
.where(Application.status ==
ApplicationStatus.PROCESSING.value,
Application.overdue_days <= 95,
(Application.type ==
ApplicationType.CASH_LOAN.value)))
updated_rows_count_processing = query_processing.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_processing)
query_test = (Application
.update(overdue_days=overdue_days)
.where(Application.status ==
ApplicationStatus.AB_TEST.value,
Application.overdue_days <= 95,
(Application.type ==
ApplicationType.CASH_LOAN.value)))
updated_rows_count_test = query_test.execute()
logging.info('calc overdue days done, updated count: %s',
updated_rows_count_test)
# 分期账单计算逾期天数
calc_overdue_days_instalment()
# 计算overdue_days后自动触发升级
apps = Application.select(Application.id).where(
Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days <= 95,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date()))
ids = [i.id for i in apps]
for idx in range(0, len(ids), 100):
send_to_default_q(
MessageAction.BOMBER_AUTOMATIC_ESCALATION,
{'application_list': ids[idx:idx + 100]})
send_to_default_q(MessageAction.UPDATE_OLD_LOAN_APPLICATION, {})
# overdue_days 计算完成后,修改C1A_entry(预期天数为4的设为C1A)
Application.update(
C1A_entry=datetime.now()
).where(
Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
ApplicationStatus.AB_TEST.value],
Application.overdue_days == 4
).execute()
# 分期的件计算逾期天数
def calc_overdue_days_instalment():
now = fn.NOW()
origin_diff_days = fn.DATEDIFF(now, OverdueBill.origin_due_at)
overdue_days = fn.GREATEST(origin_diff_days, SQL('0'))
sub_bill_status_list = [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value]
# 获取当月第一天的时间
today_now_time = datetime.now()
month_first_day = today_now_time.replace(day=1,
hour=1,
minute=30,
second=0,
microsecond=0)
for status in sub_bill_status_list:
# 更新逾期天数
query = (OverdueBill.update(overdue_days = overdue_days)
.where(OverdueBill.status == status,
OverdueBill.overdue_days <= 95))
updated_rows_count = query.execute()
logging.info("calc_overdue_days_instalment done,count:%s,status:%s" %
(updated_rows_count, status))
# 获取所有的子账单信息
overdue_bills = (OverdueBill
.select(OverdueBill.status,
OverdueBill.created_at,
OverdueBill.collection_id,
OverdueBill.overdue_days)
.join(Application, JOIN_LEFT_OUTER,
on=OverdueBill.collection_id == Application.id)
.where(Application.status == status,
(Application.type ==
ApplicationType.CASH_LOAN_STAGING.value)))
# 获取每个分期催收单要更新的逾期天数
app_update = {}
for ob in overdue_bills:
# 排除到分期这个月之前还款完成的那一期
if (ob.status == ApplicationStatus.REPAID.value and
ob.created_at < month_first_day):
continue
if ob.collection_id not in app_update:
app_update[ob.collection_id] = ob.overdue_days
else:
ob_days = max(app_update[ob.collection_id],ob.overdue_days)
app_update[ob.collection_id] = ob_days
# 更新催收单的逾期天数
for aid,a_days in app_update.items():
q = (Application.update(overdue_days = a_days)
.where(Application.id == aid)
.execute())
logging.info("update instalment application done")
@action(MessageAction.BOMBER_AUTOMATIC_ESCALATION)
def automatic_escalation(payload, msg_id):
app_ids = payload.get('application_list', [])
if not app_ids:
return
# 过滤掉已完成的订单
apps = (Application.select()
.where(Application.id.in_(app_ids),
Application.status != ApplicationStatus.REPAID.value))
for a in apps:
new_cycle = application_entry_different_calculations(a)
if a.overdue_days < 90:
logging.info(
"automatic_escalation_bomber_app_id:{},new_cycle:{},cycle:{},overdue_days:{}".format(
a.id, new_cycle, a.cycle, a.overdue_days))
if new_cycle > a.cycle:
with db.atomic():
if (a.latest_bomber_id or
a.cycle in (Cycle.C1A.value, Cycle.C1B.value)):
bomber_id = (a.latest_bomber_id
if a.latest_bomber_id else a.cycle)
(DispatchAppHistory.update(
out_at=datetime.now(),
out_overdue_days=a.overdue_days,
).where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == bomber_id
)).execute()
Escalation.create(
application=a.id,
type=EscalationType.AUTOMATIC.value,
status=ApprovalStatus.APPROVED.value,
current_cycle=a.cycle,
escalate_to=new_cycle,
current_bomber_id=a.latest_bomber,
)
# 升级的时候如果是外包的件更新dispatch_app中的状态
dis_app_update = (DispatchApp
.update(status = DisAppStatus.ABNORMAL.value)
.where(DispatchApp.application == a.id))
dis_app_update.execute()
a.cycle = new_cycle
a.last_bomber = a.latest_bomber
a.status = ApplicationStatus.UNCLAIMED.value
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
# 升级之后 拨打次数清零
a.called_times = 0
if new_cycle == Cycle.C1B.value:
a.C1B_entry = datetime.now()
elif new_cycle == Cycle.C2.value:
a.C2_entry = datetime.now()
elif new_cycle == Cycle.C3.value:
a.C3_entry = datetime.now()
a.save()
logging.info('automatic escalation done')
# 把部分件的进入C1B的时间改为10天
def application_entry_different_calculations(app):
conf = {
1: [1, 10],
2: [11, 30],
3: [31, 60],
4: [61, 90],
5: [91, 999999],
}
for new_cycle,scopes in conf.items():
if scopes[0] <= app.overdue_days <= scopes[1]:
return new_cycle
return app.cycle
@action(MessageAction.BOMBER_CALC_SUMMARY)
def cron_summary(payload, msg_id):
"""已废弃"""
employees = Bomber.select(Bomber, Role).join(Role)
summary = {
i.id: {
'cycle': i.role.cycle,
'claimed': 0,
'completed': 0,
'cleared': 0,
'escalated': 0,
'transferred': 0,
'promised': 0,
'amount_recovered': Decimal(0),
'calls_made': 0,
'calls_connected': 0,
'sms_sent': 0,
}
for i in employees
}
# 每天 2点 15分 计算 昨天的情况
now_date = date.today()
cal_date = now_date - timedelta(days=1)
# 当日下了多少ptp
claimed = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('claimed'))
.where(fn.DATE(Application.claimed_at) == cal_date,
Application.status <<
[ApplicationStatus.PROCESSING.value,
ApplicationStatus.REPAID.value],
Application.latest_bomber.is_null(False))
.group_by(Application.latest_bomber))
# 当日ptp还款件数目
cleared = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('cleared'))
.where(fn.DATE(Application.finished_at) == cal_date,
Application.status == ApplicationStatus.REPAID.value,
Application.latest_bomber.is_null(False))
.group_by(Application.latest_bomber))
# 当日有多少个ptp被维护
completed = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('completed'))
.where(Application.latest_bombing_time.is_null(False),
fn.DATE(Application.latest_bombing_time) == cal_date,
Application.latest_bomber.is_null(False))
.group_by(Application.latest_bomber))
# 手工维护的件多少个件进入下一个cycle
escalated = (Escalation
.select(Escalation.current_bomber,
fn.COUNT(Escalation.id).alias('escalated'))
.where(fn.DATE(Escalation.created_at) == cal_date,
Escalation.type == EscalationType.AUTOMATIC.value,
Escalation.current_bomber.is_null(False),
Escalation.status == ApprovalStatus.APPROVED.value)
.group_by(Escalation.current_bomber))
# 当日从某人手上移出多少个件
transferred = (Transfer
.select(Transfer.operator,
fn.COUNT(Transfer.id).alias('transferred'))
.where(fn.DATE(Transfer.reviewed_at) == cal_date,
Transfer.status == ApprovalStatus.APPROVED.value)
.group_by(Transfer.operator))
# 当天的下p件有多少有进展
promised = (
BombingHistory
.select(BombingHistory.bomber,
fn.COUNT(BombingHistory.id).alias('promised'))
.where(fn.DATE(BombingHistory.created_at) == cal_date,
BombingHistory.result == BombingResult.HAS_PROGRESS.value)
.group_by(BombingHistory.bomber)
)
# 当天催回的金额
amount_recovered = (RepaymentLog
.select(RepaymentLog.current_bomber,
fn.SUM(RepaymentLog.principal_part)
.alias('principal_part'),
fn.SUM(RepaymentLog.late_fee_part)
.alias('late_fee_part'))
.where(fn.DATE(RepaymentLog.repay_at) == cal_date,
RepaymentLog.is_bombed == True,
RepaymentLog.current_bomber.is_null(False))
.group_by(RepaymentLog.current_bomber))
# calllog表已废弃
calls_made = (CallLog
.select(CallLog.user_id,
fn.COUNT(CallLog.record_id).alias('calls_made'))
.where(fn.DATE(CallLog.time_start) == cal_date,
CallLog.system_type == '1')
.group_by(CallLog.user_id))
# calllog表已废弃
calls_connected = (CallLog
.select(CallLog.user_id,
fn.COUNT(CallLog.record_id)
.alias('calls_connected'))
.where(fn.DATE(CallLog.time_start) == cal_date,
CallLog.duration > 10,
CallLog.system_type == '1').
group_by(CallLog.user_id))
# 当天发送的所有短信
sms_sent = (ConnectHistory
.select(ConnectHistory.operator,
fn.COUNT(ConnectHistory.id).alias('sms_sent'))
.where(ConnectHistory.type.in_(ConnectType.sms()),
ConnectHistory.created_at >= cal_date,
ConnectHistory.created_at < now_date
)
.group_by(ConnectHistory.operator))
for i in claimed:
summary[i.latest_bomber_id]['claimed'] += i.claimed
for i in completed:
summary[i.latest_bomber_id]['completed'] += i.completed
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
for i in escalated:
summary[i.current_bomber_id]['escalated'] += i.escalated
for i in transferred:
summary[i.operator_id]['transferred'] += i.transferred
for i in promised:
summary[i.bomber_id]['promised'] += i.promised
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in calls_made:
summary[int(i.user_id)]['calls_made'] += i.calls_made
for i in calls_connected:
summary[int(i.user_id)]['calls_connected'] += i.calls_connected
for i in sms_sent:
summary[i.operator_id]['sms_sent'] += i.sms_sent
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({
'bomber': bomber_id,
'cycle': data['cycle'],
'claimed': data['claimed'],
'completed': data['completed'],
'cleared': data['cleared'],
'escalated': data['escalated'],
'transferred': data['transferred'],
'promised': data['promised'],
'amount_recovered': data['amount_recovered'],
'calls_made': data['calls_made'],
'calls_connected': data['calls_connected'],
'sms_sent': data['sms_sent'],
'date': cal_date,
})
if insert_args:
Summary.insert_many(insert_args).execute()
cycle_args = []
# cal new in
# 按照 cycle 统计
escalated_in = (Escalation
.select(Escalation.escalate_to,
fn.COUNT(Escalation.id).alias('escalated_in'))
.where(Escalation.status == ApprovalStatus.APPROVED.value,
fn.DATE(Escalation.created_at) == cal_date)
.group_by(Escalation.escalate_to))
for i in escalated_in:
cycle_args.append({
'cycle': i.escalate_to,
'escalated_in': i.escalated_in,
'date': cal_date,
})
amount_recovered_total = (
RepaymentLog
.select(RepaymentLog.cycle,
fn.SUM(RepaymentLog.principal_part).alias('principal_part'),
fn.SUM(RepaymentLog.late_fee_part).alias('late_fee_part'))
.where(fn.DATE(RepaymentLog.repay_at) == cal_date)
.group_by(RepaymentLog.cycle)
)
for i in amount_recovered_total:
amount_recovered_total = i.principal_part + i.late_fee_part
cycle_args.append({
'cycle': i.cycle,
'amount_recovered_total': amount_recovered_total,
'date': cal_date,
})
if cycle_args:
Summary.insert_many(cycle_args).execute()
logging.info('cal summary done')
# 报表计算结束后 再更新逾期天数 触发自动升级
send_to_default_q(MessageAction.BOMBER_CALC_OVERDUE_DAYS, {})
@action(MessageAction.BOMBER_CALC_SUMMARY2)
def cron_summary2(payload, msg_id):
"""已废弃,定时任务还在执行,具体情况待确定"""
cal_date = date.today() - timedelta(days=1)
employees = Bomber.select(Bomber, Role).join(Role)
auto_call_actions = (
AutoCallActions
.select(
AutoCallActions.bomber,
AutoCallActions.result,
fn.COUNT(AutoCallActions.id).alias('count')
)
.where(fn.DATE(AutoCallActions.created_at) == cal_date)
)
amount_recovered = (RepaymentLog
.select(RepaymentLog.current_bomber,
fn.SUM(RepaymentLog.principal_part)
.alias('principal_part'),
fn.SUM(RepaymentLog.late_fee_part)
.alias('late_fee_part'))
.where(fn.DATE(RepaymentLog.repay_at) == cal_date,
RepaymentLog.current_bomber.is_null(False),
RepaymentLog.is_bombed == True))
cleared = (Application
.select(Application.latest_bomber,
fn.COUNT(Application.id).alias('cleared'))
.where(fn.DATE(Application.finished_at) == cal_date,
Application.status == ApplicationStatus.REPAID.value,
Application.latest_bomber.is_null(False)))
auto_call_actions = auto_call_actions.group_by(
AutoCallActions.bomber, AutoCallActions.result
)
amount_recovered = amount_recovered.group_by(RepaymentLog.current_bomber)
cleared = cleared.group_by(Application.latest_bomber)
summary = {
e.id: {
'cycle': e.role.cycle,
'answered_calls': 0,
'ptp': 0,
'follow_up': 0,
'not_useful': 0,
'cleared': 0,
'amount_recovered': 0,
}
for e in employees
}
for a in auto_call_actions:
summary[a.bomber_id]['answered_calls'] += a.count
if a.result == AutoCallResult.PTP.value:
summary[a.bomber_id]['ptp'] += a.count
if a.result == AutoCallResult.FOLLOW_UP.value:
summary[a.bomber_id]['follow_up'] += a.count
if a.result == AutoCallResult.NOT_USEFUL.value:
summary[a.bomber_id]['not_useful'] += a.count
for i in amount_recovered:
amount_recovered = i.principal_part + i.late_fee_part
summary[i.current_bomber_id]['amount_recovered'] += amount_recovered
for i in cleared:
summary[i.latest_bomber_id]['cleared'] += i.cleared
insert_args = []
for bomber_id, data in summary.items():
insert_args.append({
'bomber': bomber_id,
'cycle': data['cycle'],
'answered_calls': data['answered_calls'],
'ptp': data['ptp'],
'follow_up': data['follow_up'],
'not_useful': data['not_useful'],
'cleared': data['cleared'],
'amount_recovered': str(data['amount_recovered']),
'date': cal_date,
})
if insert_args:
Summary2.insert_many(insert_args).execute()
@action(MessageAction.BOMBER_SYNC_CONTACTS)
def sync_suggested_contacts(payload, msg_id):
""" suggested contacts sync """
applications = (Application
.select(Application.id, Application.user_id)
.where(Application.status <<
[ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value]))
logging.debug('start sync contact')
for a in applications:
sync_contacts(a)
logging.info('contact sync finished')
def sync_contacts(application):
logging.info('application %s start sync contact', application.id)
# 添加联系人信息
contacts = Contact.filter(Contact.user_id == application.user_id)
existing_numbers = {contact.number for contact in contacts}
# sms contacts
insert_contacts = []
sms_contacts = GoldenEye().get(
'/applications/%s/sms-contacts' % application.external_id
)
if not sms_contacts.ok:
sms_contacts = []
logging.info('get user %s sms contacts failed', application.external_id)
else:
sms_contacts = sms_contacts.json()['data']
for i in sms_contacts:
if i['number'] in existing_numbers:
continue
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'],
'number': i['number'],
'relationship': Relationship.SUGGESTED.value,
'source': 'sms contacts',
'real_relationship': Relationship.SUGGESTED.value
})
existing_numbers.add(i['number'])
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
# call frequency
insert_contacts = []
cf = GoldenEye().get(
'/applications/%s/call/frequency' % application.external_id
)
if not cf.ok:
call_frequency = []
logging.error('get application %s call frequency error',
application.external_id)
else:
call_frequency = cf.json()['data']
with db.atomic():
for i in call_frequency:
if i['number'] in existing_numbers:
(Contact
.update(total_count=i['total_count'],
total_duration=i['total_duration'])
.where(Contact.number == i['number'],
Contact.user_id == application.user_id))
continue
insert_contacts.append({
'user_id': application.user_id,
'name': i['name'],
'number': i['number'],
'relationship': Relationship.SUGGESTED.value,
'total_count': i['total_count'],
'total_duration': i['total_duration'],
'source': 'call frequency',
'real_relationship': Relationship.SUGGESTED.value
})
if insert_contacts:
Contact.insert_many(insert_contacts).execute()
@action(MessageAction.BOMBER_AUTO_SMS)
@deprecated(version='1.0', reason='This function will be removed soon')
def bomber_auto_sms(payload, msg_id):
day_diff = int(payload['day_diff'])
custom_type = payload.get('custom_type')
msg_type = payload['msg_type']
logging.info('auto sms %s sending', msg_type)
applications = (
Application
.select()
.where(Application.overdue_days == day_diff,
Application.status << [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value],
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date()))
)
if custom_type == 'new':
applications = applications.where(Application.loan_success_times < 3)
if custom_type == 'old':
applications = applications.where(Application.loan_success_times >= 3)
templates = (
Template.select(Template.text, Template.app)
.where(Template.type == ConnectType.AUTO_SMS.value,
Template.id << Template.get_auto_sms_tpl(msg_type))
)
tpl_text = dict()
for tpl in templates:
tpl_text[tpl.app] = tpl.text
data_list = []
for a in applications:
tpl_data = {
'user_name': a.user_name,
'due_days': a.overdue_days,
'app_name': a.app,
'phone': a.user_mobile_no,
'cs_number': cs_number_conf.get(a.app, '02150202889'),
}
content = tpl_text[a.app].format(**tpl_data)
data_list.append({
'phone': '62' + a.user_mobile_no,
'content': content,
'app': a.app,
})
if not data_list:
logging.info('auto sms %s do not need sending', msg_type)
return
send_sms(data_list, msg_type, SmsChannel.NUSA.value)
@action(MessageAction.BOMBER_AUTO_MESSAGE_DAILY)
def bomber_auto_message_daily(payload, msg_id):
app_dict = dict(zip(AppName.keys(), AppName.values()))
#当天自动外呼成功的电话记录
auto_call_list = AutoCallActionsR \
.select(AutoCallActionsR.application_id) \
.where(fn.DATE(AutoCallActionsR.created_at) == fn.CURDATE())
applications = (
ApplicationR
.select()
.where(ApplicationR.overdue_days < 30,
ApplicationR.overdue_days > 4,
ApplicationR.type == ApplicationType.CASH_LOAN.value,
ApplicationR.status << [ApplicationStatus.PROCESSING.value,
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.AB_TEST.value],
ApplicationR.promised_date.is_null(True) |
(fn.DATE(ApplicationR.promised_date) < datetime.today().date()),
~(ApplicationR.id << auto_call_list))
)
stage_list1 = range(*AutoCallMessageCycle.NEW_STAGE1.value['scope'], 3) #5,8,11,14
stage_list2 = range(*AutoCallMessageCycle.STAGE2.value['scope'], 3) #15,18
stage_list3 = range(*AutoCallMessageCycle.STAGE3.value['scope'], 3)
sms_list = defaultdict(list)
fcm_list = defaultdict(list)
for a in applications:
overdue_type = ''
if a.overdue_days in stage_list1:
if a.loan_success_times < 3:
overdue_type = AutoCallMessageCycle.NEW_STAGE1.value['type']
else:
overdue_type = AutoCallMessageCycle.OLD_STAGE1.value['type']
if a.overdue_days in stage_list2:
overdue_type = AutoCallMessageCycle.STAGE2.value['type']
if a.overdue_days in stage_list3:
overdue_type = AutoCallMessageCycle.STAGE3.value['type']
if overdue_type == '':
continue
# format app name
app_name = app_dict.get(a.app.upper(), AppName.default().value)
try:
tpl_id = Template.get_daily_auto_sms_tpl(overdue_type, app_name)
except KeyError:
logging.warning('Key error {}, id is {}'.format(
(overdue_type, app_name), a.id))
continue
data_map = {
'user_name': a.user_name,
'app_name': app_name,
'overdue_days': a.overdue_days,
'cs_number': cs_number_conf.get(a.app, '')
}
sms_list[(overdue_type, tpl_id, a.app)].append({
'receiver': '62' + a.user_mobile_no,
'data_map': data_map
})
fcm_list[(overdue_type, tpl_id, a.app)].append({
'receiver': a.user_id,
'data_map': data_map
})
for (msg_type, tpl_id, app_name), data_list in sms_list.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "SMS")
for (msg_type, tpl_id, app_name), data_list in sms_list.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "FCM")
#分期逾期短信
@action(MessageAction.BOMBER_INSTALMENT_AUTO_MESSAGE_DAILY)
def bomber_instalment_auto_message_daily(payload, msg_id):
applications = (ApplicationR.select(ApplicationR.id,
ApplicationR.app,
ApplicationR.user_id,
ApplicationR.user_name,
ApplicationR.user_mobile_no,
ApplicationR.loan_success_times,
OverdueBillR.status,
OverdueBillR.sub_bill_id,
OverdueBillR.overdue_days, )
.join(OverdueBillR, JOIN_LEFT_OUTER,
on=ApplicationR.id == OverdueBillR.collection_id)
.where(ApplicationR.type ==
ApplicationType.CASH_LOAN_STAGING.value,
ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.overdue_days < 90,
ApplicationR.promised_date.is_null(True) |
(fn.DATE(
ApplicationR.promised_date) < datetime.today().date()),
)
.dicts())
# 计算真实的逾期天数和欠款情况
app_overdues = {}
for app in applications:
if app["status"] == ApplicationStatus.REPAID.value:
continue
if app["id"] in app_overdues:
overdue_days = app_overdues[app["id"]]["overdue_days"]
app_overdues[app["id"]]["overdue_days"] = max(app["overdue_days"],
overdue_days)
app_overdues[app["id"]]["bill_sub_ids"].append(app["sub_bill_id"])
else:
app_overdues[app["id"]] = {
"app_name": app["app"],
"user_id": app["user_id"],
"user_name": app["user_name"],
"overdue_days": app["overdue_days"],
"bill_sub_ids": [app["sub_bill_id"]],
"phone": '62' + app["user_mobile_no"],
"loan_success_times": app["loan_success_times"],
"cs_number": cs_number_conf.get(app["app"], '02150202889')
}
# 获取需要发短信的催收单和计算对应的未支付金额
sms_dict = {}
sub_bill_ids = []
send_message = defaultdict(list)
send_fcm = defaultdict(list)
for aid, app in app_overdues.items():
message_id = Template.get_daily_instalment_auto_sms_tpl(
overdue_days=app["overdue_days"],
loan_times=app["loan_success_times"]
)
if message_id:
app["tpl_id"] = message_id
sms_dict[aid] = app
sub_bill_ids.extend(app["bill_sub_ids"])
if not sms_dict:
logging.info("no application need send sms")
return
sub_bills = []
try:
for index in range(0,len(sub_bill_ids),30):
sub_bill = BillService().sub_bill_list(
bill_sub_ids=sub_bill_ids[index:index+30])
sub_bills += sub_bill
except Exception as e:
logging.info("send sms get bill error:%s" % str(e))
return
sub_bills_dict = {int(sb["id"]): sb for sb in sub_bills}
for aid, app in sms_dict.items():
amount = 0
for sbid in app["bill_sub_ids"]:
amount += sub_bills_dict.get(sbid, {}).get("unpaid", 0)
data_map = {
"user_name": app["user_name"],
"app_name": app["app_name"],
"overdue_days": app["overdue_days"],
"cs_number": app["cs_number"],
"amount": str(amount)
}
send_message[(app['tpl_id'], app["app_name"])].append({
"receiver": app["phone"],
"data_map": data_map
})
send_fcm[(app['tpl_id'], app["app_name"])].append({
"receiver": app["user_id"],
"data_map": data_map
})
for (tpl_id, app_name), data_list in send_message.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "SMS")
for (msg_type, tpl_id, app_name), data_list in send_fcm.items():
auto_send_sms_and_fcm(data_list, tpl_id, app_name, "FCM")
def auto_send_sms_and_fcm(data_list, tpl_id, app_name, message_type):
if not data_list:
return
# 200 条 一次请求
for idx in range(0, len(data_list), 200):
request_json = {
"app_name": app_name,
"failed_retry": True,
"is_masking": True,
"list": data_list[idx: idx+200],
"message_level": 1,
"message_type": message_type,
"sms_type": 4 if message_type == "SMS" else 0,
"type_id": tpl_id
}
try:
result = MessageService().send_batch_template(**request_json)
if not result.get("result"):
logging.error()
except Exception as e:
logging.error()
return
logging.info("")
def get_danamall_msg_service(app_name, message_service):
if app_name == AppName.DANAMALL.value:
# token = app.config['service.message.%s.token' % app_name.lower()]
message_service = Message(version=app_name)
return message_service
#催收员发送短信,提醒承诺时间
@action(MessageAction.BOMBER_REMIND_PROMISE)
def bomber_remind_promise(payload, msg_id):
day_diff = int(payload['day_diff'])
msg_type = payload['msg_type']
logging.info('auto sms %s sending', msg_type)
applications = (
Application
.select()
.where(
fn.DATEDIFF(fn.NOW(), Application.promised_date) == day_diff,
Application.status << [
ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value,
]
)
)
templates = (
Template
.select(Template.text, Template.app)
.where(Template.type == ConnectType.AUTO_SMS.value,
Template.id << Template.get_auto_sms_tpl(msg_type))
)
tpl_text = {tpl.app: tpl.text for tpl in templates}
message_date_dict = defaultdict(list)
for a in applications:
tpl_data = {
'user_name': a.user_name,
'due_days': a.overdue_days,
'app_name': a.app,
'phone': a.user_mobile_no,
'cs_number': cs_number_conf.get(a.app, '02150202889'),
'promised_date': a.promised_date.strftime('%d-%m-%Y'),
}
content = tpl_text[a.app].format(**tpl_data)
message_date_dict[a.app].append(
{
"content": content,
"receiver": '62' + a.user_mobile_no,
"title": ""
}
)
for app_name, data_list in message_date_dict.items():
send_sms(data_list, msg_type, app_name)
@action(MessageAction.BOMBER_DISCOUNT_APPROVED)
def bomber_discount_approved(payload, msg_id):
app_id = payload['id']
msg_type = payload['msg_type']
discount_to = payload['discount_to']
effective_to = payload['effective_to']
application = Application.filter(Application.id == app_id).first()
if not application:
logging.error('discount approved msg send failed '
'application %s not found', app_id)
return
template = (
Template
.select(Template.text, Template.app)
.where(Template.type == ConnectType.AUTO_SMS.value,
Template.id << Template.get_auto_sms_tpl(msg_type),
Template.app == application.app)
.first()
)
if not template:
logging.error('discount approved msg send failed '
'template %s not found', msg_type)
return
promised_date = None
if application.promised_date:
promised_date = application.promised_date.strftime('%d-%m-%Y')
tpl_data = {
'user_name': application.user_name,
'due_days': application.overdue_days,
'app_name': application.app,
'phone': application.user_mobile_no,
'cs_number': cs_number_conf.get(application.app, '02150202889'),
'promised_date': promised_date,
'discount_to': discount_to,
'effective_to': effective_to,
}
content = template.text.format(**tpl_data)
data_list = [{
'receiver': '62' + application.user_mobile_no,
'content': content,
'title': "",
}]
send_sms(data_list, msg_type, application.app)
# 批量发送自定义短信
def send_sms(data_list, msg_type, app_name):
if not data_list:
return
for index in range(0, len(data_list), 200):
req_data = {
"app_name": app_name,
"failed_retry": True,
"is_masking": True,
"list": data_list[index: index+200],
"message_level": 0,
"message_type": "SMS",
"sms_type": 3
}
try:
result = MessageService().send_batch(**req_data)
if not result.get("result"):
logging.error(
"send_sms_failed:%s,req:%s,res:%s",msg_type,req_data,result)
except Exception as e:
logging.error(
"send_sms_error:%s,req:%s,res:%s,error:%s" % (
msg_type, req_data, result, str(e)))
return
logging.info("send_sms_success:%s", msg_type)
#生成自动外呼,和分件
@action(MessageAction.BOMBER_AUTO_CALL_LIST)
def bomber_auto_call_list(payload, msg_id):
with db.atomic():
#单期件分件,分给各期的外包后,余下分配内部指定id,的bomber
#外包主要通过partner区分不同阶段,同时识别bomber中的partner_id来识别外包账号
bomber_dispatch_app()
# 分期件分件,分件主要靠installment 识别不同期的bomber
dispatch_instalment_app()
#分件记录
dis_apps = (DispatchApp
.select(DispatchApp.application)
.where(DispatchApp.status == DisAppStatus.NORMAL.value))
c1_apps = (
Application
.select(Application.id,
Application.cycle,
Application.follow_up_date,
Application.called_times)
.where(
Application.status.not_in([ApplicationStatus.REPAID.value,
ApplicationStatus.AB_TEST.value]),
Application.cycle == Cycle.C1A.value,
Application.is_rejected == False, # noqa
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < datetime.today().date())
).order_by(Application.overdue_days, Application.apply_at)
)
dis_apps_ids = [da.application_id for da in dis_apps]
insert_args = []
for a in c1_apps:
if a.id in dis_apps_ids:
continue
insert_args.append({
'application': a.id,
'cycle': a.cycle,
'follow_up_date': a.follow_up_date,
'called_times': 1 if a.called_times else 0,
'description': 'init'
})
if not insert_args:
logging.error('no application need auto call')
#检索application表,插入数据至auto_call_list
with db.atomic():
AutoCallList.delete().execute()
for idx in range(0, len(insert_args), 100):
AutoCallList.insert_many(insert_args[idx:idx + 100]).execute()
for idx in range(0, len(insert_args), 100):
application_list = [
i['application']
for i in insert_args[idx:idx + 100]
]
#获取校验后有效的电话号码
send_to_default_q(
MessageAction.BOMBER_AUTO_CALL_CONTACT,
{'application_list': application_list}
)
logging.info('bomber generate auto call list finished')
#将未下P,特定天数的件重分,即积压时间长的件,在分配
send_to_default_q(
MessageAction.UPDATE_BOMBER_FOR_SPECIAL,
{})
class ChangeBomberTool(object):
@staticmethod
def in_record(bomber_id, ids, bd):
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'),
R('null').alias('partner_id'),
SQL('DATE_ADD(CURDATE(),INTERVAL 14 DAY)')
.alias('expected_out_time'),
Application.overdue_days.alias(
'entry_overdue_days'))
.where(Application.status !=
ApplicationStatus.REPAID.value,
Application.id << ids))
(Application
.update(latest_bomber=bomber_id)
.where(Application.id.in_(ids))
.execute())
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
insert_args = list(map(partial(lambda_result, dct=bd),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
@staticmethod
def out_record(a, bd):
_id = str(a.id)
(DispatchAppHistory.update(
out_at=datetime.now(),
out_overdue_days=a.overdue_days,
out_principal_pending=(
a.amount -
Decimal(bd[_id].get('principal_paid'))),
out_late_fee_pending=(
bd[_id].get('late_fee') -
bd[_id].get('late_fee_paid')),
)
.where(
DispatchAppHistory.application == a.id,
DispatchAppHistory.bomber_id == a.latest_bomber_id
)).execute()
a.last_bomber = a.latest_bomber
a.latest_bomber = None
a.ptp_bomber = None
a.latest_call = None
a.called_times = 0
a.save()
@staticmethod
def classify(l, b):
if len(l) == 1:
return l[0]
_l = filter(lambda x: x['bomber'] != b, l)
return min(_l, key=lambda x: len(x['ids']))
@action(MessageAction.UPDATE_BOMBER_FOR_SPECIAL)
def update_bomber_for_special(payload, msg_id):
"""
cycle 1b 每天将DPD21且没有处于下P状态的件,分配给另一个催收员
cycle 2 每天将DPD46且没有处于下P状态的件,分配给另一个催收员
cycle 3 每天将dpd76且没有处于下p状态的件,分配给另一个催收员
:param payload:
:param msg_id:
:return:
"""
filter_list = {Cycle.C1B.value: {"overdue_days": 21, "role_id": 5},
Cycle.C2.value: {"overdue_days": 46, "role_id": 6},
Cycle.C3.value: {"overdue_days": 76, "role_id": 8}}
cbt = ChangeBomberTool()
for cycle, values in filter_list.items():
overdue_days = values["overdue_days"]
bombers = (Bomber.select()
.where(Bomber.role == values["role_id"],
Bomber.instalment == 0,
Bomber.is_del == 0))
bids = {b.id:b for b in bombers}
apps = (Application.select()
.where(Application.cycle == cycle,
Application.type == ApplicationType.CASH_LOAN.value,
Application.overdue_days == overdue_days,
Application.status == ApplicationStatus.AB_TEST.value,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < date.today()),
Application.latest_bomber_id.in_(list(bids.keys()))))
classify_dict = defaultdict(list)
for b in bombers:
classify_dict[b.group_id].append({"bomber": b.id, "ids": []})
with db.atomic():
app_ids = [i.id for i in apps]
if app_ids and bids:
bills = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in
bills}
for i in apps:
current_bomber = bids.get(i.latest_bomber_id)
if not current_bomber:
continue
classify_list = classify_dict.get(current_bomber.group_id)
d = cbt.classify(classify_list, i.latest_bomber_id)
d["ids"].append(i.id)
cbt.out_record(i, bill_dict)
for group_id, cl_list in classify_dict.items():
for item in cl_list:
cbt.in_record(item["bomber"], item["ids"], bill_dict)
else:
logging.info(
"cycle:{} empty application list {} or bomber list {}".format(
cycle, app_ids, list(bids.keys())))
try:
update_bomber_for_special_instalment()
except Exception as e:
logging.error("special_instalment_error:%s"%str(e))
# 分期c2,c3特殊分件
def update_bomber_for_special_instalment():
filter_list = {Cycle.C1B.value: 21, Cycle.C2.value: 46, Cycle.C3.value: 76}
for cycle,overdue_days in filter_list.items():
# 获取分期指定的催收员
bombers = (Bomber.select().where(Bomber.instalment == cycle,
Bomber.is_del == 0))
bids = {b.id:b for b in bombers}
# 获取催收单
apps = (Application.select()
.where(Application.cycle == cycle,
Application.status == ApplicationStatus.AB_TEST.value,
Application.type ==
ApplicationType.CASH_LOAN_STAGING.value,
Application.overdue_days == overdue_days,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) < date.today()),
Application.latest_bomber_id.in_(list(bids.keys()))))
classify_dict = defaultdict(list)
for b in bombers:
classify_dict[b.group_id].append({"bomber":b.id, "ids":[]})
for a in apps:
current_bomber = bids.get(a.latest_bomber_id)
if not current_bomber:
continue
classify_list = classify_dict.get(current_bomber.group_id)
d = ChangeBomberTool.classify(classify_list, a.latest_bomber_id)
d["ids"].append(a.id)
with db.atomic():
for group_id,classify_list in classify_dict.items():
for cl in classify_list:
aids = cl["ids"]
if not aids:
continue
latest_bomber_id = cl["bomber"]
q = (Application.update(latest_bomber = latest_bomber_id,
last_bomber = Application.latest_bomber)
.where(Application.id << aids)
.execute())
record_param = {
"cycle": cycle,
"application_ids": aids,
"dest_bomber_id": latest_bomber_id,
}
out_and_in_record_instalment(**record_param)
def bomber_dispatch_app():
# 将单期件c1a分件给外包,外包需设置,partner
try:
c1a_dispatch_app()
except Exception as e:
logging.error("c1a_dispatch_app error:%s"%str(e))
cycle = {
1: 10,
2: 30,
3: 60,
4: 90
}
# 单期外包 Cycle.C2 overdue_day 31
apps = (Application.select()
.where(fn.DATE(Application.C2_entry) == date.today(),
Application.type == ApplicationType.CASH_LOAN.value))
partners = (Partner.select()
.where(Partner.status == PartnerStatus.NORMAL.value,
Partner.cycle == Cycle.C2.value))
apps_ids = [a.id for a in apps]
dispatch_inserts = []
start_index = 0
apps_length = len(apps_ids)
logging.warning('apps length %s' % str(apps_length))
for p in partners: # 目前就一个partner
bombers = (Bomber.select()
.where(Bomber.partner == p.id,
Bomber.status != BomberStatus.OUTER_LEADER.value,
Bomber.is_del == 0))
gen = CycleIter([b.id for b in bombers])
existing_list = []
end_index = start_index + int(apps_length * p.app_percentage)
logging.info('partner length %s' % str(end_index))
if not apps_ids[start_index:end_index]:
continue
bills = BillService().bill_list(
application_ids=apps_ids[start_index:end_index])
bill_dict = {bill['application_id']: bill for bill in bills}
for a_id in apps_ids[start_index:end_index]:
bomber = average_gen(gen, existing_list)
q = (DispatchApp.delete()
.where(DispatchApp.application == a_id)
.execute())
dispatch_inserts.append({
'application': a_id,
'bomber': bomber,
'partner': p.id,
})
# 件分给外包后,对数据进行备份以备数据分析
application = (Application.select()
.where(Application.id == a_id)).first()
application.latest_bomber = bomber
application.status = ApplicationStatus.AB_TEST.value
application.ptp_bomber = None
application.save()
day_next_cycle = (cycle.get(application.cycle) -
application.overdue_days)
DispatchAppHistory.create(
application=a_id,
partner_id=p.id,
bomber_id=bomber,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(
application.amount -
Decimal(bill_dict[a_id].get('principal_paid'))),
entry_late_fee_pending=(
Decimal(bill_dict[a_id].get('late_fee')) -
Decimal(bill_dict[a_id].get('late_fee_paid'))),
expected_out_time=(date.today() +
timedelta(days=day_next_cycle))
)
start_index = end_index
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
# AB test 分件(人工维护分件)
config = SystemConfig.prefetch(SCI.AB_TEST_C2)
c2_bomber = config.get(SCI.AB_TEST_C2, SCI.AB_TEST_C2.default_value)
# 余下的单期件分给内部指定催收员id [76, 100, 106, 107, 213, 215, 216, 221, 222, 223, 226, 235]
c2_bomber = get_cash_bomber(c2_bomber, Cycle.C2.value)
#python库的application id
c2 = apps_ids[start_index:]
if c2:
bills = BillService().bill_list(
application_ids=c2)
else:
bills = []
#java库的bill
bill_dict = {bill['application_id']: bill for bill in bills}
logging.info('c2 AB_test length: %s' % str(c2))
gen = CycleIter(c2_bomber)
existing_list = []
for c in c2:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == c).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = 46 - application.overdue_days
DispatchAppHistory.create(
application=c,
bomber_id=bomber,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(application.amount
- bill_dict[c].get('principal_paid', 0)),
entry_late_fee_pending=(
bill_dict[c].get('late_fee', 0) -
bill_dict[c].get('late_fee_paid', 0)),
expected_out_time=(date.today() + timedelta(days=day_next_cycle))
)
ab_test_other()
# 单期的件部分分给外包,内部的C1a 不用分件进入自动外呼
def c1a_dispatch_app():
today = datetime.today().date()
tomorrow = today + timedelta(days=1)
#获取单期的件
c1a_apps = (Application.select()
.where(Application.status << [ApplicationStatus.UNCLAIMED.value,
ApplicationStatus.PROCESSING.value],
Application.dpd1_entry >= today,
Application.dpd1_entry < tomorrow,
Application.type == ApplicationType.CASH_LOAN.value))
all_aids = [a.id for a in c1a_apps]
# 获取外包部门
partners = (Partner.select()
.where(Partner.status == PartnerStatus.NORMAL.value,
Partner.cycle == Cycle.C1A.value))
end = 0
for p in partners:
#直接通过partner 获取bomber
bombers = (Bomber.select()
.where(Bomber.partner == p.id,
Bomber.is_del == 0))
start = end
end += int(len(all_aids) * p.app_percentage)
aids = all_aids[start:end]
bids = [b.id for b in bombers]
if not bids or not aids:
continue
# 获取每个外包应该分到的件的个数
average_number = get_average_number(len(aids),len(bids))
p_end = 0
for i,bid in enumerate(bids):
p_start = p_end
p_end += average_number[i]
b_aids = aids[p_start:p_end]
with db.atomic():
q = (Application
.update(latest_bomber = bid,
status = ApplicationStatus.AB_TEST.value)
.where(Application.id << b_aids)
.execute())
params = {
"cycle": Cycle.C1A.value,
"dest_partner_id": p.id,
"application_ids": b_aids,
"dest_bomber_id": bid
}
new_in_record(**params)
try:
dispatch_inserts = []
for aid in b_aids:
dispatch_inserts.append({'application': aid,
'bomber': bid,
'partner': p.id,
'status': DisAppStatus.NORMAL.value})
if dispatch_inserts:
q = (DispatchApp.insert_many(dispatch_inserts).execute())
except Exception as e:
logging.error("c1a分件写入dispatch_app error:%s"%str(e))
def ab_test_other():
cycle_upper = {
1: 10,
2: 30,
3: 60,
4: 76
}
c1b = (Application.select()
.where(fn.DATE(Application.C1B_entry) == date.today(),
Application.type == ApplicationType.CASH_LOAN.value)
.order_by(-Application.overdue_days)
)
c1b_id = [a.id for a in c1b]
dis_app_update = (DispatchApp.update(status=DisAppStatus.ABNORMAL.value)
.where(DispatchApp.application.in_(c1b_id)))
dis_app_update.execute()
c3 = (Application.select()
.where(fn.DATE(Application.C3_entry) == date.today(),
Application.type == ApplicationType.CASH_LOAN.value))
all_id = [b.id for b in c3]
try:
# 将C3的件一部分分配给外包
partners = (Partner.select()
.where(Partner.status == PartnerStatus.NORMAL.value,
Partner.cycle == Cycle.C3.value))
start_index, end_index, out_apps = 0, 0, {}
for p in partners:
end_index += int(len(all_id) * p.app_percentage)
out_apps[p.id] = all_id[start_index:end_index]
start_index = end_index
c3_id = all_id[end_index:]
allot_c3_case(out_apps)
except:
c3_id = all_id
config = SystemConfig.prefetch(SCI.AB_TEST_C1B, SCI.AB_TEST_C3)
c1b_bomber = config.get(SCI.AB_TEST_C1B, SCI.AB_TEST_C1B.default_value)
c3_bomber = config.get(SCI.AB_TEST_C3, SCI.AB_TEST_C3.default_value)
# 过滤掉催分期的催收员
c3_bomber = get_cash_bomber(c3_bomber, Cycle.C3.value)
data = [{'ids': c1b_id, 'bomber': c1b_bomber, 'index': 0, 'cycle': 2},
{'ids': c3_id, 'bomber': c3_bomber, 'index': 1, 'cycle': 4}]
for d in data:
applications = d.get('ids')
length = len(applications)
end = int(length * d.get('index'))
gen = CycleIter(d.get('bomber'))
existing_list = []
if not applications:
continue
bills = BillService().bill_list(
application_ids=applications)
bill_dict = {bill['application_id']: bill for bill in bills}
for a in applications[:end]:
bomber = average_gen(gen, existing_list)
application = Application.filter(Application.id == a).first()
application.status = ApplicationStatus.AB_TEST.value
application.latest_bomber = bomber
application.ptp_bomber = None
application.save()
day_next_cycle = (cycle_upper.get(application.cycle) -
application.overdue_days)
DispatchAppHistory.create(
application=a,
bomber_id=bomber,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(application.amount -
bill_dict[a]['principal_paid']),
entry_late_fee_pending=(bill_dict[a]['late_fee'] -
bill_dict[a]['late_fee_paid']),
expected_out_time=(date.today() +
timedelta(days=day_next_cycle))
)
# 根据partner表中的配置给外包团队分件。
if d.get('cycle') == Cycle.C1B.value:
c1b_wb_partner = (Partner.select()
.where(Partner.cycle == Cycle.C1B.value,
Partner.status ==
PartnerStatus.NORMAL.value))
# 获取c1b外包团队
c1b_wb_p_dict = { str(p.id):p.app_percentage for p in c1b_wb_partner}
c1b_wb_pids = list(map(int, c1b_wb_p_dict.keys()))
c1b_wb_bombers = (Bomber.select()
.where(Bomber.is_del == 0,
Bomber.partner_id << c1b_wb_pids,
Bomber.password.is_null(False)))
# 获取每个外包团队的成员和团队应分的件数
c1b_wb_pba = {}
apps_num = len(applications)
for cb in c1b_wb_bombers:
cb_key = str(cb.partner_id)
if cb_key in c1b_wb_pba:
c1b_wb_pba[cb_key]["bids"].append(cb.id)
else:
# 获取比例,计算分配给外包的件的个数
start = end
percentage = c1b_wb_p_dict.get(cb_key, 0)
end = start + ceil(apps_num * percentage)
c1b_wb_pba[cb_key] = {
"bids": [cb.id],
"pid": cb.partner_id,
"apps": applications[start:end]
}
# 获取现金贷c1b新件剩余的件
inner_c1b_apps = applications[end:]
dispatch_c1b_inner_apps(aids=inner_c1b_apps,
bills=bill_dict,
period=cycle_upper.get(Cycle.C1B.value))
for pid,c1b_wb in c1b_wb_pba.items():
c1b_wb_apps = c1b_wb["apps"]
c1b_wb_bids = c1b_wb["bids"]
average_nums = get_average_number(len(c1b_wb_apps),
len(c1b_wb_bids))
bid_end = 0
for b_index,bid in enumerate(c1b_wb_bids):
bid_start = bid_end
bid_end = bid_start + average_nums[b_index]
bid_apps = c1b_wb_apps[bid_start:bid_end]
logging.info("c1b_分件:bid:%s,bid_apps:%s"%(bid, bid_apps))
with db.atomic():
app_sql = (Application.update(latest_bomber=bid,
status=ApplicationStatus.AB_TEST.value,
ptp_bomber=None)
.where(Application.id << bid_apps))
app_sql.execute()
params = {
"apps":bid_apps,
"partner_id": int(pid),
"bill_dict": bill_dict,
"period": cycle_upper.get(Cycle.C1B.value),
"bomber_id":bid
}
c1b_dispatch_in_record(**params)
try:
for aid in bid_apps:
dispatch_inserts = {
'application': aid,
'bomber': bid,
'partner': int(pid),
'status': DisAppStatus.NORMAL.value,
}
q = (DispatchApp.update(**dispatch_inserts)
.where(DispatchApp.application == aid)
.execute())
if not q:
DispatchApp.create(**dispatch_inserts)
except Exception as e:
logging.error("dispatchApp插入失败:%s"%str(e))
def allot_c3_case(out_data):
dispatch_inserts = []
for key, value in out_data.items():
if not value:
continue
bombers = (Bomber
.filter(Bomber.partner == key,
Bomber.status == BomberStatus.OUTER.value,
Bomber.is_del == 0))
bomber_ids = [b.id for b in bombers]
bomber = CycleIter(bomber_ids)
bills = BillService().bill_list(application_ids=value)
bill_dict = {bill['application_id']: bill for bill in bills}
for v in value:
bomber_id = bomber.__next__()
q = (DispatchApp.delete()
.where(DispatchApp.application == v)
.execute())
dispatch_inserts.append({
'application': v,
'bomber': bomber_id,
'partner': key,
})
# 对数据进行备份以备数据分析
application = (Application.filter(Application.id == v)).first()
application.latest_bomber = bomber_id
application.ptp_bomber = None
application.status = ApplicationStatus.AB_TEST.value
application.save()
# c3进入下一个cycle时逾期天数为90天
day_next_cycle = (90 - application.overdue_days)
DispatchAppHistory.create(
application=v,
partner_id=key,
bomber_id=bomber_id,
entry_at=datetime.now(),
entry_overdue_days=application.overdue_days,
entry_principal_pending=(
application.amount -
Decimal(bill_dict[v].get('principal_paid'))),
entry_late_fee_pending=(
Decimal(bill_dict[v].get('late_fee')) -
Decimal(bill_dict[v].get('late_fee_paid'))),
expected_out_time=(
date.today() + timedelta(days=day_next_cycle))
)
with db.atomic():
for idx in range(0, len(dispatch_inserts), 100):
DispatchApp.insert_many(dispatch_inserts[idx:idx + 100]).execute()
# 获取只催单期的催收员
def get_cash_bomber(bids, cycle):
cash_bombers = (Bomber.select()
.where(Bomber.id << bids,
Bomber.is_del == 0,
Bomber.instalment != cycle))
cash_bids = [b.id for b in cash_bombers]
return cash_bids
# c1b 单期的件分件给内部员工
def dispatch_c1b_inner_apps(aids, bills, period=30):
# 获取需要分件的员工
bombers = (Bomber.select()
.where(Bomber.role_id == 5,
Bomber.is_del == 0,
Bomber.instalment == 0))
bids = [b.id for b in bombers]
if not aids or not bids:
return
avg_num = get_average_number(len(aids),len(bids))
end = 0
with db.atomic():
for index,b in enumerate(bids):
start = end
end = start + avg_num[index]
b_aids = aids[start:end]
app_sql = (Application.update(latest_bomber=b,
status=ApplicationStatus.AB_TEST.value,
ptp_bomber=None)
.where(Application.id << b_aids))
app_sql.execute()
params = {
"apps": b_aids,
"bill_dict": bills,
"period": period,
"bomber_id": b
}
c1b_dispatch_in_record(**params)
# 将分期的件分配给员工
def dispatch_instalment_app():
cycle_list = [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value,Cycle.M3.value]
# 获取每天,获取每个cycle没有分出去的件
for cycle in cycle_list:
apps = (Application.select()
.where(Application.cycle == cycle,
Application.latest_bomber.is_null(True),
Application.status != ApplicationStatus.REPAID.value,
(Application.type ==
ApplicationType.CASH_LOAN_STAGING.value)))
aids = [a.id for a in apps]
if not aids:
continue
# 获取指定的bomber
bombers = (Bomber.select()
.where(Bomber.is_del == 0,
Bomber.instalment == cycle))
bids = [b.id for b in bombers]
if not bids:
continue
average_nums = get_average_number(len(apps),len(bids))
end = 0
for i,bid in enumerate(bids):
start = end
end = start + average_nums[i]
bid_apps = aids[start:end]
with db.atomic():
# 更新状态
q = (Application.update(ptp_bomber = None,
latest_bomber = bid, #最新的催收员id
last_bomber = Application.latest_bomber,#前一接收的催收员
status = ApplicationStatus.AB_TEST.value)#人工维护的件
.where(Application.id << bid_apps)
.execute())
record_param = {"cycle": cycle,
"application_ids": bid_apps,
"dest_bomber_id": bid}
out_and_in_record_instalment(**record_param)
# 分期的入案和出案
def out_and_in_record_instalment(**kwargs):
if not kwargs.get("application_ids"):
return
# 先出案
out_q = (DispatchAppHistory.update(out_at = fn.NOW())
.where(DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True))
.execute())
# 入案
cycle_period = {
1: '10',
2: '30',
3: '60',
4: '90'
}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(kwargs['dest_partner_id'])).alias('partner_id'),
(SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'
' t1.overdue_days) DAY)' % period))
.alias('expected_out_time'))
.where(Application.status != ApplicationStatus.REPAID.value,
Application.id << kwargs['application_ids']))
application_list = list(subquery)
for idx in range(0, len(application_list), 50):
applications = application_list[idx:idx + 50]
app_ids = [i.application_id for i in applications]
# 获取所有的overdue_bill
overdue_bills = (OverdueBill.select()
.where(OverdueBill.collection_id << app_ids))
sub_bill_ids = [ob.sub_bill_id for ob in overdue_bills]
bill_list = BillService().sub_bill_list(bill_sub_ids=sub_bill_ids)
insert_args = lambad_instalment_result(bill_list, applications)
if not insert_args:
continue
DispatchAppHistory.insert_many(insert_args).execute()
#分期入案结果格式化
def lambad_instalment_result(bill_list,applications):
bill_dict = {}
insert_args = []
# 计算入案金额
for sub_bill in bill_list:
bill_id = sub_bill["bill_id"]
principal_pending = sub_bill["amount"] - sub_bill['principal_paid']
late_fee_pending = sub_bill["late_fee"] - sub_bill["late_fee_paid"]
if bill_id in bill_dict:
bill_dict[bill_id]["entry_principal_pending"] += principal_pending
bill_dict[bill_id]["entry_late_fee_pending"] += late_fee_pending
else:
bill_dict[bill_id] = {
"entry_principal_pending": principal_pending,
"entry_late_fee_pending": late_fee_pending
}
for app in applications:
bill_entry = bill_dict.get(app.bill_id, {})
entry_principal_pending = bill_entry.get("entry_principal_pending", 0)
entry_late_fee_pending = bill_entry.get("entry_late_fee_pending", 0)
insert_dict = {
'created_at': app.created_at,
'updated_at': app.updated_at,
'application': app.application_id,
'bomber_id': app.bomber_id,
'entry_at': app.entry_at,
'entry_overdue_days': app.entry_overdue_days,
'partner_id': app.partner_id,
'expected_out_time': app.expected_out_time,
'entry_principal_pending': entry_principal_pending,
'entry_late_fee_pending': entry_late_fee_pending
}
insert_args.append(insert_dict)
return insert_args
def c1b_dispatch_in_record(**kwargs):
app_ids = kwargs.get("apps")
partner_id = kwargs.get("partner_id","null")
bill_dict = kwargs.get("bill_dict")
period = kwargs.get("period")
bomber_id = kwargs.get('bomber_id')
if not all([app_ids, partner_id, bill_dict, period]):
return False
bill_dict = { str(k):v for k,v in bill_dict.items()}
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(bomber_id)).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(partner_id)).alias('partner_id'),
(SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'
' t1.overdue_days) DAY)' % period))
.alias('expected_out_time'))
.where(Application.id << app_ids))
application_list = list(subquery)
for idx in range(0,len(application_list),1000):
applications = application_list[idx:idx+1000]
insert_args = list(map(partial(lambda_result,
dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
#获取联系的电话号码
@action(MessageAction.BOMBER_AUTO_CALL_CONTACT)
def bomber_auto_call_contact(payload, msg_id):
application_list = payload['application_list']
applications = []
for app_id in application_list:
applications.append(Application.filter(Application.id == app_id)
.first())
# 得到每个件的联系人队列
with db.atomic():
for application in applications:
cycle = application.cycle
# 修改查询时的条件
contacts = (
Contact
.select()
.where(Contact.user_id == application.user_id,
Contact.latest_status.not_in(ContactStatus.no_use()))
.order_by(-Contact.useful,
Contact.relationship,
-Contact.total_duration,
-Contact.total_count)
)
level1 = []
level2 = []
level3 = []
level = []
for c in contacts:
if c.relationship == Relationship.APPLICANT.value:
level.append(c)
elif c.relationship == Relationship.FAMILY.value:
level1.append(c)
elif c.relationship == Relationship.COMPANY.value:
level2.append(c)
elif c.relationship == Relationship.SUGGESTED.value:
level3.append(c)
contacts = level + level2 + level1 + level3
numbers = []
fc_count = 0
# Pre-check if need phone calls,校验手机号是否可以拨通
app_calls = []
need_verify = False
for eac_contact in contacts:
if (eac_contact.relationship == Relationship.FAMILY.value and
eac_contact.useful == ContactsUseful.NONE.value):
need_verify = True
break
if need_verify:
logging.info('Found contact need update. app id {}'
.format(str(application.id)))
app_calls = AuditService().phone_invalid(cat=Relationship(1).name,
application_id=application.external_id)
call_history = True
c1b_family_dict = defaultdict(list)
for c in contacts:
if c.relationship == Relationship.COMPANY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
if cycle == Cycle.C1B.value:
# 暂时c1b公司只打本人填写的电话
if c.source != CompanyContactType.BASIC_INFO_JOB_TEL.value:
continue
if c.relationship == Relationship.FAMILY.value:
if cycle == Cycle.C1A.value:
call_history = check_call_history(application)
break
# Update contact useful
if c.useful == ContactsUseful.NONE.value:
c.useful = check_valid_phone(app_calls, c)
c.save()
if c.useful == ContactsUseful.INVALID.value:
logging.info('Found invalid contact. {}'
.format(str(c.id)))
continue
# 需要对family类进行排序
if cycle == Cycle.C1B.value:
c1b_family_dict[c.source].append(c.number)
continue
if c.relationship == Relationship.SUGGESTED.value:
if cycle not in (Cycle.C2.value, Cycle.C3.value):
break
if cycle == Cycle.C2.value and fc_count > 10:
break
if cycle == Cycle.C3.value and fc_count > 20:
break
fc_count += 1
numbers.append(c.number)
# if cycle1 applicant is in no_use add ec
if len(numbers) == 0 or not call_history:
src_contact = (
Contact.select()
.where(Contact.user_id == application.user_id,
Contact.source in FamilyContactType.c1a_order()))
# C1A五天内催收电话没打通,按新的顺序拨打;由原来的2种变更为4种
c1a_family_dict = defaultdict(list)
for e in src_contact:
c1a_family_dict[e.source].append(e.number)
for call_type in FamilyContactType.c1a_order():
numbers.extend(c1a_family_dict[call_type])
if cycle == Cycle.C1B.value:
for call_type in FamilyContactType.c1b_order():
numbers.extend(c1b_family_dict[call_type])
numbers = list(set(numbers))
update_query = (
AutoCallList
.update(numbers=','.join(numbers))
.where(AutoCallList.application == application.id)
)
update_query.execute()
def check_valid_phone(phone_list, contact):
useful = ContactsUseful.AVAILABLE.value
for each_phone in phone_list:
if contact.number == each_phone.get('tel_no') or \
contact.number == each_phone.get('mobile_no'):
useful = ContactsUseful.INVALID.value
break
return useful
# c1a的件如果5天之内没有接通,开放ec
def check_call_history(application):
app_create_at = application.created_at + timedelta(days=4)
if datetime.today().date() > app_create_at.date():
call_actions = (CallActions.select()
.where(CallActions.type == 0,
CallActions.application == application.id,
CallActions.created_at >
(datetime.now() - timedelta(days=5))))
for call in call_actions:
if call.phone_status == PhoneStatus.CONNECTED.value:
return True
return False
return True
#当前时间与更新时间间隔超过 SCAVENGER_TIME 时间时,SCAVENGER更新状态
@action(MessageAction.BOMBER_SCAVENGER)
def scavenger(payload, msg_id):
scavenger_time = -60
scavenger = (SystemConfig.select()
.where(SystemConfig.key == 'SCAVENGER_TIME')
.first())
if scavenger and scavenger.value.isdigit():
scavenger_time = -int(scavenger.value)
update_auto_call_list = (
AutoCallList
.update(status=AutoListStatus.PENDING.value,
description='scavenger')
.where(
AutoCallList.status == AutoListStatus.PROCESSING.value,
AutoCallList.updated_at <
datetime.now() + timedelta(minutes=scavenger_time),
)
)
count = update_auto_call_list.execute()
logging.info('scavenger processed %s application', count)
# 更新自动外呼中状态是邮箱的件的状态
mail_box_scavenger_time = -30
mail_box_scavenger = (SystemConfig.select()
.where(SystemConfig.key == 'MAIL_BOX_SCAVENGER_TIME')
.first())
if mail_box_scavenger and mail_box_scavenger.value.isdigit():
mail_box_scavenger_time = -int(mail_box_scavenger.value)
update_mail_box_call_list = (
AutoCallList.update(status=AutoListStatus.PENDING.value)
.where(AutoCallList.status == AutoListStatus.MAILBOX.value,
AutoCallList.updated_at <
datetime.now() + timedelta(minutes=mail_box_scavenger_time))
)
mail_box_count = update_mail_box_call_list.execute()
logging.info("scavenger update mail box %s", mail_box_count)
# ivr中30分钟没有接收到回调,修改ivr中的状态
update_auto_ivr = (
AutoIVR
.update(status=AutoIVRStatus.AVAILABLE.value)
.where(AutoIVR.status == AutoIVRStatus.PROCESSING.value,
AutoIVR.updated_at < datetime.now() + timedelta(minutes=-30)
)
)
ivr_result = update_auto_ivr.execute()
logging.info("scavenger update %s ivr"%ivr_result)
@action(MessageAction.BOMBER_CLEAR_OVERDUE_PTP)
def bomber_clear_overdue_ptp(payload, msg_id):
# 对于C1B, C2 和 C3 不存在预测试呼出,故其ptp清除后需回到外包或ab_test
#C1B, C2,C3 件,当前时间超过承诺还款时间时,转为人工维护
update_overdue_ptp_ab = (
Application.update(
status=ApplicationStatus.AB_TEST.value,
).where(
fn.DATE(Application.promised_date) < datetime.today().date(),
Application.status == ApplicationStatus.PROCESSING.value,
Application.cycle << [Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value]
)
)
count1 = update_overdue_ptp_ab.execute()
logging.info('bomber overdue ptp for C1B C2 and C3 cleared: %s', count1)
now_and_yesterday = ((datetime.today() + timedelta(days=1)).date(),
datetime.today().date())
overdue_1a1b_cs_ptp = (CallActions
.select()
.where(fn.DATE(CallActions.promised_date)
.in_(now_and_yesterday),
CallActions.bomber_id == 72))
update_overdue_1a1b_cs_ptp = (
Application
.update(status=ApplicationStatus.UNCLAIMED.value)
.where(Application.status == ApplicationStatus.PROCESSING.value,
Application.cycle == Cycle.C1A.value,
Application.id.in_(overdue_1a1b_cs_ptp)))
logging.debug("bomber c1a c1b cs ptp: %s", update_overdue_1a1b_cs_ptp)
count2 = update_overdue_1a1b_cs_ptp.execute()
logging.info('bomber c1a c1b cs overdue ptp cleared: %s', count2)
update_overdue_ptp = (
Application
.update(
status=ApplicationStatus.UNCLAIMED.value,
).where(
fn.DATE(Application.promised_date) < datetime.today().date(),
Application.status == ApplicationStatus.PROCESSING.value,
Application.cycle == Cycle.C1A.value,
)
)
count = update_overdue_ptp.execute()
logging.info('bomber overdue ptp cleared: %s', count)
@action(MessageAction.REPORT_BOMBER_COLLECTION)
def report_bomber_collection(payload, msg_id):
start_date = (ReportCollection
.select(fn.MAX(ReportCollection.apply_date))
.scalar())
now = datetime.now()
if start_date and str(start_date) == str(now)[:10]:
return
end_date = str(now + timedelta(days=1))[:10]
start_date = str(now)[:10]
dct = dict(zip(CycleList.sql_values(), CycleList.table_values()))
all_overdue_loan_sql1 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
GROUP BY 1;
"""
s_data1 = readonly_db.execute_sql(all_overdue_loan_sql1).fetchall()
d1 = OperatedDict(s_data1)
all_overdue_loan_sql2 = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE DATE(ba.follow_up_date) > CURDATE()
AND ba.called_counts = 0
GROUP BY 1;
"""
s_data2 = readonly_db.execute_sql(all_overdue_loan_sql2).fetchall()
d2 = OperatedDict(s_data2)
overdue_loans_entered_into_predict_call_system_sql = """
SELECT ba.cycle, COUNT(ba.id)
FROM bomber.auto_call_list ba
WHERE ba.called_counts >= 1
GROUP BY 1;
"""
s_data3 = readonly_db.execute_sql(
overdue_loans_entered_into_predict_call_system_sql).fetchall()
d3 = OperatedDict(s_data3)
loans_completed_sql = """
SELECT ba.cycle, COUNT(DISTINCT ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data4 = readonly_db.execute_sql(loans_completed_sql).fetchall()
d4 = OperatedDict(s_data4)
connected_calls_automatic_sql = """
SELECT ba.cycle, COUNT(ba.application_id)
FROM bomber.auto_call_actions ba
WHERE DATE(ba.created_at) = CURDATE()
GROUP BY 1;
"""
s_data5 = readonly_db.execute_sql(connected_calls_automatic_sql).fetchall()
d5 = OperatedDict(s_data5)
connected_calls_manual_sql = """
SELECT bb.cycle, COUNT(bb.id)
FROM bomber.bombing_history bb
WHERE DATE(bb.created_at) = curdate()
AND (bb.bomber_id < 150 OR bb.bomber_id > 200)
GROUP BY bb.cycle;
"""
s_data6 = readonly_db.execute_sql(connected_calls_manual_sql).fetchall()
d6 = OperatedDict(s_data6)
logging.info('Directly get data from database successfully.')
c1 = d1 - d2
c2 = d3
c3 = c2 / c1
c4 = d4
c5 = c4 / c2
c6 = d5
c7 = c6 / c4
c8 = d6
c9 = OperatedDict(get_agent())
c10 = (c6 + c8) / c9
try:
c11 = average_call_duration_team(start_date, end_date)
except AttributeError:
c11 = {}
lst = []
for i in range(1, 5):
lst.append({
'apply_date': start_date,
'cycle': dct[i],
'all_overdue_loan': c1.get(i, 0),
'overdue_loans_entered_into_predict_call_system': c2.get(i, 0),
'of_overdue_loans_entered_into_predict_call_system':
round(c3.get(i, 0) * 100, 1),
'loans_completed': c4.get(i, 0),
'of_completed_loans_in_predict_call_system':
round(c5.get(i, 0) * 100, 1),
'connected_calls_automatic': c6.get(i, 0),
'connected_calls_automatic_completed_loans':
round(c7.get(i, 0), 1),
'connected_calls_manual': c8.get(i, 0),
'agent': c9.get(i, 0),
'average_calls_agent': round(c10.get(i, 0), 1),
'average_call_duration_team': round(c11.get(i, 0), 1)
})
ReportCollection.insert_many(lst).execute()
logging.info('report_bomber_collection:Done!')
@action(MessageAction.BOMBER_AUTO_CALL_LIST_RECORD)
def bomber_auto_call_list_record(payload, msg_id):
"""记录一年的auto_call_list,删除前一天的数据,增加今天的数据"""
now = datetime.now()
if now > datetime.strptime('2020-02-01', '%Y-%m-%d'):
date_sql = """
SELECT DATE(created_at) FROM auto_call_list_record
GROUP BY DATE(created_at) limit 1
"""
del_date = db.execute_sql(date_sql).fetchone()[0]
del_sql = """
DELETE FROM auto_call_list_record WHERE date(created_at) = %s
"""
db.execute_sql(del_sql, [del_date])
sql = """
INSERT INTO auto_call_list_record
SELECT * FROM auto_call_list
"""
db.execute_sql(sql)
logging.info("bomber_auto_call_list_record done")
@action(MessageAction.BOMBER_MANUAL_CALL_LIST)
def bomber_manual_call_list(payload, msg_id):
"""
手动分件主要依赖
:param payload:
:param msg_id:
:return:
"""
batch_id = payload.get('batch_id')
if batch_id is None:
logging.warning('Invalid batch id')
return
query = (ManualCallList
.select()
.where(ManualCallList.batch_id == batch_id,
ManualCallList.status << ManualCallListStatus.available()))
if not query.exists():
logging.warning('Empty application id list')
return
for q in query:
application_ids = json.loads(q.application_ids or '[]')
# where
cycle = 0
where_list = [(Application.id << application_ids),
Application.latest_bomber_id == q.src_bomber_id]
src_params = json.loads(q.src_params or '{}')
if "cycle" in src_params:
where_list.append(Application.cycle == src_params['cycle'])
cycle = src_params['cycle']
if "status" in src_params:
where_list.append(Application.status == src_params['status'])
# update
update_dict = {'latest_bomber': q.dest_bomber_id}
dest_params = json.loads(q.dest_params or '{}')
if "cycle" in dest_params:
update_dict['cycle'] = dest_params['cycle']
cycle = dest_params['cycle']
if "status" in dest_params:
update_dict['status'] = dest_params['status']
with db.atomic():
try:
# update dispatch_app
if q.update_dispatch_app:
if q.dest_partner_id is None:
raise ValueError('unallowed operation')
(DispatchApp
.delete()
.where(DispatchApp.application_id.in_(application_ids))
.execute())
(DispatchApp
.insert_many([{
'application': i,
'partner': q.dest_partner_id,
'bomber': q.dest_bomber_id,
'status': DisAppStatus.NORMAL.value}
for i in application_ids])
.execute())
application_success_row = (
Application
.update(**update_dict)
.where(*where_list)
.execute()
)
if application_success_row == 0:
raise ValueError('Invalid parameter')
(ManualCallList
.update(
status=ManualCallListStatus.SUCCESS.value,
length=application_success_row)
.where(ManualCallList.id == q.id)
.execute())
out_and_in_record(
src_bomber_id=q.src_bomber_id,
application_ids=application_ids,
dest_partner_id=q.dest_partner_id,
dest_bomber_id=q.dest_bomber_id,
cycle=cycle
)
except Exception:
db.rollback()
(ManualCallList
.update(
status=ManualCallListStatus.FAILED.value,
length=0)
.where(ManualCallList.id == q.id)
.execute())
logging.error("PRINT BOMBER_MANUAL_CALL_LIST ERROR:\n%s",
traceback.format_exc())
continue
def lambda_result(item, dct):
a = str(item.application_id)
entry_principal_pending = (Decimal(item.amount or 0) -
dct[a]['principal_paid'])
entry_late_fee_pending = dct[a]['late_fee'] - dct[a]['late_fee_paid']
return {
'created_at': item.created_at,
'updated_at': item.updated_at,
'application': a,
'bomber_id': item.bomber_id,
'entry_at': item.entry_at,
'entry_overdue_days': item.entry_overdue_days,
'partner_id': item.partner_id,
'expected_out_time': item.expected_out_time,
'entry_principal_pending': entry_principal_pending,
'entry_late_fee_pending': entry_late_fee_pending
}
def out_and_in_record(**kwargs):
"""
件在催收系统的出案和入案
"""
new_out_record(**kwargs)
new_in_record(**kwargs)
def new_out_record(**kwargs):
if not kwargs['application_ids']:
return
(DispatchAppHistory
.update(out_at=fn.NOW())
.where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],
DispatchAppHistory.application << kwargs['application_ids'],
DispatchAppHistory.out_at.is_null(True))
.execute())
# 如果是月底分件,ptp_bomber不用置空
if kwargs.get("month_dispatch"):
return
# 出案时下p的件ptp_bomber置为空
try:
(Application.update(ptp_bomber=None)
.where(Application.id << kwargs["application_ids"])
.execute())
except Exception as e:
logging.error("new_out_record error:aids:%s,error:%s" %
(kwargs["application_ids"],str(e)))
def new_in_record(**kwargs):
cycle_period = {
1: '10',
2: '30',
3: '60',
4: '90'
}
period = cycle_period.get(kwargs['cycle'], '90 + t1.overdue_days')
kwargs['dest_partner_id'] = kwargs.get('dest_partner_id') or 'null'
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(kwargs['dest_bomber_id'])).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(kwargs['dest_partner_id'])).alias('partner_id'),
(SQL('DATE_ADD(CURDATE(),INTERVAL (%s -'
' t1.overdue_days) DAY)' % period))
.alias('expected_out_time'))
.where(Application.status != ApplicationStatus.REPAID.value,
Application.id << kwargs['application_ids']))
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
app_ids = [i.application_id for i in applications]
bill_list = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in bill_list}
insert_args = list(map(partial(lambda_result,
dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
def end_old_application(old_app, paid=False):
if paid:
if old_app.status == OldLoanStatus.WAITING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.status = OldLoanStatus.PAID.value
old_app.save()
return old_app.application_id
end_date = old_app.end_date
now = datetime.now()
if now >= max(end_date, old_app.promised_date or now):
old_app.status = OldLoanStatus.FINISHED.value
old_app.save()
return old_app.application_id
@action(MessageAction.UPDATE_OLD_LOAN_APPLICATION)
def update_old_loan_application(payload, msg_id):
items = (Application
.select(Application, OldLoanApplication)
.join(OldLoanApplication,
JOIN_INNER,
on=(Application.id ==
OldLoanApplication.application_id).alias('old_app'))
.where(OldLoanApplication.status
.in_(OldLoanStatus.available())))
out_list = []
for application in items:
if application.overdue_days > 90:
if application.old_app.status == OldLoanStatus.WAITING.value:
start_old_application(application.old_app)
else:
out_list.append(application.old_app)
success_list = [end_old_application(item) for item in out_list]
app_ids = list(filter(None, success_list))
if app_ids:
bomber_id = SpecialBomber.OLD_APP_BOMBER.value
out_record(src_bomber_id=bomber_id, application_ids=app_ids)
def in_record(**kwargs):
"""
:param kwargs: dist_partner_id, dist_bomber_id,
expected_out_time, application_ids
:return:
"""
# TODO: 入案记录统一
kwargs['dist_partner_id'] = kwargs.get('dist_partner_id') or 'null'
subquery = (Application
.select(Application.amount,
fn.NOW().alias('created_at'),
fn.NOW().alias('updated_at'),
Application.id.alias('application_id'),
R(str(kwargs['dist_bomber_id'])).alias('bomber_id'),
fn.NOW().alias('entry_at'),
Application.overdue_days.alias('entry_overdue_days'),
R(str(kwargs['dist_partner_id'])).alias('partner_id'),
R('"{}"'.format(kwargs['expected_out_time']))
.alias('expected_out_time'))
.where(Application.status != ApplicationStatus.REPAID.value,
Application.id << kwargs['application_ids']))
application_list = list(subquery)
for idx in range(0, len(application_list), 1000):
applications = application_list[idx:idx + 1000]
app_ids = [i.application_id for i in applications]
bill_list = BillService().bill_list(application_ids=app_ids)
bill_dict = {str(bill['application_id']): bill for bill in bill_list}
insert_args = list(map(partial(lambda_result, dct=bill_dict),
applications))
DispatchAppHistory.insert_many(insert_args).execute()
def out_record(**kwargs):
"""
:param kwargs: src_bomber_id, application_ids
:return:
"""
# TODO: 出案记录统一
if not kwargs.get('application_ids'):
return
(DispatchAppHistory
.update(out_at=fn.NOW())
.where(DispatchAppHistory.bomber_id == kwargs['src_bomber_id'],
DispatchAppHistory.application << kwargs['application_ids'])
.execute())
# 出案时下p的件ptp_bomber置为空
try:
(Application.update(ptp_bomber=None)
.where(Application.id << kwargs["application_ids"])
.execute())
except Exception as e:
logging.error("out_record error:aids:%s,error:%s" %
(kwargs["application_ids"], str(e)))
def start_old_application(old_app, cancel=False):
application_id = old_app.application_id
if cancel and (old_app.status == OldLoanStatus.PAID.value):
now = datetime.now()
if old_app.start_date is None:
# 未进入500的池子里
old_app.status = OldLoanStatus.WAITING.value
elif now >= max(old_app.end_date, old_app.promised_date or now):
# 撤销时用户已经从500的池子出去
old_app.status = OldLoanStatus.FINISHED.value
(DispatchAppHistory
.update(out_at=max(old_app.end_date,
old_app.promised_date or now))
.where(DispatchAppHistory.bomber_id == old_app.bomber_id,
DispatchAppHistory.application == application_id)
.execute())
else:
# 还在500的池子
old_app.status = OldLoanStatus.PROCESSING.value
(DispatchAppHistory
.update(out_at=None)
.where(DispatchAppHistory.bomber_id == old_app.bomber_id,
DispatchAppHistory.application == application_id)
.execute())
old_app.save()
return
application = (
Application
.get_or_none(Application.id == application_id,
Application.status != ApplicationStatus.REPAID.value,
Application.overdue_days > 90,
Application.promised_date.is_null(True) |
(fn.DATE(Application.promised_date) <
datetime.today().date())))
if not application:
logging.error("Can not set old application %s to start collecting",
application_id)
return
if old_app.status in OldLoanStatus.no_available():
logging.info("%s has finished or paid", old_app.application_id)
return
config = SystemConfig.prefetch(SCI.OLD_APP_PERIOD)
sp = config.get(SCI.OLD_APP_PERIOD,
SCI.OLD_APP_PERIOD.default_value)
old_app_bomber = SpecialBomber.OLD_APP_BOMBER.value
old_app.status = OldLoanStatus.PROCESSING.value
old_app.bomber_id = old_app_bomber
old_app.start_date = datetime.now()
# 此处需要判断end_date是否已经被设置过
if not old_app.end_date:
old_app.end_date = datetime.now() + timedelta(days=sp)
old_app.save()
in_record(dist_partner_id=None, dist_bomber_id=old_app_bomber,
application_ids=[old_app.application_id],
expected_out_time=str(old_app.end_date))
@action(MessageAction.OLD_LOAN_APPLICATION)
def old_loan_application(payload, msg_id):
application_id = payload.get('application_id')
numbers = payload.get('numbers', [])
if not (application_id and numbers):
logging.error("empty application id: %s, or invalid numbers: %s",
application_id, numbers)
application = Application.get_or_none(Application.id == application_id)
if (application and
application.status == ApplicationStatus.REPAID.value):
logging.error("application %s has paid", application_id)
return
gold_eye = GoldenEye().get('/applications/%s' % application_id)
if not gold_eye.ok:
raise RuntimeError('Get golden eye user failed. {}'
.format(str(application_id)))
gold_app = gold_eye.json().get('data')
user_id = gold_app['user_id']
user_name = gold_app['id_name']
# 通过bill获取账单类型,如果是分期的账单不关联OldloanApplication
try:
bill = BillService().bill_dict(application_id=application_id)
except Exception:
logging.error(
'application %s get bill info failed,old_loan_application',
application_id)
return
source_contacts = (Contact
.filter(Contact.user_id == user_id,
Contact.relationship ==
Relationship.APPLICANT.value,
Contact.source ==
ApplicantSource.NEW_APPLICANT.value))
source_contact_set = {i.number for i in source_contacts}
# 如果是分期不做一下操作
if bill["category"] != ApplicationType.CASH_LOAN_STAGING.value:
# 获取已有new applicant号码
old_app = OldLoanApplication.get_or_none(
OldLoanApplication.application_id == application_id,
OldLoanApplication.status.in_(OldLoanStatus.available())
)
if not old_app:
old_app = OldLoanApplication.create(application_id=application_id,
user_id=user_id,
numbers=','.join(numbers))
else:
_numbers = old_app.numbers.split(',')
# 去重并且删除空号码
old_app.numbers = ','.join(set([nu for nu in (_numbers + numbers)
if nu]))
# 已入催件end_date + 7
if old_app.status == OldLoanStatus.PROCESSING.value:
old_app.end_date = old_app.end_date + timedelta(days=7)
old_app.save()
new_contact = set(numbers) - source_contact_set
insert_args = [{'user_id': user_id,
'name': user_name,
'number': i,
'relationship': Relationship.APPLICANT.value,
'source': ApplicantSource.NEW_APPLICANT.value,
'real_relationship': Relationship.APPLICANT.value
} for i in new_contact]
if insert_args:
Contact.insert_many(insert_args).execute()
if bill["category"] == ApplicationType.CASH_LOAN_STAGING.value:
return
start_old_application(old_app)
def run_one_sql(sql):
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
result = cursor.fetchone()[0] / 1000000
except Exception as e:
logging.info('run sql error: %s' % str(sql))
result = Decimal(0)
return result
def run_member_sql(sql):
result = [0, 0]
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
sql_result = cursor.fetchone()
if sql_result:
result = sql_result
except Exception as e:
logging.info('run sql error: %s' % str(sql))
return result
def run_all_sql(sql):
try:
cursor = readonly_db.get_cursor()
cursor.execute(sql)
result = cursor.fetchall()
except Exception as e:
logging.info('run sql error: %s' % str(sql))
result = []
return result
# 得到dpd1-3的待催维度recover_rate(废弃)
def get_before_bomber(date_time):
begin_time = str(date_time - timedelta(days=7))
end_time = str(date_time)
# 得到每周一已存在的件的待催金额
old_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at<date_add('%s',interval 1 day)
and overdue_days in (2,3)
""" % (begin_time, begin_time)
old_data = run_one_sql(old_sql)
# 得到每天新达到dpd1的待催件的金额
new_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at> '%s'
and created_at<'%s'
and overdue_days=1;
""" % (begin_time, end_time)
new_data = run_one_sql(new_sql)
# 计算每天进入dpd4的金额
dpd4_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>date_add('%s',interval 1 day)
and created_at< date_add('%s',interval 1 day)
and overdue_days=4;
""" % (begin_time, end_time)
dpd4_data = run_one_sql(dpd4_sql)
# 周一时的dpd2\3待还
dpd2_sql = """
select
sum(principal_pending+late_fee_pending+interest_pending) as amount
from
bill_java.overdue bb
where
created_at>'%s'
and created_at< date_add('%s',interval 1 day)
and overdue_days in (2,3)
""" % (end_time, end_time)
dpd2_data = run_one_sql(dpd2_sql)
all_money = old_data + new_data
repayment = all_money - dpd4_data - dpd2_data
pro = 0
if all_money:
pro = (repayment / all_money) * 100
RepaymentReport.create(
time=begin_time,
cycle=0,
all_money=all_money,
proportion=pro,
repayment=repayment
)
# 每周刷新一次recover_rate报表数据(待催维度)
@action(MessageAction.RECOVER_RATE_WEEK_MONEY)
def recover_rate_week_money(payload, msg_id):
#获取当天RECOVER_RATE_WEEK_MONEY日志次数
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= date.today(),
WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY')
.first())
if worker_log.logs >= 5:
return
logging.info('start cal recover_rate_week_money')
date_time = date.today()
get_every_cycle_report(date_time)
# 得到入催維度的dpd1-3的recover_rate
def get_before_bomber_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
for i in range(2, 5):
money_sql = """
select
sum(bo1.principal_pending+bo1.late_fee_pending+
bo1.interest_pending) as dpd1_pending,
sum(bo2.principal_pending+bo2.late_fee_pending+
bo2.interest_pending) as dpd4_pending
from bill_java.overdue bo1
left join dashboard.application da
on bo1.application_id=da.id
left join bill_java.overdue bo2
on bo1.application_id=bo2.application_id
and bo2.overdue_days=%s and bo2.status = 1
where bo1.overdue_days=1
and bo1.status = 1
and bo1.which_day_overdue>='%s'
and bo1.which_day_overdue<'%s'
and da.is_first_loan = %s
and bo1.stage_num is null
""" % (i, begin_date, end_date, is_first_loan)
try:
cursor = readonly_db.get_cursor()
cursor.execute(money_sql)
money = cursor.fetchone()
all_money = money[0] / 1000000
dpd4_money = money[1] / 1000000
except Exception as e:
logging.info('get all_money error: %s' % str(e))
all_money = 0
dpd4_money = 0
repayment = all_money - dpd4_money
if begin_date == date_time - timedelta(days=1):
RepaymentReportInto.create(
time=begin_date,
cycle=0,
all_money=round(all_money, 3),
proportion='0',
repayment=round(repayment, 3),
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
else:
pro = '0'
if all_money:
pro = (repayment / all_money) * 100
pro = str(round(pro, 2))
RepaymentReportInto.update(
repayment=round(repayment, 3),
proportion=pro
).where(
RepaymentReportInto.time == begin_date,
RepaymentReportInto.cycle == 0,
RepaymentReportInto.is_first_loan == is_first_loan
).execute()
end_date = begin_date
begin_date = begin_date - timedelta(days=1)
# 得到c1a入催维度的recover_rate
def get_c1a_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=19)
repayment_sql = """
select
sum(b.principal_part+b.late_fee_part) as paid_amount,
cdt
from
(select
br.principal_part, br.late_fee_part,
date(cdt) as cdt, br.repay_at, br.application_id
from (
select ba.id, ba.C1A_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.C1A_entry >= '%s'
and ba.C1A_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 1 and date(br.repay_at) >= date(a.cdt)
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
if not repayment:
return
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C1A.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
for d in repayment:
repay = d[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == d[1],
RepaymentReportInto.cycle == Cycle.C1A.value,
RepaymentReportInto.is_first_loan == is_first_loan
).first()
if report:
report.repayment = round(repay, 3)
pro = (repay / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 得到c1b入催维度的recover_rate
def get_c1b_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,c1b_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
not_contain_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id as application_id,c1b_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id and bd.partner_id=5)
) a
inner join bill_java.overdue o on a.application_id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=22)
repayment_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount,et
from
(select br.principal_part, br.late_fee_part,
date(a.c1b_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c1b_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 2
group by 4, 5) b
group by 2;
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c1b_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c1b_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c1b_entry >= '%s'
and ba.c1b_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=5)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 2
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_repay and not repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C1B.value,
all_money=round(not_contain_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.NOT_CONTAIN.value
)
for repay in not_contain_repay:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C1B.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C1B.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C1B.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 得到c2入催维度的recover_rate
def get_c2_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o
on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
not_contain_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id,c2_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
not_contain_money = run_one_sql(not_contain_sql)
begin_date = date_time - timedelta(days=37)
repayment_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
not_contain_repay_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c2_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c2_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c2_entry >= '%s'
and ba.c2_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
and not exists(select 1 from bomber.dispatch_app_history bd
where bd.application_id=ba.id
and bd.partner_id=1)
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 3
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
not_contain_repay = run_all_sql(not_contain_repay_sql)
if not not_contain_money and repayment:
return
for i in ContainOut.values():
if i == ContainOut.NOT_CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C2.value,
all_money=round(not_contain_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.NOT_CONTAIN.value
)
for repay in not_contain_repay:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C2.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
elif i == ContainOut.CONTAIN.value:
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C2.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
for repay in repayment:
repay_money = 0
if repay[0]:
repay_money = repay[0] / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.is_first_loan == is_first_loan,
RepaymentReportInto.contain_out == i,
RepaymentReportInto.cycle == Cycle.C2.value
).first()
if report and report.all_money:
report.repayment = round(repay_money, 3)
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 得到c2入催维度的recover_rate
def get_c3_into_rate(date_time):
begin_time = date_time - timedelta(days=1)
end_time = date_time
for is_first_loan in FIRSTLOAN.values():
begin_date = begin_time
end_date = end_time
all_money_sql = """
select sum(o.principal_pending+o.late_fee_pending+
o.interest_pending) as pending_amount
from (
select ba.id, ba.c3_entry as cdt
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = %s
) a
inner join bill_java.overdue o on a.id=o.application_id
and date(a.cdt)=date(o.created_at)
""" % (begin_date, end_date, is_first_loan)
all_money = run_one_sql(all_money_sql)
begin_date = date_time - timedelta(days=30)
repayment_sql = """
select sum(b.principal_part+b.late_fee_part) as paid_amount, b.et
from
(select br.principal_part,br.late_fee_part,
date(a.c3_entry) as et, br.application_id, br.repay_at
from (
select ba.id, ba.c3_entry
from bomber.application ba
left join dashboard.application da on ba.id=da.id
where ba.c3_entry >= '%s'
and ba.c3_entry < '%s'
and ba.type = 0
and da.is_first_loan = '%s'
) a
left join bomber.repayment_log br on br.application_id = a.id
and br.cycle = 4
group by 4, 5) b
group by 2
""" % (begin_date, end_date, is_first_loan)
repayment = run_all_sql(repayment_sql)
RepaymentReportInto.create(
time=end_date - timedelta(days=1),
cycle=Cycle.C3.value,
all_money=round(all_money, 3),
proportion='0',
repayment=0,
is_first_loan=is_first_loan,
contain_out=ContainOut.CONTAIN.value
)
if not repayment:
return
for repay in repayment:
repay_money = Decimal(0)
if repay[0]:
repay_money = repay[0]
repay_money = repay_money / 1000000
report = RepaymentReportInto.filter(
RepaymentReportInto.time == repay[1],
RepaymentReportInto.cycle == Cycle.C3.value,
RepaymentReportInto.is_first_loan == is_first_loan
).first()
if report:
report.repayment = repay_money
pro = 0
if report.all_money and int(report.all_money):
pro = (repay_money / report.all_money) * 100
pro = str(round(pro, 2))
report.proportion = pro
report.save()
# 每天刷新一次recover_rate报表数据(入催维度)
@action(MessageAction.RECOVER_RATE_WEEK_MONEY_INTO)
def recover_rate_week_money_into(payload, msg_id):
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= date.today(),
WorkerLog.action == 'RECOVER_RATE_WEEK_MONEY_INTO')
.first())
if worker_log and worker_log.logs >= 5:
return
date_time = date.today()
get_before_bomber_rate(date_time)
get_c1a_into_rate(date_time)
get_c1b_into_rate(date_time)
get_c2_into_rate(date_time)
get_c3_into_rate(date_time)
# 将已经成熟的数据从未成熟改为成熟
ripe_days = {0: 3, 1: 7, 2: 20, 3: 30, 4: 30}
for i in range(0, 5):
repe_date = date.today() - timedelta(days=ripe_days[i])
(RepaymentReportInto
.update(ripe_ind=RipeInd.RIPE.value)
.where(RepaymentReportInto.time < repe_date,
RepaymentReportInto.cycle == i)
).execute()
# ----------------- 计算summary_bomber中原summary存在的指标 --------------------
# 得到基础数据
def get_static_bomber(begin_date):
active_date = begin_date - timedelta(days=8)
bombers = (BomberR
.select(BomberR.id,
BomberR.role.alias('role'),
BomberR.last_active_at.alias('active'))
.where(BomberR.last_active_at > active_date,
BomberR.role << [1, 2, 4, 5, 6, 8,9]))
summary = []
for bomber in bombers:
summary.append({
'time': begin_date,
'bomber_id': bomber.id,
'cycle': bomber.role.cycle,
'work_ind': 0
})
SummaryBomber.insert_many(summary).execute()
# 部分指标须在当天晚上计算完成
@action(MessageAction.SUMMARY_CREATE)
def summary_create(payload, msg_id):
begin_date = date.today()
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= begin_date,
WorkerLog.action == 'SUMMARY_CREATE')
.first())
if worker_log and worker_log.logs >= 5:
return
get_static_bomber(begin_date)
# 得到当天工作的员工
def get_active_bomber(begin_date):
bombers = (BomberR
.select(BomberR.id)
.where(BomberR.last_active_at >= begin_date))
for bomber in bombers:
(SummaryBomber.update(work_ind=1)
.where(SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == bomber.id)
).execute()
# 得到每个催收员每天拨打电话数和拨打件数
@time_logger
def get_call_and_made(end_date, begin_date, real_time_query=False):
call_sql = """
select
bomber_id,
count(case when relationship is not null then application_id end)
as 'call_cnt',
count(distinct case when relationship is not null then
application_id end) as 'call_case',
count(case when phone_status=4 then application_id end) as 'connect',
count(distinct case when phone_status=4 then application_id end)
as 'connect_case'
from (
select bomber_id,application_id,phone_status, cycle, relationship
from bomber.call_actions ba
where created_at>'%s' and created_at<'%s'
and type in (0, 1)
) a
group by 1
""" % (begin_date, end_date)
calls = run_all_sql(call_sql)
if real_time_query:
return calls
for call in calls:
bomber, call_cnt, case_made, connect_cnt, case_connect = call
(SummaryBomber.update(
case_made_cnt=case_made,
call_cnt=call_cnt,
call_connect_cnt=connect_cnt,
case_connect_cnt=case_connect)
.where(
SummaryBomber.bomber_id == bomber,
SummaryBomber.time == begin_date)
).execute()
return calls
# 得到每个催收员每天待催件数
@time_logger
def get_claimed_cnt(end_date, begin_date, real_time_query=False):
table_date = begin_date - timedelta(days=30)
claimed_sql = """
SELECT
COUNT( `t1`.`application_id` ) AS cnt,
`t1`.`bomber_id` AS bomber_id
FROM
`dispatch_app_history` AS t1
WHERE
( `t1`.`out_at` > '%s' OR `t1`.`out_at` IS null )
AND ( `t1`.`bomber_id` != 1000 )
AND ( `t1`.`partner_id` IS null )
AND ( `t1`.`entry_at` > '%s' )
AND ( `t1`.`entry_at` < '%s' )
GROUP BY
`t1`.`bomber_id`
""" % (begin_date, table_date, end_date)
claimeds = run_all_sql(claimed_sql)
if real_time_query:
return claimeds
for claimed in claimeds:
cnt, bomber_id = claimed
(SummaryBomber.update(claimed_cnt=cnt)
.where(SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == bomber_id)
).execute()
return claimeds
# 得到短信相关数据
def get_sms_data(end_data, begin_data):
all_sms = (ConnectHistoryR
.select(ConnectHistoryR.operator.alias('bomber_id'),
fn.COUNT(ConnectHistoryR.application).alias('sms_send'))
.where(ConnectHistoryR.created_at > begin_data,
ConnectHistoryR.created_at < end_data,
ConnectHistoryR.type.in_(ConnectType.sms()))
.group_by(ConnectHistoryR.operator))
for sms in all_sms:
(SummaryBomber.update(sms_cnt=sms.sms_send)
.where(SummaryBomber.time == begin_data,
SummaryBomber.bomber_id == sms.bomber_id)
).execute()
return all_sms
# 得到ptp相关的数据
@time_logger
def get_ptp_data(end_date, begin_date, real_query_time=False):
sql = """
SELECT
a.bomber_id,
sum( a.promised_amount ) AS ptp_amount,
count( application_id )
FROM
bomber.auto_call_actions a
LEFT JOIN bomber.bomber c ON a.bomber_id = c.id
WHERE
a.created_at >= '%s'
AND a.created_at < '%s'
AND a.promised_date != ''
GROUP BY 1
UNION
SELECT
a.bomber_id,
ifnull( sum( a.promised_amount ), 0 ) AS ptp_amount,
count( application_id )
FROM
bomber.bombing_history a
LEFT JOIN bomber.bomber c ON a.bomber_id = c.id
WHERE
bomber_id NOT BETWEEN 151
AND 177
AND bomber_id NOT BETWEEN 181
AND 183
AND bomber_id != 72
AND a.created_at >= '%s'
AND a.created_at < '%s'
AND a.promised_date != ''
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date)
ptp_datas = run_all_sql(sql)
if real_query_time:
return ptp_datas
result = {}
for ptp in ptp_datas:
bomber_id, amount, cnt = ptp
if bomber_id in result.keys():
result[bomber_id][0] += amount
result[bomber_id][1] += cnt
continue
result[bomber_id] = [amount, cnt]
for key, value in result.items():
(SummaryBomber
.update(
promised_cnt=value[1],
promised_amount=value[0]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
return ptp_datas
# 统计回款金额和回款件数
@time_logger
def get_recover_amount(end_date, begin_date, real_time_query=False):
C1_sql = """
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,
count(distinct application_id)
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,4)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
""" % (begin_date, end_date)
C1_results = run_all_sql(C1_sql)
if not real_time_query:
for C1_result in C1_results:
bomber_id, amount, cnt = C1_result
(SummaryBomber.update(
cleared_cnt=cnt,
cleared_amount=amount
).where(
SummaryBomber.bomber_id == bomber_id,
SummaryBomber.time == begin_date
)).execute()
other_sql = """
select current_bomber_id,sum(pay_amount) as pay_amount,
count(distinct application_id)
from (
select application_id,current_bomber_id,pay_amount,repay_at
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (2,3,5,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
""" % (begin_date, end_date)
sql_results = run_all_sql(other_sql)
if not real_time_query:
for sql_result in sql_results:
bomber_id, amount, cnt = sql_result
(SummaryBomber.update(
cleared_cnt=cnt,
cleared_amount=amount
).where(
SummaryBomber.bomber_id == bomber_id,
SummaryBomber.time == begin_date
)).execute()
result = sql_results + C1_results
return result
# summary 报表新数据(分布计算,先计算一部分数据)
@action(MessageAction.SUMMARY_NEW)
def summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= end_date,
WorkerLog.action == 'SUMMARY_NEW')
.first())
if worker_log and worker_log.logs >= 5:
return
get_active_bomber(begin_date)
get_call_and_made(end_date, begin_date)
get_claimed_cnt(end_date, begin_date)
get_sms_data(end_date, begin_date)
get_ptp_data(end_date, begin_date)
get_recover_amount(end_date, begin_date)
get_unfollowed(begin_date)
get_unfollowed_call(begin_date)
# ------------------------ 计算summary bomber的另部分指标 ----------------------
# 得到新件件数和金额
def get_new_case_amount(begin_date, end_date):
all_case = (DispatchAppHistoryR
.select(fn.SUM(DispatchAppHistoryR.entry_late_fee_pending +
DispatchAppHistoryR.entry_principal_pending)
.alias('pending'),
DispatchAppHistoryR.bomber_id,
fn.COUNT(DispatchAppHistoryR.application).alias('cnt'))
.where(DispatchAppHistoryR.entry_at > begin_date,
DispatchAppHistoryR.entry_at < end_date,
DispatchAppHistoryR.partner_id.is_null(True))
.group_by(DispatchAppHistoryR.bomber_id))
for case in all_case:
SummaryBomber.update(
new_case_amount_sum=case.pending,
new_case_cnt=case.cnt
).where(
SummaryBomber.bomber_id == case.bomber_id,
SummaryBomber.time == begin_date
).execute()
return all_case
# 得到KP相关数据
def get_kp_cleared(begin_date, end_date):
auto_call_sql = """
SELECT
a.current_bomber_id, count( b.application_id )
FROM
(SELECT
current_bomber_id, principal_part, late_fee_part,
repay_at, application_id
FROM
bomber.repayment_log
WHERE
repay_at >= '%s'
AND repay_at < '%s'
GROUP BY 4, 5 ) a
LEFT JOIN (
SELECT
cycle, bomber_id, promised_amount, promised_date,
application_id, created_at
FROM
bomber.auto_call_actions
WHERE
created_at >= date_sub( '%s', INTERVAL 7 DAY )
AND created_at < '%s'
AND promised_date IS NOT NULL
) b ON a.current_bomber_id = b.bomber_id
AND a.application_id = b.application_id
AND date( a.repay_at ) <= date( b.promised_date )
AND date( a.repay_at ) >= date( b.created_at )
LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id
WHERE
b.promised_date >= '%s'
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date, begin_date)
auto_call_results = run_all_sql(auto_call_sql)
manual_sql = """
SELECT
a.current_bomber_id, count( b.application_id )
FROM
(SELECT
current_bomber_id, principal_part, late_fee_part,
repay_at, application_id, created_at
FROM
bomber.repayment_log
WHERE
repay_at >= '%s'
AND repay_at < '%s'
AND principal_part + late_fee_part > 0
GROUP BY 2, 5 ) a
LEFT JOIN (
SELECT
cycle, bomber_id, promised_amount, promised_date,
application_id, created_at
FROM
bomber.bombing_history
WHERE
created_at >= date_sub( '%s', INTERVAL 7 DAY )
AND created_at < '%s'
AND promised_date IS NOT NULL
) b ON a.current_bomber_id = b.bomber_id
AND a.application_id = b.application_id
AND date( a.repay_at ) <= date( b.promised_date )
AND date( a.repay_at ) >= date( b.created_at )
LEFT JOIN bomber.bomber c ON a.current_bomber_id = c.id
WHERE
b.promised_date >= '%s'
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date, begin_date)
manual_results = run_all_sql(manual_sql)
sql_result = auto_call_results + manual_results
result = {}
for data in sql_result:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
for key, value in result.items():
(SummaryBomber
.update(
KP_cleared_cnt=value
).where(
SummaryBomber.bomber_id == key,
SummaryBomber.time == begin_date)
).execute()
# 得到当天处于ptp的件(KP率的分母)
def get_kp_today(begin_date, end_date):
sql = """
select bomber_id, count(distinct application_id)
from(
SELECT bomber_id, application_id
FROM bomber.auto_call_actions a
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where a.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))
UNION
SELECT bomber_id, application_id
FROM bomber.bombing_history b
WHERE promised_date >= '%s' AND created_at < '%s'
AND EXISTS(select 1 from bomber.application ba
where b.application_id=ba.id
and (ba.finished_at is null
or ba.finished_at > '%s'))) result
GROUP BY 1
""" % (begin_date, end_date, begin_date, begin_date, end_date, begin_date)
kp_today = run_all_sql(sql)
for kp in kp_today:
(SummaryBomber.update(
KP_today_cnt=kp[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == kp[0]
)).execute()
# 得到ptp相关信息(当日ptp到期件数、次日到期件数)
def get_ptp_cnt(begin_date, end_date):
today_due = []
for sql_date in (begin_date, end_date):
sql = """
select bomber_id,count(distinct application_id) as cnt from
( # 自动外呼中排除掉已经修改P期的件
select application_id,bomber_id,created_at
from bomber.auto_call_actions ba
where promised_date ='%s' # 需要过滤掉在手动中续P的
and not exists ( select 1 from bomber.bombing_history bb
where bb.application_id = ba.application_id
and bb.bomber_id = ba.bomber_id
and bb.created_at>ba.created_at
and bb.promised_date is not null
and bb.created_at < '%s')
union #历史记录,排除因为续P,导致这个件不在当日的P中
select b.application_id,b.bomber_id,a.cdt
from bomber.bombing_history b
inner join (
select application_id,bomber_id,max(created_at) as cdt
from bomber.bombing_history bb
where bb.created_at>date_sub('%s',interval 7 day)
and bb.created_at<'%s'
and promised_date is not null
group by 1,2) a
on b.application_id=a.application_id
and b.bomber_id=a.bomber_id and a.cdt=b.created_at
where b.promised_date ='%s'
union #当天下的当天的P
select b.application_id,b.bomber_id,b.created_at
from bomber.bombing_history b
where b.promised_date ='%s'
and b.created_at>'%s'
and b.created_at<date_add('%s',interval 1 day)
) a
where exists(select 1 from bomber.application ba
where ba.id=a.application_id
and ((ba.finished_at is null)
or (ba.finished_at > '%s')))
group by 1
""" % (sql_date, begin_date, begin_date, begin_date, sql_date,
sql_date, begin_date, begin_date, begin_date)
datas = run_all_sql(sql)
if sql_date == begin_date:
today_due = datas
for data in datas:
(SummaryBomber.update(
ptp_today_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0]
)).execute()
continue
nextday_due = datas
for data in datas:
(SummaryBomber.update(
ptp_next_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0]
)).execute()
return [today_due, nextday_due]
# 得到ptp维护的相关信息
def get_ptp_call_cnt(begin_date, end_date):
today_followed = []
for sql_data in (begin_date, end_date):
sql = """
select b.bomber_id,count(distinct b.application_id) as cnt
from (
select a.* from
(
select application_id,bomber_id,created_at
from bomber.auto_call_actions ba
where promised_date ='%s' # 需要过滤掉在手动中续P的
and not exists (select 1 from bomber.bombing_history bb
where bb.application_id = ba.application_id
and bb.bomber_id = ba.bomber_id
and bb.created_at>ba.created_at
and bb.promised_date is not null
and bb.created_at < '%s')
union #历史记录,排除因为续P,导致这个件不在当日的P中
select b.application_id,b.bomber_id,a.cdt
from bomber.bombing_history b
inner join (
select application_id,bomber_id,max(created_at) as cdt
from bomber.bombing_history bb
where bb.created_at>date_sub('%s',interval 7 day)
and bb.created_at<'%s'
and promised_date is not null
group by 1,2) a
on b.application_id=a.application_id
and b.bomber_id=a.bomber_id and a.cdt=b.created_at
where b.promised_date ='%s'
union #当天下的当天的P
select b.application_id,b.bomber_id,b.created_at
from bomber.bombing_history b
where b.promised_date ='%s'
and b.created_at>'%s'
and b.created_at<date_add('%s',interval 1 day)
) a
where exists(select 1 from bomber.application ba
where ba.id=a.application_id
and ((ba.finished_at is null)
or (ba.finished_at > '%s')))
and exists(select 1 from bomber.call_actions bc
where a.application_id = bc.application_id
and a.bomber_id = bc.bomber_id
and bc.created_at>'%s'
and bc.created_at< date_add('%s',interval 1 day)
and bc.created_at>=a.created_at)
union
select a.* from
(
select application_id,bomber_id,created_at
from bomber.auto_call_actions ba
where promised_date ='%s' # 需要过滤掉在手动中续P的
and not exists ( select 1 from bomber.bombing_history bb
where bb.application_id = ba.application_id
and bb.bomber_id = ba.bomber_id
and bb.created_at>ba.created_at
and bb.promised_date is not null
and bb.created_at < '%s')
union #历史记录,排除因为续P,导致这个件不在当日的P中
select b.application_id,b.bomber_id,a.cdt
from bomber.bombing_history b
inner join (
select application_id,bomber_id,max(created_at) as cdt
from bomber.bombing_history bb
where bb.created_at>date_sub('%s',interval 7 day)
and bb.created_at<'%s'
and promised_date is not null
group by 1,2) a
on b.application_id=a.application_id
and b.bomber_id=a.bomber_id and a.cdt=b.created_at
where b.promised_date ='%s'
union #当天下的当天的P
select b.application_id,b.bomber_id,b.created_at
from bomber.bombing_history b
where b.promised_date ='%s'
and b.created_at>'%s'
and b.created_at<date_add('%s',interval 1 day)
) a
where exists(select 1 from bomber.application ba
where ba.id=a.application_id
and ba.finished_at > '%s'
and ba.finished_at< date_add('%s',interval 1 day))
) b
group by 1
""" % (sql_data, begin_date, begin_date, begin_date, sql_data,
sql_data, begin_date, begin_date, begin_date, begin_date,
begin_date, sql_data, begin_date, begin_date, begin_date,
sql_data, sql_data, begin_date, begin_date, begin_date,
begin_date)
datas = run_all_sql(sql)
if sql_data == begin_date:
today_followed = datas
for data in datas:
(SummaryBomber.update(
ptp_today_call_cnt=data[1]
).where(
SummaryBomber.bomber_id == data[0],
SummaryBomber.time == begin_date
)).execute()
continue
nextday_followed = datas
for data in datas:
(SummaryBomber.update(
ptp_next_call_cnt=data[1]
).where(
SummaryBomber.bomber_id == data[0],
SummaryBomber.time == begin_date
)).execute()
return [today_followed, nextday_followed]
# 得到新件还款金额(只有c2、c3才有新件还款的概念)
def get_new_case_cleared(begin_date, end_date):
sql = """
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c2_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
) a
GROUP BY 1
UNION
SELECT
ptp_bomber AS bomber_id,
sum( paid_amount ) AS pending
FROM
(SELECT
br.late_fee_part + br.principal_part AS paid_amount,
br.ptp_bomber
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c3_entry ) = date( br.repay_at )
AND br.ptp_bomber is not null
WHERE ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
) a
GROUP BY 1
""" % (begin_date, end_date, begin_date, end_date,begin_date, end_date)
case_cleared_sums = run_all_sql(sql)
for clear in case_cleared_sums:
(SummaryBomber.update(
new_case_cleared_sum=clear[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == clear[0]
)).execute()
# 新件当日维护件数
@time_logger
def get_new_case_call(begin_date, end_date, real_query_time=False):
sql = """
SELECT
bd.bomber_id,
count( DISTINCT bd.application_id )
FROM
bomber.dispatch_app_history bd
INNER JOIN bomber.call_actions bc
ON bd.application_id = bc.application_id
AND bd.bomber_id = bc.bomber_id
AND date( bd.entry_at ) = date( bc.created_at )
WHERE
entry_at > '%s'
AND entry_at < '%s'
AND partner_id IS NULL
GROUP BY 1
""" % (begin_date, end_date)
new_case_calls = run_all_sql(sql)
if real_query_time:
return new_case_calls
for call in new_case_calls:
(SummaryBomber.update(
new_case_call_cnt=call[1]
).where(
SummaryBomber.bomber_id == call[0],
SummaryBomber.time == begin_date
)).execute()
return new_case_calls
# 得到接通件均通话时长
@time_logger
def get_calltime_avg(begin_date, end_date, real_query_time=False):
autos_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction,
count( 1 ) AS auto_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date)
autos = run_all_sql(autos_sql)
manual_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND an.STATUS = 'ANSWERED'
AND bb.id IS NOT NULL
AND an.recording is not null
GROUP BY 1
""" % (begin_date, end_date, '5%', '3%')
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]][0] += data[1]
result[data[0]][1] += data[2]
continue
result[data[0]] = [data[1], data[2]]
if real_query_time:
return result
for key, value in result.items():
(SummaryBomber.update(
calltime_case_sum=value[0],
calltime_case_cnt=value[1],
calltime_case_avg=value[0] / value[1] if value[1] else 0
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
return result
# 得到等待时长相关数据
def get_no_calltime_avg(begin_date, end_date):
manual_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction,
count( 1 ) AS manual_jt_cnt
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND (an.status!='ANSWERED' or an.recording is null)
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date, '5%', '3%')
manuals = run_all_sql(manual_sql)
for data in manuals:
(SummaryBomber.update(
calltime_no_case_sum=data[1],
calltime_no_case_cnt=data[2],
calltime_no_case_avg=data[1] / data[2] if data[2] else 0
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0]
)).execute()
# 得到通话总时长
@time_logger
def get_calltime_sum(begin_date, end_date, real_query_time=False):
autos_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS auto_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.username = bb.username
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND an.username != ' '
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date)
autos = run_all_sql(autos_sql)
manual_sql = """
SELECT
bb.id AS bomber_id,
sum( talkduraction ) AS manual_talkduraction
FROM
auto_call.newcdr an
LEFT JOIN bomber.bomber bb ON an.callfrom = bb.ext
WHERE
an.timestart >= '%s'
AND an.timestart < '%s'
AND ( ( an.callfrom LIKE '%s' ) OR ( an.callfrom LIKE '%s' ) )
AND bb.id IS NOT NULL
GROUP BY 1
""" % (begin_date, end_date, '5%', '3%')
manuals = run_all_sql(manual_sql)
datas = autos + manuals
result = {}
for data in datas:
if data[0] in result.keys():
result[data[0]] += data[1]
continue
result[data[0]] = data[1]
if real_query_time:
return result
for key, value in result.items():
(SummaryBomber.update(
calltime_sum=value
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
return result
# 当天未跟进的件
def get_unfollowed(begin_date):
sql = """
SELECT
bomber_id,
count(1)
FROM
(
SELECT
bd.application_id,
date(bd.entry_at) AS entry_at,
bd.bomber_id,
date(bd.out_at) AS out_at
FROM
bomber.dispatch_app_history bd
WHERE
(
out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)
OR out_at IS NULL
)
AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)
AND partner_id IS NULL
AND NOT EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
bd.bomber_id = bc.bomber_id
AND bc.application_id = bd.application_id
AND bc.created_at < '%(begin_date)s'
)
) a
GROUP BY
1
""" % {'begin_date': begin_date}
data = run_all_sql(sql)
result = defaultdict(int)
for d in data:
result[d[0]] += d[1]
bomber_list = []
for key, value in result.items():
bomber_list.append(key)
(SummaryBomber.update(
unfollowed_cnt=SummaryBomber.new_case_cnt + value
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
# 剩下bomber_id直接由new_case_cnt赋值
(SummaryBomber.update(
unfollowed_cnt=SummaryBomber.new_case_cnt
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id.not_in(bomber_list)
)).execute()
# 未跟进件中当天跟进件数
def get_unfollowed_call(begin_date):
sql = """
SELECT
bomber_id,
count(1)
FROM
(
SELECT
bd.application_id,
date(bd.entry_at) AS entry_at,
bd.bomber_id,
date(bd.out_at) AS out_at
FROM
bomber.dispatch_app_history bd
WHERE
(
out_at > date_add('%(begin_date)s', INTERVAL 1 DAY)
OR out_at IS NULL
)
AND entry_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND entry_at > date_sub('%(begin_date)s', INTERVAL 30 DAY)
AND partner_id IS NULL
AND NOT EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
bd.bomber_id = bc.bomber_id
AND bc.application_id = bd.application_id
AND bc.created_at < '%(begin_date)s'
)
) a
WHERE
EXISTS (
SELECT
1
FROM
bomber.call_actions bc
WHERE
a.application_id = bc.application_id
AND a.bomber_id = bc.bomber_id
AND bc.created_at > '%(begin_date)s'
AND bc.created_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
AND bc.created_at >= a.entry_at
)
OR EXISTS (
SELECT
1
FROM
bomber.application ba
WHERE
ba.id = a.application_id
AND ba.finished_at > '%(begin_date)s'
AND ba.finished_at < date_add('%(begin_date)s', INTERVAL 1 DAY)
)
GROUP BY
1
""" % {'begin_date': begin_date}
data = run_all_sql(sql)
result = defaultdict(int)
for d in data:
result[d[0]] += d[1]
bomber_list = []
for key, value in result.items():
bomber_list.append(key)
(SummaryBomber.update(
unfollowed_call_cnt=SummaryBomber.new_case_call_cnt + value
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == key
)).execute()
# 剩下bomber_id直接由new_case_cnt赋值
update_sql = (SummaryBomber
.update(unfollowed_call_cnt=SummaryBomber.new_case_call_cnt)
.where(SummaryBomber.time == begin_date))
if bomber_list:
update_sql = update_sql.where(SummaryBomber.bomber_id
.not_in(bomber_list))
update_sql.execute()
return result
# summary 更新新的数据(计算summary_bomber的另一部分数据)
@action(MessageAction.UPDATE_SUMMARY_NEW)
def update_summary_new(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= end_date,
WorkerLog.action == 'UPDATE_SUMMARY_NEW')
.first())
if worker_log and worker_log.logs >= 5:
return
get_new_case_amount(begin_date, end_date)
get_kp_cleared(begin_date, end_date)
get_kp_today(begin_date, end_date)
get_ptp_cnt(begin_date, end_date)
get_ptp_call_cnt(begin_date, end_date)
get_new_case_cleared(begin_date, end_date)
get_new_case_call(begin_date, end_date)
get_calltime_avg(begin_date, end_date)
get_no_calltime_avg(begin_date, end_date)
get_calltime_sum(begin_date, end_date)
# -------------------------------- 得到cycle层的数据 --------------------------
def get_cycle_claimed(begin_date, end_date):
sql = """
select cycle,count(1)
from bomber.application where cycle in (1,2,3,4)
and (finished_at is null or (finished_at>'%s'))
and created_at>'2018-09-01'
group by 1
""" % begin_date
result = run_all_sql(sql)
return result
# 得到cycle层的新件件数和金额
@time_logger
def cycle_new_case(begin_date, end_date, real_time_query=False):
sql = """
SELECT
1 AS cycle,
count( ba.id ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.created_at ) = bo.which_day_overdue
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c1b_entry ) = bo.which_day_overdue
WHERE
c1b_entry > '%s'
AND c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c2_entry ) = bo.which_day_overdue
WHERE
c2_entry > '%s'
AND c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( 1 ),
sum( bo.principal_pending + late_fee_pending +
interest_pending ) AS pending
FROM
bomber.application ba
INNER JOIN bill_java.overdue bo ON ba.id = bo.application_id
AND date( ba.c3_entry ) = bo.which_day_overdue
WHERE
c3_entry > '%s'
AND c3_entry < '%s'
""" % (begin_date, end_date, begin_date, end_date,
begin_date, end_date, begin_date, end_date)
all_datas = run_all_sql(sql)
if real_time_query:
return all_datas
for data in all_datas:
(SummaryBomber.update(
new_case_amount_sum=data[2],
new_case_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.bomber_id == data[0],
SummaryBomber.cycle == data[0]
)).execute()
return all_datas
# 新件当日维护件数
@time_logger
def get_cycle_new_case_call(begin_date, end_date, real_time_query=False):
sql = """
SELECT
1 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.created_at ) = date( bc.created_at )
WHERE
ba.created_at > '%s'
AND ba.created_at < '%s'
UNION
SELECT
2 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c1b_entry ) = date( bc.created_at )
WHERE
ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
UNION
SELECT
3 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c2_entry ) = date( bc.created_at )
WHERE
ba.c2_entry > '%s'
AND ba.c2_entry < '%s'
UNION
SELECT
4 AS cycle,
count( DISTINCT ba.id )
FROM
bomber.application ba
INNER JOIN bomber.call_actions bc ON ba.id = bc.application_id
AND date( ba.c3_entry ) = date( bc.created_at )
WHERE
ba.c3_entry > '%s'
AND ba.c3_entry < '%s'
""" % (begin_date, end_date, begin_date, end_date,
begin_date, end_date, begin_date, end_date)
cycle_datas = run_all_sql(sql)
if real_time_query:
return cycle_datas
for data in cycle_datas:
(SummaryBomber.update(
new_case_call_cnt=data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.cycle == data[0],
SummaryBomber.bomber_id == data[0]
)).execute()
return cycle_datas
def get_cycle_new_case_cleared(begin_date, end_date):
sql = """
SELECT
'1' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.created_at ) = date( br.repay_at )
WHERE ba.created_at > '%s'
AND ba.created_at < '%s'
GROUP BY 1, 2 ) a
UNION
SELECT
'2' AS cycle, count( DISTINCT id ),
sum( paid_amount ) AS pending
FROM
(SELECT ba.id, br.repay_at,
br.late_fee_part + br.principal_part AS paid_amount
FROM
bomber.application ba
INNER JOIN bomber.repayment_log br ON ba.id = br.application_id
AND date( ba.c1b_entry ) = date( br.repay_at )
WHERE ba.c1b_entry > '%s'
AND ba.c1b_entry < '%s'
GROUP BY 1, 2) a
""" % (begin_date, end_date, begin_date, end_date)
cycle_cleared = run_all_sql(sql)
for i in cycle_cleared:
(SummaryBomber.update(
new_case_cleared_sum=i[2]
).where(
SummaryBomber.cycle == i[0],
SummaryBomber.bomber_id == i[0],
SummaryBomber.time == begin_date
)).execute()
def get_cycle_case_made_cnt(begin_date, end_date):
sql = """
select cycle,count(distinct application) from (
select distinct cycle,application from bomber.auto_call_list_record
where created_at >= '%s'
and created_at < '%s'
and called_counts <> 0
and cycle in (1,2,3,4)
union
select distinct cycle,application_id from bomber.call_actions
where created_at >= '%s'
and created_at < '%s'
and cycle in (1,2,3,4)
) c
group by 1
""" % (begin_date, end_date, begin_date, end_date)
case_made_datas = run_all_sql(sql)
for case_made_data in case_made_datas:
(SummaryBomber.update(
case_made_cnt=case_made_data[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.cycle == case_made_data[0],
SummaryBomber.bomber_id == case_made_data[0]
)).execute()
# 得到cycle維度的数据
@action(MessageAction.SUMMARY_NEW_CYCLE)
def summary_new_cycle(payload, msg_id):
end_date = date.today()
begin_date = end_date - timedelta(days=1)
worker_log = (WorkerLog.select(fn.COUNT(WorkerLog.action).alias('logs'))
.where(WorkerLog.created_at >= end_date,
WorkerLog.action == 'SUMMARY_NEW_CYCLE')
.first())
if worker_log and worker_log.logs >= 5:
return
cycle_datas = (SummaryBomber
.select(fn.SUM(SummaryBomber.new_case_amount_sum)
.alias('new_case_amount_sum'),
fn.SUM(SummaryBomber.new_case_cleared_sum)
.alias('new_case_cleared_sum'),
fn.SUM(SummaryBomber.case_made_cnt)
.alias('case_made_cnt'),
fn.SUM(SummaryBomber.case_connect_cnt)
.alias('case_connect_cnt'),
fn.SUM(SummaryBomber.promised_cnt)
.alias('promised_cnt'),
fn.SUM(SummaryBomber.promised_amount)
.alias('promised_amount'),
fn.SUM(SummaryBomber.cleared_cnt)
.alias('cleared_cnt'),
fn.SUM(SummaryBomber.cleared_amount)
.alias('cleared_amount'),
fn.SUM(SummaryBomber.new_case_cnt)
.alias('new_case_cnt'),
fn.SUM(SummaryBomber.new_case_call_cnt)
.alias('new_case_call_cnt'),
fn.SUM(SummaryBomber.unfollowed_cnt)
.alias('unfollowed_cnt'),
fn.SUM(SummaryBomber.unfollowed_call_cnt)
.alias('unfollowed_call_cnt'),
fn.SUM(SummaryBomber.call_cnt).alias('call_cnt'),
fn.SUM(SummaryBomber.sms_cnt).alias('sms_cnt'),
fn.SUM(SummaryBomber.call_connect_cnt)
.alias('call_connect_cnt'),
fn.SUM(SummaryBomber.ptp_today_cnt)
.alias('ptp_today_cnt'),
fn.SUM(SummaryBomber.ptp_today_call_cnt)
.alias('ptp_today_call_cnt'),
fn.SUM(SummaryBomber.ptp_next_cnt)
.alias('ptp_next_cnt'),
fn.SUM(SummaryBomber.ptp_next_call_cnt)
.alias('ptp_next_call_cnt'),
fn.SUM(SummaryBomber.KP_cleared_cnt)
.alias('KP_cleared_cnt'),
fn.SUM(SummaryBomber.KP_today_cnt)
.alias('KP_today_cnt'),
fn.SUM(SummaryBomber.work_ind).alias('work_ind'),
fn.SUM(SummaryBomber.calltime_sum)
.alias('calltime_sum'),
fn.SUM(SummaryBomber.calltime_case_sum)
.alias('calltime_case_sum'),
fn.SUM(SummaryBomber.calltime_case_cnt)
.alias('calltime_case_cnt'),
fn.SUM(SummaryBomber.calltime_no_case_sum)
.alias('calltime_no_case_sum'),
fn.SUM(SummaryBomber.calltime_no_case_cnt)
.alias('calltime_no_case_cnt'),
SummaryBomber.cycle.alias('cycle'))
.where(SummaryBomber.time == begin_date,
SummaryBomber.cycle << Cycle.values())
.group_by(SummaryBomber.cycle))
for cycle_data in cycle_datas:
SummaryBomber.create(
bomber_id=cycle_data.cycle,
time=begin_date,
cycle=cycle_data.cycle,
new_case_amount_sum=cycle_data.new_case_amount_sum, # 新件金额(同上)
new_case_cleared_sum=cycle_data.new_case_cleared_sum, # 新件还款(同上)
new_case_cleard_rate=0,
case_made_cnt=cycle_data.case_made_cnt, # 拨打件数
case_made_rate=0,
case_connect_cnt=cycle_data.case_connect_cnt, # 接通件数
case_connect_rate=0,
promised_cnt=cycle_data.promised_cnt, # ptp件数
promised_amount=cycle_data.promised_amount, # ptp金额
cleared_cnt=cycle_data.cleared_cnt, # 回款件数
cleared_amount=cycle_data.cleared_amount, # 回款金额
new_case_cnt=cycle_data.new_case_cnt, # 新件数量(1,2待算)
new_case_call_cnt=cycle_data.new_case_call_cnt, # 新件拨打数(同上)
unfollowed_cnt=cycle_data.unfollowed_cnt,
unfollowed_call_cnt=cycle_data.unfollowed_call_cnt,
call_cnt=cycle_data.call_cnt, # 拨打电话数
sms_cnt=cycle_data.sms_cnt, # 发送短信数
call_connect_cnt=cycle_data.call_connect_cnt, # 接通电话数
calltime_case_avg=0, # 接通件均通话时长 (全部待算)
ptp_today_cnt=cycle_data.ptp_today_cnt, # 当日ptp件数
ptp_today_call_cnt=cycle_data.ptp_today_call_cnt, # 当日ptp到期维护件数
ptp_next_cnt=cycle_data.ptp_next_cnt, # 次日ptp到期数
ptp_next_call_cnt=cycle_data.ptp_next_call_cnt, # 次日到期维护数
KP_cleared_cnt=cycle_data.KP_cleared_cnt, # kp回款件
KP_today_cnt=cycle_data.KP_today_cnt, # 当日处于ptp件数
KP_cleared_rate=0,
work_ind=cycle_data.work_ind, # 当日是否工作
calltime_sum=cycle_data.calltime_sum, # 通话总时长
calltime_case_sum=cycle_data.calltime_case_sum,
calltime_case_cnt=cycle_data.calltime_case_cnt,
calltime_no_case_sum=cycle_data.calltime_no_case_sum,
calltime_no_case_cnt=cycle_data.calltime_no_case_cnt,
work_time_sum=cycle_data.work_time_sum # 工作时长
)
cycle_claimed = get_cycle_claimed(begin_date, end_date)
for claimed in cycle_claimed:
(SummaryBomber.update(
claimed_cnt=claimed[1]
).where(
SummaryBomber.time == begin_date,
SummaryBomber.cycle == claimed[0],
SummaryBomber.bomber_id == claimed[0]
)).execute()
# 得到新件件数和金额
cycle_new_case(begin_date, end_date)
# 得到新件维护件数
get_cycle_new_case_call(begin_date, end_date)
# 得到新件還款金額
get_cycle_new_case_cleared(begin_date, end_date)
# 修改cycle的拨打件数(累加对于预测试外呼都是打通的)
get_cycle_case_made_cnt(begin_date, end_date)
# 得到计算类数据(各比率)
all_datas = (SummaryBomber.filter(SummaryBomber.time == begin_date))
for data in all_datas:
cl_rat = (data.new_case_cleared_sum / data.new_case_amount_sum
if data.new_case_amount_sum else 0) * 100
data.new_case_cleard_rate = cl_rat
case_made_rate = (data.case_made_cnt / data.claimed_cnt
if data.claimed_cnt else 0) * 100
data.case_made_rate = case_made_rate
case_connect_rate = (data.case_connect_cnt / data.case_made_cnt
if data.case_made_cnt else 0) * 100
data.case_connect_rate = case_connect_rate
calltime_case_avg = (data.calltime_case_sum / data.calltime_case_cnt
if data.calltime_case_cnt else 0)
data.calltime_case_avg = calltime_case_avg
calltime_no_case_avg = (data.calltime_no_case_sum /
data.calltime_no_case_cnt
if data.calltime_no_case_cnt else 0)
data.calltime_no_case_avg = calltime_no_case_avg
KP_cleared_rate = (data.KP_cleared_cnt / data.KP_today_cnt
if data.KP_today_cnt else 0) * 100
data.KP_cleared_rate = KP_cleared_rate
data.save()
@action(MessageAction.MODIFY_BILL)
def modify_bill(payload, msg_id):
application_id = payload.get('external_id')
principal_paid = Decimal(payload.get('principal_paid', 0))
late_fee = Decimal(payload.get('late_fee', 0))
late_fee_paid = Decimal(payload.get('late_fee_paid', 0))
overdue_days = payload.get('overdue_days')
sub_bill_id = payload.get('bill_sub_id')
partner_bill_id = payload.get('partner_bill_id')
if not application_id:
logging.warning('payload has no external_id. {}'.format(str(payload)))
return
if not overdue_days:
logging.info("application %s not overdue" % application_id)
return
item = (OldLoanApplication
.get_or_none(OldLoanApplication.application_id ==
application_id))
if item:
start_old_application(item, cancel=True)
overdue_bill = (OverdueBill.select()
.where(OverdueBill.external_id == application_id,
OverdueBill.sub_bill_id == sub_bill_id)
.first())
application = (Application.filter(Application.id == application_id)
.first())
if not overdue_bill:
if not application:
logging.info('application %s not in bomber, let it in bomber now',
application_id)
send_to_default_q(MessageAction.APPLICATION_BOMBER, {
'id': application_id,
'bill_sub_id': sub_bill_id
})
return
else:
application = (Application
.filter(Application.id == overdue_bill.collection_id)
.first())
with db.atomic():
application.status = ApplicationStatus.UNCLAIMED.value
application.finished_at = None
application.paid_at = None
application.save()
if overdue_bill:
overdue_bill.status = ApplicationStatus.UNCLAIMED.value
overdue_bill.finished_at = None
overdue_bill.save()
repayment = (RepaymentLog.update(no_active = 1)
.where(RepaymentLog.application == application.id,
RepaymentLog.partner_bill_id == partner_bill_id,
RepaymentLog.overdue_bill_id == overdue_bill.id))
else:
repayment = (RepaymentLog.update(no_active=1)
.where(RepaymentLog.application == application.id,
RepaymentLog.partner_bill_id == partner_bill_id))
repayment_num = repayment.execute()
logging.info("modify_bill no active repayment count:%s" % repayment_num)
if not application.latest_bomber_id:
return
bomber_id = application.latest_bomber_id
(DispatchAppHistory.update(
out_at=None,
out_overdue_days=overdue_days,
out_principal_pending=(application.amount - principal_paid),
out_late_fee_pending=(late_fee - late_fee_paid)
).where(
DispatchAppHistory.application == application.id,
DispatchAppHistory.bomber_id == bomber_id)).execute()
# 获取改变的ids
def get_change_bomber():
cycle_role_map = {5: Cycle.C1B.value, 6: Cycle.C2.value, 8: Cycle.C3.value}
result = {}
bomber_logs = (BomberLog.select(BomberLog.bomber_id,
BomberLog.role_id,
BomberLog.operation,
Bomber.group_id)
.join(Bomber, JOIN_INNER,
on=BomberLog.bomber_id == Bomber.id)
.where(fn.DATE(BomberLog.created_at) == date.today(),
BomberLog.role_id << list(cycle_role_map.keys()),#C1b,c2,c3
BomberLog.operation << (0, 1), #0删除,1创建,3修改
Bomber.instalment == 0) #催收单期的员工
.dicts())
for b_log in bomber_logs:
cycle = cycle_role_map.get(b_log["role_id"])
group_id = b_log["group_id"]
if cycle in result:
if group_id not in result[cycle]:
result[cycle][group_id] = {
"cycle": cycle,
"del_ids": [],
"new_ids": []
}
else:
result[cycle] = {group_id: {
"cycle": cycle,
"del_ids": [],
"new_ids": []}
}
if b_log["operation"] == 0:
result[cycle][group_id]["del_ids"].append(b_log["bomber_id"])
# result 有值表示有人员变动
if result:
bombers = (Bomber.select()
.where(Bomber.role.in_(list(cycle_role_map.keys())),
Bomber.is_del == 0,
Bomber.instalment == 0))
for b in bombers:
cycle_result = result.get(cycle_role_map[b.role_id], {})
role_result = cycle_result.get(b.group_id)
if not role_result:
continue
role_result["new_ids"].append(b.id)
resutl_list = []
for cycle, group_dict in result.items():
resutl_list.extend(list(group_dict.values()))
return resutl_list
return []
# 获取所有的application
def get_total_application(cycle, del_ids, new_ids,
type=ApplicationType.CASH_LOAN.value):
bomber_list = del_ids + new_ids
all_apps = (Application.select(Application.id,
Application.latest_bomber_id.alias(
"latest_bomber_id"),
Application.promised_date,
Bomber.partner_id.alias("partner_id"))
.join(Bomber, JOIN_LEFT_OUTER,
Application.latest_bomber == Bomber.id)
.where(Application.cycle == cycle,
Application.status != ApplicationStatus.REPAID.value,
Application.latest_bomber_id << bomber_list,
Application.type == type)
.order_by(Application.id)
.dicts())
return all_apps
# 获取平均数列表,即每个bomber的平均件的数量
def get_average_number(app_nums, bomber_nums):
average = app_nums // bomber_nums
remainder = app_nums % bomber_nums
average_list = [average for i in range(bomber_nums)]
if remainder == 0:
return average_list
for i in range(remainder):
average_list[i] += 1
# 对结果进行一下随机,不然每次都是前几个人多件
random.shuffle(average_list)
return average_list
# 对appliciton进行分类统计
def classified_statistic_apps(apps):
result = {}
# 根据用户的bomber_id 对数据进行分类统计
for app in apps:
# 将用户下p和没下p的件分开
latest_bomber_id = app["latest_bomber_id"]
if latest_bomber_id not in result:
result[latest_bomber_id] = {
"bid":latest_bomber_id,
"p_list": [],
"np_list": [],
"partner_id": app["partner_id"] if app["partner_id"] else "",
}
promised_date = app.get("promised_date")
if not promised_date or promised_date.date() < date.today():
result[latest_bomber_id]['np_list'].append(app["id"])
else:
result[latest_bomber_id]['p_list'].append(app["id"])
return result
# 获取多余的件,并且计算每个人所需要的件
def get_surplus_application(new_ids, del_ids, average_nums, classified_apps):
surplus_apps = []
# 如果id在删除队列中,将对应id所有的件重新分配
for del_id in del_ids:
del_res = classified_apps.get(del_id,{})
p_list = del_res.get("p_list", [])
np_list = del_res.get("np_list", [])
del_res["need_num"] = -(len(p_list) + len(np_list))
del_res["to_list"] = np_list + p_list
surplus_apps.extend(p_list)
surplus_apps.extend(np_list)
# 计算每个用户的下p和没下p的件的个数,和自己需要的件的个数
for index, bid in enumerate(new_ids):
average = average_nums[index]
bomber_app = classified_apps.get(bid)
if not bomber_app:
# 获取partner_id
bomber = (Bomber.select(Bomber.partner_id)
.where(Bomber.id == bid)
.first())
bomber_app = {
"bid": bid,
"p_list": [],
"p_num": 0,
"np_list": [],
"np_num": 0,
"need_num": average,
"partner_id": bomber.partner_id if bomber else ''
}
classified_apps[bid] = bomber_app
else:
p_num = len(bomber_app["p_list"])
np_num = len(bomber_app["np_list"])
# 如果下p件大于平均值,直接将他剩余所有件都放入到多余列表中
if p_num > average:
bomber_app["need_num"] = - np_num
else:
bomber_app["need_num"] = average - (p_num + np_num)
bomber_app["p_num"] = p_num
bomber_app["np_num"] = np_num
# 将多余的件放入到多余列表中
if bomber_app["need_num"] < 0:
# 将件随机,确保分件的逾期天数尽量均匀
random.shuffle(bomber_app["np_list"])
res_over = bomber_app["np_list"][:-bomber_app["need_num"]]
bomber_app["to_list"] = res_over
surplus_apps.extend(res_over)
# 按照need_num进行排序
classified_apps_list = sorted(classified_apps.values(),
key=lambda x:x["need_num"],
reverse=True)
return surplus_apps, classified_apps_list
# 更新数据库数据,进行分件
def update_applications(surplus_apps, classified_apps, cycle):
# 多余得件进行随机
random.shuffle(surplus_apps)
for app in classified_apps:
status = 0
try:
if app["need_num"] > 0:
from_list = surplus_apps[:app["need_num"]]
# 移除surplus_apps中的元素
for i in from_list: surplus_apps.remove(i)
app["from_list"] = from_list
with db.atomic():
q = Application.update(
{Application.latest_bomber_id: app["bid"]}).where(
Application.id.in_(from_list))
q.execute()
# 分件入案
in_record_params = {
"dest_bomber_id": app["bid"],
"application_ids": from_list,
"dest_partner_id": app["partner_id"],
"cycle": cycle,
}
new_in_record(**in_record_params)
status = 1
elif app["need_num"] < 0:
#分件出案
out_record_params = {
"src_bomber_id": app["bid"],
"application_ids": app["to_list"]
}
new_out_record(**out_record_params)
status = 1
else:
status = 1
except Exception as e:
logging.error("分件异常,params:%s,error:%s"%(app,str(e)))
#记录操作日志
log_params = {
"bomber_id": app["bid"],
"form_ids": json.dumps(app.get("from_list", [])),
"to_ids": json.dumps(app.get("to_list", [])),
"need_num": app.get("need_num"),
"np_ids": json.dumps(app.get("np_list", [])),
"p_ids": json.dumps(app.get("p_list", [])),
"status": status
}
DispatchAppLogs.create(**log_params)
return classified_apps
# 人员变动分配分期的催收单
def get_instalment_change_bomber():
result ={}
bomber_logs = (BomberLog.select(BomberLog.bomber_id,
BomberLog.operation,
Bomber.instalment,
Bomber.group_id)
.join(Bomber, JOIN_INNER,
on=BomberLog.bomber_id == Bomber.id)
.where(fn.DATE(BomberLog.created_at) == date.today(),
BomberLog.operation << [0,1],
Bomber.instalment > 0)
.dicts())
for bl in bomber_logs:
cycle = bl["instalment"]
group_id = bl["group_id"]
if cycle not in result:
result[cycle] = {group_id: {
"cycle": cycle,
"del_ids": [],
"new_ids": []
}}
else:
if group_id not in result[cycle]:
result[cycle][group_id] = {
"cycle": cycle,
"del_ids": [],
"new_ids": []}
if bl["operation"] == 0:
result[cycle][group_id]["del_ids"].append(bl["bomber_id"])
if result:
instalments = list(result.keys())
bombers = (Bomber.select()
.where(Bomber.instalment << instalments,
Bomber.is_del == 0))
for b in bombers:
cycle_result = result.get(b.instalment, {})
group_result = cycle_result.get(b.group_id)
if not group_result:
continue
group_result["new_ids"].append(b.id)
result_list = []
for cycle,group_dict in result.items():
result_list.extend(list(group_dict.values()))
return result_list
return []
def instalment_update_applications(surplus_apps, classified_apps, cycle):
end = 0
for app in classified_apps:
if app["need_num"] <= 0:
continue
start = end
end = start + app["need_num"]
aids = surplus_apps[start:end]
app["from_list"] = aids
status = 0
with db.atomic():
q = (Application.update(last_bomber = Application.latest_bomber,
latest_bomber = app["bid"],
ptp_bomber = None)
.where(Application.id << aids)
.execute())
# 入案和出案
record_param = {
"cycle": cycle,
"application_ids": aids,
"dest_bomber_id": app["bid"],
"dest_partner_id": app["partner_id"],
}
out_and_in_record_instalment(**record_param)
status = 1
# 记录操作日志
log_params = {
"bomber_id": app["bid"],
"form_ids": json.dumps(app.get("from_list", [])),
"to_ids": json.dumps(app.get("to_list", [])),
"need_num": app.get("need_num"),
"np_ids": json.dumps(app.get("np_list", [])),
"p_ids": json.dumps(app.get("p_list", [])),
"status": status
}
DispatchAppLogs.create(**log_params)
return classified_apps
# 执行人员变动分件
def change_bomber_dispatch_apps(change_bombers,
type=ApplicationType.CASH_LOAN.value):
if not change_bombers:
return
for bombers in change_bombers:
del_ids = bombers.get("del_ids", [])
new_ids = bombers.get("new_ids", [])
cycle = bombers.get("cycle")
if not all([new_ids, cycle]):
logging.info(
"获取需要分件的信息异常,bomber:%s,type:%s" % (bombers, type))
continue
# 获取总apps
apps = get_total_application(cycle, del_ids, new_ids, type)
if not apps:
logging.info(
"分件没有获取到对应的件,bomber:%s,type:%s" % (bombers, type))
continue
# 获取平均数列表
average_nums = get_average_number(len(apps), len(new_ids))
# 分类统计apps
classified_apps = classified_statistic_apps(apps)
# 计算每个人需要分的件和多余的件
superlus_apps, classified_apps = get_surplus_application(new_ids,
del_ids,
average_nums,
classified_apps)
# 分件,更新数据库
if type == ApplicationType.CASH_LOAN.value:
result = update_applications(superlus_apps, classified_apps, cycle)
elif type == ApplicationType.CASH_LOAN_STAGING.value:
result = instalment_update_applications(superlus_apps,
classified_apps,
cycle)
else:
logging.info("人员变动触发分件,unknown type:%s" % type)
logging.info("人员变动触发的分件:result:%s,type:%s" % (result, type))
#bomber人员变动,进行分件
@action(MessageAction.BOMBER_CHANGE_DISPATCH_APPS)
def bomber_dispatch_applications(payload, msg_id):
#通过当天的登录日志,判断人员变动,若删除bomber_log会记录
change_bombers = get_change_bomber()
instalment_change_bombers = get_instalment_change_bomber()
params = {ApplicationType.CASH_LOAN.value: change_bombers,
ApplicationType.CASH_LOAN_STAGING.value: instalment_change_bombers}
for type,bombers in params.items():
change_bomber_dispatch_apps(change_bombers=bombers,type=type)
@action(MessageAction.REPAIR_BOMBER)
def repair_bomber(payload, msg_id):
app_mobile = payload['app_mobile']
username = payload.get('user_name')
logging.info('start repair bomber, number: %s' % app_mobile)
# 得到用户填写的EC,确认该EC号码是否在催收中,并存储关系
if 'mobile_no' in payload and payload['mobile_no']:
mobile = number_strip(str(payload['mobile_no']))[:64]
name = payload.get('mobile_name')
application = Application.filter(Application.user_mobile_no == mobile)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, mobile, username, name)
if 'tel_no' in payload and payload['tel_no']:
tel_no = number_strip(str(payload['tel_no']))[:64]
name = payload.get('tel_name')
application = Application.filter(Application.user_mobile_no == tel_no)
if application.exists():
repair_contact(app_mobile, application, username)
add_relationship(app_mobile, tel_no, username, name)
def repair_contact(number, application, name):
# 填写的ec有过逾期则将号码加入contact中
application = application.first()
contact = (Contact
.filter(Contact.user_id == application.user_id,
Contact.number == number))
if not contact.exists():
Contact.create(
user_id=application.user_id,
name=name,
number=number,
relationship=Relationship.FAMILY.value,
source='repair ec',
real_relationship=Relationship.FAMILY.value
)
logging.info('add repair contact success, number: %s' % number)
def add_relationship(number, ec_number, username, name):
# 存储关系
query = (TotalContact
.objects(src_number=str(number),
dest_number=ec_number,
source=20,
is_calc=False
)
.first())
if not query:
TotalContact(
src_number=str(number),
src_name=username,
dest_number=ec_number,
dest_name=name,
source=20).save()
logging.info('add relationship success, number: %s' % number)
# 获取要统计的时间范围
def get_summary_daily_time():
mid_time_t1 = datetime.strptime('12:40:00', '%H:%M:%S')
mid_time_t2 = datetime.strptime('17:20:00', '%H:%M:%S')
now_date = datetime.now()
now_date_time = now_date.time()
today_str = str(now_date.date())
if now_date_time < mid_time_t1.time():
yes_date = now_date - timedelta(days=1)
yes_date_str = str(yes_date.date())
begin_str = yes_date_str + ' 17:20:00'
end_str = today_str + ' 00:00:00'
elif mid_time_t1.time() <= now_date_time < mid_time_t2.time():
begin_str = today_str + ' 00:00:00'
end_str = today_str + ' 12:40:00'
else:
begin_str = today_str + ' 12:40:00'
end_str = today_str + ' 17:20:00'
begin_time = datetime.strptime(begin_str, '%Y-%m-%d %H:%M:%S')
end_time = datetime.strptime(end_str, '%Y-%m-%d %H:%M:%S')
# 记录统计的是哪天的数据
summary_datetime = now_date-timedelta(minutes=30)
summary_date = summary_datetime.date()
return begin_time, end_time, summary_date
# 每天12:40 和 17:20 和 凌晨 更新当天数据
@action(MessageAction.SUMMARY_DAILY)
def summary_daily_data(payload, msg_id):
begin_time, end_time, summary_date = get_summary_daily_time()
call_actions = (CallActionsR.select(CallActionsR.id,
CallActionsR.bomber_id,
CallActionsR.application_id,
CallActionsR.promised_date,
CallActionsR.cycle,
CallActionsR.name,
CallActionsR.number)
.where(CallActionsR.created_at >= begin_time,
CallActionsR.created_at < end_time,
CallActionsR.type << (0,1)))
summary_dailys = {}
for call in call_actions:
if call.bomber_id not in summary_dailys:
summary_dailys[call.bomber_id] = {'ptp_cnt': 0,
'call_cnt': 0,
'cycle': call.cycle,
'repayment': 0,
'bomber_id': call.bomber_id,
'summary_date':str(summary_date)}
# C2,C3的下p的件会多一条没有number和name的数据
if call.name and call.number:
summary_dailys[call.bomber_id]['call_cnt'] += 1
if call.promised_date:
summary_dailys[call.bomber_id]['ptp_cnt'] += 1
# 获取回款信息
C1_sql = """
SELECT a.current_bomber_id,
sum(principal_part+late_fee_part) as pay_amount,a.cycle
from
(select a.cycle,a.current_bomber_id,b.username,a.principal_part,
a.late_fee_part,a.application_id,a.repay_at
FROM bomber.repayment_log a ,bomber.bomber b
WHERE a.repay_at >= '%s' AND a.repay_at <'%s'
AND a.current_bomber_id !=''
AND a.current_bomber_id = b.id
and b.role_id in (1,2,4,5)
and principal_part+late_fee_part>0
group by 6,7) a
GROUP BY a.cycle,a.current_bomber_id
""" % (begin_time, end_time)
C1_repayment = run_all_sql(C1_sql)
other_sql = """
select current_bomber_id,sum(pay_amount) as pay_amount,cycle
from (
select application_id,current_bomber_id,pay_amount,repay_at,cycle
from (
select br.application_id,br.current_bomber_id,
br.principal_part+br.late_fee_part as pay_amount,br.repay_at,
br.cycle
from bomber.repayment_log br
left join bomber.bomber bb on br.current_bomber_id=bb.id
where exists (select 1 from bomber.bombing_history bb
where br.current_bomber_id=bb.bomber_id
and br.application_id=bb.application_id
and bb.created_at<br.repay_at
and (bb.promised_date is not null
or bb.promised_amount is not null))
and br.repay_at >= '%s'
and br.repay_at < '%s'
and bb.role_id in (3,6,7,8,9)
and br.principal_part+br.late_fee_part > 0
group by 1,4
) a
group by 1,4) b
group by 1
""" % (begin_time, end_time)
other_repayment = run_all_sql(other_sql)
all_repayment = C1_repayment + other_repayment
for res in all_repayment:
bomber_id,pay_amount,cycle = res
if bomber_id in summary_dailys:
summary_dailys[bomber_id]['repayment'] += pay_amount
else:
summary_dailys[bomber_id] = {'ptp_cnt': 0,
'call_cnt': 0,
'cycle': cycle,
'repayment': pay_amount,
'bomber_id': bomber_id,
'summary_date': str(summary_date)
}
insert_values = list(summary_dailys.values())
if insert_values:
SummaryDaily.insert_many(insert_values).execute()
# 获取本cycle所有没完成的件
def get_cycle_all_no_paid_app(cycle, type=None):
apps = (Application
.select(Application.id,
Application.latest_bomber_id,
Application.ptp_bomber,
Application.promised_date,
Application.cycle)
.where(Application.cycle == cycle,
Application.status != ApplicationStatus.REPAID.value,
Application.type == type)
.dicts())
dis_app_ids = [a['id'] for a in apps]
# 将dispatch_app中的件状态更新
with db.atomic():
for idx in range(0, len(dis_app_ids), 1000):
ids = dis_app_ids[idx:idx + 1000]
q = (DispatchApp.update(status = DisAppStatus.ABNORMAL.value)
.where(DispatchApp.application << ids)
.execute())
return apps
# 根据bomber_id整理app
def get_app_logs(apps):
app_logs = {}
all_np_apps = []
all_p_apps = []
for a in apps:
latest_bomber = a["latest_bomber"]
# 2 代替催收单中latest_bomber是空的情况,
latest_bomber = a["cycle"] if not latest_bomber else latest_bomber
if latest_bomber in app_logs:
app_logs[latest_bomber]["to_ids"].append(a["id"])
else:
app_logs[latest_bomber] = {"bomber_id": latest_bomber,
"to_ids": [a["id"]],
"np_ids": [],
"p_ids": []}
if (a["promised_date"] and
a["promised_date"].date() >= datetime.now().date()):
app_logs[latest_bomber]["p_ids"].append(a["id"])
all_p_apps.append(a)
else:
app_logs[latest_bomber]["np_ids"].append(a["id"])
all_np_apps.append(a)
return app_logs, all_np_apps, all_p_apps
# 月底分件给外包员工
def month_dispatch_app_out_partner(cycle,apps,app_logs,np_apps):
# 件随机
apps = list(apps)
np_apps = list(np_apps)
random.shuffle(np_apps)
apps_len = len(apps)
np_apps_len = len(np_apps)
end = 0
all_app_precentage = 0
# 获取这个cycle所有的的外包
partners = (Partner.select()
.where(Partner.cycle == cycle,
Partner.status == PartnerStatus.NORMAL.value))
for p in partners:
all_app_precentage += p.app_percentage
for partner in partners:
# 获取外包人员
bombers = (Bomber.select()
.where(Bomber.partner == partner.id,
Bomber.is_del == 0,
Bomber.status != BomberStatus.OUTER_LEADER.value))
bids = {b.id:b for b in bombers}
if len(bids) == 0:
logging.info("cycle:%s,partner:%s,no bomber"%(cycle, partner.id))
continue
start = end
if np_apps_len >= int(apps_len * all_app_precentage):
end = start + int(apps_len * partner.app_percentage)
else:
end = (start +
int(np_apps_len * partner.app_percentage / all_app_precentage))
# 外包团队应该获分到的所有件
partner_app = np_apps[start:end]
dispatch_apps_to_bomber(cycle, partner_app, bids, app_logs)
# 剩余给内部员工的件
np_apps = np_apps[end:]
return np_apps
# 内部员工分
def month_dispatch_app_inner(cycle,np_apps,app_logs,p_apps):
sys_cycle = {1: 'AB_TEST_C1A',
2: 'AB_TEST_C1B',
3: 'AB_TEST_C2',
4: 'AB_TEST_C3'}
# 获取内容部员工
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = (Bomber.select().where(Bomber.id << sys_values,
Bomber.is_del == 0))
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
bombers = bombers.where(Bomber.instalment == 0)
bids = {b.id:b for b in bombers}
# c1b没有下p的件要进自动外呼
if cycle == Cycle.C1A.value:
np_ids = [a["id"] for a in np_apps]
# 更新没有下p的件
np = (Application
.update(status = ApplicationStatus.PROCESSING.value,
ptp_bomber = None,
latest_bomber = None)
.where(Application.id << np_ids)
.execute())
bomber_app_logs = app_logs.get(cycle, {})
# 月底分件的时候,进自动外呼的件也要有入案和出案记录
out_param = {
"application_ids": bomber_app_logs.get("to_ids", []),
"month_dispatch": 1,
"src_bomber_id": cycle,
}
new_out_record(**out_param)
in_param = {
"cycle": cycle,
"application_ids": np_ids,
"dest_bomber_id": cycle
}
new_in_record(**in_param)
bomber_app_logs["need_num"] = len(np_apps)
bomber_app_logs["form_ids"] = np_ids
bomber_app_logs["status"] = 1
else:
dispatch_apps_to_bomber(cycle, np_apps, bids, app_logs, False)
dispatch_apps_to_bomber(cycle, p_apps, bids, app_logs, False)
# 把件分给bomber
def dispatch_apps_to_bomber(cycle,apps,bids,app_logs,out_partner=True,
type=ApplicationType.CASH_LOAN.value):
apps = list(apps)
random.shuffle(apps)
# 获取每个人应该分个数
bids_list = list(bids.keys())
if len(bids_list) <= 0:
logging.info("get_dispatch_app_to_bomber no bids")
return
average_num = get_average_number(len(apps), len(bids_list))
bomber_end = 0
with db.atomic():
for index, bid in enumerate(bids_list):
current_bomber = bids.get(bid)
bomber_app_logs = app_logs.get(bid, {})
bomber_start = bomber_end
bomber_end = bomber_start + average_num[index]
bomber_apps = apps[bomber_start:bomber_end]
from_p, from_np, from_ids,status = [], [], [], 0
# 区分员工分到的件,哪些是下p的哪些是没下p的
for ba in bomber_apps:
promised_date = ba.get("promised_date")
from_ids.append(ba["id"])
if promised_date and promised_date.date() >= date.today():
from_p.append(ba["id"])
else:
from_np.append(ba["id"])
app_status = ApplicationStatus.AB_TEST.value
# c1A内部下p的件要特殊状态
if (cycle == Cycle.C1A.value and not out_partner
and type == ApplicationType.CASH_LOAN.value):
app_status = ApplicationStatus.PROCESSING.value
if from_p:
p = (Application
.update(ptp_bomber=bid,
latest_bomber=bid,
status=app_status)
.where(Application.id << from_p)
.execute())
p_ids = bomber_app_logs.get("p_ids", []) + from_p
bomber_app_logs["p_ids"] = p_ids
if from_np:
np = (Application
.update(latest_bomber=bid,
ptp_bomber=None,
status=ApplicationStatus.AB_TEST.value)
.where(Application.id << from_np)
.execute())
np_ids = bomber_app_logs.get("np_ids", []) + from_np
bomber_app_logs["np_ids"] = np_ids
in_param = {"cycle": cycle,
"dest_partner_id": current_bomber.partner_id,
"application_ids": from_ids,
"dest_bomber_id": bid,
}
if type == ApplicationType.CASH_LOAN.value:
out_param = {"src_bomber_id": bid,
"application_ids": bomber_app_logs.get("to_ids",[]),
"month_dispatch":1
}
# 出案
new_out_record(**out_param)
# 入案
new_in_record(**in_param)
else:
out_and_in_record_instalment(**in_param)
bomber_app_logs["status"] = 1
need_num = bomber_app_logs.get("need_num", 0) + average_num[index]
bomber_app_logs["need_num"] = need_num
all_form_ids = bomber_app_logs.get("form_ids", []) + from_ids
bomber_app_logs["form_ids"] = all_form_ids
# 如果是内部的分件,不用执行下面的操作
if not out_partner:
continue
# 分给外包的件,要记录在dispatch_app中.将原来的记录删除,在插入新的数据
try:
(DispatchApp.delete()
.where(DispatchApp.application.in_(from_ids))
.execute())
dispatch_ins = [{"application": id,
"partner": current_bomber.partner_id,
"bomber": bid,
"status": DisAppStatus.NORMAL.value,
} for id in from_ids]
(DispatchApp.insert_many(dispatch_ins).execute())
except Exception as e:
logging.info(
"month_disapp_error error:%s,bid:%s,from_ids:%s" %
(str(e), bid, from_ids))
# 计算每个件的逾期天数,根据逾期天数更新对应的cycle
def calc_instalment_apps_cycle():
cycle_list = [Cycle.C2.value, Cycle.C3.value]
for cycle in cycle_list:
apps = (ApplicationR.select(ApplicationR.id,
ApplicationR.cycle,
ApplicationR.overdue_days.alias("ods"),
ApplicationR.latest_bomber,
OverdueBillR.status,
OverdueBillR.overdue_days.alias("oods"))
.join(OverdueBillR, JOIN_LEFT_OUTER,
on=ApplicationR.id == OverdueBillR.collection_id)
.where(ApplicationR.cycle == cycle,
ApplicationR.type ==
ApplicationType.CASH_LOAN_STAGING.value,
ApplicationR.status != ApplicationStatus.REPAID.value)
.dicts())
# 计算催收单真实的overdue_days
lower_apps = {}
for app in apps:
if app["status"] == ApplicationStatus.REPAID.value:
continue
aid = app["id"]
if aid in lower_apps:
lower_apps[aid]["ods"] = max(app["oods"], app["ods"])
else:
lower_apps[aid] = {
"id": aid,
"cycle": cycle,
"ods": app["oods"],
}
# 计算apps的逾期天数和当前cycle是否匹配
for aid,app in lower_apps.items():
new_cycle = get_cycle_by_overdue_days(app["ods"])
if new_cycle != cycle:
update_param = {"cycle":new_cycle,
"overdue_days":app["ods"]}
entry_time = calc_entry_time(app["ods"])
update_param.update(entry_time)
# 更新催收单
(Application.update(**update_param)
.where(Application.id == aid)
.execute())
# 降cycle之后根据逾期天数更新以下几个时间
def calc_entry_time(overdue_days):
app_entry_time = {}
overdue_entry = {
"dpd1_entry": [1, 3],
"C1A_entry": [4, 10],
"C1B_entry": [11, 30],
"C2_entry": [31, 60],
"C3_entry": [61, 90]
}
for key,value in overdue_entry.items():
if value[0] <= overdue_days <= value[1]:
app_entry_time[key] = datetime.now()
else:
app_entry_time[key] = None
return app_entry_time
# 分期分件
def instalment_month_dispatch_app():
sys_cycle = {1: 'AB_TEST_C1A',
2: 'AB_TEST_C1B',
3: 'AB_TEST_C2',
4: 'AB_TEST_C3'}
# 降cycle
calc_instalment_apps_cycle()
instalment_cycle_list = Cycle.values()[:4]
for cycle in instalment_cycle_list:
apps = get_cycle_all_no_paid_app(cycle,
ApplicationType.CASH_LOAN_STAGING.value)
if not apps:
logging.info("instalment_month_dispatch no get apps,cycle:%s"%cycle)
continue
app_logs, all_np_apps, all_p_apps = get_app_logs(apps)
# 获取要分件的成员
if cycle == Cycle.C1A.value:
sys_config = SystemConfig.get(SystemConfig.key == sys_cycle[cycle])
sys_values = json.loads(sys_config.value)
bombers = (Bomber.select().where(Bomber.id << sys_values,
Bomber.is_del == 0))
else:
bombers = (Bomber.select().where(Bomber.is_del == 0,
Bomber.instalment == cycle))
bids = {b.id:b for b in bombers}
if not bids:
logging.info("instalment_month_dispatch no bomber,cycle:%s"%cycle)
continue
dispatch_apps_to_bomber(cycle = cycle,
apps = all_p_apps,
bids = bids,
app_logs = app_logs,
out_partner = False,
type = ApplicationType.CASH_LOAN_STAGING.value)
if cycle in (Cycle.C1B.value, Cycle.C2.value, Cycle.C3.value):
dispatch_apps_to_bomber(cycle=cycle,
apps=all_np_apps,
bids=bids,
app_logs=app_logs,
out_partner=False,
type=ApplicationType.CASH_LOAN_STAGING.value)
else:
# 未下p的件要有入案记录
np_ids = [a["id"] for a in all_np_apps]
np = (Application.update(status=ApplicationStatus.UNCLAIMED.value,
ptp_bomber=None,
latest_bomber=None)
.where(Application.id << np_ids,
ApplicationStatus != ApplicationStatus.REPAID.value)
.execute())
in_param = {
"cycle": cycle,
"application_ids": np_ids,
"dest_bomber_id": cycle
}
out_and_in_record_instalment(**in_param)
# 如果有降cycle的件,也记录在历史记录中
try:
dispatch_apps_logs = []
for bid,app in app_logs.items():
alg = {
"bomber_id": bid,
"need_num": -len(app.get("to_ids", [])),
"form_ids": json.dumps(app.get("form_ids", [])),
"to_ids": json.dumps(app.get("to_ids", [])),
"np_ids": json.dumps(app.get("np_ids", [])),
"p_ids": json.dumps(app.get("p_ids", [])),
"status": 1
}
if bid in bids:
alg["need_num"] = app.get("need_num", 0)
dispatch_apps_logs.append(alg)
if dispatch_apps_logs:
DispatchAppLogs.insert_many(dispatch_apps_logs).execute()
except Exception as e:
logging.info(
"instalment_dispatch_app_month log error.cycle:%s,error:%s" % (
cycle, str(e)))
# 每个月月底进行所有件重新分配
@action(MessageAction.MONTH_DISPATCH_APP)
def month_dispatch_app(payload, msg_id):
# 判断几天的日期是不是1号
if datetime.today().day != 1:
logging.info("今天不是1号,不能执行分期件")
return
cycle_list = [Cycle.C1A.value,
Cycle.C1B.value,
Cycle.C2.value,
Cycle.C3.value]
with db.atomic():
for cycle in cycle_list:
apps = get_cycle_all_no_paid_app(cycle,
ApplicationType.CASH_LOAN.value)
if not apps:
logging.info("month_dispatch_app not get apps.cycle:%s"%cycle)
continue
app_logs, all_np_apps, all_p_apps = get_app_logs(apps)
np_apps = month_dispatch_app_out_partner(cycle=cycle,
apps=apps,
app_logs=app_logs,
np_apps = all_np_apps)
if not np_apps and not all_p_apps:
logging.info("month_dispatch_app not get inner apps.cycle:%s",
cycle)
continue
month_dispatch_app_inner(cycle,np_apps,app_logs,all_p_apps)
# 分件日志记录在表中
try:
dispatch_apps_logs = []
for bid,app in app_logs.items():
alg = {
"bomber_id": bid,
"need_num": app.get("need_num",0),
"form_ids": json.dumps(app.get("form_ids", [])),
"to_ids": json.dumps(app.get("to_ids", [])),
"np_ids": json.dumps(app.get("np_ids", [])),
"p_ids": json.dumps(app.get("p_ids", [])),
"status": 1
}
dispatch_apps_logs.append(alg)
for idx in range(0, len(dispatch_apps_logs), 10):
DispatchAppLogs.insert_many(
dispatch_apps_logs[idx:idx + 10]).execute()
except Exception as e:
logging.error(
"insert dispatch_log error:%s,cycle:%s"%(str(e),cycle))
try:
instalment_month_dispatch_app()
except Exception as e:
logging.info("instalment_month_dispatch_error:%s"%str(e))
# 每天定时统计催收单信息
@action(MessageAction.SUMMARY_BOMBER_OVERDUE)
def summary_bomber_overdue_everyday(payload, msg_id):
cycle_list = Cycle.values()
which_day = date.today()
# 获取每个cycle没有完成的订单
for cycle in cycle_list:
apps = (ApplicationR.select(ApplicationR.id,
ApplicationR.cycle,
ApplicationR.ptp_bomber,
ApplicationR.overdue_days,
ApplicationR.promised_date,
ApplicationR.follow_up_date,
ApplicationR.external_id,
OverdueBillR.status,
OverdueBillR.periods,
OverdueBillR.sub_bill_id)
.join(OverdueBillR, JOIN_LEFT_OUTER,
on = ApplicationR.id == OverdueBillR.collection_id)
.where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.no_active == 0,
ApplicationR.cycle == cycle)
.dicts())
bomber_overdue_list = []
for app in apps:
status = app.get("status")
if status == ApplicationStatus.REPAID.value:
continue
ptp_bomber = app.get("ptp_bomber")
promised_date = app.get("promised_date")
follow_up_date = app.get("follow_up_date")
if not promised_date or promised_date.date() < date.today():
ptp_bomber = promised_date = None
if not follow_up_date or follow_up_date.date() < date.today():
follow_up_date = None
overdue_dict = {
"collection_id": app.get("id"),
"external_id": app.get("external_id"),
"sub_bill_id": app.get("sub_bill_id"),
"periods": app.get("periods"),
"cycle": app.get("cycle") if app.get("cycle") else cycle,
"ptp_bomber": ptp_bomber,
"promised_date": promised_date,
"follow_up_date": follow_up_date,
"which_day": which_day,
"overdue_days": app.get("overdue_days")
}
bomber_overdue_list.append(overdue_dict)
try:
if bomber_overdue_list:
with db.atomic():
for index in range(0, len(bomber_overdue_list), 1000):
insert_list = bomber_overdue_list[index: index+1000]
BomberOverdue.insert_many(insert_list).execute()
except Exception as e:
logging.info(
"summary_bomber_overdue_error,cycle:%s,which_day:%s,error:%s"%(
cycle,str(which_day),str(e)))
# 每分钟对员工的下p件个数做个统计
@action(MessageAction.BOMBER_PTP_REAL_TIME_SUMMARY)
def bomber_ptp_real_time_summary(payload, msg_id):
ptp_switch_number = 200
sys_ptp_switch = (SystemConfig.select()
.where(SystemConfig.key == 'PTP_SWITCH_NUMBER')
.first())
if sys_ptp_switch and sys_ptp_switch.value.isdigit():
ptp_switch_number = int(sys_ptp_switch.value)
today = datetime.today().date()
ptp_apps = (ApplicationR.select(fn.COUNT(ApplicationR.id).alias('ptp_cnt'),
ApplicationR.latest_bomber)
.where(ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.cycle < Cycle.C2.value,
ApplicationR.promised_date >= today,
ApplicationR.latest_bomber.is_null(False))
.group_by(ApplicationR.latest_bomber))
bomber_ptps = (BomberPtp.select(BomberPtp.bomber_id))
bomber_ptp_bids = [b.bomber_id for b in bomber_ptps]
insert_result = []
for app in ptp_apps:
ptp_switch = BomberCallSwitch.ON.value
if app.ptp_cnt >= ptp_switch_number:
ptp_switch = BomberCallSwitch.OFF.value
params = {"bomber_id": app.latest_bomber_id,
"ptp_cnt": app.ptp_cnt,
"ptp_switch": ptp_switch,
"auto_ext": app.latest_bomber.auto_ext}
if app.latest_bomber_id in bomber_ptp_bids:
try:
q = (BomberPtp.update(**params)
.where(BomberPtp.bomber_id==app.latest_bomber_id)
.execute())
except Exception as e:
logging.error("ptp_reil_time_summary_error:%s,data,bid:%s" % (
str(e),params,app.latest_bomber_id))
else:
insert_result.append(params)
if insert_result:
BomberPtp.insert_many(insert_result).execute()
# 每天的10:00,14:00,16:30不让接自动外呼,员工把自动外呼的件跟进完,才能接自动外呼
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_OFF)
def today_ptp_auto_call_switch(payload, msg_id):
today = datetime.today().date()
next_day = today + timedelta(days=1)
# 获取有今天p到期的件的催收员
apps = (ApplicationR.select(ApplicationR.latest_bomber)
.where(ApplicationR.promised_date < next_day,
ApplicationR.promised_date >= today,
ApplicationR.promised_date.is_null(False),
ApplicationR.status != ApplicationStatus.REPAID.value,
ApplicationR.cycle < Cycle.C2.value,
ApplicationR.latest_bomber.is_null(False))
.group_by(ApplicationR.latest_bomber))
bids = [a.latest_bomber_id for a in apps]
if not bids:
return
q = (BomberPtp.update(today_switch=BomberCallSwitch.OFF.value)
.where(BomberPtp.auto_ext.is_null(False),
BomberPtp.bomber_id << bids)
.execute())
# 每天早上8点定时刷新催收员自动外呼的状态
@action(MessageAction.BOMBER_TODAY_PTP_FOLLOW_SWITCH_ON)
def update_today_switch_every_day(payload, msg_id):
q = (BomberPtp.update(today_switch=BomberCallSwitch.ON.value)
.where(BomberPtp.auto_ext.is_null(False))
.execute())
# 用户修改电话通知bomber
@action(MessageAction.USER_UPDATE_PHONE)
def user_change_phone(payload, msg_id):
user_id = payload.get("user_id")
new_mobile_no = payload.get("new_mobile_no")
if not all([user_id, new_mobile_no]):
logging.info("用户修改电话,没有获取到用户id获这用户手机号")
return
source = 'applicant updated number'
contacts = (Contact.select()
.where(Contact.user_id == int(user_id)))
if not contacts.exists():
logging.info("用户在contact中没有记录")
return
new_contact = contacts.where(Contact.number == new_mobile_no,
Contact.source == source)
if new_contact.exists():
logging.info("用户手机号已存在")
return
contact = contacts.order_by(-Contact.created_at).first()
Contact.create(user_id=contact.user_id,
name=contact.name,
number = new_mobile_no,
source = source,
relationship = Relationship.APPLICANT.value,
real_relationship = Relationship.APPLICANT.value)
| [
60,
69,
74,
80,
146
] |
1,959 | c185a88332e39c561649f087f01fd3b704e7010b | <mask token>
def upgrade():
bind = op.get_bind()
urls = bind.execute(
'SELECT p.id as pid, r.id as rid, r.uri as uri FROM oauth2_identity_provider p JOIN resource r ON p.api_resource_id = r.id'
)
op.add_column('oauth2_identity_provider', sa.Column('uri', sa.String(),
nullable=True))
for url in urls:
bind.execute(
f"UPDATE oauth2_identity_provider SET uri = '{url[2]}' WHERE id = {url[0]}"
)
bind.execute(
"UPDATE oauth2_identity_provider SET uri = 'https://github.com' WHERE name = 'github'"
)
op.alter_column('oauth2_identity_provider', 'uri', nullable=False)
op.create_unique_constraint(None, 'oauth2_identity_provider', ['uri'])
<mask token>
| <mask token>
def upgrade():
bind = op.get_bind()
urls = bind.execute(
'SELECT p.id as pid, r.id as rid, r.uri as uri FROM oauth2_identity_provider p JOIN resource r ON p.api_resource_id = r.id'
)
op.add_column('oauth2_identity_provider', sa.Column('uri', sa.String(),
nullable=True))
for url in urls:
bind.execute(
f"UPDATE oauth2_identity_provider SET uri = '{url[2]}' WHERE id = {url[0]}"
)
bind.execute(
"UPDATE oauth2_identity_provider SET uri = 'https://github.com' WHERE name = 'github'"
)
op.alter_column('oauth2_identity_provider', 'uri', nullable=False)
op.create_unique_constraint(None, 'oauth2_identity_provider', ['uri'])
def downgrade():
op.drop_constraint(None, 'oauth2_identity_provider', type_='unique')
op.drop_column('oauth2_identity_provider', 'uri')
| <mask token>
revision = '52561c782d96'
down_revision = 'cdf9f34b764c'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
urls = bind.execute(
'SELECT p.id as pid, r.id as rid, r.uri as uri FROM oauth2_identity_provider p JOIN resource r ON p.api_resource_id = r.id'
)
op.add_column('oauth2_identity_provider', sa.Column('uri', sa.String(),
nullable=True))
for url in urls:
bind.execute(
f"UPDATE oauth2_identity_provider SET uri = '{url[2]}' WHERE id = {url[0]}"
)
bind.execute(
"UPDATE oauth2_identity_provider SET uri = 'https://github.com' WHERE name = 'github'"
)
op.alter_column('oauth2_identity_provider', 'uri', nullable=False)
op.create_unique_constraint(None, 'oauth2_identity_provider', ['uri'])
def downgrade():
op.drop_constraint(None, 'oauth2_identity_provider', type_='unique')
op.drop_column('oauth2_identity_provider', 'uri')
| <mask token>
from alembic import op
import sqlalchemy as sa
revision = '52561c782d96'
down_revision = 'cdf9f34b764c'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
urls = bind.execute(
'SELECT p.id as pid, r.id as rid, r.uri as uri FROM oauth2_identity_provider p JOIN resource r ON p.api_resource_id = r.id'
)
op.add_column('oauth2_identity_provider', sa.Column('uri', sa.String(),
nullable=True))
for url in urls:
bind.execute(
f"UPDATE oauth2_identity_provider SET uri = '{url[2]}' WHERE id = {url[0]}"
)
bind.execute(
"UPDATE oauth2_identity_provider SET uri = 'https://github.com' WHERE name = 'github'"
)
op.alter_column('oauth2_identity_provider', 'uri', nullable=False)
op.create_unique_constraint(None, 'oauth2_identity_provider', ['uri'])
def downgrade():
op.drop_constraint(None, 'oauth2_identity_provider', type_='unique')
op.drop_column('oauth2_identity_provider', 'uri')
| """Add uri on identity provider
Revision ID: 52561c782d96
Revises: cdf9f34b764c
Create Date: 2022-03-11 10:16:39.583434
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '52561c782d96'
down_revision = 'cdf9f34b764c'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
# get api urls
urls = bind.execute("SELECT p.id as pid, r.id as rid, r.uri as uri "
"FROM oauth2_identity_provider p JOIN resource r ON p.api_resource_id = r.id")
# add URI
op.add_column('oauth2_identity_provider', sa.Column('uri', sa.String(), nullable=True))
# set api_url as default URI
for url in urls:
bind.execute(f"UPDATE oauth2_identity_provider SET uri = '{url[2]}' WHERE id = {url[0]}")
# patch Github URI
bind.execute("UPDATE oauth2_identity_provider SET uri = 'https://github.com' WHERE name = 'github'")
# add constraints
op.alter_column('oauth2_identity_provider', 'uri', nullable=False)
op.create_unique_constraint(None, 'oauth2_identity_provider', ['uri'])
def downgrade():
# remove URI
op.drop_constraint(None, 'oauth2_identity_provider', type_='unique')
op.drop_column('oauth2_identity_provider', 'uri')
| [
1,
2,
3,
4,
5
] |
1,960 | 61cfc583cd87ac0528cb07f4e051392167414920 | <mask token>
| <mask token>
while x <= 24:
if x % 5 == 0:
x = x + 1
continue
print(x)
x = x + 1
| x = 1
while x <= 24:
if x % 5 == 0:
x = x + 1
continue
print(x)
x = x + 1
| null | null | [
0,
1,
2
] |
1,961 | cd4f22b8e2188e8019e7324e80d64a7b95f8f956 | <mask token>
class CouchTests2(unittest.TestCase):
<mask token>
def test_bar(self):
self.assertEqual(1, 1)
| <mask token>
class CouchTests2(unittest.TestCase):
def test_foo(self):
self.assertEqual(1, 1)
def test_bar(self):
self.assertEqual(1, 1)
| __author__ = 'Administrator'
<mask token>
class CouchTests2(unittest.TestCase):
def test_foo(self):
self.assertEqual(1, 1)
def test_bar(self):
self.assertEqual(1, 1)
| __author__ = 'Administrator'
import unittest
class CouchTests2(unittest.TestCase):
def test_foo(self):
self.assertEqual(1, 1)
def test_bar(self):
self.assertEqual(1, 1)
| null | [
2,
3,
4,
5
] |
1,962 | 030bc0c7bdbbb09f722ffe4c82866726062f5317 | <mask token>
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0
self.mouse_y = 0
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
<mask token>
<mask token>
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
<mask token>
<mask token>
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if not revealed[cell_x][cell_y]:
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5)
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +
left, top), (left, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH -
CELL_MARGIN / 2, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render('?', 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else:
shape, color = self.get_shape_and_color(board, cell_x,
cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
<mask token>
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
<mask token>
<mask token>
<mask token>
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
<mask token>
| <mask token>
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0
self.mouse_y = 0
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
<mask token>
@staticmethod
def get_board():
icons = []
mines = 0
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if mines < NUM_MINES:
icons.append((MINE, RED))
mines += 1
else:
icons.append((CLEAR, WHITE))
random.shuffle(icons)
board = []
for x in range(GRID_WIDTH):
column = []
for y in range(GRID_HEIGHT):
column.append(icons[0])
del icons[0]
board.append(column)
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
mines = 0
if x > 0:
if y > 0:
if board[x - 1][y - 1][0] == MINE:
mines += 1
if board[x - 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x - 1][y + 1][0] == MINE:
mines += 1
if x < GRID_WIDTH - 1:
if y > 0:
if board[x + 1][y - 1][0] == MINE:
mines += 1
if board[x + 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x + 1][y + 1][0] == MINE:
mines += 1
if y > 0:
if board[x][y - 1][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x][y + 1][0] == MINE:
mines += 1
if board[x][y][0] != MINE:
if mines in range(1, 9):
board[x][y] = str(mines), WHITE
return board
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
@staticmethod
def get_top_left_coordinates(row, column):
left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN
top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN
return left, top
def get_cell_at_pixel(self, x, y):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH)
if cell_rect.collidepoint(x, y):
return cell_x, cell_y
return None, None
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if not revealed[cell_x][cell_y]:
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5)
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +
left, top), (left, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH -
CELL_MARGIN / 2, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render('?', 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else:
shape, color = self.get_shape_and_color(board, cell_x,
cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
def draw_icon(self, shape, color, cell_x, cell_y):
quarter = int(CELL_SIDE_LENGTH * 0.25)
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if shape == CLEAR:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
elif shape == MINE:
pygame.draw.ellipse(SURFACE, color, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
else:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render(shape, 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
<mask token>
def reveal_cells(self, x, y, board, revealed, flags, questionmarks):
if revealed[x][y]:
return
if flags[x][y]:
return
revealed[x][y] = True
if board[x][y][0] != CLEAR:
return
if x > 0:
if y > 0:
self.reveal_cells(x - 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x - 1, y + 1, board, revealed, flags,
questionmarks)
if x < GRID_WIDTH - 1:
if y > 0:
self.reveal_cells(x + 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x + 1, y + 1, board, revealed, flags,
questionmarks)
if y > 0:
self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)
@staticmethod
def create_menu():
font = pygame.font.SysFont('times new roman', 20)
label = font.render(' High scores', 1, BLACK)
pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))
SURFACE.blit(label, (500, 135))
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
<mask token>
| <mask token>
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0
self.mouse_y = 0
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
def main(self):
while True:
left_click = False
right_click = False
SURFACE.fill(BG_COLOR)
self.draw_board(self.board, self.revealed_cells, self.flags,
self.questionmarks)
self.create_menu()
font = pygame.font.SysFont('times new roman', 25)
self.timer.start()
t1 = self.timer.get_seconds()
label = font.render(str(t1), 1, MAGENTA)
SURFACE.blit(label, (50, 50))
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit()
elif event.type == pygame.locals.MOUSEMOTION:
self.mouse_x, self.mouse_y = event.pos
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1:
self.mouse_x, self.mouse_y = event.pos
print(self.mouse_x, self.mouse_y)
left_click = True
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3:
self.mouse_x, self.mouse_y = event.pos
right_click = True
if self.game_over and right_click:
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
right_click = False
if self.game_over:
self.timer.pause()
score = self.timer.get_seconds()
a_x = X_BOARD_MARGIN + GRID_WIDTH / 4 * CELL_SIDE_LENGTH
b_y = (Y_BOARD_MARGIN + Y_BOARD_MARGIN / 4 + GRID_HEIGHT *
CELL_SIDE_LENGTH + GRID_HEIGHT * CELL_MARGIN)
font = pygame.font.SysFont('times new roman', 25)
if win:
label = font.render('Congratulations, you won!', 1, GREEN)
SURFACE.blit(label, (a_x - 75, b_y))
label = font.render('Score: ' + str(score), 1, GREEN)
SURFACE.blit(label, (a_x + 200, b_y))
else:
label = font.render('GAME OVER', 1, RED)
SURFACE.blit(label, (a_x + 10, b_y))
label = font.render('Press RIGHT mouse button', 1, YELLOW)
SURFACE.blit(label, (a_x - 50, b_y + 25))
cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)
if cell_x is not None and cell_y is not None:
if not self.revealed_cells[cell_x][cell_y
] and not self.game_over:
self.highlight_cell(cell_x, cell_y)
if not self.revealed_cells[cell_x][cell_y
] and left_click and not self.game_over:
if not self.flags[cell_x][cell_y
] and not self.questionmarks[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
if self.board[cell_x][cell_y][0] == MINE:
self.revealed_cells = self.generate_data(True)
self.game_over = True
elif self.board[cell_x][cell_y][0] == CLEAR:
self.reveal_cells(cell_x, cell_y, self.board,
self.revealed_cells, self.flags, self.
questionmarks)
else:
self.revealed_cells[cell_x][cell_y] = True
self.draw_board(self.board, self.revealed_cells,
self.flags, self.questionmarks)
if not self.revealed_cells[cell_x][cell_y
] and right_click and not self.game_over:
if self.flags[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
self.questionmarks[cell_x][cell_y] = True
elif self.questionmarks[cell_x][cell_y]:
self.questionmarks[cell_x][cell_y] = False
self.flags[cell_x][cell_y] = False
else:
self.flags[cell_x][cell_y] = True
self.questionmarks[cell_x][cell_y] = False
self.draw_board(self.board, self.revealed_cells, self.
flags, self.questionmarks)
win = True
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if self.board[x][y][0] == MINE and not self.flags[x][y
] or self.board[x][y][0
] != MINE and not self.revealed_cells[x][y]:
win = False
if win:
self.game_over = True
pygame.display.update()
CLOCK.tick(FPS)
@staticmethod
def get_board():
icons = []
mines = 0
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if mines < NUM_MINES:
icons.append((MINE, RED))
mines += 1
else:
icons.append((CLEAR, WHITE))
random.shuffle(icons)
board = []
for x in range(GRID_WIDTH):
column = []
for y in range(GRID_HEIGHT):
column.append(icons[0])
del icons[0]
board.append(column)
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
mines = 0
if x > 0:
if y > 0:
if board[x - 1][y - 1][0] == MINE:
mines += 1
if board[x - 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x - 1][y + 1][0] == MINE:
mines += 1
if x < GRID_WIDTH - 1:
if y > 0:
if board[x + 1][y - 1][0] == MINE:
mines += 1
if board[x + 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x + 1][y + 1][0] == MINE:
mines += 1
if y > 0:
if board[x][y - 1][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x][y + 1][0] == MINE:
mines += 1
if board[x][y][0] != MINE:
if mines in range(1, 9):
board[x][y] = str(mines), WHITE
return board
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
@staticmethod
def get_top_left_coordinates(row, column):
left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN
top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN
return left, top
def get_cell_at_pixel(self, x, y):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH)
if cell_rect.collidepoint(x, y):
return cell_x, cell_y
return None, None
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if not revealed[cell_x][cell_y]:
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5)
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +
left, top), (left, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH -
CELL_MARGIN / 2, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render('?', 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else:
shape, color = self.get_shape_and_color(board, cell_x,
cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
def draw_icon(self, shape, color, cell_x, cell_y):
quarter = int(CELL_SIDE_LENGTH * 0.25)
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if shape == CLEAR:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
elif shape == MINE:
pygame.draw.ellipse(SURFACE, color, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
else:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render(shape, 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
<mask token>
def reveal_cells(self, x, y, board, revealed, flags, questionmarks):
if revealed[x][y]:
return
if flags[x][y]:
return
revealed[x][y] = True
if board[x][y][0] != CLEAR:
return
if x > 0:
if y > 0:
self.reveal_cells(x - 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x - 1, y + 1, board, revealed, flags,
questionmarks)
if x < GRID_WIDTH - 1:
if y > 0:
self.reveal_cells(x + 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x + 1, y + 1, board, revealed, flags,
questionmarks)
if y > 0:
self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)
@staticmethod
def create_menu():
font = pygame.font.SysFont('times new roman', 20)
label = font.render(' High scores', 1, BLACK)
pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))
SURFACE.blit(label, (500, 135))
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
<mask token>
| import sys
import random
import pygame
import pygame.locals
import time
CELL_SIDE_LENGTH = 40
CELL_MARGIN = 2
GRID_HEIGHT = 10
GRID_WIDTH = 10
X_BOARD_MARGIN = 50
Y_BOARD_MARGIN = 75
MENU_MARGIN = 100
DIFFICULTY = 0.1
FPS = 30
NUM_MINES = 1 + int(GRID_WIDTH * GRID_HEIGHT * DIFFICULTY)
WINDOW_HEIGHT = (CELL_SIDE_LENGTH * GRID_HEIGHT + CELL_MARGIN * GRID_HEIGHT +
Y_BOARD_MARGIN * 2)
WINDOW_WIDTH = (CELL_SIDE_LENGTH * GRID_WIDTH + CELL_MARGIN * GRID_WIDTH +
X_BOARD_MARGIN * 2 + MENU_MARGIN)
RED = 255, 0, 0
YELLOW = 255, 255, 0
GREEN = 0, 255, 0
MIDGREEN = 40, 190, 40
CYAN = 0, 255, 255
BLUE = 0, 0, 255
DARKBLUE = 20, 20, 60
MAGENTA = 255, 0, 255
BLACK = 0, 0, 0
WHITE = 255, 255, 255
GRAY = 200, 200, 200
BG_COLOR = DARKBLUE
CELL_COLOR = GRAY
HIGHLIGHT_COLOR = CYAN
FLAG_COLOR = MIDGREEN
FLAG = 'flag'
MINE = 'mine'
CLEAR = 'clear'
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0
self.mouse_y = 0
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
def main(self):
while True:
left_click = False
right_click = False
SURFACE.fill(BG_COLOR)
self.draw_board(self.board, self.revealed_cells, self.flags,
self.questionmarks)
self.create_menu()
font = pygame.font.SysFont('times new roman', 25)
self.timer.start()
t1 = self.timer.get_seconds()
label = font.render(str(t1), 1, MAGENTA)
SURFACE.blit(label, (50, 50))
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit()
elif event.type == pygame.locals.MOUSEMOTION:
self.mouse_x, self.mouse_y = event.pos
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1:
self.mouse_x, self.mouse_y = event.pos
print(self.mouse_x, self.mouse_y)
left_click = True
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3:
self.mouse_x, self.mouse_y = event.pos
right_click = True
if self.game_over and right_click:
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
right_click = False
if self.game_over:
self.timer.pause()
score = self.timer.get_seconds()
a_x = X_BOARD_MARGIN + GRID_WIDTH / 4 * CELL_SIDE_LENGTH
b_y = (Y_BOARD_MARGIN + Y_BOARD_MARGIN / 4 + GRID_HEIGHT *
CELL_SIDE_LENGTH + GRID_HEIGHT * CELL_MARGIN)
font = pygame.font.SysFont('times new roman', 25)
if win:
label = font.render('Congratulations, you won!', 1, GREEN)
SURFACE.blit(label, (a_x - 75, b_y))
label = font.render('Score: ' + str(score), 1, GREEN)
SURFACE.blit(label, (a_x + 200, b_y))
else:
label = font.render('GAME OVER', 1, RED)
SURFACE.blit(label, (a_x + 10, b_y))
label = font.render('Press RIGHT mouse button', 1, YELLOW)
SURFACE.blit(label, (a_x - 50, b_y + 25))
cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)
if cell_x is not None and cell_y is not None:
if not self.revealed_cells[cell_x][cell_y
] and not self.game_over:
self.highlight_cell(cell_x, cell_y)
if not self.revealed_cells[cell_x][cell_y
] and left_click and not self.game_over:
if not self.flags[cell_x][cell_y
] and not self.questionmarks[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
if self.board[cell_x][cell_y][0] == MINE:
self.revealed_cells = self.generate_data(True)
self.game_over = True
elif self.board[cell_x][cell_y][0] == CLEAR:
self.reveal_cells(cell_x, cell_y, self.board,
self.revealed_cells, self.flags, self.
questionmarks)
else:
self.revealed_cells[cell_x][cell_y] = True
self.draw_board(self.board, self.revealed_cells,
self.flags, self.questionmarks)
if not self.revealed_cells[cell_x][cell_y
] and right_click and not self.game_over:
if self.flags[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
self.questionmarks[cell_x][cell_y] = True
elif self.questionmarks[cell_x][cell_y]:
self.questionmarks[cell_x][cell_y] = False
self.flags[cell_x][cell_y] = False
else:
self.flags[cell_x][cell_y] = True
self.questionmarks[cell_x][cell_y] = False
self.draw_board(self.board, self.revealed_cells, self.
flags, self.questionmarks)
win = True
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if self.board[x][y][0] == MINE and not self.flags[x][y
] or self.board[x][y][0
] != MINE and not self.revealed_cells[x][y]:
win = False
if win:
self.game_over = True
pygame.display.update()
CLOCK.tick(FPS)
@staticmethod
def get_board():
icons = []
mines = 0
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if mines < NUM_MINES:
icons.append((MINE, RED))
mines += 1
else:
icons.append((CLEAR, WHITE))
random.shuffle(icons)
board = []
for x in range(GRID_WIDTH):
column = []
for y in range(GRID_HEIGHT):
column.append(icons[0])
del icons[0]
board.append(column)
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
mines = 0
if x > 0:
if y > 0:
if board[x - 1][y - 1][0] == MINE:
mines += 1
if board[x - 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x - 1][y + 1][0] == MINE:
mines += 1
if x < GRID_WIDTH - 1:
if y > 0:
if board[x + 1][y - 1][0] == MINE:
mines += 1
if board[x + 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x + 1][y + 1][0] == MINE:
mines += 1
if y > 0:
if board[x][y - 1][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x][y + 1][0] == MINE:
mines += 1
if board[x][y][0] != MINE:
if mines in range(1, 9):
board[x][y] = str(mines), WHITE
return board
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
@staticmethod
def get_top_left_coordinates(row, column):
left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN
top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN
return left, top
def get_cell_at_pixel(self, x, y):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH)
if cell_rect.collidepoint(x, y):
return cell_x, cell_y
return None, None
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if not revealed[cell_x][cell_y]:
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5)
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +
left, top), (left, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH -
CELL_MARGIN / 2, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render('?', 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else:
shape, color = self.get_shape_and_color(board, cell_x,
cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
def draw_icon(self, shape, color, cell_x, cell_y):
quarter = int(CELL_SIDE_LENGTH * 0.25)
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if shape == CLEAR:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
elif shape == MINE:
pygame.draw.ellipse(SURFACE, color, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
else:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render(shape, 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
def highlight_cell(self, cell_x, cell_y):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
pygame.draw.rect(SURFACE, HIGHLIGHT_COLOR, (left - CELL_MARGIN / 2,
top - CELL_MARGIN / 2, CELL_SIDE_LENGTH + CELL_MARGIN,
CELL_SIDE_LENGTH + CELL_MARGIN), 2)
def reveal_cells(self, x, y, board, revealed, flags, questionmarks):
if revealed[x][y]:
return
if flags[x][y]:
return
revealed[x][y] = True
if board[x][y][0] != CLEAR:
return
if x > 0:
if y > 0:
self.reveal_cells(x - 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x - 1, y + 1, board, revealed, flags,
questionmarks)
if x < GRID_WIDTH - 1:
if y > 0:
self.reveal_cells(x + 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x + 1, y + 1, board, revealed, flags,
questionmarks)
if y > 0:
self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)
@staticmethod
def create_menu():
font = pygame.font.SysFont('times new roman', 20)
label = font.render(' High scores', 1, BLACK)
pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))
SURFACE.blit(label, (500, 135))
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
g = Game()
g.main()
| import sys
import random
import pygame
import pygame.locals
import time
# TODO high scores, difficulties
# Absolutes (in pixels where not otherwise stated)
CELL_SIDE_LENGTH = 40 # Side length of each cell
CELL_MARGIN = 2 # Gap between cells
GRID_HEIGHT = 10 # How many cells are in the grid
GRID_WIDTH = 10
X_BOARD_MARGIN = 50 # Gap between grid and sides of board
Y_BOARD_MARGIN = 75
MENU_MARGIN = 100 # Amount of space on the right dedicated to the menu
DIFFICULTY = 0.1 # Ratio of bombs (10% by default)
FPS = 30 # frames per second (window refresh speed)
# Relatives (so board size can easily be changed)
NUM_MINES = 1 + int(GRID_WIDTH * GRID_HEIGHT * DIFFICULTY) # Default about 10% of the board is mines
WINDOW_HEIGHT = (CELL_SIDE_LENGTH * GRID_HEIGHT) + (CELL_MARGIN * GRID_HEIGHT) + (Y_BOARD_MARGIN * 2)
WINDOW_WIDTH = (CELL_SIDE_LENGTH * GRID_WIDTH) + (CELL_MARGIN * GRID_WIDTH) + (X_BOARD_MARGIN * 2) + MENU_MARGIN
# R G B (not all used, but kept so theme can easily be changed)
RED = (255, 0, 0)
YELLOW = (255, 255, 0)
GREEN = (0, 255, 0)
MIDGREEN = (40, 190, 40)
CYAN = (0, 255, 255)
BLUE = (0, 0, 255)
DARKBLUE = (20, 20, 60)
MAGENTA = (255, 0, 255)
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
GRAY = (200, 200, 200)
BG_COLOR = DARKBLUE # Background color
CELL_COLOR = GRAY # Universal cover color
HIGHLIGHT_COLOR = CYAN # Cell the cursor is currently hovering over
FLAG_COLOR = MIDGREEN
# Symbols
FLAG = 'flag'
MINE = 'mine'
CLEAR = 'clear'
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0 # Stores x-coordinate of mouse event
self.mouse_y = 0 # Stores y-coordinate of mouse event
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
def main(self):
while True:
left_click = False
right_click = False
SURFACE.fill(BG_COLOR)
self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)
self.create_menu()
font = pygame.font.SysFont("times new roman", 25)
# Timer (will be used to implement high scores)
self.timer.start()
t1 = self.timer.get_seconds()
label = font.render(str(t1), 1, MAGENTA)
SURFACE.blit(label, (50, 50))
# Mouse event handling
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit() # Even if the window closes, we still need to manually stop the processes
elif event.type == pygame.locals.MOUSEMOTION:
self.mouse_x, self.mouse_y = event.pos # For hovering info
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1: # Left click
self.mouse_x, self.mouse_y = event.pos
print(self.mouse_x, self.mouse_y)
left_click = True
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3: # Right click
self.mouse_x, self.mouse_y = event.pos
right_click = True
# If user decided to start over, reinitialize game
if self.game_over and right_click:
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
right_click = False
# TODO tweak spacing on text
if self.game_over:
self.timer.pause()
score = self.timer.get_seconds()
a_x = X_BOARD_MARGIN + ((GRID_WIDTH / 4) * CELL_SIDE_LENGTH)
b_y = Y_BOARD_MARGIN + (Y_BOARD_MARGIN / 4) + (GRID_HEIGHT * CELL_SIDE_LENGTH) + (GRID_HEIGHT * CELL_MARGIN)
font = pygame.font.SysFont("times new roman", 25)
if win:
label = font.render('Congratulations, you won!', 1, GREEN)
SURFACE.blit(label, (a_x - 75, b_y))
label = font.render('Score: ' + str(score), 1, GREEN)
SURFACE.blit(label, (a_x + 200, b_y))
else:
label = font.render('GAME OVER', 1, RED)
SURFACE.blit(label, (a_x + 10, b_y))
label = font.render('Press RIGHT mouse button', 1, YELLOW)
SURFACE.blit(label, (a_x - 50, b_y + 25))
cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)
if cell_x is not None and cell_y is not None: # If mouse is hovering over a cell during mouse event
# Highlight cell
if not self.revealed_cells[cell_x][cell_y] and not self.game_over:
self.highlight_cell(cell_x, cell_y)
# Digging somewhere
if not self.revealed_cells[cell_x][cell_y] and left_click and not self.game_over:
# So you can't accidentally click a flagged/question mark space
if not self.flags[cell_x][cell_y] and not self.questionmarks[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
if self.board[cell_x][cell_y][0] == MINE: # If you dig a mine, reveal all cells & game over
self.revealed_cells = self.generate_data(True)
self.game_over = True
elif self.board[cell_x][cell_y][0] == CLEAR: # If you dig a clear cell, reveal that cell
self.reveal_cells(cell_x, cell_y, self.board, self.revealed_cells, self.flags, self.questionmarks)
else:
self.revealed_cells[cell_x][cell_y] = True # Set the cell as revealed
# Redraw board after mouse event
self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)
# Placing a flag- if flag already there, change flag to question mark.
# If question mark already there, turn to nothing. If nothing there, turn on flag
if not self.revealed_cells[cell_x][cell_y] and right_click and not self.game_over:
if self.flags[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
self.questionmarks[cell_x][cell_y] = True
elif self.questionmarks[cell_x][cell_y]:
self.questionmarks[cell_x][cell_y] = False
self.flags[cell_x][cell_y] = False
else:
self.flags[cell_x][cell_y] = True
self.questionmarks[cell_x][cell_y] = False
# Flag is drawn in this method call
self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)
# This block decides whether or not the player has won yet after a mouse event
win = True
for x in range(GRID_WIDTH): # If a cell is a mine and not flagged, or if a cell is clear
for y in range(GRID_HEIGHT): # but not revealed, then the game is not yet over
if (self.board[x][y][0] == MINE and not self.flags[x][y]) or (
self.board[x][y][0] != MINE and not self.revealed_cells[x][y]):
win = False
if win:
self.game_over = True
# Redraw the screen and wait for clock tick
pygame.display.update()
CLOCK.tick(FPS)
@staticmethod
def get_board():
icons = []
mines = 0
# Bottom of board is made of only mines and clear cells, which is then selectively covered for gameplay
# Making randomized array
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if mines < NUM_MINES:
icons.append((MINE, RED))
mines += 1
else:
icons.append((CLEAR, WHITE))
random.shuffle(icons)
# Create static under-board
board = []
for x in range(GRID_WIDTH):
column = []
for y in range(GRID_HEIGHT):
column.append(icons[0])
del icons[0] # so the next icon[0] is the one after this
board.append(column)
# This block determines how many mines are around each cell, and adds the number to the board's array
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
mines = 0
if x > 0:
if y > 0: # If not on the left edge AND not on top edge
if board[x - 1][y - 1][0] == MINE:
mines += 1
if board[x - 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x - 1][y + 1][0] == MINE:
mines += 1
if x < GRID_WIDTH - 1:
if y > 0: # If not on right edge AND not on top edge
if board[x + 1][y - 1][0] == MINE:
mines += 1
if board[x + 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x + 1][y + 1][0] == MINE:
mines += 1
if y > 0: # If not on right or left edge AND not on top edge
if board[x][y - 1][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1: # If not on riht or left edge AND on bottom edge
if board[x][y + 1][0] == MINE:
mines += 1
# If the cell is clear and there are mines around it, add the number of mines to board array
if board[x][y][0] != MINE:
if mines in range(1, 9):
board[x][y] = (str(mines), WHITE)
return board
# Used to show full board on game over & reset board on game start
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
# Convert row, column coordinates into x, y pixel coordinates (for drawing shapes)
@staticmethod
def get_top_left_coordinates(row, column):
left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN
top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN
return left, top
# Convert x, y pixel coordinates to row, column coordinates (for mouse hovering)
def get_cell_at_pixel(self, x, y):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH)
if cell_rect.collidepoint(x, y): # If currently hovering over a cell
return cell_x, cell_y
return None, None # If not currently hovering over a cell
# Redraws board after mouse event
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
# Symbols not added on board creation must be drawn here: "unrevealed" boxes, flags, and question marks
if not revealed[cell_x][cell_y]:
# Draw a gray box over unrevealed cell, so value isn't affected but user can't see the value
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5) # Relative point halfway through cell
# top point, bottom left point, bottom right point
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half + left, top),
(left, top + CELL_SIDE_LENGTH - CELL_MARGIN/2),
(left + CELL_SIDE_LENGTH - CELL_MARGIN/2, top +
CELL_SIDE_LENGTH - CELL_MARGIN/2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont("times new roman", fontsize)
label = font.render("?", 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else: # Draw revealed cells
shape, color = self.get_shape_and_color(board, cell_x, cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
# Draws icon passed to it in the stated cell
def draw_icon(self, shape, color, cell_x, cell_y):
# Relative point of quarter-way through cell
quarter = int(CELL_SIDE_LENGTH * 0.25)
left, top = self.get_top_left_coordinates(cell_x, cell_y) # Drawing of all images starts at top left corner
# Draw the shapes
if shape == CLEAR:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
elif shape == MINE:
pygame.draw.ellipse(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
# Flag shape & question mark in draw_board because they are activated via mouse event
else: # Clear with num
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont("times new roman", fontsize)
label = font.render(shape, 1, BLACK) # a cell with number corresponds to shapes "1", "2", etc.
SURFACE.blit(label, (left + quarter, top))
# Returns the shape and color of icon to be created in draw_icon method
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
# shape value for cell x, y is stored in board[x][y][0], color value in board[x][y][1]
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
# Draws a box around the cell the mouse is hovering over, 'highlighting' it
def highlight_cell(self, cell_x, cell_y):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
# Changes with cell size, but line width is hard-set at 2px (last argument)
pygame.draw.rect(SURFACE, HIGHLIGHT_COLOR, (left - (CELL_MARGIN / 2), top - (CELL_MARGIN / 2),
CELL_SIDE_LENGTH + CELL_MARGIN, CELL_SIDE_LENGTH + CELL_MARGIN), 2)
# Reveals clear cells next to clear cell the user clicked (and clear cells next to those cells, etc.)
def reveal_cells(self, x, y, board, revealed, flags, questionmarks):
if revealed[x][y]: # If the cell is already revealed, do nothing
return
if flags[x][y]: # If the cell already has a flag on it, do nothing
return
revealed[x][y] = True
if board[x][y][0] != CLEAR:
return
if x > 0:
if y > 0:
self.reveal_cells(x - 1, y - 1, board, revealed, flags, questionmarks)
self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x - 1, y + 1, board, revealed, flags, questionmarks)
if x < GRID_WIDTH - 1:
if y > 0:
self.reveal_cells(x + 1, y - 1, board, revealed, flags, questionmarks)
self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x + 1, y + 1, board, revealed, flags, questionmarks)
if y > 0:
self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)
@staticmethod
def create_menu():
font = pygame.font.SysFont("times new roman", 20)
label = font.render(" High scores", 1, BLACK)
pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50)) # view high scores
SURFACE.blit(label, (500, 135))
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
g = Game()
g.main()
| [
10,
16,
17,
21,
22
] |
1,963 | 1844cfb3e174454e0e95d91e4e55679caddcd56e | <mask token>
| from . import common_wizard
| null | null | null | [
0,
1
] |
1,964 | f15ce7cec032ace65604771fa56e3d9969c98209 | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Proceso', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('name', models.CharField(max_length=
128, verbose_name='Nombre')), ('description', models.CharField(
max_length=256, verbose_name='Descripción')), ('deleted', models.
BooleanField(default=False))])]
| from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Proceso', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('name', models.CharField(max_length=
128, verbose_name='Nombre')), ('description', models.CharField(
max_length=256, verbose_name='Descripción')), ('deleted', models.
BooleanField(default=False))])]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-08-03 10:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Proceso',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, verbose_name='Nombre')),
('description', models.CharField(max_length=256, verbose_name='Descripci\xf3n')),
('deleted', models.BooleanField(default=False)),
],
),
]
| [
0,
1,
2,
3,
4
] |
1,965 | 9eef202a42bfc10b2f52d1b9153d664c5046c13f | <mask token>
class CB030Ticker(Device):
def __init__(self, args, **options):
super().__init__(args=args, name='CB030Ticker', required_options=[
'address'], **options)
self.size = 4096
self._tick_cycles = int(self.emu.cycle_rate / 100)
self.reset()
def reset(self):
self._stop()
self._tick_fired = False
def access(self, operation, offset, size, value):
if offset < 2048:
self._stop()
else:
self._start()
def _stop(self):
self.callback_cancel('tick')
self._ticker_on = False
<mask token>
def _tick(self):
if self._ticker_on:
self._tick_fired = True
self.assert_ipl()
def get_vector(self):
if self._tick_fired:
self._tick_fired = False
return M68K_IRQ_AUTOVECTOR
return M68K_IRQ_SPURIOUS
<mask token>
| <mask token>
class CB030Remap(Device):
def __init__(self, args, **options):
super().__init__(args=args, name='CB030Remap', required_options=[
'address'], **options)
self.size = 4096
self._did_remap = False
self._dram_size = args.dram_size
def access(self, operation, offset, size, value):
if not self._did_remap:
self.emu.remove_memory(base=0)
self.emu.add_memory(base=0, size=self._dram_size * 1024 * 1024)
return 0
class CB030Ticker(Device):
def __init__(self, args, **options):
super().__init__(args=args, name='CB030Ticker', required_options=[
'address'], **options)
self.size = 4096
self._tick_cycles = int(self.emu.cycle_rate / 100)
self.reset()
def reset(self):
self._stop()
self._tick_fired = False
def access(self, operation, offset, size, value):
if offset < 2048:
self._stop()
else:
self._start()
def _stop(self):
self.callback_cancel('tick')
self._ticker_on = False
def _start(self):
if not self._ticker_on:
self.callback_every(self._tick_cycles, 'tick', self._tick)
self._ticker_on = True
def _tick(self):
if self._ticker_on:
self._tick_fired = True
self.assert_ipl()
def get_vector(self):
if self._tick_fired:
self._tick_fired = False
return M68K_IRQ_AUTOVECTOR
return M68K_IRQ_SPURIOUS
<mask token>
| <mask token>
def add_arguments(parser):
parser.add_argument('--rom', type=str, help='ROM image')
parser.add_argument('--dram-size', type=int, default=16, help=
'DRAM size; boards may have 16, 64 or 128M')
parser.add_argument('--cf-width', type=int, default=8, help=
'CompactFlash interface width, 8 or 16')
CompactFlash.add_arguments(parser)
MC68681.add_arguments(parser)
class CB030Remap(Device):
def __init__(self, args, **options):
super().__init__(args=args, name='CB030Remap', required_options=[
'address'], **options)
self.size = 4096
self._did_remap = False
self._dram_size = args.dram_size
def access(self, operation, offset, size, value):
if not self._did_remap:
self.emu.remove_memory(base=0)
self.emu.add_memory(base=0, size=self._dram_size * 1024 * 1024)
return 0
class CB030Ticker(Device):
def __init__(self, args, **options):
super().__init__(args=args, name='CB030Ticker', required_options=[
'address'], **options)
self.size = 4096
self._tick_cycles = int(self.emu.cycle_rate / 100)
self.reset()
def reset(self):
self._stop()
self._tick_fired = False
def access(self, operation, offset, size, value):
if offset < 2048:
self._stop()
else:
self._start()
def _stop(self):
self.callback_cancel('tick')
self._ticker_on = False
def _start(self):
if not self._ticker_on:
self.callback_every(self._tick_cycles, 'tick', self._tick)
self._ticker_on = True
def _tick(self):
if self._ticker_on:
self._tick_fired = True
self.assert_ipl()
def get_vector(self):
if self._tick_fired:
self._tick_fired = False
return M68K_IRQ_AUTOVECTOR
return M68K_IRQ_SPURIOUS
def configure(args):
"""create and configure an emulator"""
emu = Emulator(args, cpu='68030', frequency=24 * 1000 * 1000)
emu.add_memory(base=0, size=512 * 1024, writable=False, from_file=args.rom)
emu.add_memory(base=4261412864, size=512 * 1024, writable=False,
from_file=args.rom)
emu.add_device(args, MC68681, address=4294963200, interrupt=m68k.IRQ_2,
register_arrangement='16-bit-doubled')
emu.add_device(args, CompactFlash, address=4294959104,
register_arrangement='8-bit' if args.cf_width == 8 else '16-bit')
emu.add_device(args, CB030Remap, address=4294934528)
emu.add_device(args, CB030Ticker, address=4294938624, interrupt=m68k.IRQ_6)
return emu
| from emulator import Emulator
from device import Device
from devices.compactflash import CompactFlash
from devices.mc68681 import MC68681
from musashi import m68k
def add_arguments(parser):
parser.add_argument('--rom', type=str, help='ROM image')
parser.add_argument('--dram-size', type=int, default=16, help=
'DRAM size; boards may have 16, 64 or 128M')
parser.add_argument('--cf-width', type=int, default=8, help=
'CompactFlash interface width, 8 or 16')
CompactFlash.add_arguments(parser)
MC68681.add_arguments(parser)
class CB030Remap(Device):
def __init__(self, args, **options):
super().__init__(args=args, name='CB030Remap', required_options=[
'address'], **options)
self.size = 4096
self._did_remap = False
self._dram_size = args.dram_size
def access(self, operation, offset, size, value):
if not self._did_remap:
self.emu.remove_memory(base=0)
self.emu.add_memory(base=0, size=self._dram_size * 1024 * 1024)
return 0
class CB030Ticker(Device):
def __init__(self, args, **options):
super().__init__(args=args, name='CB030Ticker', required_options=[
'address'], **options)
self.size = 4096
self._tick_cycles = int(self.emu.cycle_rate / 100)
self.reset()
def reset(self):
self._stop()
self._tick_fired = False
def access(self, operation, offset, size, value):
if offset < 2048:
self._stop()
else:
self._start()
def _stop(self):
self.callback_cancel('tick')
self._ticker_on = False
def _start(self):
if not self._ticker_on:
self.callback_every(self._tick_cycles, 'tick', self._tick)
self._ticker_on = True
def _tick(self):
if self._ticker_on:
self._tick_fired = True
self.assert_ipl()
def get_vector(self):
if self._tick_fired:
self._tick_fired = False
return M68K_IRQ_AUTOVECTOR
return M68K_IRQ_SPURIOUS
def configure(args):
"""create and configure an emulator"""
emu = Emulator(args, cpu='68030', frequency=24 * 1000 * 1000)
emu.add_memory(base=0, size=512 * 1024, writable=False, from_file=args.rom)
emu.add_memory(base=4261412864, size=512 * 1024, writable=False,
from_file=args.rom)
emu.add_device(args, MC68681, address=4294963200, interrupt=m68k.IRQ_2,
register_arrangement='16-bit-doubled')
emu.add_device(args, CompactFlash, address=4294959104,
register_arrangement='8-bit' if args.cf_width == 8 else '16-bit')
emu.add_device(args, CB030Remap, address=4294934528)
emu.add_device(args, CB030Ticker, address=4294938624, interrupt=m68k.IRQ_6)
return emu
| from emulator import Emulator
from device import Device
from devices.compactflash import CompactFlash
from devices.mc68681 import MC68681
from musashi import m68k
def add_arguments(parser):
parser.add_argument('--rom',
type=str,
help='ROM image')
parser.add_argument('--dram-size',
type=int,
default=16,
help='DRAM size; boards may have 16, 64 or 128M')
parser.add_argument('--cf-width',
type=int,
default=8,
help='CompactFlash interface width, 8 or 16')
CompactFlash.add_arguments(parser)
MC68681.add_arguments(parser)
class CB030Remap(Device):
def __init__(self, args, **options):
super().__init__(args=args,
name='CB030Remap',
required_options=['address'],
**options)
# no registers, just a 4k aperture
self.size = 0x1000
self._did_remap = False
self._dram_size = args.dram_size
def access(self, operation, offset, size, value):
if not self._did_remap:
# remove the low alias of the EEPROM
self.emu.remove_memory(base=0)
# and add the previously-masked DRAM
self.emu.add_memory(base=0x0000000, size=self._dram_size * 1024 * 1024)
return 0
class CB030Ticker(Device):
def __init__(self, args, **options):
super().__init__(args=args,
name='CB030Ticker',
required_options=['address'],
**options)
# no registers, just a 4k aperture
self.size = 0x1000
# core clock @ 24MHz, 100Hz tick rate
self._tick_cycles = int(self.emu.cycle_rate / 100)
self.reset()
def reset(self):
self._stop()
self._tick_fired = False
def access(self, operation, offset, size, value):
if offset < 0x800:
self._stop()
else:
self._start()
def _stop(self):
self.callback_cancel('tick')
self._ticker_on = False
def _start(self):
if not self._ticker_on:
self.callback_every(self._tick_cycles, 'tick', self._tick)
self._ticker_on = True
def _tick(self):
if self._ticker_on:
self._tick_fired = True
self.assert_ipl()
def get_vector(self):
if self._tick_fired:
self._tick_fired = False
return M68K_IRQ_AUTOVECTOR
return M68K_IRQ_SPURIOUS
def configure(args):
"""create and configure an emulator"""
emu = Emulator(args,
cpu='68030',
frequency=24 * 1000 * 1000)
# initially only the EEPROM exists; aliased at 0 all the way up to 0xfe000000
# we only map the low and high aliases, as the intermediates aren't interesting
emu.add_memory(base=0, size=512 * 1024, writable=False, from_file=args.rom)
emu.add_memory(base=0xfe000000, size=512 * 1024, writable=False, from_file=args.rom)
emu.add_device(args,
MC68681,
address=0xfffff000,
interrupt=m68k.IRQ_2,
register_arrangement='16-bit-doubled')
emu.add_device(args,
CompactFlash,
address=0xffffe000,
register_arrangement='8-bit' if args.cf_width == 8 else '16-bit')
emu.add_device(args,
CB030Remap,
address=0xffff8000)
emu.add_device(args,
CB030Ticker,
address=0xffff9000,
interrupt=m68k.IRQ_6)
return emu
| [
7,
11,
13,
14,
15
] |
1,966 | f9b53df799b3e6b71282c84a625ea5915ccb8014 | <mask token>
| <mask token>
def square_root(n):
start = 1
end = n
if n == 0 or n == 1:
return n
while start <= end:
mid = (start + end) // 2
if mid * mid == n:
return mid
elif mid * mid < n:
start = mid + 1
else:
end = mid - 1
| """
This is a big integer challenge. You are given an integer which is a **perfect
square**. It is composed of 40 or more digits. Compose a function which will
find the exact square root of this integer.
### Examples
square_root(152415787532388367501905199875019052100) ➞ 12345678901234567890
square_root(10203040506070809101112131413121110090807060504030201) ➞ 101010101010101010101010101
### Notes
* All test cases are perfect squares.
* A **good fortune** bonus awaits you if you are able to complete this challenge without importing anything.
"""
def square_root(n):
start = 1
end = n
if n == 0 or n == 1:
return n
while start <= end:
mid = (start+end)//2
if mid*mid == n:
return mid
elif mid*mid < n:
start = mid+1
else:
end = mid-1
| null | null | [
0,
1,
2
] |
1,967 | d21b89285d4b4c73a08bda746cea31b5a13d1050 | <mask token>
| <mask token>
urlpatterns = [path('product', views.ProductCreateAndList.as_view()), path(
'product/<int:pk>', views.ProductRetrieve.as_view())]
| from django.urls import path
from . import views
urlpatterns = [path('product', views.ProductCreateAndList.as_view()), path(
'product/<int:pk>', views.ProductRetrieve.as_view())]
| from django.urls import path
from . import views
urlpatterns = [
path('product', views.ProductCreateAndList.as_view()),
path('product/<int:pk>', views.ProductRetrieve.as_view()),
]
| null | [
0,
1,
2,
3
] |
1,968 | 6f3de70267956a6c7c3c5b261cf591051de4c548 | <mask token>
| <mask token>
def do_pack():
timestamp = datetime.utcnow().strftime('%Y%m%d%H%M%S')
archive = 'web_static_' + timestamp + '.tgz'
local('mkdir -p versions')
local('tar -cvzf versions/{} web_static/'.format(archive))
my_file = Path('versions/{}'.format(archive))
if my_file.is_file():
return my_file
else:
return None
| <mask token>
from datetime import datetime, time
from fabric.api import *
from pathlib import Path
def do_pack():
timestamp = datetime.utcnow().strftime('%Y%m%d%H%M%S')
archive = 'web_static_' + timestamp + '.tgz'
local('mkdir -p versions')
local('tar -cvzf versions/{} web_static/'.format(archive))
my_file = Path('versions/{}'.format(archive))
if my_file.is_file():
return my_file
else:
return None
| #!/usr/bin/python3
"""
This module contains a Fabric function definition.
"""
from datetime import datetime, time
from fabric.api import *
from pathlib import Path
def do_pack():
timestamp = datetime.utcnow().strftime("%Y%m%d%H%M%S")
archive = "web_static_" + timestamp + ".tgz"
local("mkdir -p versions")
local("tar -cvzf versions/{} web_static/".format(archive))
my_file = Path("versions/{}".format(archive))
if my_file.is_file():
return my_file
else:
return None
| null | [
0,
1,
2,
3
] |
1,969 | 4791b210f328dff5d48ff5afc381a98a5a1a2b7b | <mask token>
def get_channel_urls(url):
wb_data = requests.get(url)
wb_data.encoding = 'utf-8'
soup = BeautifulSoup(wb_data.text, 'lxml')
links = soup.select('body > div.navWrap.clearfix > div > ul > li > a')
for link in links:
page_url = url_host + link.get('href')
url_list1.insert_one({'url': page_url})
print(page_url)
<mask token>
| <mask token>
def get_channel_urls(url):
wb_data = requests.get(url)
wb_data.encoding = 'utf-8'
soup = BeautifulSoup(wb_data.text, 'lxml')
links = soup.select('body > div.navWrap.clearfix > div > ul > li > a')
for link in links:
page_url = url_host + link.get('href')
url_list1.insert_one({'url': page_url})
print(page_url)
get_channel_urls(start_url)
<mask token>
| <mask token>
client = pymongo.MongoClient('localhost', 27017)
ku = client['ku']
url_list1 = ku['url_list_index']
start_url = 'http://news.ccsu.cn/index.htm'
url_host = 'http://news.ccsu.cn/'
def get_channel_urls(url):
wb_data = requests.get(url)
wb_data.encoding = 'utf-8'
soup = BeautifulSoup(wb_data.text, 'lxml')
links = soup.select('body > div.navWrap.clearfix > div > ul > li > a')
for link in links:
page_url = url_host + link.get('href')
url_list1.insert_one({'url': page_url})
print(page_url)
get_channel_urls(start_url)
ccsu_list = """
http://news.ccsu.cn/index.htm
http://news.ccsu.cn/zdyw.htm
http://news.ccsu.cn/xysx.htm
http://news.ccsu.cn/mtjj.htm
http://news.ccsu.cn/xywh.htm
http://news.ccsu.cn/hdzt.htm
http://news.ccsu.cn/zdrw.htm
http://news.ccsu.cn/xbzx.htm
http://news.ccsu.cn/tzgg.htm
http://news.ccsu.cn/zlxz.htm
http://news.ccsu.cn/jxzd.htm
"""
| from bs4 import BeautifulSoup
import requests
import pymongo
client = pymongo.MongoClient('localhost', 27017)
ku = client['ku']
url_list1 = ku['url_list_index']
start_url = 'http://news.ccsu.cn/index.htm'
url_host = 'http://news.ccsu.cn/'
def get_channel_urls(url):
wb_data = requests.get(url)
wb_data.encoding = 'utf-8'
soup = BeautifulSoup(wb_data.text, 'lxml')
links = soup.select('body > div.navWrap.clearfix > div > ul > li > a')
for link in links:
page_url = url_host + link.get('href')
url_list1.insert_one({'url': page_url})
print(page_url)
get_channel_urls(start_url)
ccsu_list = """
http://news.ccsu.cn/index.htm
http://news.ccsu.cn/zdyw.htm
http://news.ccsu.cn/xysx.htm
http://news.ccsu.cn/mtjj.htm
http://news.ccsu.cn/xywh.htm
http://news.ccsu.cn/hdzt.htm
http://news.ccsu.cn/zdrw.htm
http://news.ccsu.cn/xbzx.htm
http://news.ccsu.cn/tzgg.htm
http://news.ccsu.cn/zlxz.htm
http://news.ccsu.cn/jxzd.htm
"""
| from bs4 import BeautifulSoup
import requests
import pymongo
client = pymongo.MongoClient('localhost', 27017)
ku = client['ku']
url_list1 = ku['url_list_index']
start_url="http://news.ccsu.cn/index.htm"
url_host="http://news.ccsu.cn/"
def get_channel_urls(url):
wb_data = requests.get(url)
wb_data.encoding = 'utf-8'
soup = BeautifulSoup(wb_data.text, "lxml")
links= soup.select("body > div.navWrap.clearfix > div > ul > li > a")
#print(links)
for link in links:
page_url =url_host + link.get("href")
url_list1.insert_one({'url': page_url})
print(page_url)
#print(link.text)
get_channel_urls(start_url)
ccsu_list = '''
http://news.ccsu.cn/index.htm
http://news.ccsu.cn/zdyw.htm
http://news.ccsu.cn/xysx.htm
http://news.ccsu.cn/mtjj.htm
http://news.ccsu.cn/xywh.htm
http://news.ccsu.cn/hdzt.htm
http://news.ccsu.cn/zdrw.htm
http://news.ccsu.cn/xbzx.htm
http://news.ccsu.cn/tzgg.htm
http://news.ccsu.cn/zlxz.htm
http://news.ccsu.cn/jxzd.htm
''' | [
1,
2,
3,
4,
5
] |
1,970 | f98120d191e9e4b92984a6b59b25b1331b5d8c3a | <mask token>
| <mask token>
for i in range(1, n + 1, 1):
tempo = int(input('Digite o tempo:'))
if i == 1:
tempo1 = tempo
elif i == n:
f = tempo + 10
<mask token>
print(x)
| pessoas = int(input('Digite o numero de pessoas que passa pela esada rolante:')
)
for i in range(1, n + 1, 1):
tempo = int(input('Digite o tempo:'))
if i == 1:
tempo1 = tempo
elif i == n:
f = tempo + 10
X = f - tempo1
print(x)
| # -*- coding: utf-8 -*-
pessoas=int(input('Digite o numero de pessoas que passa pela esada rolante:'))
for i in range(1,n+1,1):
tempo=int(input('Digite o tempo:'))
if i==1:
tempo1=tempo
elif i==n:
f=tempo+10
X=f-tempo1
print(x) | null | [
0,
1,
2,
3
] |
1,971 | b4787d65fb8adf5dc6a99c1a13922c8f9acc2087 | <mask token>
| <mask token>
class BackendSerializer(serializers.ModelSerializer):
class Meta:
model = Backend
fields = '__all__'
| from rest_framework import serializers
from .models import Backend
class BackendSerializer(serializers.ModelSerializer):
class Meta:
model = Backend
fields = '__all__'
| null | null | [
0,
1,
2
] |
1,972 | e5cc556d4258ef5c85f7bc5149cdd33471493bdb | <mask token>
| <mask token>
for en in list:
ef = re.sub('en', 'ef', en)
efAli = re.sub('en', 'efAli', en)
cmd = 'proc2d %s %s_filt.mrc apix=1.501 lp=20' % (ef, ef[:-4])
subprocess.Popen(cmd, shell=True).wait()
cmd = 'alignhuge %s_filt.mrc %s %s' % (ef[:-4], en, efAli)
subprocess.Popen(cmd, shell=True).wait()
cmd = 'rm %s_filt.mrc %s' % (ef[:-4], efAli)
subprocess.Popen(cmd, shell=True).wait()
| <mask token>
list = glob.glob('*en.mrc')
for en in list:
ef = re.sub('en', 'ef', en)
efAli = re.sub('en', 'efAli', en)
cmd = 'proc2d %s %s_filt.mrc apix=1.501 lp=20' % (ef, ef[:-4])
subprocess.Popen(cmd, shell=True).wait()
cmd = 'alignhuge %s_filt.mrc %s %s' % (ef[:-4], en, efAli)
subprocess.Popen(cmd, shell=True).wait()
cmd = 'rm %s_filt.mrc %s' % (ef[:-4], efAli)
subprocess.Popen(cmd, shell=True).wait()
| import os
import shutil
import glob
import re
import subprocess
list = glob.glob('*en.mrc')
for en in list:
ef = re.sub('en', 'ef', en)
efAli = re.sub('en', 'efAli', en)
cmd = 'proc2d %s %s_filt.mrc apix=1.501 lp=20' % (ef, ef[:-4])
subprocess.Popen(cmd, shell=True).wait()
cmd = 'alignhuge %s_filt.mrc %s %s' % (ef[:-4], en, efAli)
subprocess.Popen(cmd, shell=True).wait()
cmd = 'rm %s_filt.mrc %s' % (ef[:-4], efAli)
subprocess.Popen(cmd, shell=True).wait()
| #!/usr/bin/env python
import os
import shutil
import glob
import re
import subprocess
list = glob.glob("*en.mrc")
for en in list:
ef = re.sub("en","ef",en)
efAli = re.sub("en","efAli",en)
cmd='proc2d %s %s_filt.mrc apix=1.501 lp=20' %(ef,ef[:-4])
subprocess.Popen(cmd,shell=True).wait()
cmd="alignhuge %s_filt.mrc %s %s" %(ef[:-4],en,efAli)
subprocess.Popen(cmd,shell=True).wait()
cmd='rm %s_filt.mrc %s' %(ef[:-4],efAli)
subprocess.Popen(cmd,shell=True).wait()
| [
0,
1,
2,
3,
4
] |
1,973 | f4d4be174bed2704c0ad12eea2f0cd64eaaa0aaa | <mask token>
def gen_ft_parser():
ft_parser = argparse.ArgumentParser(description=
'Generate a Character-Feature Translation Table')
ft_parser.add_argument('alphabet_file', metavar='alphabet_file', type=
str, help=
'A file contianing all the characters that will appear in the translation table.'
)
ft_parser.add_argument('save_file', metavar='save_path', type=str, help
='The feature table filename.')
return ft_parser
def construct_alphabet(alpha_string):
symbols = set(alpha_string)
alphabet = ''.join(sorted(c for c in string.printable if c in symbols))
return numpy.array(list(alphabet))
def load_alphabet(alphabet_file):
with open(alphabet_file) as alphabet:
alphabet = alphabet.read(100000).replace('\n', ' ')
return construct_alphabet(alphabet)
def gen_row(c, key):
row = [False] * (len(key) + 1)
row[key[c.lower()]] = True
row[-1] = c.isupper()
return row
<mask token>
| <mask token>
def gen_ft_parser():
ft_parser = argparse.ArgumentParser(description=
'Generate a Character-Feature Translation Table')
ft_parser.add_argument('alphabet_file', metavar='alphabet_file', type=
str, help=
'A file contianing all the characters that will appear in the translation table.'
)
ft_parser.add_argument('save_file', metavar='save_path', type=str, help
='The feature table filename.')
return ft_parser
def construct_alphabet(alpha_string):
symbols = set(alpha_string)
alphabet = ''.join(sorted(c for c in string.printable if c in symbols))
return numpy.array(list(alphabet))
def load_alphabet(alphabet_file):
with open(alphabet_file) as alphabet:
alphabet = alphabet.read(100000).replace('\n', ' ')
return construct_alphabet(alphabet)
def gen_row(c, key):
row = [False] * (len(key) + 1)
row[key[c.lower()]] = True
row[-1] = c.isupper()
return row
def build_table(alphabet):
code = ''.join(sorted(set(''.join(alphabet).lower())))
key = {c: i for i, c in enumerate(code)}
table = numpy.zeros((len(alphabet), len(key) + 1))
for i, c in enumerate(alphabet):
table[i] = gen_row(c, key)
return table
def main(args):
table = build_table(load_alphabet(args.alphabet_file))
numpy.save(args.save_file, table)
<mask token>
| <mask token>
def gen_ft_parser():
ft_parser = argparse.ArgumentParser(description=
'Generate a Character-Feature Translation Table')
ft_parser.add_argument('alphabet_file', metavar='alphabet_file', type=
str, help=
'A file contianing all the characters that will appear in the translation table.'
)
ft_parser.add_argument('save_file', metavar='save_path', type=str, help
='The feature table filename.')
return ft_parser
def construct_alphabet(alpha_string):
symbols = set(alpha_string)
alphabet = ''.join(sorted(c for c in string.printable if c in symbols))
return numpy.array(list(alphabet))
def load_alphabet(alphabet_file):
with open(alphabet_file) as alphabet:
alphabet = alphabet.read(100000).replace('\n', ' ')
return construct_alphabet(alphabet)
def gen_row(c, key):
row = [False] * (len(key) + 1)
row[key[c.lower()]] = True
row[-1] = c.isupper()
return row
def build_table(alphabet):
code = ''.join(sorted(set(''.join(alphabet).lower())))
key = {c: i for i, c in enumerate(code)}
table = numpy.zeros((len(alphabet), len(key) + 1))
for i, c in enumerate(alphabet):
table[i] = gen_row(c, key)
return table
def main(args):
table = build_table(load_alphabet(args.alphabet_file))
numpy.save(args.save_file, table)
if __name__ == '__main__':
main(gen_ft_parser().parse_args())
| import argparse
import string
import numpy
def gen_ft_parser():
ft_parser = argparse.ArgumentParser(description=
'Generate a Character-Feature Translation Table')
ft_parser.add_argument('alphabet_file', metavar='alphabet_file', type=
str, help=
'A file contianing all the characters that will appear in the translation table.'
)
ft_parser.add_argument('save_file', metavar='save_path', type=str, help
='The feature table filename.')
return ft_parser
def construct_alphabet(alpha_string):
symbols = set(alpha_string)
alphabet = ''.join(sorted(c for c in string.printable if c in symbols))
return numpy.array(list(alphabet))
def load_alphabet(alphabet_file):
with open(alphabet_file) as alphabet:
alphabet = alphabet.read(100000).replace('\n', ' ')
return construct_alphabet(alphabet)
def gen_row(c, key):
row = [False] * (len(key) + 1)
row[key[c.lower()]] = True
row[-1] = c.isupper()
return row
def build_table(alphabet):
code = ''.join(sorted(set(''.join(alphabet).lower())))
key = {c: i for i, c in enumerate(code)}
table = numpy.zeros((len(alphabet), len(key) + 1))
for i, c in enumerate(alphabet):
table[i] = gen_row(c, key)
return table
def main(args):
table = build_table(load_alphabet(args.alphabet_file))
numpy.save(args.save_file, table)
if __name__ == '__main__':
main(gen_ft_parser().parse_args())
| #!/usr/bin/python
import argparse
import string
import numpy
def gen_ft_parser():
ft_parser = argparse.ArgumentParser(
description='Generate a Character-Feature Translation Table')
ft_parser.add_argument('alphabet_file', metavar='alphabet_file',
type=str, help='A file contianing all the characters that will '
'appear in the translation table.')
ft_parser.add_argument('save_file', metavar='save_path',
type=str, help='The feature table filename.')
return ft_parser
def construct_alphabet(alpha_string):
symbols = set(alpha_string)
alphabet = ''.join(sorted(c for c in string.printable if c in symbols))
return numpy.array(list(alphabet))
def load_alphabet(alphabet_file):
with open(alphabet_file) as alphabet:
alphabet = alphabet.read(100000).replace('\n', ' ')
return construct_alphabet(alphabet)
def gen_row(c, key):
row = [False] * (len(key) + 1)
row[key[c.lower()]] = True
row[-1] = c.isupper()
return row
def build_table(alphabet):
code = ''.join(sorted(set(''.join(alphabet).lower())))
key = {c:i for i, c in enumerate(code)}
table = numpy.zeros((len(alphabet), len(key) + 1))
for i, c in enumerate(alphabet):
table[i] = gen_row(c, key)
return table
def main(args):
table = build_table(load_alphabet(args.alphabet_file))
numpy.save(args.save_file, table)
if __name__ == "__main__":
main(gen_ft_parser().parse_args())
| [
4,
6,
7,
8,
9
] |
1,974 | 93b712c60ba4bfa81d967ec59035b6fb7793ce87 | class User:
<mask token>
<mask token>
def greet_user(self):
if self.gender.lower() == 'male':
print('Greetings, Mr. ' + self.last_name.title() + '!')
elif self.gender.lower() == 'female':
print('Greetings, Miss ' + self.last_name.title() + '!')
<mask token>
| class User:
<mask token>
def describe_user(self):
print('The name of the user is ' + self.full_name + '.')
print("The user's gender is " + self.gender + '.')
print('The user is ' + str(self.age) + ' years old.')
def greet_user(self):
if self.gender.lower() == 'male':
print('Greetings, Mr. ' + self.last_name.title() + '!')
elif self.gender.lower() == 'female':
print('Greetings, Miss ' + self.last_name.title() + '!')
<mask token>
| class User:
def __init__(self, first, last, gender, age):
self.first_name = first
self.last_name = last
self.gender = gender
self.age = age
self.full_name = self.first_name + ' ' + self.last_name
def describe_user(self):
print('The name of the user is ' + self.full_name + '.')
print("The user's gender is " + self.gender + '.')
print('The user is ' + str(self.age) + ' years old.')
def greet_user(self):
if self.gender.lower() == 'male':
print('Greetings, Mr. ' + self.last_name.title() + '!')
elif self.gender.lower() == 'female':
print('Greetings, Miss ' + self.last_name.title() + '!')
<mask token>
| class User:
def __init__(self, first, last, gender, age):
self.first_name = first
self.last_name = last
self.gender = gender
self.age = age
self.full_name = self.first_name + ' ' + self.last_name
def describe_user(self):
print('The name of the user is ' + self.full_name + '.')
print("The user's gender is " + self.gender + '.')
print('The user is ' + str(self.age) + ' years old.')
def greet_user(self):
if self.gender.lower() == 'male':
print('Greetings, Mr. ' + self.last_name.title() + '!')
elif self.gender.lower() == 'female':
print('Greetings, Miss ' + self.last_name.title() + '!')
user1 = User('zhichao', 'li', 'male', 27)
user2 = User('juan', 'zhang', 'female', 28)
user3 = User('Tian', 'ZHANG', 'male', 26)
user1.describe_user()
user1.greet_user()
user2.describe_user()
user2.greet_user()
user3.describe_user()
user3.greet_user()
| class User():
def __init__(self, first, last, gender, age):
self.first_name = first
self.last_name = last
self.gender = gender
self.age = age
self.full_name = self.first_name + " " + self.last_name
def describe_user(self):
print("The name of the user is " + self.full_name + ".")
print("The user's gender is " + self.gender + ".")
print("The user is " + str(self.age) + " years old.")
def greet_user(self):
if self.gender.lower() == "male":
print("Greetings, Mr. " + self.last_name.title() + "!")
elif self.gender.lower() == "female":
print("Greetings, Miss " + self.last_name.title() + "!")
user1 = User("zhichao", "li", "male", 27)
user2 = User("juan", "zhang", "female", 28)
user3 = User("Tian", "ZHANG", "male", 26)
user1.describe_user()
user1.greet_user()
user2.describe_user()
user2.greet_user()
user3.describe_user()
user3.greet_user()
| [
2,
3,
4,
6,
7
] |
1,975 | 96e64b715dbfc1c59ba44d608ad2694b165017b5 | <mask token>
| <mask token>
logging.basicConfig(level=logging.DEBUG, format=
'%(asctime)s - %(levelname)s - %(message)s')
<mask token>
| <mask token>
logging.basicConfig(level=logging.DEBUG, format=
'%(asctime)s - %(levelname)s - %(message)s')
q = 'levamisole inhibitor'
p = PaperProcessor(q)
| from paper_processor import PaperProcessor
import logging
logging.basicConfig(level=logging.DEBUG, format=
'%(asctime)s - %(levelname)s - %(message)s')
q = 'levamisole inhibitor'
p = PaperProcessor(q)
| from paper_processor import PaperProcessor
import logging
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s - %(levelname)s - %(message)s')
q = "levamisole inhibitor"
p = PaperProcessor(q)
| [
0,
1,
2,
3,
4
] |
1,976 | 1d5db3db319e67e050036e718bbe0c538365d229 | <mask token>
class FitSim(object):
<mask token>
def __init__(self, participant_choice_property='Actions',
participant_reward_property='Rewards', model_fitting_variable=
'ActionProb', task_stimuli_property=None, fit_subset=None,
action_options_property=None, float_error_response_value=1 / 1e+100):
self.participant_choice_property = participant_choice_property
self.participant_reward_property = participant_reward_property
self.model_fitting_variable = model_fitting_variable
self.task_stimuli_property = task_stimuli_property
self.action_options_property = action_options_property
self.float_error_response_value = float_error_response_value
self.fit_subset = fit_subset
self.fit_subset_described = self._preprocess_fit_subset(fit_subset)
self.Name = self.find_name()
self.sim_info = {'Name': self.Name, 'participant_choice_property':
participant_choice_property, 'participant_reward_property':
participant_reward_property, 'task_stimuli_property':
task_stimuli_property, 'action_options_property':
action_options_property, 'model_fitting_variable':
model_fitting_variable, 'float_error_response_value':
float_error_response_value, 'fit_subset': fit_subset}
self.model = None
self.initial_parameter_values = None
self.model_parameter_names = None
self.model_other_properties = None
self.participant_observations = None
self.participant_actions = None
self.participant_rewards = None
def fitness(self, *model_parameters):
"""
Used by a fitter to generate the list of values characterising how well the model parameters describe the
participants actions.
Parameters
----------
model_parameters : list of floats
A list of the parameters used by the model in the order previously defined
Returns
-------
model_performance : list of floats
The choices made by the model that will be used to characterise the quality of the fit.
See Also
--------
fitAlgs.fitSims.FitSim.participant : Fits participant data
fitAlgs.fitAlg.fitAlg : The general fitting class
fitAlgs.fitAlg.fitAlg.fitness : The function that this one is called by
"""
try:
model_instance = self.fitted_model(*model_parameters)
except FloatingPointError:
message = utils.errorResp()
logger = logging.getLogger('Fitter')
logger.warning(
u"""{0}
. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2}"""
.format(message, repr(self.get_model_parameters(*
model_parameters)), repr(self.float_error_response_value)))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
except ValueError as e:
logger = logging.getLogger('Fitter')
logger.warn(
'{0} in fitted model. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2} - {3}, - {4}'
.format(type(e), repr(self.get_model_parameters(*
model_parameters)), repr(self.float_error_response_value),
e.message, e.args))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
model_data = model_instance.returnTaskState()
model_choice_probabilities = model_data[self.model_fitting_variable]
if self.fit_subset_described is None:
model_performance = model_choice_probabilities
else:
model_performance = model_choice_probabilities[self.
fit_subset_described]
if np.isnan(model_performance).any():
logger = logging.getLogger('Fitter')
message = (
'model performance values contain ``Not a Number`` (NaN), i.e. the model had a problem.'
)
logger.warning(message +
'.\n Abandoning fitting with parameters: ' + repr(self.
get_model_parameters(*model_parameters)) +
' Returning an action choice probability for each trialstep of '
+ repr(self.float_error_response_value))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
return model_performance
<mask token>
@staticmethod
def participant_sequence_generation(participant_data, choice_property,
reward_property, stimuli_property, action_options_property):
"""
Finds the stimuli in the participant data and returns formatted observations
Parameters
----------
participant_data : dict
The participant data
choice_property : string
The participant data key of their action choices.
reward_property : string
The participant data key of the participant reward data
stimuli_property : string or None or list of strings
A list of the keys in partData representing participant stimuli
action_options_property : string or None or list of strings, ints or None
The name of the key in partData where the list of valid actions
can be found. If ``None`` then the action list is considered to
stay constant. If a list then the list will be taken as the list
of actions that can be taken at every trialstep. If the list is
shorter than the number of trialsteps, then it will be considered
to be a list of valid actions for each trialstep.
Returns
-------
participant_sequence : list of three element tuples
Each list element contains the observation, action and feedback for each trial taken
by the participant
"""
actions = participant_data[choice_property]
rewards = participant_data[reward_property]
participant_data_length = len(actions)
partDataShape = None
if stimuli_property is None:
stimuli_data = [None] * participant_data_length
elif isinstance(stimuli_property, str):
stimuli_data = np.array(participant_data[stimuli_property])
partDataShape = stimuli_data.shape
elif isinstance(stimuli_property, list):
if len(stimuli_property) > 1:
stimuli_data = np.array([participant_data[s] for s in
stimuli_property]).T
else:
stimuli_data = participant_data[stimuli_property[0]]
partDataShape = stimuli_data.shape
else:
raise StimuliError('Unknown representation of stimuli')
if partDataShape:
if max(partDataShape) != partDataShape[0]:
stimuli_data = stimuli_data.T
if isinstance(action_options_property, str
) and action_options_property in participant_data:
available_actions = participant_data[action_options_property]
elif action_options_property is None or len(action_options_property
) != participant_data_length:
available_actions = [action_options_property
] * participant_data_length
else:
available_actions = action_options_property
mismatches = [(True if trial_available_actions is not None and
trial_action not in trial_available_actions else False) for
trial_action, trial_available_actions in zip(actions,
available_actions)]
if any(mismatches):
mismatch_actions = [a for a, m in zip(actions, mismatches) if m is
True]
mismatch_available_actions = [a for a, m in zip(
available_actions, mismatches) if m is True]
raise ActionError(
"""An action is chosen that is not listed as available for the trial
{}
{}"""
.format(mismatch_actions, mismatch_available_actions))
observations = [(s, a) for s, a in zip(stimuli_data, available_actions)
]
return observations, actions, rewards
def info(self):
"""
The dictionary describing the fitters algorithm chosen
Returns
-------
fitInfo : dict
The dictionary of fitters class information
"""
return self.sim_info
def find_name(self):
"""
Returns the name of the class
"""
return self.__class__.__name__
def fitted_model(self, *model_parameters):
"""
Simulating a model run with specific parameter values
Parameters
----------
*model_parameters : floats
The model parameters provided in the order defined in the model setup
Returns
-------
model_instance : model.modelTemplate.Model class instance
"""
model_arguments = self.get_model_properties(*model_parameters)
model_instance = self.model(**model_arguments)
model_instance = self._simulation_run(model_instance, self.
participant_observations, self.participant_actions, self.
participant_rewards)
return model_instance
def get_model_properties(self, *model_parameters):
"""
Compiles the kwarg model arguments based on the model_parameters and
previously specified other parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
model_properties : dict
The kwarg model arguments
"""
model_properties = self.get_model_parameters(*model_parameters)
for k, v in self.model_other_properties.items():
model_properties[k] = copy.deepcopy(v)
return model_properties
def get_model_parameters(self, *model_parameters):
"""
Compiles the model parameter arguments based on the model parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
parameters : dict
The kwarg model parameter arguments
"""
parameters = {k: v for k, v in zip(self.model_parameter_names,
model_parameters)}
return parameters
@staticmethod
def _simulation_run(model_instance, observations, actions, rewards):
"""
Simulates the events of a simulation from the perspective of a model
Parameters
----------
model_instance : model.modelTemplate.modelTemplate class instance
observations : list of tuples
The sequence of (stimuli, valid actions) for each trial
actions : list
The sequence of participant actions for each trial
rewards : list
The sequence of participant rewards for each trial
model_instance : model.modelTemplate.Model class instance
The same instance that is returned
Returns
-------
model_instance : model.modelTemplate.Model class instance
The same instance that was passed in
"""
for observation, action, reward in zip(observations, actions, rewards):
model_instance.observe(observation)
model_instance.overrideActionChoice(action)
model_instance.feedback(reward)
return model_instance
@staticmethod
def _preprocess_fit_subset(fit_subset):
"""
Prepare as many possible combinations of fit_subset as possible.
If it needs knowledge of the rewards, return ``[]``
Parameters
----------
fit_subset : ``float('Nan')``, ``None``, ``"rewarded"``, ``"unrewarded"``, ``"all"`` or list of int
Describes which, if any, subset of trials will be used to evaluate the performance of the model.
This can either be described as a list of trial numbers or, by passing
- ``"all"`` or ``None`` for fitting all trials
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
Returns
-------
fit_subset_described : None, or list of ints
A description of the trials to be used, with ``None`` being all of them.
If more information was needed ``[]`` was returned
"""
if fit_subset is None:
fit_subset_described = None
elif isinstance(fit_subset, (list, np.ndarray)):
fit_subset_described = fit_subset
elif fit_subset == 'rewarded':
fit_subset_described = []
elif fit_subset == 'unrewarded':
fit_subset_described = []
elif fit_subset == 'all':
fit_subset_described = None
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = []
else:
raise FitSubsetError('{} is not a known fit_subset'.format(
fit_subset))
return fit_subset_described
@staticmethod
def _set_fit_subset(fit_subset, part_rewards):
"""
Identify any fit_subset options that required part_rewards, i.e. subsets of trials where there was or was not
``np.nan`` as the feedback.
Parameters
----------
fit_subset : ``float('Nan')``, ``"rewarded"``, ``"unrewarded"``
Describes which, subset of trials will be used to evaluate the performance of the model.
This can either be described by passing
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
part_rewards: list of float
The rewards received by the participant
Returns
-------
fit_subset_described : list of bool the length of part_reward
A description of the trials to be used
"""
if fit_subset == 'rewarded':
fit_subset_described = ~np.isnan(part_rewards)
elif fit_subset == 'unrewarded':
fit_subset_described = np.isnan(part_rewards)
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = np.isnan(part_rewards)
else:
raise FitSubsetError('{} is not a known fit_subset'.format(
fit_subset))
return fit_subset_described
| <mask token>
class StimuliError(Exception):
pass
class FitSim(object):
"""
A class for fitting data by passing the participant data through the model.
This has been setup for fitting action-response models
Parameters
----------
participant_choice_property : string, optional
The participant data key of their action choices. Default ``'Actions'``
participant_reward_property : string, optional
The participant data key of the participant reward data. Default ``'Rewards'``
model_fitting_variable : string, optional
The key to be compared in the model data. Default ``'ActionProb'``
task_stimuli_property : list of strings or None, optional
The keys containing the stimuli seen by the
participant before taking a decision on an action. Default ``None``
action_options_property : string or None or list of ints, optional
The name of the key in partData where the list of valid actions
can be found. If ``None`` then the action list is considered to
stay constant. If a list then the list will be taken as the list
of actions that can be taken at each instance. Default ``None``
float_error_response_value : float, optional
If a floating point error occurs when running a fit the fitter function
will return a value for each element of fpRespVal. Default is ``1/1e100``
fit_subset : ``float('Nan')``, ``None``, ``"rewarded"``, ``"unrewarded"``, ``"all"`` or list of int, optional
Describes which, if any, subset of trials will be used to evaluate the performance of the model.
This can either be described as a list of trial numbers or, by passing
- ``"all"`` for fitting all trials
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
Default ``None``, which means all trials will be used.
Attributes
----------
Name : string
The name of the fitting type
See Also
--------
fitAlgs.fitAlg.FitAlg : The general fitting class
"""
def __init__(self, participant_choice_property='Actions',
participant_reward_property='Rewards', model_fitting_variable=
'ActionProb', task_stimuli_property=None, fit_subset=None,
action_options_property=None, float_error_response_value=1 / 1e+100):
self.participant_choice_property = participant_choice_property
self.participant_reward_property = participant_reward_property
self.model_fitting_variable = model_fitting_variable
self.task_stimuli_property = task_stimuli_property
self.action_options_property = action_options_property
self.float_error_response_value = float_error_response_value
self.fit_subset = fit_subset
self.fit_subset_described = self._preprocess_fit_subset(fit_subset)
self.Name = self.find_name()
self.sim_info = {'Name': self.Name, 'participant_choice_property':
participant_choice_property, 'participant_reward_property':
participant_reward_property, 'task_stimuli_property':
task_stimuli_property, 'action_options_property':
action_options_property, 'model_fitting_variable':
model_fitting_variable, 'float_error_response_value':
float_error_response_value, 'fit_subset': fit_subset}
self.model = None
self.initial_parameter_values = None
self.model_parameter_names = None
self.model_other_properties = None
self.participant_observations = None
self.participant_actions = None
self.participant_rewards = None
def fitness(self, *model_parameters):
"""
Used by a fitter to generate the list of values characterising how well the model parameters describe the
participants actions.
Parameters
----------
model_parameters : list of floats
A list of the parameters used by the model in the order previously defined
Returns
-------
model_performance : list of floats
The choices made by the model that will be used to characterise the quality of the fit.
See Also
--------
fitAlgs.fitSims.FitSim.participant : Fits participant data
fitAlgs.fitAlg.fitAlg : The general fitting class
fitAlgs.fitAlg.fitAlg.fitness : The function that this one is called by
"""
try:
model_instance = self.fitted_model(*model_parameters)
except FloatingPointError:
message = utils.errorResp()
logger = logging.getLogger('Fitter')
logger.warning(
u"""{0}
. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2}"""
.format(message, repr(self.get_model_parameters(*
model_parameters)), repr(self.float_error_response_value)))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
except ValueError as e:
logger = logging.getLogger('Fitter')
logger.warn(
'{0} in fitted model. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2} - {3}, - {4}'
.format(type(e), repr(self.get_model_parameters(*
model_parameters)), repr(self.float_error_response_value),
e.message, e.args))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
model_data = model_instance.returnTaskState()
model_choice_probabilities = model_data[self.model_fitting_variable]
if self.fit_subset_described is None:
model_performance = model_choice_probabilities
else:
model_performance = model_choice_probabilities[self.
fit_subset_described]
if np.isnan(model_performance).any():
logger = logging.getLogger('Fitter')
message = (
'model performance values contain ``Not a Number`` (NaN), i.e. the model had a problem.'
)
logger.warning(message +
'.\n Abandoning fitting with parameters: ' + repr(self.
get_model_parameters(*model_parameters)) +
' Returning an action choice probability for each trialstep of '
+ repr(self.float_error_response_value))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
return model_performance
def prepare_sim(self, model, model_parameters, model_properties,
participant_data):
"""
Set up the simulation of a model following the behaviour of a participant
Parameters
----------
model : model.modelTemplate.Model inherited class
The model you wish to try and fit values to
model_parameters : dict
The model initial parameters
model_properties : dict
The model static properties
participant_data : dict
The participant data
Returns
-------
fitness
"""
self.model = model
self.initial_parameter_values = list(model_parameters.values())
self.model_parameter_names = list(model_parameters.keys())
self.model_other_properties = model_properties
participant_sequence = self.participant_sequence_generation(
participant_data, self.participant_choice_property, self.
participant_reward_property, self.task_stimuli_property, self.
action_options_property)
(self.participant_observations, self.participant_actions, self.
participant_rewards) = participant_sequence
if (not self.fit_subset_described and self.fit_subset_described is not
None):
self.fit_subset_described = self._set_fit_subset(self.
fit_subset, self.participant_rewards)
return self.fitness
@staticmethod
def participant_sequence_generation(participant_data, choice_property,
reward_property, stimuli_property, action_options_property):
"""
Finds the stimuli in the participant data and returns formatted observations
Parameters
----------
participant_data : dict
The participant data
choice_property : string
The participant data key of their action choices.
reward_property : string
The participant data key of the participant reward data
stimuli_property : string or None or list of strings
A list of the keys in partData representing participant stimuli
action_options_property : string or None or list of strings, ints or None
The name of the key in partData where the list of valid actions
can be found. If ``None`` then the action list is considered to
stay constant. If a list then the list will be taken as the list
of actions that can be taken at every trialstep. If the list is
shorter than the number of trialsteps, then it will be considered
to be a list of valid actions for each trialstep.
Returns
-------
participant_sequence : list of three element tuples
Each list element contains the observation, action and feedback for each trial taken
by the participant
"""
actions = participant_data[choice_property]
rewards = participant_data[reward_property]
participant_data_length = len(actions)
partDataShape = None
if stimuli_property is None:
stimuli_data = [None] * participant_data_length
elif isinstance(stimuli_property, str):
stimuli_data = np.array(participant_data[stimuli_property])
partDataShape = stimuli_data.shape
elif isinstance(stimuli_property, list):
if len(stimuli_property) > 1:
stimuli_data = np.array([participant_data[s] for s in
stimuli_property]).T
else:
stimuli_data = participant_data[stimuli_property[0]]
partDataShape = stimuli_data.shape
else:
raise StimuliError('Unknown representation of stimuli')
if partDataShape:
if max(partDataShape) != partDataShape[0]:
stimuli_data = stimuli_data.T
if isinstance(action_options_property, str
) and action_options_property in participant_data:
available_actions = participant_data[action_options_property]
elif action_options_property is None or len(action_options_property
) != participant_data_length:
available_actions = [action_options_property
] * participant_data_length
else:
available_actions = action_options_property
mismatches = [(True if trial_available_actions is not None and
trial_action not in trial_available_actions else False) for
trial_action, trial_available_actions in zip(actions,
available_actions)]
if any(mismatches):
mismatch_actions = [a for a, m in zip(actions, mismatches) if m is
True]
mismatch_available_actions = [a for a, m in zip(
available_actions, mismatches) if m is True]
raise ActionError(
"""An action is chosen that is not listed as available for the trial
{}
{}"""
.format(mismatch_actions, mismatch_available_actions))
observations = [(s, a) for s, a in zip(stimuli_data, available_actions)
]
return observations, actions, rewards
def info(self):
"""
The dictionary describing the fitters algorithm chosen
Returns
-------
fitInfo : dict
The dictionary of fitters class information
"""
return self.sim_info
def find_name(self):
"""
Returns the name of the class
"""
return self.__class__.__name__
def fitted_model(self, *model_parameters):
"""
Simulating a model run with specific parameter values
Parameters
----------
*model_parameters : floats
The model parameters provided in the order defined in the model setup
Returns
-------
model_instance : model.modelTemplate.Model class instance
"""
model_arguments = self.get_model_properties(*model_parameters)
model_instance = self.model(**model_arguments)
model_instance = self._simulation_run(model_instance, self.
participant_observations, self.participant_actions, self.
participant_rewards)
return model_instance
def get_model_properties(self, *model_parameters):
"""
Compiles the kwarg model arguments based on the model_parameters and
previously specified other parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
model_properties : dict
The kwarg model arguments
"""
model_properties = self.get_model_parameters(*model_parameters)
for k, v in self.model_other_properties.items():
model_properties[k] = copy.deepcopy(v)
return model_properties
def get_model_parameters(self, *model_parameters):
"""
Compiles the model parameter arguments based on the model parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
parameters : dict
The kwarg model parameter arguments
"""
parameters = {k: v for k, v in zip(self.model_parameter_names,
model_parameters)}
return parameters
@staticmethod
def _simulation_run(model_instance, observations, actions, rewards):
"""
Simulates the events of a simulation from the perspective of a model
Parameters
----------
model_instance : model.modelTemplate.modelTemplate class instance
observations : list of tuples
The sequence of (stimuli, valid actions) for each trial
actions : list
The sequence of participant actions for each trial
rewards : list
The sequence of participant rewards for each trial
model_instance : model.modelTemplate.Model class instance
The same instance that is returned
Returns
-------
model_instance : model.modelTemplate.Model class instance
The same instance that was passed in
"""
for observation, action, reward in zip(observations, actions, rewards):
model_instance.observe(observation)
model_instance.overrideActionChoice(action)
model_instance.feedback(reward)
return model_instance
@staticmethod
def _preprocess_fit_subset(fit_subset):
"""
Prepare as many possible combinations of fit_subset as possible.
If it needs knowledge of the rewards, return ``[]``
Parameters
----------
fit_subset : ``float('Nan')``, ``None``, ``"rewarded"``, ``"unrewarded"``, ``"all"`` or list of int
Describes which, if any, subset of trials will be used to evaluate the performance of the model.
This can either be described as a list of trial numbers or, by passing
- ``"all"`` or ``None`` for fitting all trials
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
Returns
-------
fit_subset_described : None, or list of ints
A description of the trials to be used, with ``None`` being all of them.
If more information was needed ``[]`` was returned
"""
if fit_subset is None:
fit_subset_described = None
elif isinstance(fit_subset, (list, np.ndarray)):
fit_subset_described = fit_subset
elif fit_subset == 'rewarded':
fit_subset_described = []
elif fit_subset == 'unrewarded':
fit_subset_described = []
elif fit_subset == 'all':
fit_subset_described = None
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = []
else:
raise FitSubsetError('{} is not a known fit_subset'.format(
fit_subset))
return fit_subset_described
@staticmethod
def _set_fit_subset(fit_subset, part_rewards):
"""
Identify any fit_subset options that required part_rewards, i.e. subsets of trials where there was or was not
``np.nan`` as the feedback.
Parameters
----------
fit_subset : ``float('Nan')``, ``"rewarded"``, ``"unrewarded"``
Describes which, subset of trials will be used to evaluate the performance of the model.
This can either be described by passing
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
part_rewards: list of float
The rewards received by the participant
Returns
-------
fit_subset_described : list of bool the length of part_reward
A description of the trials to be used
"""
if fit_subset == 'rewarded':
fit_subset_described = ~np.isnan(part_rewards)
elif fit_subset == 'unrewarded':
fit_subset_described = np.isnan(part_rewards)
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = np.isnan(part_rewards)
else:
raise FitSubsetError('{} is not a known fit_subset'.format(
fit_subset))
return fit_subset_described
| <mask token>
class ActionError(Exception):
pass
class StimuliError(Exception):
pass
class FitSim(object):
"""
A class for fitting data by passing the participant data through the model.
This has been setup for fitting action-response models
Parameters
----------
participant_choice_property : string, optional
The participant data key of their action choices. Default ``'Actions'``
participant_reward_property : string, optional
The participant data key of the participant reward data. Default ``'Rewards'``
model_fitting_variable : string, optional
The key to be compared in the model data. Default ``'ActionProb'``
task_stimuli_property : list of strings or None, optional
The keys containing the stimuli seen by the
participant before taking a decision on an action. Default ``None``
action_options_property : string or None or list of ints, optional
The name of the key in partData where the list of valid actions
can be found. If ``None`` then the action list is considered to
stay constant. If a list then the list will be taken as the list
of actions that can be taken at each instance. Default ``None``
float_error_response_value : float, optional
If a floating point error occurs when running a fit the fitter function
will return a value for each element of fpRespVal. Default is ``1/1e100``
fit_subset : ``float('Nan')``, ``None``, ``"rewarded"``, ``"unrewarded"``, ``"all"`` or list of int, optional
Describes which, if any, subset of trials will be used to evaluate the performance of the model.
This can either be described as a list of trial numbers or, by passing
- ``"all"`` for fitting all trials
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
Default ``None``, which means all trials will be used.
Attributes
----------
Name : string
The name of the fitting type
See Also
--------
fitAlgs.fitAlg.FitAlg : The general fitting class
"""
def __init__(self, participant_choice_property='Actions',
participant_reward_property='Rewards', model_fitting_variable=
'ActionProb', task_stimuli_property=None, fit_subset=None,
action_options_property=None, float_error_response_value=1 / 1e+100):
self.participant_choice_property = participant_choice_property
self.participant_reward_property = participant_reward_property
self.model_fitting_variable = model_fitting_variable
self.task_stimuli_property = task_stimuli_property
self.action_options_property = action_options_property
self.float_error_response_value = float_error_response_value
self.fit_subset = fit_subset
self.fit_subset_described = self._preprocess_fit_subset(fit_subset)
self.Name = self.find_name()
self.sim_info = {'Name': self.Name, 'participant_choice_property':
participant_choice_property, 'participant_reward_property':
participant_reward_property, 'task_stimuli_property':
task_stimuli_property, 'action_options_property':
action_options_property, 'model_fitting_variable':
model_fitting_variable, 'float_error_response_value':
float_error_response_value, 'fit_subset': fit_subset}
self.model = None
self.initial_parameter_values = None
self.model_parameter_names = None
self.model_other_properties = None
self.participant_observations = None
self.participant_actions = None
self.participant_rewards = None
def fitness(self, *model_parameters):
"""
Used by a fitter to generate the list of values characterising how well the model parameters describe the
participants actions.
Parameters
----------
model_parameters : list of floats
A list of the parameters used by the model in the order previously defined
Returns
-------
model_performance : list of floats
The choices made by the model that will be used to characterise the quality of the fit.
See Also
--------
fitAlgs.fitSims.FitSim.participant : Fits participant data
fitAlgs.fitAlg.fitAlg : The general fitting class
fitAlgs.fitAlg.fitAlg.fitness : The function that this one is called by
"""
try:
model_instance = self.fitted_model(*model_parameters)
except FloatingPointError:
message = utils.errorResp()
logger = logging.getLogger('Fitter')
logger.warning(
u"""{0}
. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2}"""
.format(message, repr(self.get_model_parameters(*
model_parameters)), repr(self.float_error_response_value)))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
except ValueError as e:
logger = logging.getLogger('Fitter')
logger.warn(
'{0} in fitted model. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2} - {3}, - {4}'
.format(type(e), repr(self.get_model_parameters(*
model_parameters)), repr(self.float_error_response_value),
e.message, e.args))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
model_data = model_instance.returnTaskState()
model_choice_probabilities = model_data[self.model_fitting_variable]
if self.fit_subset_described is None:
model_performance = model_choice_probabilities
else:
model_performance = model_choice_probabilities[self.
fit_subset_described]
if np.isnan(model_performance).any():
logger = logging.getLogger('Fitter')
message = (
'model performance values contain ``Not a Number`` (NaN), i.e. the model had a problem.'
)
logger.warning(message +
'.\n Abandoning fitting with parameters: ' + repr(self.
get_model_parameters(*model_parameters)) +
' Returning an action choice probability for each trialstep of '
+ repr(self.float_error_response_value))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
return model_performance
def prepare_sim(self, model, model_parameters, model_properties,
participant_data):
"""
Set up the simulation of a model following the behaviour of a participant
Parameters
----------
model : model.modelTemplate.Model inherited class
The model you wish to try and fit values to
model_parameters : dict
The model initial parameters
model_properties : dict
The model static properties
participant_data : dict
The participant data
Returns
-------
fitness
"""
self.model = model
self.initial_parameter_values = list(model_parameters.values())
self.model_parameter_names = list(model_parameters.keys())
self.model_other_properties = model_properties
participant_sequence = self.participant_sequence_generation(
participant_data, self.participant_choice_property, self.
participant_reward_property, self.task_stimuli_property, self.
action_options_property)
(self.participant_observations, self.participant_actions, self.
participant_rewards) = participant_sequence
if (not self.fit_subset_described and self.fit_subset_described is not
None):
self.fit_subset_described = self._set_fit_subset(self.
fit_subset, self.participant_rewards)
return self.fitness
@staticmethod
def participant_sequence_generation(participant_data, choice_property,
reward_property, stimuli_property, action_options_property):
"""
Finds the stimuli in the participant data and returns formatted observations
Parameters
----------
participant_data : dict
The participant data
choice_property : string
The participant data key of their action choices.
reward_property : string
The participant data key of the participant reward data
stimuli_property : string or None or list of strings
A list of the keys in partData representing participant stimuli
action_options_property : string or None or list of strings, ints or None
The name of the key in partData where the list of valid actions
can be found. If ``None`` then the action list is considered to
stay constant. If a list then the list will be taken as the list
of actions that can be taken at every trialstep. If the list is
shorter than the number of trialsteps, then it will be considered
to be a list of valid actions for each trialstep.
Returns
-------
participant_sequence : list of three element tuples
Each list element contains the observation, action and feedback for each trial taken
by the participant
"""
actions = participant_data[choice_property]
rewards = participant_data[reward_property]
participant_data_length = len(actions)
partDataShape = None
if stimuli_property is None:
stimuli_data = [None] * participant_data_length
elif isinstance(stimuli_property, str):
stimuli_data = np.array(participant_data[stimuli_property])
partDataShape = stimuli_data.shape
elif isinstance(stimuli_property, list):
if len(stimuli_property) > 1:
stimuli_data = np.array([participant_data[s] for s in
stimuli_property]).T
else:
stimuli_data = participant_data[stimuli_property[0]]
partDataShape = stimuli_data.shape
else:
raise StimuliError('Unknown representation of stimuli')
if partDataShape:
if max(partDataShape) != partDataShape[0]:
stimuli_data = stimuli_data.T
if isinstance(action_options_property, str
) and action_options_property in participant_data:
available_actions = participant_data[action_options_property]
elif action_options_property is None or len(action_options_property
) != participant_data_length:
available_actions = [action_options_property
] * participant_data_length
else:
available_actions = action_options_property
mismatches = [(True if trial_available_actions is not None and
trial_action not in trial_available_actions else False) for
trial_action, trial_available_actions in zip(actions,
available_actions)]
if any(mismatches):
mismatch_actions = [a for a, m in zip(actions, mismatches) if m is
True]
mismatch_available_actions = [a for a, m in zip(
available_actions, mismatches) if m is True]
raise ActionError(
"""An action is chosen that is not listed as available for the trial
{}
{}"""
.format(mismatch_actions, mismatch_available_actions))
observations = [(s, a) for s, a in zip(stimuli_data, available_actions)
]
return observations, actions, rewards
def info(self):
"""
The dictionary describing the fitters algorithm chosen
Returns
-------
fitInfo : dict
The dictionary of fitters class information
"""
return self.sim_info
def find_name(self):
"""
Returns the name of the class
"""
return self.__class__.__name__
def fitted_model(self, *model_parameters):
"""
Simulating a model run with specific parameter values
Parameters
----------
*model_parameters : floats
The model parameters provided in the order defined in the model setup
Returns
-------
model_instance : model.modelTemplate.Model class instance
"""
model_arguments = self.get_model_properties(*model_parameters)
model_instance = self.model(**model_arguments)
model_instance = self._simulation_run(model_instance, self.
participant_observations, self.participant_actions, self.
participant_rewards)
return model_instance
def get_model_properties(self, *model_parameters):
"""
Compiles the kwarg model arguments based on the model_parameters and
previously specified other parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
model_properties : dict
The kwarg model arguments
"""
model_properties = self.get_model_parameters(*model_parameters)
for k, v in self.model_other_properties.items():
model_properties[k] = copy.deepcopy(v)
return model_properties
def get_model_parameters(self, *model_parameters):
"""
Compiles the model parameter arguments based on the model parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
parameters : dict
The kwarg model parameter arguments
"""
parameters = {k: v for k, v in zip(self.model_parameter_names,
model_parameters)}
return parameters
@staticmethod
def _simulation_run(model_instance, observations, actions, rewards):
"""
Simulates the events of a simulation from the perspective of a model
Parameters
----------
model_instance : model.modelTemplate.modelTemplate class instance
observations : list of tuples
The sequence of (stimuli, valid actions) for each trial
actions : list
The sequence of participant actions for each trial
rewards : list
The sequence of participant rewards for each trial
model_instance : model.modelTemplate.Model class instance
The same instance that is returned
Returns
-------
model_instance : model.modelTemplate.Model class instance
The same instance that was passed in
"""
for observation, action, reward in zip(observations, actions, rewards):
model_instance.observe(observation)
model_instance.overrideActionChoice(action)
model_instance.feedback(reward)
return model_instance
@staticmethod
def _preprocess_fit_subset(fit_subset):
"""
Prepare as many possible combinations of fit_subset as possible.
If it needs knowledge of the rewards, return ``[]``
Parameters
----------
fit_subset : ``float('Nan')``, ``None``, ``"rewarded"``, ``"unrewarded"``, ``"all"`` or list of int
Describes which, if any, subset of trials will be used to evaluate the performance of the model.
This can either be described as a list of trial numbers or, by passing
- ``"all"`` or ``None`` for fitting all trials
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
Returns
-------
fit_subset_described : None, or list of ints
A description of the trials to be used, with ``None`` being all of them.
If more information was needed ``[]`` was returned
"""
if fit_subset is None:
fit_subset_described = None
elif isinstance(fit_subset, (list, np.ndarray)):
fit_subset_described = fit_subset
elif fit_subset == 'rewarded':
fit_subset_described = []
elif fit_subset == 'unrewarded':
fit_subset_described = []
elif fit_subset == 'all':
fit_subset_described = None
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = []
else:
raise FitSubsetError('{} is not a known fit_subset'.format(
fit_subset))
return fit_subset_described
@staticmethod
def _set_fit_subset(fit_subset, part_rewards):
"""
Identify any fit_subset options that required part_rewards, i.e. subsets of trials where there was or was not
``np.nan`` as the feedback.
Parameters
----------
fit_subset : ``float('Nan')``, ``"rewarded"``, ``"unrewarded"``
Describes which, subset of trials will be used to evaluate the performance of the model.
This can either be described by passing
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
part_rewards: list of float
The rewards received by the participant
Returns
-------
fit_subset_described : list of bool the length of part_reward
A description of the trials to be used
"""
if fit_subset == 'rewarded':
fit_subset_described = ~np.isnan(part_rewards)
elif fit_subset == 'unrewarded':
fit_subset_described = np.isnan(part_rewards)
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = np.isnan(part_rewards)
else:
raise FitSubsetError('{} is not a known fit_subset'.format(
fit_subset))
return fit_subset_described
| <mask token>
class FitSubsetError(Exception):
pass
class ActionError(Exception):
pass
class StimuliError(Exception):
pass
class FitSim(object):
"""
A class for fitting data by passing the participant data through the model.
This has been setup for fitting action-response models
Parameters
----------
participant_choice_property : string, optional
The participant data key of their action choices. Default ``'Actions'``
participant_reward_property : string, optional
The participant data key of the participant reward data. Default ``'Rewards'``
model_fitting_variable : string, optional
The key to be compared in the model data. Default ``'ActionProb'``
task_stimuli_property : list of strings or None, optional
The keys containing the stimuli seen by the
participant before taking a decision on an action. Default ``None``
action_options_property : string or None or list of ints, optional
The name of the key in partData where the list of valid actions
can be found. If ``None`` then the action list is considered to
stay constant. If a list then the list will be taken as the list
of actions that can be taken at each instance. Default ``None``
float_error_response_value : float, optional
If a floating point error occurs when running a fit the fitter function
will return a value for each element of fpRespVal. Default is ``1/1e100``
fit_subset : ``float('Nan')``, ``None``, ``"rewarded"``, ``"unrewarded"``, ``"all"`` or list of int, optional
Describes which, if any, subset of trials will be used to evaluate the performance of the model.
This can either be described as a list of trial numbers or, by passing
- ``"all"`` for fitting all trials
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
Default ``None``, which means all trials will be used.
Attributes
----------
Name : string
The name of the fitting type
See Also
--------
fitAlgs.fitAlg.FitAlg : The general fitting class
"""
def __init__(self, participant_choice_property='Actions',
participant_reward_property='Rewards', model_fitting_variable=
'ActionProb', task_stimuli_property=None, fit_subset=None,
action_options_property=None, float_error_response_value=1 / 1e+100):
self.participant_choice_property = participant_choice_property
self.participant_reward_property = participant_reward_property
self.model_fitting_variable = model_fitting_variable
self.task_stimuli_property = task_stimuli_property
self.action_options_property = action_options_property
self.float_error_response_value = float_error_response_value
self.fit_subset = fit_subset
self.fit_subset_described = self._preprocess_fit_subset(fit_subset)
self.Name = self.find_name()
self.sim_info = {'Name': self.Name, 'participant_choice_property':
participant_choice_property, 'participant_reward_property':
participant_reward_property, 'task_stimuli_property':
task_stimuli_property, 'action_options_property':
action_options_property, 'model_fitting_variable':
model_fitting_variable, 'float_error_response_value':
float_error_response_value, 'fit_subset': fit_subset}
self.model = None
self.initial_parameter_values = None
self.model_parameter_names = None
self.model_other_properties = None
self.participant_observations = None
self.participant_actions = None
self.participant_rewards = None
def fitness(self, *model_parameters):
"""
Used by a fitter to generate the list of values characterising how well the model parameters describe the
participants actions.
Parameters
----------
model_parameters : list of floats
A list of the parameters used by the model in the order previously defined
Returns
-------
model_performance : list of floats
The choices made by the model that will be used to characterise the quality of the fit.
See Also
--------
fitAlgs.fitSims.FitSim.participant : Fits participant data
fitAlgs.fitAlg.fitAlg : The general fitting class
fitAlgs.fitAlg.fitAlg.fitness : The function that this one is called by
"""
try:
model_instance = self.fitted_model(*model_parameters)
except FloatingPointError:
message = utils.errorResp()
logger = logging.getLogger('Fitter')
logger.warning(
u"""{0}
. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2}"""
.format(message, repr(self.get_model_parameters(*
model_parameters)), repr(self.float_error_response_value)))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
except ValueError as e:
logger = logging.getLogger('Fitter')
logger.warn(
'{0} in fitted model. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2} - {3}, - {4}'
.format(type(e), repr(self.get_model_parameters(*
model_parameters)), repr(self.float_error_response_value),
e.message, e.args))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
model_data = model_instance.returnTaskState()
model_choice_probabilities = model_data[self.model_fitting_variable]
if self.fit_subset_described is None:
model_performance = model_choice_probabilities
else:
model_performance = model_choice_probabilities[self.
fit_subset_described]
if np.isnan(model_performance).any():
logger = logging.getLogger('Fitter')
message = (
'model performance values contain ``Not a Number`` (NaN), i.e. the model had a problem.'
)
logger.warning(message +
'.\n Abandoning fitting with parameters: ' + repr(self.
get_model_parameters(*model_parameters)) +
' Returning an action choice probability for each trialstep of '
+ repr(self.float_error_response_value))
return np.ones(np.array(self.participant_rewards).shape
) * self.float_error_response_value
return model_performance
def prepare_sim(self, model, model_parameters, model_properties,
participant_data):
"""
Set up the simulation of a model following the behaviour of a participant
Parameters
----------
model : model.modelTemplate.Model inherited class
The model you wish to try and fit values to
model_parameters : dict
The model initial parameters
model_properties : dict
The model static properties
participant_data : dict
The participant data
Returns
-------
fitness
"""
self.model = model
self.initial_parameter_values = list(model_parameters.values())
self.model_parameter_names = list(model_parameters.keys())
self.model_other_properties = model_properties
participant_sequence = self.participant_sequence_generation(
participant_data, self.participant_choice_property, self.
participant_reward_property, self.task_stimuli_property, self.
action_options_property)
(self.participant_observations, self.participant_actions, self.
participant_rewards) = participant_sequence
if (not self.fit_subset_described and self.fit_subset_described is not
None):
self.fit_subset_described = self._set_fit_subset(self.
fit_subset, self.participant_rewards)
return self.fitness
@staticmethod
def participant_sequence_generation(participant_data, choice_property,
reward_property, stimuli_property, action_options_property):
"""
Finds the stimuli in the participant data and returns formatted observations
Parameters
----------
participant_data : dict
The participant data
choice_property : string
The participant data key of their action choices.
reward_property : string
The participant data key of the participant reward data
stimuli_property : string or None or list of strings
A list of the keys in partData representing participant stimuli
action_options_property : string or None or list of strings, ints or None
The name of the key in partData where the list of valid actions
can be found. If ``None`` then the action list is considered to
stay constant. If a list then the list will be taken as the list
of actions that can be taken at every trialstep. If the list is
shorter than the number of trialsteps, then it will be considered
to be a list of valid actions for each trialstep.
Returns
-------
participant_sequence : list of three element tuples
Each list element contains the observation, action and feedback for each trial taken
by the participant
"""
actions = participant_data[choice_property]
rewards = participant_data[reward_property]
participant_data_length = len(actions)
partDataShape = None
if stimuli_property is None:
stimuli_data = [None] * participant_data_length
elif isinstance(stimuli_property, str):
stimuli_data = np.array(participant_data[stimuli_property])
partDataShape = stimuli_data.shape
elif isinstance(stimuli_property, list):
if len(stimuli_property) > 1:
stimuli_data = np.array([participant_data[s] for s in
stimuli_property]).T
else:
stimuli_data = participant_data[stimuli_property[0]]
partDataShape = stimuli_data.shape
else:
raise StimuliError('Unknown representation of stimuli')
if partDataShape:
if max(partDataShape) != partDataShape[0]:
stimuli_data = stimuli_data.T
if isinstance(action_options_property, str
) and action_options_property in participant_data:
available_actions = participant_data[action_options_property]
elif action_options_property is None or len(action_options_property
) != participant_data_length:
available_actions = [action_options_property
] * participant_data_length
else:
available_actions = action_options_property
mismatches = [(True if trial_available_actions is not None and
trial_action not in trial_available_actions else False) for
trial_action, trial_available_actions in zip(actions,
available_actions)]
if any(mismatches):
mismatch_actions = [a for a, m in zip(actions, mismatches) if m is
True]
mismatch_available_actions = [a for a, m in zip(
available_actions, mismatches) if m is True]
raise ActionError(
"""An action is chosen that is not listed as available for the trial
{}
{}"""
.format(mismatch_actions, mismatch_available_actions))
observations = [(s, a) for s, a in zip(stimuli_data, available_actions)
]
return observations, actions, rewards
def info(self):
"""
The dictionary describing the fitters algorithm chosen
Returns
-------
fitInfo : dict
The dictionary of fitters class information
"""
return self.sim_info
def find_name(self):
"""
Returns the name of the class
"""
return self.__class__.__name__
def fitted_model(self, *model_parameters):
"""
Simulating a model run with specific parameter values
Parameters
----------
*model_parameters : floats
The model parameters provided in the order defined in the model setup
Returns
-------
model_instance : model.modelTemplate.Model class instance
"""
model_arguments = self.get_model_properties(*model_parameters)
model_instance = self.model(**model_arguments)
model_instance = self._simulation_run(model_instance, self.
participant_observations, self.participant_actions, self.
participant_rewards)
return model_instance
def get_model_properties(self, *model_parameters):
"""
Compiles the kwarg model arguments based on the model_parameters and
previously specified other parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
model_properties : dict
The kwarg model arguments
"""
model_properties = self.get_model_parameters(*model_parameters)
for k, v in self.model_other_properties.items():
model_properties[k] = copy.deepcopy(v)
return model_properties
def get_model_parameters(self, *model_parameters):
"""
Compiles the model parameter arguments based on the model parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
parameters : dict
The kwarg model parameter arguments
"""
parameters = {k: v for k, v in zip(self.model_parameter_names,
model_parameters)}
return parameters
@staticmethod
def _simulation_run(model_instance, observations, actions, rewards):
"""
Simulates the events of a simulation from the perspective of a model
Parameters
----------
model_instance : model.modelTemplate.modelTemplate class instance
observations : list of tuples
The sequence of (stimuli, valid actions) for each trial
actions : list
The sequence of participant actions for each trial
rewards : list
The sequence of participant rewards for each trial
model_instance : model.modelTemplate.Model class instance
The same instance that is returned
Returns
-------
model_instance : model.modelTemplate.Model class instance
The same instance that was passed in
"""
for observation, action, reward in zip(observations, actions, rewards):
model_instance.observe(observation)
model_instance.overrideActionChoice(action)
model_instance.feedback(reward)
return model_instance
@staticmethod
def _preprocess_fit_subset(fit_subset):
"""
Prepare as many possible combinations of fit_subset as possible.
If it needs knowledge of the rewards, return ``[]``
Parameters
----------
fit_subset : ``float('Nan')``, ``None``, ``"rewarded"``, ``"unrewarded"``, ``"all"`` or list of int
Describes which, if any, subset of trials will be used to evaluate the performance of the model.
This can either be described as a list of trial numbers or, by passing
- ``"all"`` or ``None`` for fitting all trials
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
Returns
-------
fit_subset_described : None, or list of ints
A description of the trials to be used, with ``None`` being all of them.
If more information was needed ``[]`` was returned
"""
if fit_subset is None:
fit_subset_described = None
elif isinstance(fit_subset, (list, np.ndarray)):
fit_subset_described = fit_subset
elif fit_subset == 'rewarded':
fit_subset_described = []
elif fit_subset == 'unrewarded':
fit_subset_described = []
elif fit_subset == 'all':
fit_subset_described = None
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = []
else:
raise FitSubsetError('{} is not a known fit_subset'.format(
fit_subset))
return fit_subset_described
@staticmethod
def _set_fit_subset(fit_subset, part_rewards):
"""
Identify any fit_subset options that required part_rewards, i.e. subsets of trials where there was or was not
``np.nan`` as the feedback.
Parameters
----------
fit_subset : ``float('Nan')``, ``"rewarded"``, ``"unrewarded"``
Describes which, subset of trials will be used to evaluate the performance of the model.
This can either be described by passing
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
part_rewards: list of float
The rewards received by the participant
Returns
-------
fit_subset_described : list of bool the length of part_reward
A description of the trials to be used
"""
if fit_subset == 'rewarded':
fit_subset_described = ~np.isnan(part_rewards)
elif fit_subset == 'unrewarded':
fit_subset_described = np.isnan(part_rewards)
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = np.isnan(part_rewards)
else:
raise FitSubsetError('{} is not a known fit_subset'.format(
fit_subset))
return fit_subset_described
| # -*- coding: utf-8 -*-
"""
:Author: Dominic Hunt
"""
import numpy as np
import logging
import itertools
import copy
import types
import utils
class FitSubsetError(Exception):
pass
class ActionError(Exception):
pass
class StimuliError(Exception):
pass
class FitSim(object):
"""
A class for fitting data by passing the participant data through the model.
This has been setup for fitting action-response models
Parameters
----------
participant_choice_property : string, optional
The participant data key of their action choices. Default ``'Actions'``
participant_reward_property : string, optional
The participant data key of the participant reward data. Default ``'Rewards'``
model_fitting_variable : string, optional
The key to be compared in the model data. Default ``'ActionProb'``
task_stimuli_property : list of strings or None, optional
The keys containing the stimuli seen by the
participant before taking a decision on an action. Default ``None``
action_options_property : string or None or list of ints, optional
The name of the key in partData where the list of valid actions
can be found. If ``None`` then the action list is considered to
stay constant. If a list then the list will be taken as the list
of actions that can be taken at each instance. Default ``None``
float_error_response_value : float, optional
If a floating point error occurs when running a fit the fitter function
will return a value for each element of fpRespVal. Default is ``1/1e100``
fit_subset : ``float('Nan')``, ``None``, ``"rewarded"``, ``"unrewarded"``, ``"all"`` or list of int, optional
Describes which, if any, subset of trials will be used to evaluate the performance of the model.
This can either be described as a list of trial numbers or, by passing
- ``"all"`` for fitting all trials
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
Default ``None``, which means all trials will be used.
Attributes
----------
Name : string
The name of the fitting type
See Also
--------
fitAlgs.fitAlg.FitAlg : The general fitting class
"""
def __init__(self,
participant_choice_property='Actions',
participant_reward_property='Rewards',
model_fitting_variable='ActionProb',
task_stimuli_property=None,
fit_subset=None,
action_options_property=None,
float_error_response_value=1 / 1e100
):
self.participant_choice_property = participant_choice_property
self.participant_reward_property = participant_reward_property
self.model_fitting_variable = model_fitting_variable
self.task_stimuli_property = task_stimuli_property
self.action_options_property = action_options_property
self.float_error_response_value = float_error_response_value
self.fit_subset = fit_subset
self.fit_subset_described = self._preprocess_fit_subset(fit_subset)
self.Name = self.find_name()
self.sim_info = {'Name': self.Name,
'participant_choice_property': participant_choice_property,
'participant_reward_property': participant_reward_property,
'task_stimuli_property': task_stimuli_property,
'action_options_property': action_options_property,
'model_fitting_variable': model_fitting_variable,
'float_error_response_value': float_error_response_value,
'fit_subset': fit_subset}
self.model = None
self.initial_parameter_values = None
self.model_parameter_names = None
self.model_other_properties = None
self.participant_observations = None
self.participant_actions = None
self.participant_rewards = None
def fitness(self, *model_parameters):
"""
Used by a fitter to generate the list of values characterising how well the model parameters describe the
participants actions.
Parameters
----------
model_parameters : list of floats
A list of the parameters used by the model in the order previously defined
Returns
-------
model_performance : list of floats
The choices made by the model that will be used to characterise the quality of the fit.
See Also
--------
fitAlgs.fitSims.FitSim.participant : Fits participant data
fitAlgs.fitAlg.fitAlg : The general fitting class
fitAlgs.fitAlg.fitAlg.fitness : The function that this one is called by
"""
try:
model_instance = self.fitted_model(*model_parameters)
except FloatingPointError:
message = utils.errorResp()
logger = logging.getLogger('Fitter')
logger.warning(
u"{0}\n. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2}".format(message,
repr(
self.get_model_parameters(
*model_parameters)),
repr(
self.float_error_response_value)))
return np.ones(np.array(self.participant_rewards).shape) * self.float_error_response_value
except ValueError as e:
logger = logging.getLogger('Fitter')
logger.warn(
"{0} in fitted model. Abandoning fitting with parameters: {1} Returning an action choice probability for each trialstep of {2} - {3}, - {4}".format(
type(e),
repr(self.get_model_parameters(*model_parameters)),
repr(self.float_error_response_value),
e.message,
e.args))
return np.ones(np.array(self.participant_rewards).shape) * self.float_error_response_value
# Pull out the values to be compared
model_data = model_instance.returnTaskState()
model_choice_probabilities = model_data[self.model_fitting_variable]
if self.fit_subset_described is None:
model_performance = model_choice_probabilities
else:
model_performance = model_choice_probabilities[self.fit_subset_described]
if np.isnan(model_performance).any():
logger = logging.getLogger('Fitter')
message = "model performance values contain ``Not a Number`` (NaN), i.e. the model had a problem."
logger.warning(message + ".\n Abandoning fitting with parameters: "
+ repr(self.get_model_parameters(*model_parameters))
+ " Returning an action choice probability for each trialstep of "
+ repr(self.float_error_response_value))
return np.ones(np.array(self.participant_rewards).shape) * self.float_error_response_value
return model_performance
def prepare_sim(self, model, model_parameters, model_properties, participant_data):
"""
Set up the simulation of a model following the behaviour of a participant
Parameters
----------
model : model.modelTemplate.Model inherited class
The model you wish to try and fit values to
model_parameters : dict
The model initial parameters
model_properties : dict
The model static properties
participant_data : dict
The participant data
Returns
-------
fitness
"""
self.model = model
self.initial_parameter_values = list(model_parameters.values())
self.model_parameter_names = list(model_parameters.keys())
self.model_other_properties = model_properties
participant_sequence = self.participant_sequence_generation(participant_data,
self.participant_choice_property,
self.participant_reward_property,
self.task_stimuli_property,
self.action_options_property)
self.participant_observations, self.participant_actions, self.participant_rewards = participant_sequence
if not self.fit_subset_described and self.fit_subset_described is not None:
self.fit_subset_described = self._set_fit_subset(self.fit_subset, self.participant_rewards)
return self.fitness
@staticmethod
def participant_sequence_generation(participant_data,
choice_property,
reward_property,
stimuli_property,
action_options_property):
"""
Finds the stimuli in the participant data and returns formatted observations
Parameters
----------
participant_data : dict
The participant data
choice_property : string
The participant data key of their action choices.
reward_property : string
The participant data key of the participant reward data
stimuli_property : string or None or list of strings
A list of the keys in partData representing participant stimuli
action_options_property : string or None or list of strings, ints or None
The name of the key in partData where the list of valid actions
can be found. If ``None`` then the action list is considered to
stay constant. If a list then the list will be taken as the list
of actions that can be taken at every trialstep. If the list is
shorter than the number of trialsteps, then it will be considered
to be a list of valid actions for each trialstep.
Returns
-------
participant_sequence : list of three element tuples
Each list element contains the observation, action and feedback for each trial taken
by the participant
"""
actions = participant_data[choice_property]
rewards = participant_data[reward_property]
participant_data_length = len(actions)
partDataShape = None
if stimuli_property is None:
stimuli_data = [None] * participant_data_length
elif isinstance(stimuli_property, str):
stimuli_data = np.array(participant_data[stimuli_property])
partDataShape = stimuli_data.shape
elif isinstance(stimuli_property, list):
if len(stimuli_property) > 1:
stimuli_data = np.array([participant_data[s] for s in stimuli_property]).T
else:
stimuli_data = participant_data[stimuli_property[0]]
partDataShape = stimuli_data.shape
else:
raise StimuliError('Unknown representation of stimuli')
if partDataShape:
if max(partDataShape) != partDataShape[0]:
stimuli_data = stimuli_data.T
if isinstance(action_options_property, str) and action_options_property in participant_data:
available_actions = participant_data[action_options_property]
elif action_options_property is None or len(action_options_property) != participant_data_length:
available_actions = [action_options_property] * participant_data_length
else:
available_actions = action_options_property
mismatches = [True if (trial_available_actions is not None and trial_action not in trial_available_actions)
else False
for trial_action, trial_available_actions in zip(actions, available_actions)]
if any(mismatches):
mismatch_actions = [a for a, m in zip(actions, mismatches) if m is True]
mismatch_available_actions = [a for a, m in zip(available_actions, mismatches) if m is True]
raise ActionError('An action is chosen that is not listed as available for the trial \n{}\n {}'.format(mismatch_actions,
mismatch_available_actions))
observations = [(s, a) for s, a in zip(stimuli_data, available_actions)]
return observations, actions, rewards
def info(self):
"""
The dictionary describing the fitters algorithm chosen
Returns
-------
fitInfo : dict
The dictionary of fitters class information
"""
return self.sim_info
def find_name(self):
"""
Returns the name of the class
"""
return self.__class__.__name__
def fitted_model(self, *model_parameters):
"""
Simulating a model run with specific parameter values
Parameters
----------
*model_parameters : floats
The model parameters provided in the order defined in the model setup
Returns
-------
model_instance : model.modelTemplate.Model class instance
"""
model_arguments = self.get_model_properties(*model_parameters)
model_instance = self.model(**model_arguments)
model_instance = self._simulation_run(model_instance,
self.participant_observations,
self.participant_actions,
self.participant_rewards)
return model_instance
def get_model_properties(self, *model_parameters):
"""
Compiles the kwarg model arguments based on the model_parameters and
previously specified other parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
model_properties : dict
The kwarg model arguments
"""
model_properties = self.get_model_parameters(*model_parameters)
for k, v in self.model_other_properties.items():
model_properties[k] = copy.deepcopy(v)
return model_properties
def get_model_parameters(self, *model_parameters):
"""
Compiles the model parameter arguments based on the model parameters
Parameters
----------
model_parameters : list of floats
The parameter values in the order extracted from the modelSetup parameter dictionary
Returns
-------
parameters : dict
The kwarg model parameter arguments
"""
parameters = {k: v for k, v in zip(self.model_parameter_names, model_parameters)}
return parameters
@staticmethod
def _simulation_run(model_instance, observations, actions, rewards):
"""
Simulates the events of a simulation from the perspective of a model
Parameters
----------
model_instance : model.modelTemplate.modelTemplate class instance
observations : list of tuples
The sequence of (stimuli, valid actions) for each trial
actions : list
The sequence of participant actions for each trial
rewards : list
The sequence of participant rewards for each trial
model_instance : model.modelTemplate.Model class instance
The same instance that is returned
Returns
-------
model_instance : model.modelTemplate.Model class instance
The same instance that was passed in
"""
for observation, action, reward in zip(observations, actions, rewards):
model_instance.observe(observation)
model_instance.overrideActionChoice(action)
model_instance.feedback(reward)
return model_instance
@staticmethod
def _preprocess_fit_subset(fit_subset):
"""
Prepare as many possible combinations of fit_subset as possible.
If it needs knowledge of the rewards, return ``[]``
Parameters
----------
fit_subset : ``float('Nan')``, ``None``, ``"rewarded"``, ``"unrewarded"``, ``"all"`` or list of int
Describes which, if any, subset of trials will be used to evaluate the performance of the model.
This can either be described as a list of trial numbers or, by passing
- ``"all"`` or ``None`` for fitting all trials
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
Returns
-------
fit_subset_described : None, or list of ints
A description of the trials to be used, with ``None`` being all of them.
If more information was needed ``[]`` was returned
"""
if fit_subset is None:
fit_subset_described = None
elif isinstance(fit_subset, (list, np.ndarray)):
fit_subset_described = fit_subset
elif fit_subset == "rewarded":
fit_subset_described = []
elif fit_subset == "unrewarded":
fit_subset_described = []
elif fit_subset == "all":
fit_subset_described = None
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = []
else:
raise FitSubsetError('{} is not a known fit_subset'.format(fit_subset))
return fit_subset_described
@staticmethod
def _set_fit_subset(fit_subset, part_rewards):
"""
Identify any fit_subset options that required part_rewards, i.e. subsets of trials where there was or was not
``np.nan`` as the feedback.
Parameters
----------
fit_subset : ``float('Nan')``, ``"rewarded"``, ``"unrewarded"``
Describes which, subset of trials will be used to evaluate the performance of the model.
This can either be described by passing
- ``float('Nan')`` or ``"unrewarded"`` for all those trials whose feedback was ``float('Nan')``
- ``"rewarded"`` for those who had feedback that was not ``float('Nan')``
part_rewards: list of float
The rewards received by the participant
Returns
-------
fit_subset_described : list of bool the length of part_reward
A description of the trials to be used
"""
if fit_subset == "rewarded":
fit_subset_described = ~np.isnan(part_rewards)
elif fit_subset == "unrewarded":
fit_subset_described = np.isnan(part_rewards)
elif isinstance(fit_subset, float) and np.isnan(fit_subset):
fit_subset_described = np.isnan(part_rewards)
else:
raise FitSubsetError('{} is not a known fit_subset'.format(fit_subset))
return fit_subset_described
| [
12,
15,
16,
17,
19
] |
1,977 | e32c73abdcd384ee7c369182527cca6495f067b3 | <mask token>
| <mask token>
def getData(request):
index = request.GET.get('index')
msg = '未查找到数据'
if ExtExecute.objects.filter(query_code=index):
ext = ExtExecute.objects.filter(query_code=index).first()
result = getpicture(ext.upload_file.path)
if result:
for i in result:
i = request.META.get('HTTP_HOST') + i
subject = ext.extSubmit.subProject
dataset = ext.sampleinfoext_set.all()
type = 1
elif LibExecute.objects.filter(query_code=index):
result = getpicture(LibExecute.objects.filter(query_code=index).
first().upload_file.path)
if result:
for i in result:
i = request.META.get('HTTP_HOST') + i
subject = LibExecute.objects.filter(query_code=index).first(
).libSubmit.subProject
dataset = LibExecute.objects.filter(query_code=index).first(
).sampleinfolib_set.all()
type = 2
elif SeqExecute.objects.filter(query_code=index):
subject = SeqExecute.objects.filter(query_code=index).first(
).seqSubmit.subProject
dataset = SeqExecute.objects.filter(query_code=index).first(
).sampleinfoseq_set.all()
type = 3
return render(request, 'Showdata.html', {'data': dataset, 'type':
type, 'subject': subject})
else:
return render(request, 'Showdata.html', {'error': msg})
return render(request, 'Showdata.html', {'data': dataset, 'type': type,
'subject': subject, 'pic': result})
| <mask token>
def getpicture(word):
if word.split('.')[1] not in ['doc', 'docx']:
return None
word_zip = word.split('.')[0] + '.zip'
path = ''
for i in word.split('/')[0:-1]:
path += i
path += '/'
path += 'tem/'
if not os.path.exists(path):
os.rename(word, word_zip)
f = zipfile.ZipFile(word_zip, 'r')
for file in f.filelist:
f.extract(file, path)
f.close()
os.rename(word_zip, word)
pic = os.listdir(os.path.join(path, 'word/media'))
result = []
result_ = []
for i in pic:
result.append(os.path.join(path, 'word/media/') + i)
for j in result:
url = '/media/' + j.split('/media/')[1] + '/media/' + j.split(
'/media/')[2]
result_.append(url)
return result_
else:
pic = os.listdir(os.path.join(path, 'word/media'))
result = []
result_ = []
for i in pic:
result.append(os.path.join(path, 'word/media/') + i)
for j in result:
url = '/media/' + j.split('/media/')[1] + '/media/' + j.split(
'/media/')[2]
result_.append(url)
return result_
def getData(request):
index = request.GET.get('index')
msg = '未查找到数据'
if ExtExecute.objects.filter(query_code=index):
ext = ExtExecute.objects.filter(query_code=index).first()
result = getpicture(ext.upload_file.path)
if result:
for i in result:
i = request.META.get('HTTP_HOST') + i
subject = ext.extSubmit.subProject
dataset = ext.sampleinfoext_set.all()
type = 1
elif LibExecute.objects.filter(query_code=index):
result = getpicture(LibExecute.objects.filter(query_code=index).
first().upload_file.path)
if result:
for i in result:
i = request.META.get('HTTP_HOST') + i
subject = LibExecute.objects.filter(query_code=index).first(
).libSubmit.subProject
dataset = LibExecute.objects.filter(query_code=index).first(
).sampleinfolib_set.all()
type = 2
elif SeqExecute.objects.filter(query_code=index):
subject = SeqExecute.objects.filter(query_code=index).first(
).seqSubmit.subProject
dataset = SeqExecute.objects.filter(query_code=index).first(
).sampleinfoseq_set.all()
type = 3
return render(request, 'Showdata.html', {'data': dataset, 'type':
type, 'subject': subject})
else:
return render(request, 'Showdata.html', {'error': msg})
return render(request, 'Showdata.html', {'data': dataset, 'type': type,
'subject': subject, 'pic': result})
| import datetime
from django.shortcuts import render
from lims.models import *
import os
import zipfile
def getpicture(word):
if word.split('.')[1] not in ['doc', 'docx']:
return None
word_zip = word.split('.')[0] + '.zip'
path = ''
for i in word.split('/')[0:-1]:
path += i
path += '/'
path += 'tem/'
if not os.path.exists(path):
os.rename(word, word_zip)
f = zipfile.ZipFile(word_zip, 'r')
for file in f.filelist:
f.extract(file, path)
f.close()
os.rename(word_zip, word)
pic = os.listdir(os.path.join(path, 'word/media'))
result = []
result_ = []
for i in pic:
result.append(os.path.join(path, 'word/media/') + i)
for j in result:
url = '/media/' + j.split('/media/')[1] + '/media/' + j.split(
'/media/')[2]
result_.append(url)
return result_
else:
pic = os.listdir(os.path.join(path, 'word/media'))
result = []
result_ = []
for i in pic:
result.append(os.path.join(path, 'word/media/') + i)
for j in result:
url = '/media/' + j.split('/media/')[1] + '/media/' + j.split(
'/media/')[2]
result_.append(url)
return result_
def getData(request):
index = request.GET.get('index')
msg = '未查找到数据'
if ExtExecute.objects.filter(query_code=index):
ext = ExtExecute.objects.filter(query_code=index).first()
result = getpicture(ext.upload_file.path)
if result:
for i in result:
i = request.META.get('HTTP_HOST') + i
subject = ext.extSubmit.subProject
dataset = ext.sampleinfoext_set.all()
type = 1
elif LibExecute.objects.filter(query_code=index):
result = getpicture(LibExecute.objects.filter(query_code=index).
first().upload_file.path)
if result:
for i in result:
i = request.META.get('HTTP_HOST') + i
subject = LibExecute.objects.filter(query_code=index).first(
).libSubmit.subProject
dataset = LibExecute.objects.filter(query_code=index).first(
).sampleinfolib_set.all()
type = 2
elif SeqExecute.objects.filter(query_code=index):
subject = SeqExecute.objects.filter(query_code=index).first(
).seqSubmit.subProject
dataset = SeqExecute.objects.filter(query_code=index).first(
).sampleinfoseq_set.all()
type = 3
return render(request, 'Showdata.html', {'data': dataset, 'type':
type, 'subject': subject})
else:
return render(request, 'Showdata.html', {'error': msg})
return render(request, 'Showdata.html', {'data': dataset, 'type': type,
'subject': subject, 'pic': result})
| import datetime
from django.shortcuts import render
from lims.models import *
import os
import zipfile
def getpicture(word):
if word.split(".")[1] not in ["doc","docx"]:
return None
word_zip = word.split(".")[0] + ".zip"
path = ""
for i in word.split("/")[0:-1]:
path += i
path += "/"
path += "tem/"
if not os.path.exists(path):
os.rename(word,word_zip)
f = zipfile.ZipFile(word_zip,"r")
for file in f.filelist:
f.extract(file,path)
f.close()
os.rename(word_zip,word)
pic = os.listdir(os.path.join(path,"word/media"))
result = []
result_ = []
for i in pic:
result.append(os.path.join(path,"word/media/") + i)
for j in result:
url = "/media/" + j.split("/media/")[1] + "/media/" + j.split("/media/")[2]
result_.append(url)
return result_
else:
pic = os.listdir(os.path.join(path, "word/media"))
result = []
result_ = []
for i in pic:
result.append(os.path.join(path, "word/media/") + i)
for j in result:
url = "/media/" + j.split("/media/")[1] + "/media/" +j.split("/media/")[2]
result_.append(url)
return result_
def getData(request):
index = request.GET.get("index")
msg = "未查找到数据"
if ExtExecute.objects.filter(query_code=index):
ext = ExtExecute.objects.filter(query_code=index).first()
result = getpicture(ext.upload_file.path)
if result:
for i in result:
i = request.META.get("HTTP_HOST") + i
subject = ext.extSubmit.subProject
dataset = ext.sampleinfoext_set.all()
type = 1
elif LibExecute.objects.filter(query_code=index):
result = getpicture(LibExecute.objects.filter(query_code=index).first().upload_file.path)
if result:
for i in result:
i = request.META.get("HTTP_HOST") + i
subject = LibExecute.objects.filter(query_code=index).first().libSubmit.subProject
dataset = LibExecute.objects.filter(query_code=index).first().sampleinfolib_set.all()
type = 2
elif SeqExecute.objects.filter(query_code=index):
subject = SeqExecute.objects.filter(query_code=index).first().seqSubmit.subProject
dataset = SeqExecute.objects.filter(query_code=index).first().sampleinfoseq_set.all()
type = 3
return render(request, "Showdata.html", {"data": dataset, "type": type, "subject": subject})
else:
return render(request,"Showdata.html",{"error":msg})
return render(request,"Showdata.html",{"data":dataset,"type":type,"subject":subject,"pic":result}) | [
0,
1,
2,
3,
4
] |
1,978 | 93c465f017542cfe9cbc55da0ae5a9e34663cf32 | # -*- coding: utf-8 -*-
#########################################################################
## This scaffolding model makes your app work on Google App Engine too
## File is released under public domain and you can use without limitations
#########################################################################
if not request.env.web2py_runtime_gae:
## if NOT running on Google App Engine use SQLite or other DB
db = DAL('sqlite://storage.sqlite')
else:
## connect to Google BigTable (optional 'google:datastore://namespace')
db = DAL('google:datastore')
## store sessions and tickets there
session.connect(request, response, db = db)
## or store session in Memcache, Redis, etc.
## from gluon.contrib.memdb import MEMDB
## from google.appengine.api.memcache import Client
## session.connect(request, response, db = MEMDB(Client()))
## by default give a view/generic.extension to all actions from localhost
## none otherwise. a pattern can be 'controller/function.extension'
response.generic_patterns = ['*'] if request.is_local else []
#########################################################################
## Here is sample code if you need for
## - email capabilities
## - authentication (registration, login, logout, ... )
## - authorization (role based authorization)
## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
## - old style crud actions
## (more options discussed in gluon/tools.py)
#########################################################################
from gluon.tools import Auth, Crud, Service, PluginManager, prettydate
auth = Auth(db, hmac_key=Auth.get_or_create_key())
crud, service, plugins = Crud(db), Service(), PluginManager()
## create all tables needed by auth if not custom tables
auth.define_tables()
## configure email
mail=auth.settings.mailer
mail.settings.server = 'logging' or 'smtp.gmail.com:587'
mail.settings.sender = '[email protected]'
mail.settings.login = 'username:password'
## configure auth policy
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
## if you need to use OpenID, Facebook, MySpace, Twitter, Linkedin, etc.
## register with janrain.com, write your domain:api_key in private/janrain.key
from gluon.contrib.login_methods.rpx_account import use_janrain
use_janrain(auth,filename='private/janrain.key')
#########################################################################
## Define your tables below (or better in another model file) for example
##
## >>> db.define_table('mytable',Field('myfield','string'))
##
## Fields can be 'string','text','password','integer','double','boolean'
## 'date','time','datetime','blob','upload', 'reference TABLENAME'
## There is an implicit 'id integer autoincrement' field
## Consult manual for more options, validators, etc.
##
## More API examples for controllers:
##
## >>> db.mytable.insert(myfield='value')
## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
## >>> for row in rows: print row.id, row.myfield
#########################################################################
db=SQLDB("sqlite://db.db")
from plugin_ckeditor import CKEditor
ckeditor = CKEditor(db)
ckeditor.define_tables()
db.define_table('home',
Field('image','upload'),
Field('description', length=2096),
Field('biography', length=2096))
db.define_table('personal',
Field('first_name'),
Field('surname'),
Field('image','upload'),
Field('description', 'text',length=2096),
Field('biography', 'text',length=2096),
Field('email'))
db.personal.biography.widget=ckeditor.widget
db.personal.description.widget=ckeditor.widget
db.personal.image.represent=lambda image,row: A(IMG(_src=URL('download',args=image),_height="120"),_href=URL('show_image',args=row.id, vars=request.vars))
db.personal.description.represent=lambda d,r:XML(d)
db.personal.biography.represent=lambda b,r:XML(b)
def idx(id):
return A(IMG(_src=URL('download',args=db(db.image.show==id).select().first().thumb),_height="120"),_href=URL('category',args=id, vars=request.vars))
db.define_table('show',
SQLField('name'))
db.show.name.requires=[IS_NOT_EMPTY(),IS_NOT_IN_DB(db,db.show.name)]
#db.show.id.represent=lambda id,row: A(IMG(_src=URL('download',args=db().select(db.image.show==id).first().file),_height="120"),_href=URL('show_image',args=row.id, vars=request.vars))
db.show.id.represent=lambda id, row:idx(id)
db.show.id.label=' '
db.show.name.label='Gallery'
db.define_table('image',
Field('show',db.show),
Field('title'),
Field('size'),
Field('media'),
Field('price'),
Field('file','upload'),
Field('thumb','upload',writable=False))
def no_none(x):
print x, "HH"
if x==None:
return " "
else:
return x
def thumbnail(infile):
import os, sys
from PIL import Image
size = 128, 128
outfile = os.path.splitext(infile)[0] + "tn"
im = Image.open(infile)
im.thumbnail(size)
im.save(outfile, "JPEG")
return outfile+".jpg"
class RESIZE(object):
def __init__(self,nx=160,ny=80,error_message='niepoprawny plik'):
(self.nx,self.ny,self.error_message)=(nx,ny,error_message)
def __call__(self,value):
if isinstance(value, str) and len(value)==0:
return (value,None)
from PIL import Image
import cStringIO
try:
img = Image.open(value.file)
img.thumbnail((self.nx,self.ny), Image.ANTIALIAS)
s = cStringIO.StringIO()
img.save(s, 'JPEG', quality=100)
s.seek(0)
value.file = s
except:
return (value, self.error_message)
else:
return (value, None)
def THUMB(image, nx=120, ny=120):
from PIL import Image
import os
img = Image.open(request.folder + 'uploads/' + image)
img.thumbnail((nx,ny), Image.ANTIALIAS)
root,ext = os.path.splitext(image)
thumb='%s_thumb%s' %(root, ext)
img.save(request.folder + 'uploads/' + thumb)
print thumb
return thumb
db.image.show.requires=IS_IN_DB(db,db.show.id,'%(name)s')
db.image.id.readable=False
db.image.file.represent=lambda file,row: A(IMG(_src=URL('download',args=file),_height="120"),_href=URL('show_image',args=row.id, vars=request.vars))
db.image.show.represent=lambda show, row:db.show[show].name
db.image.size.represent=lambda size, row:no_none(size)
db.image.media.represent=lambda media, row:no_none(media)
db.image.title.label='Image name'
db.image.file.label=' '
db.image.thumb.label=' '
db.image.thumb.compute=lambda r:THUMB(r['file'])
db.image.thumb.represent=lambda thumb,row: A(IMG(_src=URL('download',args=thumb),_height="120"),_href=URL('show_image',args=row.id, vars=request.vars))
| null | null | null | null | [
0
] |
1,979 | fcbbffe0682da9f2131fdddbef606dcae3303ce9 | <mask token>
| <mask token>
print(float(my_int))
<mask token>
| greeting = 'My name is '
your_name = ''
best_string = 'I am '
your_age = 6
my_int = 5
print(float(my_int))
pi = 3.1415
| # Create two integer variables and print their sum. What is the type of the
# result?
# Now, create a float variable and print its sum with an integer variable. What
# is the type of the result.
# Divide your smallest integer value by your largest integer value. Is the
# result what you expected? Now, do the same with your float variable and an
# integer variable. What to you get?
# Fill in the blanks, try adding the following two string variables and print
# the result. What do you get?
greeting = "My name is "
your_name = ""
# Try adding the following variables.
best_string = "I am "
your_age = 6
# Although Python can add integers and floats, it can't add strings and integers.
# In order to do this, we need to convert the integer variable to a string using
# the str keyword
# Uncomment the line below and check that it works.
# print(best_string + str(your_age))
# You can create complex string by using multiple string additions.
# Uncomment the line below and see the result.
# print(best_string + str(your_age) + " years old")
# We can also use the float keyword and the int keyword to convert variables to
# floats and ints respectively.
my_int = 5
print(float(my_int))
# Now, convert pi to an int.
pi = 3.1415
| null | [
0,
1,
2,
3
] |
1,980 | 40b3c403f99044eb61740d62eda15ddd08b0f739 | <mask token>
class Script(BaseScript):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
| <mask token>
class Script(BaseScript):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def execute_cli(self, **kwargs):
v = self.cli('show version', cached=True)
for platform, ver in [('ESCOM L', self.rx_ver_escom_l), ('ESCOM',
self.rx_ver), ('ESCOM', self.rx_ver1)]:
match = ver.search(v)
if match:
break
else:
raise NotImplementedError
if platform == 'ESCOM L':
hw_match = self.rx_hw_escom_l.search(v)
return {'vendor': 'Iskratel', 'version': match.group('version'),
'platform': platform, 'image': hw_match.group('image'),
'attributes': {'Boot PROM': hw_match.group('bootprom'),
'HW version': hw_match.group('hardware'), 'Serial Number':
hw_match.group('serial')}}
r = {'vendor': 'Iskratel', 'version': match.group('version'),
'attributes': {'Boot PROM': match.group('bootprom'),
'HW version': match.group('hardware')}}
v = self.cli('show system', cached=True)
match = self.rx_platform.search(v)
if not match:
match = self.rx_platform1.search(v)
r['platform'] = match.group('platform')
v = self.cli('show system id', cached=True)
match = self.rx_serial.search(v)
if match:
r['attributes']['Serial Number'] = match.group('serial')
return r
| <mask token>
class Script(BaseScript):
name = 'Iskratel.ESCOM.get_version'
cache = True
interface = IGetVersion
rx_ver = re.compile(
'^\\s*SW version\\s+(?P<version>\\S+).*\\n^\\s*Boot version\\s+(?P<bootprom>\\S+).*\\n^\\s*HW version\\s+(?P<hardware>\\S+).*\\n'
, re.MULTILINE)
rx_ver1 = re.compile(
'^\\s+1\\s+(?P<version>\\S+)\\s+(?P<bootprom>\\S+)\\s+(?P<hardware>\\S+)'
, re.MULTILINE)
rx_ver_escom_l = re.compile(
'SI3000 ESCOM L Series Software,\\s*Version\\s(?P<version>\\S+) Build (?P<version_build>\\S+),'
, re.MULTILINE)
rx_hw_escom_l = re.compile(
'ROM:\\s*System Bootstrap, Version\\s*(?P<bootprom>\\S+),\\s*hardware version:\\s*(?P<hardware>\\S+)\\nSerial num:(?P<serial>\\S+), ID num:(?P<id_number>\\S+)\\nSystem image file is \\"(?P<image>\\S+)\\"'
, re.MULTILINE)
rx_platform = re.compile('^\\s*System Description:\\s+(?P<platform>.+)\\n',
re.MULTILINE)
rx_platform1 = re.compile('^\\s+1\\s+(?P<platform>\\S+)\\s*\\n', re.
MULTILINE)
rx_serial = re.compile('^\\s*Serial number : (?P<serial>\\S+)')
def execute_cli(self, **kwargs):
v = self.cli('show version', cached=True)
for platform, ver in [('ESCOM L', self.rx_ver_escom_l), ('ESCOM',
self.rx_ver), ('ESCOM', self.rx_ver1)]:
match = ver.search(v)
if match:
break
else:
raise NotImplementedError
if platform == 'ESCOM L':
hw_match = self.rx_hw_escom_l.search(v)
return {'vendor': 'Iskratel', 'version': match.group('version'),
'platform': platform, 'image': hw_match.group('image'),
'attributes': {'Boot PROM': hw_match.group('bootprom'),
'HW version': hw_match.group('hardware'), 'Serial Number':
hw_match.group('serial')}}
r = {'vendor': 'Iskratel', 'version': match.group('version'),
'attributes': {'Boot PROM': match.group('bootprom'),
'HW version': match.group('hardware')}}
v = self.cli('show system', cached=True)
match = self.rx_platform.search(v)
if not match:
match = self.rx_platform1.search(v)
r['platform'] = match.group('platform')
v = self.cli('show system id', cached=True)
match = self.rx_serial.search(v)
if match:
r['attributes']['Serial Number'] = match.group('serial')
return r
| import re
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetversion import IGetVersion
class Script(BaseScript):
name = 'Iskratel.ESCOM.get_version'
cache = True
interface = IGetVersion
rx_ver = re.compile(
'^\\s*SW version\\s+(?P<version>\\S+).*\\n^\\s*Boot version\\s+(?P<bootprom>\\S+).*\\n^\\s*HW version\\s+(?P<hardware>\\S+).*\\n'
, re.MULTILINE)
rx_ver1 = re.compile(
'^\\s+1\\s+(?P<version>\\S+)\\s+(?P<bootprom>\\S+)\\s+(?P<hardware>\\S+)'
, re.MULTILINE)
rx_ver_escom_l = re.compile(
'SI3000 ESCOM L Series Software,\\s*Version\\s(?P<version>\\S+) Build (?P<version_build>\\S+),'
, re.MULTILINE)
rx_hw_escom_l = re.compile(
'ROM:\\s*System Bootstrap, Version\\s*(?P<bootprom>\\S+),\\s*hardware version:\\s*(?P<hardware>\\S+)\\nSerial num:(?P<serial>\\S+), ID num:(?P<id_number>\\S+)\\nSystem image file is \\"(?P<image>\\S+)\\"'
, re.MULTILINE)
rx_platform = re.compile('^\\s*System Description:\\s+(?P<platform>.+)\\n',
re.MULTILINE)
rx_platform1 = re.compile('^\\s+1\\s+(?P<platform>\\S+)\\s*\\n', re.
MULTILINE)
rx_serial = re.compile('^\\s*Serial number : (?P<serial>\\S+)')
def execute_cli(self, **kwargs):
v = self.cli('show version', cached=True)
for platform, ver in [('ESCOM L', self.rx_ver_escom_l), ('ESCOM',
self.rx_ver), ('ESCOM', self.rx_ver1)]:
match = ver.search(v)
if match:
break
else:
raise NotImplementedError
if platform == 'ESCOM L':
hw_match = self.rx_hw_escom_l.search(v)
return {'vendor': 'Iskratel', 'version': match.group('version'),
'platform': platform, 'image': hw_match.group('image'),
'attributes': {'Boot PROM': hw_match.group('bootprom'),
'HW version': hw_match.group('hardware'), 'Serial Number':
hw_match.group('serial')}}
r = {'vendor': 'Iskratel', 'version': match.group('version'),
'attributes': {'Boot PROM': match.group('bootprom'),
'HW version': match.group('hardware')}}
v = self.cli('show system', cached=True)
match = self.rx_platform.search(v)
if not match:
match = self.rx_platform1.search(v)
r['platform'] = match.group('platform')
v = self.cli('show system id', cached=True)
match = self.rx_serial.search(v)
if match:
r['attributes']['Serial Number'] = match.group('serial')
return r
| # ---------------------------------------------------------------------
# Iskratel.ESCOM.get_version
# ---------------------------------------------------------------------
# Copyright (C) 2007-2018 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import re
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetversion import IGetVersion
class Script(BaseScript):
name = "Iskratel.ESCOM.get_version"
cache = True
interface = IGetVersion
rx_ver = re.compile(
r"^\s*SW version\s+(?P<version>\S+).*\n"
r"^\s*Boot version\s+(?P<bootprom>\S+).*\n"
r"^\s*HW version\s+(?P<hardware>\S+).*\n",
re.MULTILINE,
)
rx_ver1 = re.compile(
r"^\s+1\s+(?P<version>\S+)\s+(?P<bootprom>\S+)\s+(?P<hardware>\S+)", re.MULTILINE
)
rx_ver_escom_l = re.compile(
r"SI3000 ESCOM L Series Software,\s*Version\s(?P<version>\S+) Build (?P<version_build>\S+),",
re.MULTILINE,
)
rx_hw_escom_l = re.compile(
r"ROM:\s*System Bootstrap, Version\s*(?P<bootprom>\S+),\s*hardware version:\s*(?P<hardware>\S+)\n"
r"Serial num:(?P<serial>\S+), ID num:(?P<id_number>\S+)\n"
r"System image file is \"(?P<image>\S+)\"",
re.MULTILINE,
)
rx_platform = re.compile(r"^\s*System Description:\s+(?P<platform>.+)\n", re.MULTILINE)
rx_platform1 = re.compile(r"^\s+1\s+(?P<platform>\S+)\s*\n", re.MULTILINE)
rx_serial = re.compile(r"^\s*Serial number : (?P<serial>\S+)")
def execute_cli(self, **kwargs):
v = self.cli("show version", cached=True)
for platform, ver in [
("ESCOM L", self.rx_ver_escom_l),
("ESCOM", self.rx_ver),
("ESCOM", self.rx_ver1),
]:
match = ver.search(v)
if match:
break
else:
raise NotImplementedError
if platform == "ESCOM L":
hw_match = self.rx_hw_escom_l.search(v)
return {
"vendor": "Iskratel",
"version": match.group("version"),
"platform": platform,
"image": hw_match.group("image"),
"attributes": {
"Boot PROM": hw_match.group("bootprom"),
"HW version": hw_match.group("hardware"),
"Serial Number": hw_match.group("serial"),
},
}
r = {
"vendor": "Iskratel",
"version": match.group("version"),
"attributes": {
"Boot PROM": match.group("bootprom"),
"HW version": match.group("hardware"),
},
}
v = self.cli("show system", cached=True)
match = self.rx_platform.search(v)
if not match:
match = self.rx_platform1.search(v)
r["platform"] = match.group("platform")
v = self.cli("show system id", cached=True)
match = self.rx_serial.search(v)
if match:
r["attributes"]["Serial Number"] = match.group("serial")
return r
| [
1,
2,
3,
4,
5
] |
1,981 | b668945820abe893b92fdf26ccd8563ccff804ee | <mask token>
class DatasetLoader(object):
<mask token>
<mask token>
<mask token>
<mask token>
| <mask token>
class DatasetLoader(object):
<mask token>
def __init__(self, ds_id, ds_name, ds_desc, ds_columns=None):
if ds_columns is None:
columns = ['user_id', 'item_id', 'values', 'timestamp']
else:
columns = ds_columns
self.id = ds_id
self.name = ds_name
self.desc = ds_desc
train_path = self.base_path + self.name + str(self.id) + '.base'
test_path = self.base_path + self.name + str(self.id) + '.test'
self.train = pd.read_csv(train_path, header=None, delim_whitespace=True
)
self.train.columns = columns
self.test = pd.read_csv(test_path, header=None, delim_whitespace=True)
self.test.columns = columns
self.train_ratings = Ratings(self.to_lists(self.train))
self.test_ratings = Ratings(self.to_lists(self.test))
def to_lists(self, ds):
"""
:param ds_type: str [train || test]
:return: dataset in form of three list saved in a dict {users:u, items:i, values:v}
"""
lists = {'users': ds['user_id'].values, 'items': ds['item_id'].
values, 'values': ds['values'].values}
return lists
def __str__(self):
return (
f'Dataset Id: {self.id}, File Name: {self.name}, Description: {self.desc}. train size: {len(self.train)}, test size: {len(self.test)}'
)
| <mask token>
class DatasetLoader(object):
base_path = './dataset/'
def __init__(self, ds_id, ds_name, ds_desc, ds_columns=None):
if ds_columns is None:
columns = ['user_id', 'item_id', 'values', 'timestamp']
else:
columns = ds_columns
self.id = ds_id
self.name = ds_name
self.desc = ds_desc
train_path = self.base_path + self.name + str(self.id) + '.base'
test_path = self.base_path + self.name + str(self.id) + '.test'
self.train = pd.read_csv(train_path, header=None, delim_whitespace=True
)
self.train.columns = columns
self.test = pd.read_csv(test_path, header=None, delim_whitespace=True)
self.test.columns = columns
self.train_ratings = Ratings(self.to_lists(self.train))
self.test_ratings = Ratings(self.to_lists(self.test))
def to_lists(self, ds):
"""
:param ds_type: str [train || test]
:return: dataset in form of three list saved in a dict {users:u, items:i, values:v}
"""
lists = {'users': ds['user_id'].values, 'items': ds['item_id'].
values, 'values': ds['values'].values}
return lists
def __str__(self):
return (
f'Dataset Id: {self.id}, File Name: {self.name}, Description: {self.desc}. train size: {len(self.train)}, test size: {len(self.test)}'
)
| <mask token>
import pandas as pd
from Ratings import Ratings
class DatasetLoader(object):
base_path = './dataset/'
def __init__(self, ds_id, ds_name, ds_desc, ds_columns=None):
if ds_columns is None:
columns = ['user_id', 'item_id', 'values', 'timestamp']
else:
columns = ds_columns
self.id = ds_id
self.name = ds_name
self.desc = ds_desc
train_path = self.base_path + self.name + str(self.id) + '.base'
test_path = self.base_path + self.name + str(self.id) + '.test'
self.train = pd.read_csv(train_path, header=None, delim_whitespace=True
)
self.train.columns = columns
self.test = pd.read_csv(test_path, header=None, delim_whitespace=True)
self.test.columns = columns
self.train_ratings = Ratings(self.to_lists(self.train))
self.test_ratings = Ratings(self.to_lists(self.test))
def to_lists(self, ds):
"""
:param ds_type: str [train || test]
:return: dataset in form of three list saved in a dict {users:u, items:i, values:v}
"""
lists = {'users': ds['user_id'].values, 'items': ds['item_id'].
values, 'values': ds['values'].values}
return lists
def __str__(self):
return (
f'Dataset Id: {self.id}, File Name: {self.name}, Description: {self.desc}. train size: {len(self.train)}, test size: {len(self.test)}'
)
| """
Class: Dataset
This class is responsible of loading datasets
After initializing using load method the class results two parameter:
train: contains train set
test: contains test set
It's able of returning data structure in form of three lists:
- users
- items
- values (which are ratings)
"""
import pandas as pd
from Ratings import Ratings
class DatasetLoader(object):
# Default path where dataset files are located
base_path = './dataset/'
def __init__(self, ds_id, ds_name, ds_desc, ds_columns=None):
if ds_columns is None:
columns = ['user_id', 'item_id', 'values', 'timestamp']
else:
columns = ds_columns
self.id = ds_id
self.name = ds_name
self.desc = ds_desc
train_path = self.base_path + self.name + str(self.id) + '.base'
test_path = self.base_path + self.name + str(self.id) + '.test'
self.train = pd.read_csv(train_path, header=None, delim_whitespace=True)
self.train.columns = columns
self.test = pd.read_csv(test_path, header=None, delim_whitespace=True)
self.test.columns = columns
self.train_ratings = Ratings(self.to_lists(self.train))
self.test_ratings = Ratings(self.to_lists(self.test))
def to_lists(self, ds):
"""
:param ds_type: str [train || test]
:return: dataset in form of three list saved in a dict {users:u, items:i, values:v}
"""
#ds = getattr(self, ds_type)
lists = {
'users': ds['user_id'].values,
'items': ds['item_id'].values,
'values': ds['values'].values
}
return lists
def __str__(self):
return f'Dataset Id: {self.id}, File Name: {self.name}, Description: {self.desc}. \
train size: {len(self.train)}, test size: {len(self.test)}'
# Testing Area
# m_lens = Loader(2, 'u', 'MovieLens dataset, fold 1')
# print(len(m_lens.train))
# print(len(m_lens.test))
# print(m_lens)
| [
1,
4,
5,
6,
7
] |
1,982 | 51711c9293f8b5d9dc4d299569da04e2d1bc0064 |
# Procedures for automatic COBD calculation.
# The useful ones are:
# - get_heuristic4_OBD() as a heuristic one [the only heuristic one here that does not miss-out solutions]
# - getOBD2plus4() as the fastest exhaustive one [uses two filtering techniques for early detection of graphs without an OBD]
import itertools
import time
import pickle
import numpy
import networkx as nx
import matplotlib.pyplot as plt
def insertOBDlabels(P, obd):
allOK = True
for n in P.nodes():
label = None
for i in range(len(obd)): # obd is a list of elements (lists), if n is in i-th element, then i is its label
if n in obd[i]:
label = i
if label == None:
allOK = False
print "Warning: not all nodes are in the provided OBD."
break
P.node[n]['OBDlabel'] = label
return allOK
def OBDnodeCondition(n, P):
"""assumes that nodes have ['OBDlabel'] set already (this is why insertOBDlabels() must be called beforehand) """
condition = True
higherNeighborLabel = None
for neigh in P.neighbors(n):
if P.node[neigh]['OBDlabel'] == P.node[n]['OBDlabel']:
condition = False
break
elif P.node[neigh]['OBDlabel'] > P.node[n]['OBDlabel']:
if higherNeighborLabel == None:
higherNeighborLabel = P.node[neigh]['OBDlabel']
else:
if P.node[neigh]['OBDlabel'] != higherNeighborLabel:
condition = False
break
return condition
def OBDcorrect(P, obd):
correct = True
ans = insertOBDlabels(P, obd) # adds 'OBDlabel' to each node in P, according to decomposition obd
if ans == False:
correct = False
else:
for n in P.nodes():
if not OBDnodeCondition(n, P): #do all the neighbors have different labels, and all with higher label have the same one?
correct = False
break
return correct
def connectedOBD(P, obd):
'''test whether the obd is such, that each node with higher level is connected to some node with lower level (needed in our depth-first kind of algorithm)'''
connected = True
seen = []
if len(obd[0]) > 1:
connected = False
##print "Warning: more than one root element in obd."
else:
seen.append(obd[0][0])
for i in range(len(obd)):
if i == 0:
pass
else:
for el in obd[i]:
test = False
neighbors = P.neighbors(el)
for neigh in neighbors:
if neigh in seen:
test = True
if test == False:
connected = False
else:
seen.append(el)
return connected
# create all possible permutations of elements (IDs) - and on each permutation then try all possible splits....first with len(P) parts (optimal) and then lower.
def split_list(data, n):
#""" splits a list into n parts in all possible ways
#>>> list(split_list([1, 2, 3, 4], 2))
#[[[1], [2, 3, 4]], [[1, 2], [3, 4]], [[1, 2, 3], [4]]]
#>>> list(split_list([1, 2, 3, 4], 3))
#[[[1], [2], [3, 4]], [[1], [2, 3], [4]], [[1, 2], [3], [4]]]"""
from itertools import combinations, chain
for splits in combinations(range(1, len(data)), n-1):
result = []
prev = None
for split in chain(splits, [None]):
result.append(data[prev:split])
prev = split
yield result
def getOBD(P):
result = None
found = False
IDs = []
for n in P.nodes():
IDs.append(P.node[n]['id'])
# we will try with largest possible decomposition size and then go lower, if nothing is found
decomp_size = len(IDs)
while decomp_size > 0:
# now we go over all possible permutations of IDs
permutations = itertools.permutations(IDs) # this has to be recreated each time we go over it again
for perm in permutations:
splits = split_list(list(perm), decomp_size)
for s in splits:
# now this is our candidate OBD
if ( OBDcorrect(P, s) and connectedOBD(P, s) ): # connectedOBD is additional condition because of our depth-first approach
result = s
found = True
if found == True: break;
if found == True: break;
if found == True: break;
decomp_size = decomp_size -1
if found == False:
##print "OBD was not found for this pattern."
result = None
return result
#------------------------------HEURISTIC 1--------------------------------
def heuristic1_label_OBD(n, P, current_label):
P.node[n]['OBDlabel'] = current_label
current_label = current_label + 1
neighbors = P.neighbors(n)
for neigh in neighbors:
if 'OBDlabel' in P.node[neigh].keys():
if P.node[neigh]['OBDlabel'] > current_label:
current_label = P.node[neigh]['OBDlabel']
# we got maximum of current label or any node that neighbors have - now we label them all with that
for neigh in neighbors:
if 'OBDlabel' in P.node[neigh].keys():
if P.node[neigh]['OBDlabel'] >= P.node[n]['OBDlabel']:
heuristic1_label_OBD(neigh, P, current_label)
else: # if set and smaller than mine, leave them alone
pass
else: # if not set, then not lower and not labelled
heuristic1_label_OBD(neigh, P, current_label)
def produceOBDlist(P):
"""expects pattern P which has OBDlabel set for all the nodes. OBDlist is created accoring to labels (some might be skipped! so this is taken into account)"""
# first we'll get all OBD labels, so that we can see how many different ones are there...
output = []
OBDlabels = set() # set, so that we do not collect duplicate labels
for n in P.nodes():
OBDlabels.add(P.node[n]['OBDlabel'])
OBDlabels = list(OBDlabels) # now we have a list of labels without duplicates
OBDlabels.sort() # in-place sorting (OBDlabels is changed)
for el in OBDlabels:
innerlist = []
for n in P.nodes():
if P.node[n]['OBDlabel'] == el:
innerlist.append(n)
output.append(innerlist)
return output
def get_heuristic1_OBD(P):
heuristic1_label_OBD(P.nodes()[0], P, 1)
obd = produceOBDlist(P)
if ( OBDcorrect(P, obd) and connectedOBD(P, obd) ):
return obd
else:
return None
# result will be put into ['OBDlabel'] of nodes in P, so you have to create then the proper format...
#------------------------------HEURISTIC 2--------------------------------
def heuristic2_label_OBD(n, P, label, critical=None):
"""heuristic approach with backtracking"""
print "trying to label " + str(n) + " with " + str(label)
nodes_labeled = []
if ('critical' in P.node[n].keys()) and (P.node[n]['critical']==True) and (P.node[n]['OBDlabel'] != label) :
print "FAIL on critical and not the same label."
return (False, []) # being critical, we could avoid failure only if the label to set would be the same (it happens)
else:
P.node[n]['OBDlabel'] = label
nodes_labeled.append(n) # this is a list that gets passed through recursions
if critical == True:
P.node[n]['critical'] = True
# labeling part done
flag_critical = False # if I will label more than one neighbor from now on, then the labels will be critical (not to be changed by others)
new_label = label + 1
neighbors = P.neighbors(n)
for neigh in neighbors:
if 'OBDlabel' in P.node[neigh].keys():
if P.node[neigh]['OBDlabel'] > new_label:
new_label = P.node[neigh]['OBDlabel']
# we got maximum of current label or any node that neighbors have - now we label them all with that
neighbors_to_label = []
for neigh in neighbors:
if 'OBDlabel' in P.node[neigh].keys():
if (P.node[neigh]['OBDlabel'] >= P.node[n]['OBDlabel']) or (P.node[neigh]['OBDlabel'] == None): # now they can have it, but set to None (because of removal in failers)
neighbors_to_label.append(neigh)
else: # if set and smaller than mine, leave them alone
pass
else: # if not set, then not lower and not labelled
neighbors_to_label.append(neigh)
# now we have all the neighbors that need to be labeled
if len(neighbors_to_label) > 1:
flag_critical = True
# and now the recursive step - labeling all these nodes
permutations = itertools.permutations(neighbors_to_label) # iterator : gets exhausted as we access elements
for perm in permutations:
print "trying perm: " + str(perm)
this_run_success = True
this_run_labeled = []
for el in perm:
(s, nl) = heuristic2_label_OBD(el, P, new_label, flag_critical)
this_run_labeled = this_run_labeled + nl
if s == False:
this_run_success = False
break
if this_run_success == False:
# then unlabel all that were labelled up to now
for nn in this_run_labeled:
print "removing label of " + str(nn)
P.node[nn]['OBDlabel'] = None
P.node[nn]['critical'] = False
else: # obviously success is True, we managed to label all others...
nodes_labeled = nodes_labeled + this_run_labeled
print "Win in labeling neighbors of " + str(n)
return (True, nodes_labeled)
break
# if no permutation is successful, we end up returning the last line
return (False, nodes_labeled)
print "FAIL of all permutations from " + str(n)
def get_heuristic2_OBD(P):
heuristic2_label_OBD(P.nodes()[0], P, 1)
#------------------------------HEURISTIC 2B--------------------------------
def heuristic2B_label_OBD(n, P, label, critical=None):
"""heuristic approach with backtracking"""
nodes_labeled = []
flag_critical = False # if I will label more than one neighbor from now on, then the labels will be critical (not to be changed by others)
new_label = label + 1
neighbors = P.neighbors(n)
for neigh in neighbors:
if 'OBDlabel' in P.node[neigh].keys(): # if it has a label
if P.node[neigh]['OBDlabel'] > new_label: # and it is higher than what I would use for labeling
new_label = P.node[neigh]['OBDlabel']
# we got maximum of current label or any node that neighbors have - now we label them all with that
neighbors_to_label = []
for neigh in neighbors:
if 'OBDlabel' in P.node[neigh].keys():
if (P.node[neigh]['OBDlabel'] >= P.node[n]['OBDlabel']) or (P.node[neigh]['OBDlabel'] == None): # now they can have it, but set to None (because of removal in failers)
neighbors_to_label.append(neigh)
else: # if set and smaller than mine, leave them alone
pass
else: # if not set, then not lower and not labelled
neighbors_to_label.append(neigh)
# now we have all the neighbors that need to be labeled
if len(neighbors_to_label) > 1:
flag_critical = True
# and now labeling all these nodes
for neigh in neighbors_to_label:
if ('critical' in P.node[neigh].keys()) and (P.node[neigh]['critical']==True) and (P.node[neigh]['OBDlabel'] != new_label) :
return (False, nodes_labeled) # being critical, we could avoid failure only if the label to set would be the same (it happens)
else:
P.node[neigh]['OBDlabel'] = new_label
nodes_labeled.append(neigh) # this is a list that gets passed through recursions
if flag_critical == True:
P.node[neigh]['critical'] = True
# labeling part done
# and now recursive step - going into each neighbor to continue, in any order if necessary
permutations = itertools.permutations(neighbors_to_label) # iterator : gets exhausted as we access elements
for perm in permutations:
this_run_success = True
this_run_labeled = []
for el in perm:
(s, nl) = heuristic2B_label_OBD(el, P, new_label, flag_critical)
this_run_labeled = this_run_labeled + nl
if s == False:
this_run_success = False
if this_run_success == False:
# then unlabel all that were labelled up to now
for nn in this_run_labeled:
P.node[nn]['OBDlabel'] = None
P.node[nn]['critical'] = False
else: # obviously success is True, we managed to label all others...
nodes_labeled = nodes_labeled + this_run_labeled
return (True, nodes_labeled)
break
# if no permutation is successful, we end up returning the last line
return (False, nodes_labeled)
def get_heuristic2B_OBD(P):
# in this version we label the root before recursion
for n in P.nodes():
root = n
P.node[root]['OBDlabel'] = 1
(success, result) = heuristic2B_label_OBD(root, P, 1)
if success:
obd = produceOBDlist(P)
if ( OBDcorrect(P, obd) and connectedOBD(P, obd) ):
return obd
else:
for no in P.nodes():
P.node[no]['OBDlabel'] = None
P.node[no]['critical'] = False
else: # in case of failure of all attempts with this node as a root - we have to clean up all flags and labels before the new root is tried
for nn in P.nodes():
P.node[nn]['OBDlabel'] = None
P.node[nn]['critical'] = False
# if we did not return any solution before, then None was found
return None
#----------------------------------------------------------------------------------
#------------------------------exhaustive 2--------------------------------
def any_neighbors(nodelist, G):
"""If any two nodes in the nodelist are neighbors in graph G, it outputs TRUE, otherwise FALSE."""
outcome = False
#neighbors = P.neighbors(n)
for i in range(len(nodelist)):
for j in range(i+1, len(nodelist)):
if G.has_edge(nodelist[i], nodelist[j]) or G.has_edge(nodelist[j], nodelist[i]):
##if nodelist[j] in G.neighbors(nodelist[i]):
outcome = True
return outcome
return outcome
def getOBD2(P):
result = None
found = False
IDs = []
for n in P.nodes():
IDs.append(P.node[n]['id'])
# we will try with largest possible decomposition size and then go lower, if nothing is found
decomp_size = len(IDs)
while decomp_size > 0:
# now we go over all possible permutations of IDs
permutations = itertools.permutations(IDs) # this has to be recreated each time we go over it again
for perm in permutations:
splits = split_list(list(perm), decomp_size)
for s in splits:
# now this is our candidate OBD
# -------speedup A: checking for neighbors in elements of split
noneighbors = True
for nodelist in s:
if len(nodelist)>1:
if any_neighbors(nodelist, P):
noneighbors = False
# -------
if noneighbors and OBDcorrect(P, s) and connectedOBD(P, s): # connectedOBD is additional condition because of our depth-first approach
result = s
found = True
if found == True: break;
if found == True: break;
if found == True: break;
decomp_size = decomp_size -1
if found == False:
result = None
return result
#----------------------------------------------------------------------------------
#------------------------------exhaustive 3--------------------------------
def size_degree_check(obd, P):
"""for every node in OBD calculates its [degree(n) - linksToNodesAlreadyInOBD]
and verifies whether in the remaining part of OBD there is an element of at least that size (all bigger must have equal label)"""
outcome = True
flatOBD = [item for sublist in obd for item in sublist] # we get a flat list from a list of lists
seen = []
for i in range(len(flatOBD)):
n = flatOBD[i]
linksback = 0
for el in seen:
if P.has_edge(el, n) or P.has_edge(n, el):
linksback = linksback + 1
out_degree = P.degree(n) - linksback
# now verify whether we have such strength in the rest of obd
targetElement = None
for elobd in obd:
if n in elobd:
targetElement = elobd
# we now in which element is n - now check from here on
remaining_obd = obd[obd.index(targetElement)+1:]
sizes = [len(x) for x in remaining_obd]
if (len(sizes)>0) and (max(sizes) < out_degree):
outcome = False
return outcome
seen.append(n)
return outcome
def getOBD3(P):
result = None
found = False
max_degree = max(list(P.degree().values()))
IDs = []
for n in P.nodes():
IDs.append(P.node[n]['id'])
# we will try with largest possible decomposition size and then go lower, if nothing is found
decomp_size = len(IDs)
while decomp_size > 0:
# now we go over all possible permutations of IDs
permutations = itertools.permutations(IDs) # this has to be recreated each time we go over it again
for perm in permutations:
splits = split_list(list(perm), decomp_size)
for s in splits:
# now this is our candidate OBD
# -------speedup B: checking sizes of decomposition elements against out-degrees
sizeCheck = size_degree_check(s, P)
# -------
if sizeCheck and OBDcorrect(P, s) and connectedOBD(P, s): # connectedOBD is additional condition because of our depth-first approach
result = s
found = True
if found == True: break;
if found == True: break;
if found == True: break;
decomp_size = decomp_size -1
if found == False:
result = None
return result
#----------------------------------------------------------------------------------
#------------------------------exhaustive 4--------------------------------
def any_triangles(G):
"""checks and outputs (True, False) whether there are any triangles in graph G"""
for x in G.nodes():
for y in G.nodes():
for z in G.nodes():
if (x != y) and (x !=z) and (y!=z):
if (G.has_edge(x, y) or G.has_edge(y, x)) and (G.has_edge(x, z) or G.has_edge(z, x)) and (G.has_edge(z, y) or G.has_edge(y, z)):
return True
# if all triplets were checked and we did not find a triangle, then we can only return False
return False
def getOBD4(P):
if any_triangles(P):
return None
result = None
found = False
max_degree = max(list(P.degree().values()))
IDs = []
for n in P.nodes():
IDs.append(P.node[n]['id'])
# we will try with largest possible decomposition size and then go lower, if nothing is found
decomp_size = len(IDs)
while decomp_size > 0:
# now we go over all possible permutations of IDs
permutations = itertools.permutations(IDs) # this has to be recreated each time we go over it again
for perm in permutations:
splits = split_list(list(perm), decomp_size)
for s in splits:
# now this is our candidate OBD
if OBDcorrect(P, s) and connectedOBD(P, s): # connectedOBD is additional condition because of our depth-first approach
result = s
found = True
if found == True: break;
if found == True: break;
if found == True: break;
decomp_size = decomp_size -1
if found == False:
result = None
return result
#----------------------------------------------------------------------------------
#------------------------------exhaustive 2plus4--------------------------
def getOBD2plus4(P):
if any_triangles(P):
return None
result = None
found = False
IDs = []
for n in P.nodes():
IDs.append(P.node[n]['id'])
# we will try with largest possible decomposition size and then go lower, if nothing is found
decomp_size = len(IDs)
while decomp_size > 0:
# now we go over all possible permutations of IDs
permutations = itertools.permutations(IDs) # this has to be recreated each time we go over it again
for perm in permutations:
splits = split_list(list(perm), decomp_size)
for s in splits:
# now this is our candidate OBD
# -------speedup A: checking for neighbors in elements of split
noneighbors = True
for nodelist in s:
if len(nodelist)>1:
if any_neighbors(nodelist, P):
noneighbors = False
# -------
if noneighbors and OBDcorrect(P, s) and connectedOBD(P, s): # connectedOBD is additional condition because of our depth-first approach
result = s
found = True
if found == True: break;
if found == True: break;
if found == True: break;
decomp_size = decomp_size -1
if found == False:
result = None
return result
#----------------------------------------------------------------------------------
#------------------------------HEURISTIC 3--------------------------------
def to_graph(l):
""" l is a list of lists"""
G = nx.Graph()
for part in l:
# each sublist is a bunch of nodes
G.add_nodes_from(part)
# it also imlies a number of edges:
G.add_edges_from(to_edges(part))
return G
def to_edges(l):
"""
treat `l` as a Graph and returns it's edges
to_edges(['a','b','c','d']) -> [(a,b), (b,c),(c,d)]
"""
it = iter(l)
last = next(it)
for current in it:
yield last, current
last = current
#G = to_graph(l)
#print connected_components(G)
def partitions(set_):
if not set_:
yield []
return
for i in xrange(2**len(set_)/2):
parts = [set(), set()]
for item in set_:
parts[i&1].add(item)
i >>= 1
for b in partitions(parts[1]):
yield [parts[0]]+b
#for p in partitions(["a", "b", "c", "d"]):
#print p
def h3_step(d, P, label):
## print "started with decomp element %s" % str(d)
# trenutna dekompozicija d na P, hocem celotno od tu dalje
# d is a list like [2, 3]
# first we check if d has any neighbors:
if any_neighbors(d, P):
## print "Fail because neighbors detected in %s" % str(d)
return (False, [])
else:
#---now lets get the situation
labeledOnes = []
for n in d:
if (('OBDlabel' in P.node[n].keys()) and (P.node[n]['OBDlabel'] != None)):
labeledOnes.append(n)
if len(labeledOnes) == len(d):
return (True, []) # was done already from some other decomp. element
elif ((len(labeledOnes) < len(d)) and (len(labeledOnes) > 0)): # so, if some are labeled, but not all
return (False, [])
else: # none are labeled
for n in d:
P.node[n]['OBDlabel'] = label
new_label = label + 1
all_labeled = d
output = [d]
neighbors_to_d = [] # this will be a list of lists, for each element e in d it will hold e's neighbors that are not labeled yet
for el in d:
neighbors_to_d.append([x for x in P.neighbors(el) if (('OBDlabel' not in P.node[x].keys()) or (P.node[x]['OBDlabel']==None) or (P.node[x]['OBDlabel']>=P.node[el]['OBDlabel'])) ])
if neighbors_to_d == []:
## print "Success, because no more unlabeled neighbors for %s" % str(d)
return (True, [d])
#now we'll merge them according to connected components
tempG = to_graph(neighbors_to_d)
components = nx.connected_components(tempG)
# components contains all groups of nodes that can have different decomposition labels, at least according to local information
# we try with the most defragmented components, and then merge them (PARTITIONING) if it fails in later steps
# when all partitions are exhausted, we report failure back
indices = set(range(len(components))) # set of indices will be partitioned
## print "components: %s" % str(components)
## print "indices: %s" % str(indices)
for partits in partitions(indices):
for par in itertools.permutations(partits):
# par is one partition of indeces, like: [ set([0]) , set([1]) , set([2]) ] or [ [0], [1,2] ] that correspond to e.g. [ [1], [2,3,4] ]
## print "trying par: %s" % str(par)
this_try = True # all decomposition elements in partition have to succeed
all_decomps = []
this_try_labeled = []
for d_next_inds in par:
d_next_inds = list(d_next_inds) # we make a list back from a set
# now we have to merge the components with these indices into a decomposition element candidate
d_next = []
for i in d_next_inds:
d_next = d_next + components[i]
# d_next is now the new candidate partition class
## print "and trying the next decomp candidate in next recursive step: %s" % str(d_next)
(success, partial_decomp) = h3_step(d_next, P, new_label)
if success == True:
all_decomps = all_decomps + partial_decomp
this_try_labeled = this_try_labeled + partial_decomp
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX problem: several possible solutions and not all elements are duplicates!!!
else:
this_try = False
if this_try == True: # obviously this partition was OK in recursions
output = output + all_decomps
## print "Success in recursion below. Outputting %s" % str(output)
return (True, output)
else:
for alist in this_try_labeled:
for nodeid in alist:
P.node[nodeid]['OBDlabel'] = None
# if we came to here it means all partitions of indices of components were exhausted without solution
## print "Fail because all options exhausted"
return (False, output)
def get_heuristic3_OBD(P):
#
for n in P.nodes():
root = n
(success, result) = h3_step([root], P, 1)
if success:
#----might have duplicates, so we'll remove them
nice_result = []
for el in result:
if el not in nice_result:
nice_result.append(el)
## print "as success we get OBD: %s" % str(nice_result)
if ( OBDcorrect(P, nice_result) and connectedOBD(P, nice_result) ):
return nice_result
else:
pass
## print "The produced OBD was either not correct or not connected"
## print "----------------------------------"
#----cleaning after this root node was not successful
for nn in P.nodes():
if ('OBDlabel' in P.node[nn].keys()):
P.node[nn]['OBDlabel'] = None
#-----------------
# if we did not return any solution before, then None was found
return None
#----------------------------------------------------------------------------------
#------------HEURISTIC 4 ---------------------------------------------
def get_components(partOBD, P):
flat_partialOBD = [item for sublist in partOBD for item in sublist] # we get a flat list from a list of lists
#
meta_neighbors = [] # this will contain all contents of neighbors_to_d for all d-s
for d in partOBD:
neighbors_to_d = [] # this will be a list of lists, for each element e in d it will hold e's neighbors that are not labeled yet
for el in d:
neighbors_to_d.append([x for x in P.neighbors(el) if (x not in flat_partialOBD)])
meta_neighbors = meta_neighbors + neighbors_to_d
#now we'll merge them according to connected components
tempG = to_graph(meta_neighbors)
components = nx.connected_components(tempG)
return components
def labelon(partialOBD, P):
## print "came into labelon() with partialOBD: %s" % str(partialOBD)
flat_partialOBD = [item for sublist in partialOBD for item in sublist] # we get a flat list from a list of lists
if len(flat_partialOBD) == len(P.nodes()): # check for the end of recursion
## print "and YES, we are at recursion end"
if ( OBDcorrect(P, partialOBD) and connectedOBD(P, partialOBD) ):
## print "and even correct and connected - FINISH."
return partialOBD
else:
## print "but not correct OBD or not connected"
return None
else: # else: get all candidates to continue (next connected components) and try on all of them
components = list(get_components(partialOBD, P))
# now to partialOBD we add each component separately, but also each possible merging of these components, including full merge
candidates = [] # this will hold all such candidates, each candidate is a list of vertices
for L in range(1, len(components)+1):
for subset in itertools.combinations(components, L):
cand = subset # but this is a list of lists - we have to flatten it
candFlat = [x for sub in cand for x in sub]
candidates.append(candFlat)
for c in candidates:
new_partial_OBD = partialOBD + [c]
## print "starting recursive call with new_partialOBD: %s" % str(new_partial_OBD)
result = labelon(new_partial_OBD, P)
## print "back from recursion call for new_partialOBD: %s" % str(new_partial_OBD)
## print "and result is: %s" % str(result)
if result != None:
return result
# if I came here without returning something , then nothing was found below me
return None
def get_heuristic4_OBD(P, startNode = None):
#
if startNode == None:
for n in P.nodes():
## print "starting with node %s" % str(n)
result = labelon([[n]], P)
if result != None:
return result
return None
else:
result = labelon([[startNode]], P)
if result != None:
return result
return None
####pattern_file_name = "pattern1.gml"
##pattern_file_name = "graph6c_15.gml"
## ./problemAnalysis/graph8c_random_663.gml
####P = nx.read_gml(pattern_file_name)
####print "reading done."
#pattern_file_name = "./graphs/7c/graph7c_104.gml"; P = nx.read_gml(pattern_file_name); get_heuristic3_OBD(P)
# OBdecomp = [ [0], [1] , [2, 3], [4], [5] ]
##start = time.time()
##res = get_heuristic1_OBD(P)
##stop = time.time()
##
##print res
##print "Calculation took %.2f seconds." % (stop-start)
# call with: > python OBDsearch.py patternX.gml [resultfile.obd] [computer_name]
##if __name__=="__main__":
## import sys
## pattern_file_name = sys.argv[1]
## result_file_name = None
## computer_name = None
## if len(sys.argv)>2:
## result_file_name = sys.argv[2]
## if len(sys.argv)>3:
## computer_name = sys.argv[3]
## P = nx.read_gml(pattern_file_name)
## start = time.time()
## obd = getOBD(P)
## stop = time.time()
## if obd != None:
## print obd
## else:
## print "None, OBD not found."
## if result_file_name != None:
## resultfile = open(result_file_name, 'w')
## resultfile.write(str(obd)); resultfile.write('\n')
## if computer_name !=None:
## resultfile.write("Finding OBD took %.2f seconds on %s." % (stop-start, computer_name))
## else:
## resultfile.write("Finding OBD took %.2f seconds." % (stop-start))
| null | null | null | null | [
0
] |
1,983 | 22b2ebdbb48caa593bece030d238089a0aa27053 | <mask token>
def item(request):
if not request.session.get('is_login', None):
return redirect('/item/item')
else:
item_list = Item.objects.all()
return render(request, 'item/item.html', locals())
<mask token>
def add_unit(request):
if request.method == 'GET':
last_unit_info = Unit.objects.last()
return render(request, 'item/add_unit.html', locals())
else:
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name)
new_unit.save()
return redirect('/item/unit/')
def edit_unit(request):
if request.method == 'GET':
nid = request.GET.get('nid')
unit_info = Unit.objects.get(id=nid)
return render(request, 'item/edit_unit.html', locals())
else:
nid = request.GET.get('nid')
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
unit_info = Unit.objects.get(id=nid)
unit_info.unit_index = unit_index
unit_info.unit_name = unit_name
unit_info.save()
return redirect('/item/unit/')
def del_unit(request):
nid = request.GET.get('nid')
unit_info = Unit.objects.filter(id=nid)
unit_info.delete()
return redirect('/item/unit/')
| <mask token>
def item(request):
if not request.session.get('is_login', None):
return redirect('/item/item')
else:
item_list = Item.objects.all()
return render(request, 'item/item.html', locals())
<mask token>
def edit_item(request):
if request.method == 'GET':
nid = request.GET.get('nid')
item_info = Item.objects.get(id=nid)
unit_list = Unit.objects.all()
return render(request, 'item/edit_item.html', locals())
else:
nid = request.GET.get('nid')
item_index = request.POST.get('item_index')
item_chinese_name = request.POST.get('item_chinese_name')
item_english_name = request.POST.get('item_english_name')
item_method = request.POST.get('item_method')
item_unit = request.POST.get('item_unit')
is_calc = request.POST.get('is_calc')
is_use = request.POST.get('is_use')
unit_info = Unit.objects.get(id=item_unit)
item_info = Item.objects.get(id=nid)
item_info.item_index = item_index
item_info.item_chinese_name = item_chinese_name
item_info.item_english_name = item_english_name
item_info.item_method = item_method
item_info.item_unit = unit_info
item_info.is_calc = str_to_bool(is_calc)
item_info.is_use = str_to_bool(is_use)
item_info.save()
return redirect('/item/item/')
<mask token>
def add_unit(request):
if request.method == 'GET':
last_unit_info = Unit.objects.last()
return render(request, 'item/add_unit.html', locals())
else:
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name)
new_unit.save()
return redirect('/item/unit/')
def edit_unit(request):
if request.method == 'GET':
nid = request.GET.get('nid')
unit_info = Unit.objects.get(id=nid)
return render(request, 'item/edit_unit.html', locals())
else:
nid = request.GET.get('nid')
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
unit_info = Unit.objects.get(id=nid)
unit_info.unit_index = unit_index
unit_info.unit_name = unit_name
unit_info.save()
return redirect('/item/unit/')
def del_unit(request):
nid = request.GET.get('nid')
unit_info = Unit.objects.filter(id=nid)
unit_info.delete()
return redirect('/item/unit/')
| <mask token>
def item(request):
if not request.session.get('is_login', None):
return redirect('/item/item')
else:
item_list = Item.objects.all()
return render(request, 'item/item.html', locals())
<mask token>
def edit_item(request):
if request.method == 'GET':
nid = request.GET.get('nid')
item_info = Item.objects.get(id=nid)
unit_list = Unit.objects.all()
return render(request, 'item/edit_item.html', locals())
else:
nid = request.GET.get('nid')
item_index = request.POST.get('item_index')
item_chinese_name = request.POST.get('item_chinese_name')
item_english_name = request.POST.get('item_english_name')
item_method = request.POST.get('item_method')
item_unit = request.POST.get('item_unit')
is_calc = request.POST.get('is_calc')
is_use = request.POST.get('is_use')
unit_info = Unit.objects.get(id=item_unit)
item_info = Item.objects.get(id=nid)
item_info.item_index = item_index
item_info.item_chinese_name = item_chinese_name
item_info.item_english_name = item_english_name
item_info.item_method = item_method
item_info.item_unit = unit_info
item_info.is_calc = str_to_bool(is_calc)
item_info.is_use = str_to_bool(is_use)
item_info.save()
return redirect('/item/item/')
<mask token>
def unit(request):
if not request.session.get('is_login', None):
return redirect('/item/unit')
else:
unit_list = Unit.objects.all()
return render(request, 'item/unit.html', locals())
def add_unit(request):
if request.method == 'GET':
last_unit_info = Unit.objects.last()
return render(request, 'item/add_unit.html', locals())
else:
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name)
new_unit.save()
return redirect('/item/unit/')
def edit_unit(request):
if request.method == 'GET':
nid = request.GET.get('nid')
unit_info = Unit.objects.get(id=nid)
return render(request, 'item/edit_unit.html', locals())
else:
nid = request.GET.get('nid')
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
unit_info = Unit.objects.get(id=nid)
unit_info.unit_index = unit_index
unit_info.unit_name = unit_name
unit_info.save()
return redirect('/item/unit/')
def del_unit(request):
nid = request.GET.get('nid')
unit_info = Unit.objects.filter(id=nid)
unit_info.delete()
return redirect('/item/unit/')
| <mask token>
def str_to_bool(s):
return True if s.lower() == 'true' else False
def item(request):
if not request.session.get('is_login', None):
return redirect('/item/item')
else:
item_list = Item.objects.all()
return render(request, 'item/item.html', locals())
def add_item(request):
if request.method == 'GET':
last_item_info = Item.objects.last()
unit_list = Unit.objects.all()
return render(request, 'item/add_item.html', locals())
else:
item_index = request.POST.get('item_index')
item_chinese_name = request.POST.get('item_chinese_name')
item_english_name = request.POST.get('item_english_name')
item_method = request.POST.get('item_method')
item_unit = request.POST.get('item_unit')
is_calc = request.POST.get('is_calc')
is_use = request.POST.get('is_use')
unit_info = Unit.objects.get(id=item_unit)
new_item = Item(item_index=int(item_index), item_chinese_name=
item_chinese_name, item_english_name=item_english_name,
item_method=item_method, item_unit=unit_info, is_calc=
str_to_bool(is_calc), is_use=str_to_bool(is_use))
new_item.save()
return redirect('/item/item/')
def edit_item(request):
if request.method == 'GET':
nid = request.GET.get('nid')
item_info = Item.objects.get(id=nid)
unit_list = Unit.objects.all()
return render(request, 'item/edit_item.html', locals())
else:
nid = request.GET.get('nid')
item_index = request.POST.get('item_index')
item_chinese_name = request.POST.get('item_chinese_name')
item_english_name = request.POST.get('item_english_name')
item_method = request.POST.get('item_method')
item_unit = request.POST.get('item_unit')
is_calc = request.POST.get('is_calc')
is_use = request.POST.get('is_use')
unit_info = Unit.objects.get(id=item_unit)
item_info = Item.objects.get(id=nid)
item_info.item_index = item_index
item_info.item_chinese_name = item_chinese_name
item_info.item_english_name = item_english_name
item_info.item_method = item_method
item_info.item_unit = unit_info
item_info.is_calc = str_to_bool(is_calc)
item_info.is_use = str_to_bool(is_use)
item_info.save()
return redirect('/item/item/')
<mask token>
def unit(request):
if not request.session.get('is_login', None):
return redirect('/item/unit')
else:
unit_list = Unit.objects.all()
return render(request, 'item/unit.html', locals())
def add_unit(request):
if request.method == 'GET':
last_unit_info = Unit.objects.last()
return render(request, 'item/add_unit.html', locals())
else:
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name)
new_unit.save()
return redirect('/item/unit/')
def edit_unit(request):
if request.method == 'GET':
nid = request.GET.get('nid')
unit_info = Unit.objects.get(id=nid)
return render(request, 'item/edit_unit.html', locals())
else:
nid = request.GET.get('nid')
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
unit_info = Unit.objects.get(id=nid)
unit_info.unit_index = unit_index
unit_info.unit_name = unit_name
unit_info.save()
return redirect('/item/unit/')
def del_unit(request):
nid = request.GET.get('nid')
unit_info = Unit.objects.filter(id=nid)
unit_info.delete()
return redirect('/item/unit/')
| from django.shortcuts import render, redirect
# Create your views here.
from item.models import Item, Unit
def str_to_bool(s):
return True if s.lower() == 'true' else False
def item(request):
if not request.session.get('is_login', None):
return redirect('/item/item')
else:
item_list = Item.objects.all()
return render(request, 'item/item.html', locals())
def add_item(request):
if request.method == 'GET':
last_item_info = Item.objects.last()
unit_list=Unit.objects.all()
return render(request, 'item/add_item.html', locals())
else:
item_index = request.POST.get('item_index')
item_chinese_name = request.POST.get('item_chinese_name')
item_english_name = request.POST.get('item_english_name')
item_method = request.POST.get('item_method')
item_unit = request.POST.get('item_unit')
is_calc = request.POST.get('is_calc')
is_use = request.POST.get('is_use')
unit_info=Unit.objects.get(id=item_unit)
new_item = Item(item_index=int(item_index), item_chinese_name=item_chinese_name,
item_english_name=item_english_name,item_method=item_method,item_unit=unit_info,is_calc=str_to_bool(is_calc),
is_use=str_to_bool(is_use))
new_item.save()
return redirect('/item/item/')
def edit_item(request):
if request.method == 'GET':
nid = request.GET.get('nid')
item_info = Item.objects.get(id=nid)
unit_list = Unit.objects.all()
return render(request, 'item/edit_item.html', locals())
else:
nid = request.GET.get('nid')
item_index = request.POST.get('item_index')
item_chinese_name = request.POST.get('item_chinese_name')
item_english_name = request.POST.get('item_english_name')
item_method = request.POST.get('item_method')
item_unit = request.POST.get('item_unit')
is_calc = request.POST.get('is_calc')
is_use = request.POST.get('is_use')
unit_info = Unit.objects.get(id=item_unit)
item_info = Item.objects.get(id=nid)
item_info.item_index = item_index
item_info.item_chinese_name = item_chinese_name
item_info.item_english_name = item_english_name
item_info.item_method = item_method
item_info.item_unit = unit_info
item_info.is_calc = str_to_bool(is_calc)
item_info.is_use = str_to_bool(is_use)
item_info.save()
return redirect('/item/item/')
def del_item(request):
nid = request.GET.get('nid')
item_info = Unit.objects.filter(id=nid)
item_info.delete()
return redirect('/item/item/')
def unit(request):
if not request.session.get('is_login', None):
return redirect('/item/unit')
else:
unit_list = Unit.objects.all()
return render(request, 'item/unit.html', locals())
def add_unit(request):
if request.method == 'GET':
last_unit_info = Unit.objects.last()
return render(request, 'item/add_unit.html', locals())
else:
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
new_unit = Unit(unit_index=int(unit_index), unit_name=unit_name,)
new_unit.save()
return redirect('/item/unit/')
def edit_unit(request):
if request.method == 'GET':
nid = request.GET.get('nid')
unit_info = Unit.objects.get(id=nid)
return render(request, 'item/edit_unit.html', locals())
else:
nid = request.GET.get('nid')
unit_index = request.POST.get('unit_index')
unit_name = request.POST.get('unit_name')
unit_info = Unit.objects.get(id=nid)
unit_info.unit_index = unit_index
unit_info.unit_name = unit_name
unit_info.save()
return redirect('/item/unit/')
def del_unit(request):
nid = request.GET.get('nid')
unit_info = Unit.objects.filter(id=nid)
unit_info.delete()
return redirect('/item/unit/') | [
4,
5,
6,
8,
11
] |
1,984 | 532bcf8ae0ee40dc3eb4bd7170acfcb5d21cc4b9 | <mask token>
class StdIOFactory(Factory):
<mask token>
<mask token>
<mask token>
class StandardInput(LineReceiver, StandardIO):
"""
Reads stdin and writes every line received as a message to the
server. No fancy editing or anything, simple pipe.
"""
delimiter = os.linesep
def lineReceived(self, line):
return self.protocol.sendMessage(self.nick, line)
def __init__(self, nick, proto):
self.nick = nick
self.protocol = proto
def connectionLost(self, reason):
self.protocol.transport.loseConnection()
| <mask token>
class ChatClient(Protocol):
def __init__(self, done):
self.done = done
self.unpacker = msgpack.Unpacker()
def connectionLost(self, reason):
print(reason.getErrorMessage())
self.done.callback(reason)
<mask token>
<mask token>
class StdIOFactory(Factory):
def __init__(self, nick, proto):
self.nick = nick
self.proto = proto
def buildProtocol(self, addr):
return StandardInput(self.nick, self.proto)
<mask token>
class StandardInput(LineReceiver, StandardIO):
"""
Reads stdin and writes every line received as a message to the
server. No fancy editing or anything, simple pipe.
"""
delimiter = os.linesep
def lineReceived(self, line):
return self.protocol.sendMessage(self.nick, line)
def __init__(self, nick, proto):
self.nick = nick
self.protocol = proto
def connectionLost(self, reason):
self.protocol.transport.loseConnection()
| <mask token>
class ChatClient(Protocol):
def __init__(self, done):
self.done = done
self.unpacker = msgpack.Unpacker()
def connectionLost(self, reason):
print(reason.getErrorMessage())
self.done.callback(reason)
def sendMessage(self, nick, msg):
print('sending', nick, msg)
data = msgpack.packb([nick, msg])
self.transport.write(data)
<mask token>
class StdIOFactory(Factory):
def __init__(self, nick, proto):
self.nick = nick
self.proto = proto
def buildProtocol(self, addr):
return StandardInput(self.nick, self.proto)
<mask token>
class StandardInput(LineReceiver, StandardIO):
"""
Reads stdin and writes every line received as a message to the
server. No fancy editing or anything, simple pipe.
"""
delimiter = os.linesep
def lineReceived(self, line):
return self.protocol.sendMessage(self.nick, line)
def __init__(self, nick, proto):
self.nick = nick
self.protocol = proto
def connectionLost(self, reason):
self.protocol.transport.loseConnection()
| <mask token>
class ChatClient(Protocol):
def __init__(self, done):
self.done = done
self.unpacker = msgpack.Unpacker()
def connectionLost(self, reason):
print(reason.getErrorMessage())
self.done.callback(reason)
def sendMessage(self, nick, msg):
print('sending', nick, msg)
data = msgpack.packb([nick, msg])
self.transport.write(data)
def dataReceived(self, data):
self.unpacker.feed(data)
for msg in self.unpacker:
print('{}: {}'.format(*msg))
class StdIOFactory(Factory):
def __init__(self, nick, proto):
self.nick = nick
self.proto = proto
def buildProtocol(self, addr):
return StandardInput(self.nick, self.proto)
<mask token>
class StandardInput(LineReceiver, StandardIO):
"""
Reads stdin and writes every line received as a message to the
server. No fancy editing or anything, simple pipe.
"""
delimiter = os.linesep
def lineReceived(self, line):
return self.protocol.sendMessage(self.nick, line)
def __init__(self, nick, proto):
self.nick = nick
self.protocol = proto
def connectionLost(self, reason):
self.protocol.transport.loseConnection()
| from __future__ import print_function
import os
from twisted.internet.task import react
from twisted.internet.defer import Deferred, inlineCallbacks
from twisted.internet.protocol import Factory
from twisted.internet.protocol import Protocol
from twisted.internet.endpoints import TCP4ClientEndpoint, connectProtocol
from twisted.protocols.basic import LineReceiver
import msgpack
class ChatClient(Protocol):
def __init__(self, done):
self.done = done
self.unpacker = msgpack.Unpacker()
def connectionLost(self, reason):
print(reason.getErrorMessage())
self.done.callback(reason)
def sendMessage(self, nick, msg):
print("sending", nick, msg)
data = msgpack.packb([nick, msg])
self.transport.write(data)
def dataReceived(self, data):
# ditto to server: go over what about "burst" messages?
# (and do "original" code here at first: msg = msgpack.unpack(data)
self.unpacker.feed(data)
for msg in self.unpacker:
print("{}: {}".format(*msg))
class StdIOFactory(Factory):
def __init__(self, nick, proto):
self.nick = nick
self.proto = proto
def buildProtocol(self, addr):
return StandardInput(self.nick, self.proto)
from twisted.internet.stdio import StandardIO
class StandardInput(LineReceiver, StandardIO):
'''
Reads stdin and writes every line received as a message to the
server. No fancy editing or anything, simple pipe.
'''
delimiter = os.linesep
def lineReceived(self, line):
return self.protocol.sendMessage(self.nick, line)
def __init__(self, nick, proto):
self.nick = nick
self.protocol = proto
def connectionLost(self, reason):
self.protocol.transport.loseConnection()
| [
7,
12,
13,
14,
16
] |
1,985 | 6670295241516664e30c7db5cd3b5e2fb6c4fb05 | <mask token>
| <mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
| <mask token>
class Migration(migrations.Migration):
dependencies = [('app', '0005_alter_users_is_active')]
operations = [migrations.AlterModelManagers(name='users', managers=[])]
| from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('app', '0005_alter_users_is_active')]
operations = [migrations.AlterModelManagers(name='users', managers=[])]
| # Generated by Django 3.2.7 on 2021-10-01 08:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0005_alter_users_is_active'),
]
operations = [
migrations.AlterModelManagers(
name='users',
managers=[
],
),
]
| [
0,
1,
2,
3,
4
] |
1,986 | 94e9d67095dde4d3bf7ddb207ac17a4c250a2bfc | from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from mp_data_scrapper.items import MpDataScrapperItem
class MininovaSpider(CrawlSpider):
name = 'mp'
allowed_domains = ['india.gov.in']
start_urls = ['http://india.gov.in/my-government/indian-parliament/lok-sabha',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=1',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=2',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=3',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=4',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=5',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=6',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=7',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=8',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=9',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=10',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=11',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=12',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=13',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=14',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=15',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=16',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=17',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=18',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=19',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=20',
'http://india.gov.in/my-government/indian-parliament/lok-sabha?page=21',
]
rules = [Rule(SgmlLinkExtractor(allow=['/my-government/indian-parliament/[^?]+'], deny=['my-government/indian-parliament/lok-sabha', 'my-government/indian-parliament/rajya-sabha'], unique=True), process_links='process_links', callback='parse_mp', follow=True)]
def parse_mp(self, response):
mp = MpDataScrapperItem()
try:
mp['name'] = response.xpath("//h1/text()").extract()[0]
except IndexError:
pass
try:
mp['constituency'] = response.xpath("//span[@class='views-label views-label-field-const-name-value']/following::span[1]/text()").extract()[0]
#mp['constituency'] = response.xpath("//span[contains(concat(' ',normalize-space(@class),' '),' views-label-field-const-name-value ')]/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['party'] = response.xpath("//span[@class='views-label views-label-field-party-fname-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['father'] = response.xpath("//span[@class='views-label views-label-field-father-name-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['mother'] = response.xpath("//span[@class='views-label views-label-field-mother-name-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['dob'] = response.xpath("//span[@class='views-label views-label-field-dob-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['birth_place'] = response.xpath("//span[@class='views-label views-label-field-birth-place-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['marital_status'] = response.xpath("//span[@class='views-label views-label-field-marital-status-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['spouse_name'] = response.xpath("//span[@class='views-label views-label-field-spouse-name-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['num_sons'] = response.xpath("//span[@class='views-label views-label-field-sons-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['num_daughters'] = response.xpath("//span[@class='views-label views-label-field-daughters-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['state'] = response.xpath("//span[@class='views-label views-label-field-state-name-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['permanent_address'] = response.xpath("//span[@class='views-label views-label-phpcode-1']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['present_address'] = response.xpath("//span[@class='views-label views-label-phpcode-2']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['email'] = response.xpath("//span[@class='views-label views-label-field-email-value']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['education'] = response.xpath("//span[@class='views-label views-label-phpcode-5']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['positions_held'] = response.xpath("//span[@class='views-label views-label-phpcode']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['social_cultural_activities'] = response.xpath("//span[@class='views-label views-label-phpcode-7']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['sports_clubs'] = response.xpath("//span[@class='views-label views-label-phpcode-8']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['pastimes_recreation'] = response.xpath("//span[@class='views-label views-label-phpcode-9']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['countries_visited'] = response.xpath("//span[@class='views-label views-label-phpcode-4']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['other_info'] = response.xpath("//span[@class='views-label views-label-phpcode-3']/following::span[1]/text()").extract()[0]
except IndexError:
pass
try:
mp['photo'] = 'http://india.gov.in' + response.xpath("//div[@class='views-field views-field-phpcode-10']/child::span[1]/child::img[1]/@src").extract()[0]
except IndexError:
pass
return mp
def process_links(self,links):
for i, w in enumerate(links):
print w.url
#w.url = w.url.replace("http://india.gov.in/my-government/indian-parliament/lok-sabha", "http://india.gov.in")
links[i] = w
return links
| null | null | null | null | [
0
] |
1,987 | 037a02ff2c0699acdd1fefbe60098c93cd99e777 | """
help find Holly find dups in the PC's
Given a particular dir - report the dupset of each of the files so we can see
where the dups are
"""
import os, sys, re
from comms.dup_manager import DupManager
class DupFinder (DupManager):
base_archives_path = '/Volumes/archives/CommunicationsImageCollection/'
base_dedup_path = '/Volumes/cic-de-duped/'
def __init__ (self, dup_data_path):
DupManager.__init__ (self, dup_data_path)
def make_deduped_path (self, archive_path):
# return archive_path
rel_dedup_path = archive_path.replace (self.base_archives_path, '')
# Kludge for Stage / Field Projects
if rel_dedup_path.startswith('Staging'):
rel_dedup_path = rel_dedup_path.replace('Staging', 'Field Projects')
return os.path.join (self.base_dedup_path, rel_dedup_path)
def make_archives_path (self, dedup_path):
rel_archives_path = dedup_path.replace (self.base_dedup_path, '')
# Kludge for Stage / Field Projects
if rel_archives_path.startswith('Field Projects'):
rel_archives_path = rel_archives_path.replace('Field Projects', 'Staging')
return os.path.join (self.base_archives_path, rel_archives_path)
def find_dups (self, dir_path):
return self.find_dups_for_file(dir_path)
def find_dups_for_directory (self, dirpath):
dupset ={}
for filename in self.list_dir(dirpath):
path = os.path.join(dirpath, filename)
dups = self.find_dups (path)
if dups:
dupset[path] = dups
return dupset
def get_dup_display_path (self, dup_path):
default_base_dup_display = os.path.join(self.base_dedup_path, 'CIC-ExternalDisk1/')
if dup_path.startswith (default_base_dup_display):
return dup_path.replace(default_base_dup_display, '')
else:
return dup_path.replace (self.base_dedup_path, '')
def report_dir (self, dir_path):
"""
print a list of duplicates, the one which exists on disk is marked with an asterisk
:param dir_path: The path to the directory to be reported
:return:
"""
print len(os.listdir(dir_path)), 'in archive directory'
dupset = self.find_dups_for_directory (dir_path)
keys = dupset.keys()
keys.sort()
print '- ', len(keys), 'dups found'
for key in keys:
# print '\n', key.replace(archives_base_path, '')
dedup_key_path = self.make_deduped_path(key)
# print '\n', '{}{}'.format(dedup_key_path, os.path.exists(dedup_key_path) and ' *' or '')
print '\n', '{}{}'.format(self.get_dup_display_path(dedup_key_path), os.path.exists(dedup_key_path) and ' *' or '')
dups = dupset[key]
for dup in dups:
dedup_path = self.make_deduped_path(dup)
# print ' - {}{}'.format(dedup_path, os.path.exists(dedup_path) and ' *' or '')
print ' - {}{}'.format(self.get_dup_display_path(dedup_path), os.path.exists(dedup_path) and ' *' or '')
def list_dir (self, frag):
if frag[0] == '/':
path = frag
else:
# path = os.path.join(base_path, frag)
path = os.path.join(self.base_dedup_path, frag)
print 'PATH: ', path
return os.listdir (path)
if __name__ == '__main__':
# base_path = '/Volumes/archives/CommunicationsImageCollection/Staging'
# filepath = os.path.join (archive_base_path, rel_path)
if 0: # search under CIC-ExternalDisk1
archive_base_path = '/Volumes/archives/CommunicationsImageCollection/CIC-ExternalDisk1'
deduped_base_path = None # default
rel_path = 'disc 182/Emily CoBabe Ammann'
if 0: # search under field projects
archive_base_path = '/Volumes/archives/CommunicationsImageCollection/Staging'
deduped_base_path = '/Volumes/cic-de-duped/Field Projects'
rel_path = 'Field Project-HIAPER-FP2/HIAPER 8-19-05/8-19-05'
rel_path = 'Field Project-HIAPER-FP2/HIAPER 8-19-05/8-19-05/tif&jpgs'
if 1: # search under field projects
archive_base_path = '/Volumes/archives/CommunicationsImageCollection/Staging'
rel_path = 'SOARS-3/SOARS 11-1/HIRO-mentors'
rel_path = 'Field Project-ARISTO-FP21/jpgs'
dup_data_path = '/Users/ostwald/Documents/Comms/Composite_DB/master_check_sum_dups.json'
print dup_data_path
# finder = DupFinder (dup_data_path, archive_base_path, deduped_base_path)
finder = DupFinder (dup_data_path)
dir_path = os.path.join (archive_base_path, rel_path)
print 'DIR_PATH:', dir_path
finder.report_dir(dir_path)
if 0: # test some paths
path = '/Volumes/cic-de-duped/CIC-ExternalDisk1/disc 19/HIAPER take-off/8-19-05/tif&jpgs/IMG_5820.tif'
print finder.make_deduped_path (path)
path ='/Volumes/archives/CommunicationsImageCollection/Staging/Field Project-HIAPER-FP2/HIAPER Backups/HIAPER 2/HIAPER take-off/8-19-05/jpgs/IMG_5820.jpg'
print finder.make_deduped_path(path)
| null | null | null | null | [
0
] |
1,988 | c2ba18062b8555c77b329718ec1f2ae7f326c78e | <mask token>
class DenseBlock(nn.Module):
<mask token>
def forward(self, x):
out = self.denseblock(x)
return out
| <mask token>
class BottleNeck(nn.Module):
<mask token>
def forward(self, x):
out = self.bottleneck(x)
out = torch.cat((x, out), 1)
return out
class DenseBlock(nn.Module):
def __init__(self, n_channels, growth_rate, n_DenseBlocks):
super(DenseBlock, self).__init__()
layers = []
for i in range(n_DenseBlocks):
layers.append(BottleNeck(n_channels + i * growth_rate, growth_rate)
)
self.denseblock = nn.Sequential(*layers)
def forward(self, x):
out = self.denseblock(x)
return out
| <mask token>
class BottleNeck(nn.Module):
def __init__(self, n_channels, growth_rate):
super(BottleNeck, self).__init__()
Channels = 4 * growth_rate
self.bottleneck = nn.Sequential(nn.BatchNorm2d(n_channels), nn.ReLU
(inplace=True), nn.Conv2d(n_channels, Channels, 1, bias=False),
nn.BatchNorm2d(Channels), nn.ReLU(inplace=True), nn.Conv2d(
Channels, growth_rate, 3, padding=1, bias=False))
def forward(self, x):
out = self.bottleneck(x)
out = torch.cat((x, out), 1)
return out
class DenseBlock(nn.Module):
def __init__(self, n_channels, growth_rate, n_DenseBlocks):
super(DenseBlock, self).__init__()
layers = []
for i in range(n_DenseBlocks):
layers.append(BottleNeck(n_channels + i * growth_rate, growth_rate)
)
self.denseblock = nn.Sequential(*layers)
def forward(self, x):
out = self.denseblock(x)
return out
| <mask token>
import torch
from torch import nn
class BottleNeck(nn.Module):
def __init__(self, n_channels, growth_rate):
super(BottleNeck, self).__init__()
Channels = 4 * growth_rate
self.bottleneck = nn.Sequential(nn.BatchNorm2d(n_channels), nn.ReLU
(inplace=True), nn.Conv2d(n_channels, Channels, 1, bias=False),
nn.BatchNorm2d(Channels), nn.ReLU(inplace=True), nn.Conv2d(
Channels, growth_rate, 3, padding=1, bias=False))
def forward(self, x):
out = self.bottleneck(x)
out = torch.cat((x, out), 1)
return out
class DenseBlock(nn.Module):
def __init__(self, n_channels, growth_rate, n_DenseBlocks):
super(DenseBlock, self).__init__()
layers = []
for i in range(n_DenseBlocks):
layers.append(BottleNeck(n_channels + i * growth_rate, growth_rate)
)
self.denseblock = nn.Sequential(*layers)
def forward(self, x):
out = self.denseblock(x)
return out
| # -*- coding: utf-8 -*-
"""
@File : densenet_block.py
@Time : 12/11/20 9:59 PM
@Author : Mingqiang Ning
@Email : [email protected]
@Modify Time @Version @Description
------------ -------- -----------
12/11/20 9:59 PM 1.0 None
# @Software: PyCharm
"""
import torch
from torch import nn
class BottleNeck(nn.Module):
def __init__(self,n_channels,growth_rate):
super(BottleNeck,self).__init__()
Channels=4*growth_rate
self.bottleneck=nn.Sequential(
nn.BatchNorm2d(n_channels),
nn.ReLU(inplace=True),
nn.Conv2d(n_channels,Channels,1,bias=False),
nn.BatchNorm2d(Channels),
nn.ReLU(inplace=True),
nn.Conv2d(Channels, growth_rate, 3,padding=1, bias=False)
)
def forward(self,x):
out=self.bottleneck(x)
out=torch.cat((x,out),1)
return out
class DenseBlock(nn.Module):
def __init__(self, n_channels, growth_rate,n_DenseBlocks):
super(DenseBlock, self).__init__()
layers=[]
for i in range(n_DenseBlocks):
layers.append(BottleNeck(n_channels+i*growth_rate,growth_rate))
self.denseblock=nn.Sequential(*layers)
def forward(self, x):
out=self.denseblock(x)
return out
| [
2,
5,
6,
7,
8
] |
1,989 | cae0aeea2ebd0a429cf6ecc9acab8f5f103e9669 | <mask token>
def main():
cv2.namedWindow('image')
cv2.setMouseCallback('image', movemouse)
cv2.waitKey()
cv2.destroyAllWindows()
<mask token>
| <mask token>
def movemouse(event, x, y, flags, param):
global img
img2 = img.copy()
if event == cv2.EVENT_MOUSEMOVE:
font = cv2.FONT_HERSHEY_SIMPLEX
message = '{}'.format(img2[y, x])
cv2.putText(img2, message, (int(w / 2.5), int(h / 16)), font, 0.5,
(255, 255, 255), 1)
cv2.circle(img2, (x, y), 1, (0, 0, 255), -1)
cv2.imshow('image', img2)
def main():
cv2.namedWindow('image')
cv2.setMouseCallback('image', movemouse)
cv2.waitKey()
cv2.destroyAllWindows()
<mask token>
| <mask token>
def movemouse(event, x, y, flags, param):
global img
img2 = img.copy()
if event == cv2.EVENT_MOUSEMOVE:
font = cv2.FONT_HERSHEY_SIMPLEX
message = '{}'.format(img2[y, x])
cv2.putText(img2, message, (int(w / 2.5), int(h / 16)), font, 0.5,
(255, 255, 255), 1)
cv2.circle(img2, (x, y), 1, (0, 0, 255), -1)
cv2.imshow('image', img2)
def main():
cv2.namedWindow('image')
cv2.setMouseCallback('image', movemouse)
cv2.waitKey()
cv2.destroyAllWindows()
if __name__ == '__main__':
img = cv2.imread('./2.jpg')
img_size = img.shape
h, w = img_size[0:2]
main()
| import cv2
def movemouse(event, x, y, flags, param):
global img
img2 = img.copy()
if event == cv2.EVENT_MOUSEMOVE:
font = cv2.FONT_HERSHEY_SIMPLEX
message = '{}'.format(img2[y, x])
cv2.putText(img2, message, (int(w / 2.5), int(h / 16)), font, 0.5,
(255, 255, 255), 1)
cv2.circle(img2, (x, y), 1, (0, 0, 255), -1)
cv2.imshow('image', img2)
def main():
cv2.namedWindow('image')
cv2.setMouseCallback('image', movemouse)
cv2.waitKey()
cv2.destroyAllWindows()
if __name__ == '__main__':
img = cv2.imread('./2.jpg')
img_size = img.shape
h, w = img_size[0:2]
main()
| import cv2
def movemouse(event, x, y, flags, param):
global img
img2 = img.copy()
# img2 = cv2.cvtColor(img2, cv2.COLOR_BGR2HSV)
if event == cv2.EVENT_MOUSEMOVE:
font = cv2.FONT_HERSHEY_SIMPLEX
message = '{}'.format(img2[y, x])
cv2.putText(img2, message, (int(w / 2.5), int(h / 16)),font, 0.5, (255, 255, 255), 1)
cv2.circle(img2, (x, y), 1, (0, 0, 255), -1)
cv2.imshow('image', img2)
def main():
cv2.namedWindow("image")
cv2.setMouseCallback("image", movemouse)
cv2.waitKey()
cv2.destroyAllWindows()
if __name__ == '__main__':
img = cv2.imread('./2.jpg')
img_size = img.shape
h, w = img_size[0:2]
main()
| [
1,
2,
3,
4,
5
] |
1,990 | cb2e2ef70935a22854c70fedf4f4a6715b089291 | <mask token>
class Prog(Emp):
def __init__(self):
super().__init__()
print('its child constructor')
def takeBreath(self):
super().takeBreath()
print('Iam a programmer and breathing++.')
a = 0
<mask token>
| <mask token>
class Emp(Person):
def takeBreath(self):
print('Yes Iam EMP and Iam also breathing.')
class Prog(Emp):
def __init__(self):
super().__init__()
print('its child constructor')
def takeBreath(self):
super().takeBreath()
print('Iam a programmer and breathing++.')
a = 0
<mask token>
| class Person:
<mask token>
<mask token>
def takeBreath(self):
print('Yes Iam breathing.')
class Emp(Person):
def takeBreath(self):
print('Yes Iam EMP and Iam also breathing.')
class Prog(Emp):
def __init__(self):
super().__init__()
print('its child constructor')
def takeBreath(self):
super().takeBreath()
print('Iam a programmer and breathing++.')
a = 0
<mask token>
| class Person:
country = 'INDIA'
def __init__(self):
print('its base constructor')
def takeBreath(self):
print('Yes Iam breathing.')
class Emp(Person):
def takeBreath(self):
print('Yes Iam EMP and Iam also breathing.')
class Prog(Emp):
def __init__(self):
super().__init__()
print('its child constructor')
def takeBreath(self):
super().takeBreath()
print('Iam a programmer and breathing++.')
a = 0
<mask token>
p.takeBreath()
<mask token>
e.takeBreath()
<mask token>
pr.takeBreath()
| class Person:
country = "INDIA"
def __init__(self):
print("its base constructor")
def takeBreath(self):
print("Yes Iam breathing.")
class Emp(Person): # inherits person
def takeBreath(self):
print("Yes Iam EMP and Iam also breathing.")
class Prog(Emp):
def __init__(self):
super().__init__() # CALLS BASE CLASS CONTRUCTOR
print("its child constructor")
def takeBreath(self):
super().takeBreath() # calls previous class's method
print("Iam a programmer and breathing++.")
a=0
p = Person()
p.takeBreath()
e = Emp()
e.takeBreath()
pr = Prog()
pr.takeBreath() | [
4,
6,
8,
11,
13
] |
1,991 | 94056e8920d265831da67bd1d999330a47a7ef0d | <mask token>
| <mask token>
print(dir(math))
| import math
print(dir(math))
| import math
print(dir(math))
# Prints a list of entities residing in the math module | null | [
0,
1,
2,
3
] |
1,992 | 728af8b07bc391b496709e54926f3f1f49897176 | <mask token>
| include_rules = ['+apps', '+components/live_caption',
'+services/device/public', '+components/device_reauth', '+remoting/host']
specific_include_rules = {'.*test.*': ['+chrome/browser/ui/views/frame',
'+components/captive_portal', '+components/web_package',
'+skia/public/mojom/bitmap.mojom.h'], 'tls_socket_unittest\\.cc': [
'+services/network/network_context.h'], 'tcp_socket_unittest\\.cc': [
'+services/network/network_context.h'], 'udp_socket_unittest\\.cc': [
'+services/network/network_context.h']}
| include_rules = [
"+apps",
"+components/live_caption",
"+services/device/public",
"+components/device_reauth",
# Enable remote assistance on Chrome OS
"+remoting/host",
]
specific_include_rules = {
".*test.*": [
"+chrome/browser/ui/views/frame",
"+components/captive_portal",
"+components/web_package",
"+skia/public/mojom/bitmap.mojom.h",
],
"tls_socket_unittest\.cc": [
"+services/network/network_context.h",
],
"tcp_socket_unittest\.cc": [
"+services/network/network_context.h",
],
"udp_socket_unittest\.cc": [
"+services/network/network_context.h",
],
}
| null | null | [
0,
1,
2
] |
1,993 | ed35a9bc3dd267c9a5fe76ccbb1b4ac5261fc3c8 | <mask token>
def get_model(num_feat=294, lr=0.001, drop_out=0.1, layer_dims=''):
model = Sequential()
act_fn = 'relu'
if len(layer_dims) == 0:
layer_dims = [10, 5, 0.2]
else:
layer_dims = [float(d) for d in layer_dims.split('-')]
model.add(Dense(int(num_feat * layer_dims[0]), input_dim=num_feat,
kernel_initializer='normal'))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
for layer_dim in layer_dims[1:-1]:
model.add(Dense(int(num_feat * layer_dim)))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
model.add(Dense(int(num_feat * layer_dims[-1])))
model.add(Activation(act_fn))
model.add(Dropout(drop_out))
model.add(Dense(1))
adam = Adam(lr=lr)
model.compile(loss='logcosh', optimizer=adam)
return model
<mask token>
| <mask token>
np.random.seed(seed)
def get_model(num_feat=294, lr=0.001, drop_out=0.1, layer_dims=''):
model = Sequential()
act_fn = 'relu'
if len(layer_dims) == 0:
layer_dims = [10, 5, 0.2]
else:
layer_dims = [float(d) for d in layer_dims.split('-')]
model.add(Dense(int(num_feat * layer_dims[0]), input_dim=num_feat,
kernel_initializer='normal'))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
for layer_dim in layer_dims[1:-1]:
model.add(Dense(int(num_feat * layer_dim)))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
model.add(Dense(int(num_feat * layer_dims[-1])))
model.add(Activation(act_fn))
model.add(Dropout(drop_out))
model.add(Dense(1))
adam = Adam(lr=lr)
model.compile(loss='logcosh', optimizer=adam)
return model
<mask token>
def generate_training_input(mol_file):
"""
:param mol_file: str
:return: pd.DataFrame
"""
ifs = oechem.oemolistream(mol_file)
training_data = []
for mol in ifs.GetOEGraphMols():
energy = float(oechem.OEGetSDData(mol, ENERGY_KEY))
sf_elements = get_sf_elements(mol)
dihe_inchi = get_dihedral_inchi_key(mol)
data = [dihe_inchi, energy]
data.extend(sf_elements)
training_data.append(data)
ifs.close()
columns = [INCHI_KEY, ENERGY_KEY]
num_sf_elements = len(training_data[0]) - 2
sf_columns = [('sf_%d' % (i + 1)) for i in range(num_sf_elements)]
columns.extend(sf_columns)
df = pd.DataFrame(training_data, columns=columns)
grouped = df.loc[:, [INCHI_KEY, ENERGY_KEY]].groupby(INCHI_KEY)
df2 = grouped.transform(lambda x: x - x.min())
df[ENERGY_KEY] = df2[ENERGY_KEY]
return df
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=
'Train neural network model to predict torsional relative energy')
parser.add_argument('--input', type=str, help=
'sd file containing MM structures alongwith sd properties with torsion atom indices and QM energy'
)
parser.add_argument('--num_epoch', default=5000, type=int, help=
'number of epoch (default = 2000)')
parser.add_argument('--batch_size', default=256, type=int, help=
'batch size (default: 256)')
parser.add_argument('--layer_dims', default='10-5-1-0.2', type=str,
help='layer dimensions')
parser.add_argument('--lr', default=0.0001, type=float, help=
'learning rate (default: 1e-r)')
parser.add_argument('--dropout', default=0.2, type=float, help=
'dropout (default: 0.2)')
parser.add_argument('--val_split', default=0.1, type=float, help=
'validation split (default: 0.1)')
parser.add_argument('--scalar', default='scaler.pkl', type=str, help=
'output file with standard scaler')
parser.add_argument('--model', default='model.h5', type=str, help=
'output file with trained model')
parser.add_argument('-v', '--verbose', action='count', default=0)
args = parser.parse_args()
input_file = args.input
num_epoch = args.num_epoch
batch_size = args.batch_size
lr = args.lr
dropout = args.dropout
layer_dims = args.layer_dims
df = generate_training_input(input_file)
tmp_idx = df.ENERGY > 30
df.ENERGY[tmp_idx] = 30.0 + np.exp(30 - df.ENERGY[tmp_idx])
dihe_inchis = df[INCHI_KEY].unique()
print('Number of profiles: %d' % len(dihe_inchis))
desc_bgn_idx = df.columns.get_loc('sf_1')
Xtrain = df.as_matrix(columns=df.columns[desc_bgn_idx:])
ytrain = df.ENERGY
scaler = StandardScaler().fit(Xtrain)
Xtrain = scaler.transform(Xtrain)
print('Xtrain.shape ', Xtrain.shape)
with open(args.scalar, 'wb') as fptr:
pickle.dump(scaler, fptr)
_, num_feat = Xtrain.shape
earlystop = EarlyStopping(monitor='val_loss', min_delta=0.001, patience
=100, verbose=1, mode='auto')
model_file = args.model
model = get_model(num_feat, lr, dropout, layer_dims)
print(model.summary())
checkpointer = ModelCheckpoint(filepath=model_file, verbose=1,
save_best_only=True)
callbacks_list = [checkpointer]
model.fit(Xtrain, ytrain, epochs=num_epoch, batch_size=batch_size,
validation_split=args.val_split, callbacks=callbacks_list, verbose=1)
print('Training complete')
print('Standard scalar is saved in %s' % args.scalar)
print('Model is saved in %s' % args.model)
| <mask token>
seed = 7
np.random.seed(seed)
def get_model(num_feat=294, lr=0.001, drop_out=0.1, layer_dims=''):
model = Sequential()
act_fn = 'relu'
if len(layer_dims) == 0:
layer_dims = [10, 5, 0.2]
else:
layer_dims = [float(d) for d in layer_dims.split('-')]
model.add(Dense(int(num_feat * layer_dims[0]), input_dim=num_feat,
kernel_initializer='normal'))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
for layer_dim in layer_dims[1:-1]:
model.add(Dense(int(num_feat * layer_dim)))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
model.add(Dense(int(num_feat * layer_dims[-1])))
model.add(Activation(act_fn))
model.add(Dropout(drop_out))
model.add(Dense(1))
adam = Adam(lr=lr)
model.compile(loss='logcosh', optimizer=adam)
return model
ENERGY_KEY = 'ENERGY'
INCHI_KEY = 'Inchi'
def generate_training_input(mol_file):
"""
:param mol_file: str
:return: pd.DataFrame
"""
ifs = oechem.oemolistream(mol_file)
training_data = []
for mol in ifs.GetOEGraphMols():
energy = float(oechem.OEGetSDData(mol, ENERGY_KEY))
sf_elements = get_sf_elements(mol)
dihe_inchi = get_dihedral_inchi_key(mol)
data = [dihe_inchi, energy]
data.extend(sf_elements)
training_data.append(data)
ifs.close()
columns = [INCHI_KEY, ENERGY_KEY]
num_sf_elements = len(training_data[0]) - 2
sf_columns = [('sf_%d' % (i + 1)) for i in range(num_sf_elements)]
columns.extend(sf_columns)
df = pd.DataFrame(training_data, columns=columns)
grouped = df.loc[:, [INCHI_KEY, ENERGY_KEY]].groupby(INCHI_KEY)
df2 = grouped.transform(lambda x: x - x.min())
df[ENERGY_KEY] = df2[ENERGY_KEY]
return df
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=
'Train neural network model to predict torsional relative energy')
parser.add_argument('--input', type=str, help=
'sd file containing MM structures alongwith sd properties with torsion atom indices and QM energy'
)
parser.add_argument('--num_epoch', default=5000, type=int, help=
'number of epoch (default = 2000)')
parser.add_argument('--batch_size', default=256, type=int, help=
'batch size (default: 256)')
parser.add_argument('--layer_dims', default='10-5-1-0.2', type=str,
help='layer dimensions')
parser.add_argument('--lr', default=0.0001, type=float, help=
'learning rate (default: 1e-r)')
parser.add_argument('--dropout', default=0.2, type=float, help=
'dropout (default: 0.2)')
parser.add_argument('--val_split', default=0.1, type=float, help=
'validation split (default: 0.1)')
parser.add_argument('--scalar', default='scaler.pkl', type=str, help=
'output file with standard scaler')
parser.add_argument('--model', default='model.h5', type=str, help=
'output file with trained model')
parser.add_argument('-v', '--verbose', action='count', default=0)
args = parser.parse_args()
input_file = args.input
num_epoch = args.num_epoch
batch_size = args.batch_size
lr = args.lr
dropout = args.dropout
layer_dims = args.layer_dims
df = generate_training_input(input_file)
tmp_idx = df.ENERGY > 30
df.ENERGY[tmp_idx] = 30.0 + np.exp(30 - df.ENERGY[tmp_idx])
dihe_inchis = df[INCHI_KEY].unique()
print('Number of profiles: %d' % len(dihe_inchis))
desc_bgn_idx = df.columns.get_loc('sf_1')
Xtrain = df.as_matrix(columns=df.columns[desc_bgn_idx:])
ytrain = df.ENERGY
scaler = StandardScaler().fit(Xtrain)
Xtrain = scaler.transform(Xtrain)
print('Xtrain.shape ', Xtrain.shape)
with open(args.scalar, 'wb') as fptr:
pickle.dump(scaler, fptr)
_, num_feat = Xtrain.shape
earlystop = EarlyStopping(monitor='val_loss', min_delta=0.001, patience
=100, verbose=1, mode='auto')
model_file = args.model
model = get_model(num_feat, lr, dropout, layer_dims)
print(model.summary())
checkpointer = ModelCheckpoint(filepath=model_file, verbose=1,
save_best_only=True)
callbacks_list = [checkpointer]
model.fit(Xtrain, ytrain, epochs=num_epoch, batch_size=batch_size,
validation_split=args.val_split, callbacks=callbacks_list, verbose=1)
print('Training complete')
print('Standard scalar is saved in %s' % args.scalar)
print('Model is saved in %s' % args.model)
| import os, sys
import math
import argparse
import shutil
import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import KFold
from keras.models import Sequential
from keras.layers import Dense, Dropout, LocallyConnected1D, Activation, GaussianNoise, GaussianDropout
from keras.layers.normalization import BatchNormalization
from keras.wrappers.scikit_learn import KerasRegressor
from keras.utils import multi_gpu_model
from keras.callbacks import EarlyStopping
from keras.callbacks import ModelCheckpoint
from keras.optimizers import Adam
from keras.models import load_model
from keras.callbacks import Callback
import timeit
import pickle
from openeye import oechem
from torsion.model import get_sf_elements
from torsion.analysis import get_dihedral_inchi_key
import matplotlib.pyplot as plt
seed = 7
np.random.seed(seed)
def get_model(num_feat=294, lr=0.001, drop_out=0.1, layer_dims=''):
model = Sequential()
act_fn = 'relu'
if len(layer_dims) == 0:
layer_dims = [10, 5, 0.2]
else:
layer_dims = [float(d) for d in layer_dims.split('-')]
model.add(Dense(int(num_feat * layer_dims[0]), input_dim=num_feat,
kernel_initializer='normal'))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
for layer_dim in layer_dims[1:-1]:
model.add(Dense(int(num_feat * layer_dim)))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
model.add(Dense(int(num_feat * layer_dims[-1])))
model.add(Activation(act_fn))
model.add(Dropout(drop_out))
model.add(Dense(1))
adam = Adam(lr=lr)
model.compile(loss='logcosh', optimizer=adam)
return model
ENERGY_KEY = 'ENERGY'
INCHI_KEY = 'Inchi'
def generate_training_input(mol_file):
"""
:param mol_file: str
:return: pd.DataFrame
"""
ifs = oechem.oemolistream(mol_file)
training_data = []
for mol in ifs.GetOEGraphMols():
energy = float(oechem.OEGetSDData(mol, ENERGY_KEY))
sf_elements = get_sf_elements(mol)
dihe_inchi = get_dihedral_inchi_key(mol)
data = [dihe_inchi, energy]
data.extend(sf_elements)
training_data.append(data)
ifs.close()
columns = [INCHI_KEY, ENERGY_KEY]
num_sf_elements = len(training_data[0]) - 2
sf_columns = [('sf_%d' % (i + 1)) for i in range(num_sf_elements)]
columns.extend(sf_columns)
df = pd.DataFrame(training_data, columns=columns)
grouped = df.loc[:, [INCHI_KEY, ENERGY_KEY]].groupby(INCHI_KEY)
df2 = grouped.transform(lambda x: x - x.min())
df[ENERGY_KEY] = df2[ENERGY_KEY]
return df
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=
'Train neural network model to predict torsional relative energy')
parser.add_argument('--input', type=str, help=
'sd file containing MM structures alongwith sd properties with torsion atom indices and QM energy'
)
parser.add_argument('--num_epoch', default=5000, type=int, help=
'number of epoch (default = 2000)')
parser.add_argument('--batch_size', default=256, type=int, help=
'batch size (default: 256)')
parser.add_argument('--layer_dims', default='10-5-1-0.2', type=str,
help='layer dimensions')
parser.add_argument('--lr', default=0.0001, type=float, help=
'learning rate (default: 1e-r)')
parser.add_argument('--dropout', default=0.2, type=float, help=
'dropout (default: 0.2)')
parser.add_argument('--val_split', default=0.1, type=float, help=
'validation split (default: 0.1)')
parser.add_argument('--scalar', default='scaler.pkl', type=str, help=
'output file with standard scaler')
parser.add_argument('--model', default='model.h5', type=str, help=
'output file with trained model')
parser.add_argument('-v', '--verbose', action='count', default=0)
args = parser.parse_args()
input_file = args.input
num_epoch = args.num_epoch
batch_size = args.batch_size
lr = args.lr
dropout = args.dropout
layer_dims = args.layer_dims
df = generate_training_input(input_file)
tmp_idx = df.ENERGY > 30
df.ENERGY[tmp_idx] = 30.0 + np.exp(30 - df.ENERGY[tmp_idx])
dihe_inchis = df[INCHI_KEY].unique()
print('Number of profiles: %d' % len(dihe_inchis))
desc_bgn_idx = df.columns.get_loc('sf_1')
Xtrain = df.as_matrix(columns=df.columns[desc_bgn_idx:])
ytrain = df.ENERGY
scaler = StandardScaler().fit(Xtrain)
Xtrain = scaler.transform(Xtrain)
print('Xtrain.shape ', Xtrain.shape)
with open(args.scalar, 'wb') as fptr:
pickle.dump(scaler, fptr)
_, num_feat = Xtrain.shape
earlystop = EarlyStopping(monitor='val_loss', min_delta=0.001, patience
=100, verbose=1, mode='auto')
model_file = args.model
model = get_model(num_feat, lr, dropout, layer_dims)
print(model.summary())
checkpointer = ModelCheckpoint(filepath=model_file, verbose=1,
save_best_only=True)
callbacks_list = [checkpointer]
model.fit(Xtrain, ytrain, epochs=num_epoch, batch_size=batch_size,
validation_split=args.val_split, callbacks=callbacks_list, verbose=1)
print('Training complete')
print('Standard scalar is saved in %s' % args.scalar)
print('Model is saved in %s' % args.model)
| import os, sys
import math
import argparse
import shutil
import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import KFold
from keras.models import Sequential
from keras.layers import Dense, Dropout, LocallyConnected1D, Activation, \
GaussianNoise, GaussianDropout
from keras.layers.normalization import BatchNormalization
from keras.wrappers.scikit_learn import KerasRegressor
from keras.utils import multi_gpu_model
from keras.callbacks import EarlyStopping
from keras.callbacks import ModelCheckpoint
from keras.optimizers import Adam
from keras.models import load_model
from keras.callbacks import Callback
import timeit
import pickle
from openeye import oechem
from torsion.model import get_sf_elements
from torsion.analysis import get_dihedral_inchi_key
import matplotlib.pyplot as plt
# fix random seed for reproducibility
seed = 7
np.random.seed(seed)
def get_model(num_feat=294, lr=1e-3, drop_out=0.1, layer_dims=''):
model = Sequential()
act_fn = 'relu'
if len(layer_dims) == 0:
layer_dims = [10, 5, 0.2]
else:
layer_dims = [float(d) for d in layer_dims.split('-')]
model.add(
Dense(
int(num_feat * layer_dims[0]), input_dim=num_feat,
kernel_initializer='normal'))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
for layer_dim in layer_dims[1:-1]:
model.add(Dense(int(num_feat * layer_dim)))
model.add(Activation(act_fn))
model.add(BatchNormalization())
model.add(Dropout(drop_out))
model.add(Dense(int(num_feat * layer_dims[-1])))
model.add(Activation(act_fn))
model.add(Dropout(drop_out))
model.add(Dense(1))
adam = Adam(lr=lr)
model.compile(loss='logcosh', optimizer=adam)
return model
ENERGY_KEY = 'ENERGY'
INCHI_KEY = 'Inchi'
def generate_training_input(mol_file):
'''
:param mol_file: str
:return: pd.DataFrame
'''
ifs = oechem.oemolistream(mol_file)
training_data = []
for mol in ifs.GetOEGraphMols():
energy = float(oechem.OEGetSDData(mol, ENERGY_KEY))
sf_elements = get_sf_elements(mol)
dihe_inchi = get_dihedral_inchi_key(mol)
data = [dihe_inchi, energy]
data.extend(sf_elements)
training_data.append(data)
ifs.close()
columns = [INCHI_KEY, ENERGY_KEY]
num_sf_elements = len(training_data[0]) - 2
sf_columns = ['sf_%d'%(i+1) for i in range(num_sf_elements)]
columns.extend(sf_columns)
df = pd.DataFrame(training_data, columns=columns)
# calculate relative energy for each profile
grouped = df.loc[:,[INCHI_KEY, ENERGY_KEY]].groupby(INCHI_KEY)
df2 = grouped.transform(lambda x: x - x.min())
df[ENERGY_KEY] = df2[ENERGY_KEY]
return df
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Train neural network model to predict torsional relative energy')
parser.add_argument('--input', type=str, help='sd file containing MM structures alongwith '
'sd properties with torsion atom indices and QM energy')
parser.add_argument('--num_epoch', default=5000, type=int, help='number of epoch (default = 2000)')
parser.add_argument('--batch_size', default=256, type=int, help='batch size (default: 256)')
parser.add_argument('--layer_dims', default='10-5-1-0.2', type=str, help='layer dimensions')
parser.add_argument('--lr', default=0.0001, type=float, help='learning rate (default: 1e-r)')
parser.add_argument('--dropout', default=0.2, type=float, help='dropout (default: 0.2)')
parser.add_argument('--val_split', default=0.1, type=float, help='validation split (default: 0.1)')
parser.add_argument('--scalar', default='scaler.pkl', type=str, help='output file with standard scaler')
parser.add_argument('--model', default='model.h5', type=str, help='output file with trained model')
parser.add_argument('-v', '--verbose', action='count', default=0)
args = parser.parse_args()
input_file = args.input
num_epoch = args.num_epoch
batch_size = args.batch_size
lr = args.lr
dropout = args.dropout
layer_dims = args.layer_dims
# generate training data using the molecules in the input file
# for each molecule in the input file, extract the QM energy from SD property "ENERGY"
# and generate symmetry function elements around the specified torsion (SD property "TORSION_ATOMS_FRAGMENT")
df = generate_training_input(input_file)
# cap the relative energy
tmp_idx = df.ENERGY > 30
df.ENERGY[tmp_idx] = 30.0 + np.exp(30 - df.ENERGY[tmp_idx])
dihe_inchis = df[INCHI_KEY].unique()
print('Number of profiles: %d' % len(dihe_inchis))
desc_bgn_idx = df.columns.get_loc('sf_1')
Xtrain = df.as_matrix(columns=df.columns[desc_bgn_idx:])
ytrain = df.ENERGY
# feature transformation
scaler = StandardScaler().fit(Xtrain)
Xtrain = scaler.transform(Xtrain)
print('Xtrain.shape ', Xtrain.shape)
# save feature transformation
with open(args.scalar, 'wb') as fptr:
pickle.dump(scaler, fptr)
_, num_feat = Xtrain.shape
# early stopping criteria
earlystop = EarlyStopping(monitor='val_loss', min_delta=0.001, patience=100, \
verbose=1, mode='auto')
model_file = args.model
# create DNN model
model = get_model(num_feat, lr, dropout, layer_dims)
print(model.summary())
checkpointer = ModelCheckpoint(
filepath=model_file, verbose=1, save_best_only=True)
callbacks_list = [checkpointer]
# train DNN model
model.fit(
Xtrain,
ytrain,
epochs=num_epoch,
batch_size=batch_size,
validation_split=args.val_split,
callbacks=callbacks_list,
verbose=1)
print('Training complete')
print('Standard scalar is saved in %s' % args.scalar)
print('Model is saved in %s' % args.model)
| [
1,
3,
4,
5,
6
] |
1,994 | 4df9af863a857c3bbc3c266d745a49b6ef78ba9b | <mask token>
class Calculator(QWidget):
<mask token>
def accept_button_value(self, number):
if number == 'Clean':
self.number_str = ''
elif number == 'Backspace':
self.number_str = list(self.number_str)
if len(self.number_str) > 0:
self.number_str.pop()
self.number_str = ''.join(self.number_str)
else:
self.number_str = ''
elif number == 'Infor':
self.number_str = self.version
else:
line_edit_content = self.lineEdit.text()
if line_edit_content.find(self.version) >= 0:
self.number_str = ''
self.number_str = str(self.number_str) + str(number)
print(self.number_str)
self.lineEdit.setText(str(self.number_str))
def calculation_results(self, infor):
line_edit_content = self.lineEdit.text()
print(line_edit_content)
result = 0.0
if line_edit_content.find(self.version) >= 0:
self.lineEdit.setText('输入错误')
else:
try:
result = eval(line_edit_content)
except:
result = '计算错误,请检查输入'
self.lineEdit.setText(str(result))
<mask token>
| <mask token>
class Calculator(QWidget):
def __init__(self):
self.number_str = ''
self.version = '小树计算器 V1.0'
super().__init__()
self.resize(400, 400)
from PyQt5.uic import loadUi
loadUi('calculator_gui.ui', self)
self.Button_0.clicked.connect(lambda : self.accept_button_value(0))
self.Button_1.clicked.connect(lambda : self.accept_button_value(1))
self.Button_2.clicked.connect(lambda : self.accept_button_value(2))
self.Button_3.clicked.connect(lambda : self.accept_button_value(3))
self.Button_4.clicked.connect(lambda : self.accept_button_value(4))
self.Button_5.clicked.connect(lambda : self.accept_button_value(5))
self.Button_6.clicked.connect(lambda : self.accept_button_value(6))
self.Button_7.clicked.connect(lambda : self.accept_button_value(7))
self.Button_8.clicked.connect(lambda : self.accept_button_value(8))
self.Button_9.clicked.connect(lambda : self.accept_button_value(9))
self.Button_addition.clicked.connect(lambda : self.
accept_button_value('+'))
self.Button_subtraction.clicked.connect(lambda : self.
accept_button_value('-'))
self.Button_multiplication.clicked.connect(lambda : self.
accept_button_value('*'))
self.Button_division.clicked.connect(lambda : self.
accept_button_value('/'))
self.Button_Backspace.clicked.connect(lambda : self.
accept_button_value('Backspace'))
self.Button_Clean.clicked.connect(lambda : self.accept_button_value
('Clean'))
self.Button_Infor.clicked.connect(lambda : self.accept_button_value
('Infor'))
self.Button_L_par.clicked.connect(lambda : self.accept_button_value
('('))
self.Button_R_par.clicked.connect(lambda : self.accept_button_value
(')'))
self.Button_result.clicked.connect(lambda : self.
calculation_results('计算结果'))
def accept_button_value(self, number):
if number == 'Clean':
self.number_str = ''
elif number == 'Backspace':
self.number_str = list(self.number_str)
if len(self.number_str) > 0:
self.number_str.pop()
self.number_str = ''.join(self.number_str)
else:
self.number_str = ''
elif number == 'Infor':
self.number_str = self.version
else:
line_edit_content = self.lineEdit.text()
if line_edit_content.find(self.version) >= 0:
self.number_str = ''
self.number_str = str(self.number_str) + str(number)
print(self.number_str)
self.lineEdit.setText(str(self.number_str))
def calculation_results(self, infor):
line_edit_content = self.lineEdit.text()
print(line_edit_content)
result = 0.0
if line_edit_content.find(self.version) >= 0:
self.lineEdit.setText('输入错误')
else:
try:
result = eval(line_edit_content)
except:
result = '计算错误,请检查输入'
self.lineEdit.setText(str(result))
<mask token>
| <mask token>
class Calculator(QWidget):
def __init__(self):
self.number_str = ''
self.version = '小树计算器 V1.0'
super().__init__()
self.resize(400, 400)
from PyQt5.uic import loadUi
loadUi('calculator_gui.ui', self)
self.Button_0.clicked.connect(lambda : self.accept_button_value(0))
self.Button_1.clicked.connect(lambda : self.accept_button_value(1))
self.Button_2.clicked.connect(lambda : self.accept_button_value(2))
self.Button_3.clicked.connect(lambda : self.accept_button_value(3))
self.Button_4.clicked.connect(lambda : self.accept_button_value(4))
self.Button_5.clicked.connect(lambda : self.accept_button_value(5))
self.Button_6.clicked.connect(lambda : self.accept_button_value(6))
self.Button_7.clicked.connect(lambda : self.accept_button_value(7))
self.Button_8.clicked.connect(lambda : self.accept_button_value(8))
self.Button_9.clicked.connect(lambda : self.accept_button_value(9))
self.Button_addition.clicked.connect(lambda : self.
accept_button_value('+'))
self.Button_subtraction.clicked.connect(lambda : self.
accept_button_value('-'))
self.Button_multiplication.clicked.connect(lambda : self.
accept_button_value('*'))
self.Button_division.clicked.connect(lambda : self.
accept_button_value('/'))
self.Button_Backspace.clicked.connect(lambda : self.
accept_button_value('Backspace'))
self.Button_Clean.clicked.connect(lambda : self.accept_button_value
('Clean'))
self.Button_Infor.clicked.connect(lambda : self.accept_button_value
('Infor'))
self.Button_L_par.clicked.connect(lambda : self.accept_button_value
('('))
self.Button_R_par.clicked.connect(lambda : self.accept_button_value
(')'))
self.Button_result.clicked.connect(lambda : self.
calculation_results('计算结果'))
def accept_button_value(self, number):
if number == 'Clean':
self.number_str = ''
elif number == 'Backspace':
self.number_str = list(self.number_str)
if len(self.number_str) > 0:
self.number_str.pop()
self.number_str = ''.join(self.number_str)
else:
self.number_str = ''
elif number == 'Infor':
self.number_str = self.version
else:
line_edit_content = self.lineEdit.text()
if line_edit_content.find(self.version) >= 0:
self.number_str = ''
self.number_str = str(self.number_str) + str(number)
print(self.number_str)
self.lineEdit.setText(str(self.number_str))
def calculation_results(self, infor):
line_edit_content = self.lineEdit.text()
print(line_edit_content)
result = 0.0
if line_edit_content.find(self.version) >= 0:
self.lineEdit.setText('输入错误')
else:
try:
result = eval(line_edit_content)
except:
result = '计算错误,请检查输入'
self.lineEdit.setText(str(result))
if __name__ == '__main__':
app = QApplication(sys.argv)
w = Calculator()
w.show()
sys.exit(app.exec_())
| from PyQt5.QtWidgets import QApplication, QWidget
import sys
class Calculator(QWidget):
def __init__(self):
self.number_str = ''
self.version = '小树计算器 V1.0'
super().__init__()
self.resize(400, 400)
from PyQt5.uic import loadUi
loadUi('calculator_gui.ui', self)
self.Button_0.clicked.connect(lambda : self.accept_button_value(0))
self.Button_1.clicked.connect(lambda : self.accept_button_value(1))
self.Button_2.clicked.connect(lambda : self.accept_button_value(2))
self.Button_3.clicked.connect(lambda : self.accept_button_value(3))
self.Button_4.clicked.connect(lambda : self.accept_button_value(4))
self.Button_5.clicked.connect(lambda : self.accept_button_value(5))
self.Button_6.clicked.connect(lambda : self.accept_button_value(6))
self.Button_7.clicked.connect(lambda : self.accept_button_value(7))
self.Button_8.clicked.connect(lambda : self.accept_button_value(8))
self.Button_9.clicked.connect(lambda : self.accept_button_value(9))
self.Button_addition.clicked.connect(lambda : self.
accept_button_value('+'))
self.Button_subtraction.clicked.connect(lambda : self.
accept_button_value('-'))
self.Button_multiplication.clicked.connect(lambda : self.
accept_button_value('*'))
self.Button_division.clicked.connect(lambda : self.
accept_button_value('/'))
self.Button_Backspace.clicked.connect(lambda : self.
accept_button_value('Backspace'))
self.Button_Clean.clicked.connect(lambda : self.accept_button_value
('Clean'))
self.Button_Infor.clicked.connect(lambda : self.accept_button_value
('Infor'))
self.Button_L_par.clicked.connect(lambda : self.accept_button_value
('('))
self.Button_R_par.clicked.connect(lambda : self.accept_button_value
(')'))
self.Button_result.clicked.connect(lambda : self.
calculation_results('计算结果'))
def accept_button_value(self, number):
if number == 'Clean':
self.number_str = ''
elif number == 'Backspace':
self.number_str = list(self.number_str)
if len(self.number_str) > 0:
self.number_str.pop()
self.number_str = ''.join(self.number_str)
else:
self.number_str = ''
elif number == 'Infor':
self.number_str = self.version
else:
line_edit_content = self.lineEdit.text()
if line_edit_content.find(self.version) >= 0:
self.number_str = ''
self.number_str = str(self.number_str) + str(number)
print(self.number_str)
self.lineEdit.setText(str(self.number_str))
def calculation_results(self, infor):
line_edit_content = self.lineEdit.text()
print(line_edit_content)
result = 0.0
if line_edit_content.find(self.version) >= 0:
self.lineEdit.setText('输入错误')
else:
try:
result = eval(line_edit_content)
except:
result = '计算错误,请检查输入'
self.lineEdit.setText(str(result))
if __name__ == '__main__':
app = QApplication(sys.argv)
w = Calculator()
w.show()
sys.exit(app.exec_())
| from PyQt5.QtWidgets import QApplication, QWidget
import sys
class Calculator(QWidget):
def __init__(self):
self.number_str = ""
self.version = "小树计算器 V1.0"
super().__init__()
self.resize(400,400)
from PyQt5.uic import loadUi # 需要导入的模块
#loadUi("record.ui", self) #加载UI文件
loadUi("calculator_gui.ui", self) #加载UI文件
#信号槽---- 按键触发
self.Button_0.clicked.connect(lambda: self.accept_button_value(0))
self.Button_1.clicked.connect(lambda: self.accept_button_value(1))
self.Button_2.clicked.connect(lambda: self.accept_button_value(2))
self.Button_3.clicked.connect(lambda: self.accept_button_value(3))
self.Button_4.clicked.connect(lambda: self.accept_button_value(4))
self.Button_5.clicked.connect(lambda: self.accept_button_value(5))
self.Button_6.clicked.connect(lambda: self.accept_button_value(6))
self.Button_7.clicked.connect(lambda: self.accept_button_value(7))
self.Button_8.clicked.connect(lambda: self.accept_button_value(8))
self.Button_9.clicked.connect(lambda: self.accept_button_value(9))
#功能按键触发
self.Button_addition.clicked.connect(lambda: self.accept_button_value("+"))
self.Button_subtraction.clicked.connect(lambda: self.accept_button_value("-"))
self.Button_multiplication.clicked.connect(lambda: self.accept_button_value("*"))
self.Button_division.clicked.connect(lambda: self.accept_button_value("/"))
self.Button_Backspace.clicked.connect(lambda: self.accept_button_value("Backspace"))
self.Button_Clean.clicked.connect(lambda: self.accept_button_value("Clean"))
self.Button_Infor.clicked.connect(lambda: self.accept_button_value("Infor"))
self.Button_L_par.clicked.connect(lambda: self.accept_button_value("("))
self.Button_R_par.clicked.connect(lambda: self.accept_button_value(")"))
self.Button_result.clicked.connect(lambda: self.calculation_results("计算结果"))
def accept_button_value(self, number):
if number == "Clean":
self.number_str = ""
elif number == "Backspace":
self.number_str = list(self.number_str)
if len(self.number_str) > 0:
self.number_str.pop()
self.number_str = "".join(self.number_str)
else:
self.number_str = ""
elif number == "Infor":
self.number_str = self.version
else:
line_edit_content = self.lineEdit.text()
if line_edit_content.find(self.version) >= 0:
self.number_str = ""
self.number_str = str(self.number_str) + str(number)
print(self.number_str)
self.lineEdit.setText(str(self.number_str))
def calculation_results(self, infor):
line_edit_content = self.lineEdit.text()
print(line_edit_content)
result = 0.0
if line_edit_content.find(self.version) >= 0:
self.lineEdit.setText("输入错误")
else:
try:
result = eval(line_edit_content)
except:
result = "计算错误,请检查输入"
self.lineEdit.setText(str(result))
if __name__=='__main__':
app=QApplication(sys.argv)
w=Calculator()
w.show()
sys.exit(app.exec_())
| [
3,
4,
5,
6,
7
] |
1,995 | be06a0ad22f4ae9ab4c0acea6a7c601c14a90fc4 | <mask token>
class Dot(Sprite):
<mask token>
def update(self, dt):
arena = self.parent.parent
snake = arena.snake
self.check_kill(snake)
for s in arena.enemies:
self.check_kill(s)
<mask token>
| <mask token>
class Dot(Sprite):
def __init__(self, pos=None, color=None):
if color is None:
color = random.choice(define.ALL_COLOR)
super(Dot, self).__init__('circle.png', color=color)
self.killed = False
if pos is None:
self.position = random.randint(40, define.WIDTH - 40
), random.randint(40, define.HEIGHT - 40)
self.is_big = False
self.scale = 0.8
else:
self.position = pos[0] + random.random() * 32 - 16, pos[1
] + random.random() * 32 - 16
self.is_big = True
self.schedule_interval(self.update, random.random() * 0.2 + 0.1)
def update(self, dt):
arena = self.parent.parent
snake = arena.snake
self.check_kill(snake)
for s in arena.enemies:
self.check_kill(s)
def check_kill(self, snake):
if (not self.killed and not snake.is_dead) and (abs(snake.x - self.
x) < 32 and abs(snake.y - self.y) < 32):
self.killed = True
self.killer = snake
self.do(MoveTo(snake.position, 0.1) + CallFuncS(kill))
| <mask token>
def kill(spr):
spr.unschedule(spr.update)
arena = spr.parent.parent
if not spr.is_big:
arena.batch.add(Dot())
spr.killer.add_score()
else:
spr.killer.add_score(2)
arena.batch.remove(spr)
if not spr.killer.is_enemy:
arena.parent.update_score()
del spr
class Dot(Sprite):
def __init__(self, pos=None, color=None):
if color is None:
color = random.choice(define.ALL_COLOR)
super(Dot, self).__init__('circle.png', color=color)
self.killed = False
if pos is None:
self.position = random.randint(40, define.WIDTH - 40
), random.randint(40, define.HEIGHT - 40)
self.is_big = False
self.scale = 0.8
else:
self.position = pos[0] + random.random() * 32 - 16, pos[1
] + random.random() * 32 - 16
self.is_big = True
self.schedule_interval(self.update, random.random() * 0.2 + 0.1)
def update(self, dt):
arena = self.parent.parent
snake = arena.snake
self.check_kill(snake)
for s in arena.enemies:
self.check_kill(s)
def check_kill(self, snake):
if (not self.killed and not snake.is_dead) and (abs(snake.x - self.
x) < 32 and abs(snake.y - self.y) < 32):
self.killed = True
self.killer = snake
self.do(MoveTo(snake.position, 0.1) + CallFuncS(kill))
| import random
from cocos.actions import MoveTo, CallFuncS
from cocos.sprite import Sprite
import define
def kill(spr):
spr.unschedule(spr.update)
arena = spr.parent.parent
if not spr.is_big:
arena.batch.add(Dot())
spr.killer.add_score()
else:
spr.killer.add_score(2)
arena.batch.remove(spr)
if not spr.killer.is_enemy:
arena.parent.update_score()
del spr
class Dot(Sprite):
def __init__(self, pos=None, color=None):
if color is None:
color = random.choice(define.ALL_COLOR)
super(Dot, self).__init__('circle.png', color=color)
self.killed = False
if pos is None:
self.position = random.randint(40, define.WIDTH - 40
), random.randint(40, define.HEIGHT - 40)
self.is_big = False
self.scale = 0.8
else:
self.position = pos[0] + random.random() * 32 - 16, pos[1
] + random.random() * 32 - 16
self.is_big = True
self.schedule_interval(self.update, random.random() * 0.2 + 0.1)
def update(self, dt):
arena = self.parent.parent
snake = arena.snake
self.check_kill(snake)
for s in arena.enemies:
self.check_kill(s)
def check_kill(self, snake):
if (not self.killed and not snake.is_dead) and (abs(snake.x - self.
x) < 32 and abs(snake.y - self.y) < 32):
self.killed = True
self.killer = snake
self.do(MoveTo(snake.position, 0.1) + CallFuncS(kill))
| # -*- coding: utf-8 -*-
import random
from cocos.actions import MoveTo, CallFuncS
from cocos.sprite import Sprite
import define
def kill(spr):
spr.unschedule(spr.update)
arena = spr.parent.parent
if not spr.is_big:
arena.batch.add(Dot())
spr.killer.add_score()
else:
spr.killer.add_score(2)
arena.batch.remove(spr)
if not spr.killer.is_enemy:
arena.parent.update_score()
del spr
class Dot(Sprite):
def __init__(self, pos=None, color=None):
if color is None:
color = random.choice(define.ALL_COLOR)
super(Dot, self).__init__('circle.png', color=color)
self.killed = False
if pos is None:
self.position = (random.randint(40, define.WIDTH - 40),
random.randint(40, define.HEIGHT - 40))
self.is_big = False
self.scale = 0.8
else:
self.position = (pos[0] + random.random() * 32 - 16,
pos[1] + random.random() * 32 - 16)
self.is_big = True
self.schedule_interval(self.update, random.random() * 0.2 + 0.1)
def update(self, dt):
arena = self.parent.parent
snake = arena.snake
self.check_kill(snake)
for s in arena.enemies:
self.check_kill(s)
def check_kill(self, snake):
if (not self.killed and not snake.is_dead) and (
abs(snake.x - self.x) < 32 and abs(snake.y - self.y) < 32
):
self.killed = True
self.killer = snake
self.do(MoveTo(snake.position, 0.1) + CallFuncS(kill))
| [
2,
4,
5,
6,
7
] |
1,996 | 4bbfb35e4b03e2bfd46dd0fe5bfd54fb01ba11df | <mask token>
class TestResumeParser(TestCase):
<mask token>
<mask token>
def generate_counter(self, resume_name):
json_file = self.load_resume(resume_name)
return self.convert_to_counter(json_file)
<mask token>
def generate_email(self, resume_name):
json_file = self.load_resume(resume_name)
return json_file['email']
<mask token>
<mask token>
def test_parse_tariq_ali_profile_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('TariqAliProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_second_tariq_ali_profile_counter(self):
expected_counter = Counter({'Ruby': 15, 'Rails': 5, 'WordPress': 3,
'Angular': 3, 'Sinatra': 2, 'jQuery': 2, 'JavaScript': 2, 'C++':
2, 'Twitter': 2, 'Javascript': 2, 'Bootstrap': 2, 'GitHub': 1,
'.NET': 1, 'RSpec': 1, 'blockchain': 1, 'Ethereum': 1,
'Capistrano': 1, 'AWS': 1, 'C#': 1, 'React': 1})
actual_counter = self.generate_counter('Tariq_Ali')
self.assertEqual(expected_counter, actual_counter)
def test_parse_second_tariq_ali_profile_name(self):
expected_name = 'Tariq\xa0Ali'
actual_name = self.generate_name('Tariq_Ali')
self.assertEqual(expected_name, actual_name)
<mask token>
def test_parse_dan_bernier_profile_counter(self):
expected_counter = Counter({'Ruby': 7, 'Processing': 4, 'C#': 3,
'Rails': 2, 'Javascript': 1, '.NET': 1, 'JavaScript': 1,
'Scheme': 1})
actual_counter = self.generate_counter('DanBernierProfile')
self.assertEqual(expected_counter, actual_counter)
<mask token>
<mask token>
<mask token>
def test_parse_dylan_hirschkorn_profile_name(self):
expected_name = 'Dylan Hirschkorn'
actual_name = self.generate_name('DylanHirschkornProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_dylan_hirschkorn_profile_email(self):
expected_email = ''
actual_email = self.generate_email('DylanHirschkornProfile')
self.assertEqual(expected_email, actual_email)
<mask token>
def test_parse_sean_dugan_murphy_profile_name(self):
expected_name = 'Sean Dugan'
actual_name = self.generate_name('SeanDuganMurphyProfile')
self.assertEqual(expected_name, actual_name)
<mask token>
<mask token>
<mask token>
<mask token>
| <mask token>
class TestResumeParser(TestCase):
def load_resume(self, resume_name):
path_to_directory = 'generator/fixtures/{resume_name}.pdf'.format(
resume_name=resume_name)
file_path = os.path.abspath(path_to_directory)
json_string = resume_parser.convert(file_path)
json_file = json.loads(json_string)
return json_file
def convert_to_counter(self, json_file):
counter = json_file['counter']
return Counter(counter)
def generate_counter(self, resume_name):
json_file = self.load_resume(resume_name)
return self.convert_to_counter(json_file)
def generate_name(self, resume_name):
json_file = self.load_resume(resume_name)
return json_file['name']
def generate_email(self, resume_name):
json_file = self.load_resume(resume_name)
return json_file['email']
def test_parse_tariq_ali_profile_counter(self):
expected_counter = Counter({'Ruby': 8, 'Rails': 5, 'WordPress': 3,
'Bootstrap': 2, 'JavaScript': 1, 'jQuery': 1, '.NET': 1, 'C#':
1, 'RSpec': 1, 'Sinatra': 1, 'C++': 1, 'Angular': 1,
'Javascript': 1, 'Ethereum': 1, 'blockchain': 1})
actual_counter = self.generate_counter('TariqAliProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_tariq_ali_profile_name(self):
expected_name = 'Tariq Ali'
actual_name = self.generate_name('TariqAliProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_tariq_ali_profile_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('TariqAliProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_second_tariq_ali_profile_counter(self):
expected_counter = Counter({'Ruby': 15, 'Rails': 5, 'WordPress': 3,
'Angular': 3, 'Sinatra': 2, 'jQuery': 2, 'JavaScript': 2, 'C++':
2, 'Twitter': 2, 'Javascript': 2, 'Bootstrap': 2, 'GitHub': 1,
'.NET': 1, 'RSpec': 1, 'blockchain': 1, 'Ethereum': 1,
'Capistrano': 1, 'AWS': 1, 'C#': 1, 'React': 1})
actual_counter = self.generate_counter('Tariq_Ali')
self.assertEqual(expected_counter, actual_counter)
def test_parse_second_tariq_ali_profile_name(self):
expected_name = 'Tariq\xa0Ali'
actual_name = self.generate_name('Tariq_Ali')
self.assertEqual(expected_name, actual_name)
def test_parse_second_tariq_ali_profile_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('Tariq_Ali')
self.assertEqual(expected_email, actual_email)
def test_parse_dan_bernier_profile_counter(self):
expected_counter = Counter({'Ruby': 7, 'Processing': 4, 'C#': 3,
'Rails': 2, 'Javascript': 1, '.NET': 1, 'JavaScript': 1,
'Scheme': 1})
actual_counter = self.generate_counter('DanBernierProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_dan_bernier_profile_name(self):
expected_name = 'Dan Bernier'
actual_name = self.generate_name('DanBernierProfile')
self.assertEqual(expected_name, actual_name)
<mask token>
def test_parse_dylan_hirschkorn_profile_counter(self):
expected_counter = Counter({'Dylan': 3, 'Visual Basic': 3, 'BASIC':
3, 'C#': 2, 'Swift': 1})
actual_counter = self.generate_counter('DylanHirschkornProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_dylan_hirschkorn_profile_name(self):
expected_name = 'Dylan Hirschkorn'
actual_name = self.generate_name('DylanHirschkornProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_dylan_hirschkorn_profile_email(self):
expected_email = ''
actual_email = self.generate_email('DylanHirschkornProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_sean_dugan_murphy_profile_counter(self):
expected_counter = Counter({'Swift': 11, 'Twitter': 3,
'Objective-C': 3, 'Facebook': 3, 'GitHub': 2, 'YouTube': 2,
'CSS': 1, 'C#': 1})
actual_counter = self.generate_counter('SeanDuganMurphyProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_sean_dugan_murphy_profile_name(self):
expected_name = 'Sean Dugan'
actual_name = self.generate_name('SeanDuganMurphyProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_sean_dugan_murphy_profile_email(self):
expected_email = ''
actual_email = self.generate_email('SeanDuganMurphyProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_christopher_salat_ceev_counter(self):
expected_counter = Counter({'YouTube': 5, 'PHP': 2, 'Scratch': 1})
actual_counter = self.generate_counter('Christopher_Salat_Ceev')
self.assertEqual(expected_counter, actual_counter)
<mask token>
def test_parse_christopher_salat_ceev_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('Christopher_Salat_Ceev')
self.assertEqual(expected_email, actual_email)
| <mask token>
class TestResumeParser(TestCase):
def load_resume(self, resume_name):
path_to_directory = 'generator/fixtures/{resume_name}.pdf'.format(
resume_name=resume_name)
file_path = os.path.abspath(path_to_directory)
json_string = resume_parser.convert(file_path)
json_file = json.loads(json_string)
return json_file
def convert_to_counter(self, json_file):
counter = json_file['counter']
return Counter(counter)
def generate_counter(self, resume_name):
json_file = self.load_resume(resume_name)
return self.convert_to_counter(json_file)
def generate_name(self, resume_name):
json_file = self.load_resume(resume_name)
return json_file['name']
def generate_email(self, resume_name):
json_file = self.load_resume(resume_name)
return json_file['email']
def test_parse_tariq_ali_profile_counter(self):
expected_counter = Counter({'Ruby': 8, 'Rails': 5, 'WordPress': 3,
'Bootstrap': 2, 'JavaScript': 1, 'jQuery': 1, '.NET': 1, 'C#':
1, 'RSpec': 1, 'Sinatra': 1, 'C++': 1, 'Angular': 1,
'Javascript': 1, 'Ethereum': 1, 'blockchain': 1})
actual_counter = self.generate_counter('TariqAliProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_tariq_ali_profile_name(self):
expected_name = 'Tariq Ali'
actual_name = self.generate_name('TariqAliProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_tariq_ali_profile_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('TariqAliProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_second_tariq_ali_profile_counter(self):
expected_counter = Counter({'Ruby': 15, 'Rails': 5, 'WordPress': 3,
'Angular': 3, 'Sinatra': 2, 'jQuery': 2, 'JavaScript': 2, 'C++':
2, 'Twitter': 2, 'Javascript': 2, 'Bootstrap': 2, 'GitHub': 1,
'.NET': 1, 'RSpec': 1, 'blockchain': 1, 'Ethereum': 1,
'Capistrano': 1, 'AWS': 1, 'C#': 1, 'React': 1})
actual_counter = self.generate_counter('Tariq_Ali')
self.assertEqual(expected_counter, actual_counter)
def test_parse_second_tariq_ali_profile_name(self):
expected_name = 'Tariq\xa0Ali'
actual_name = self.generate_name('Tariq_Ali')
self.assertEqual(expected_name, actual_name)
def test_parse_second_tariq_ali_profile_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('Tariq_Ali')
self.assertEqual(expected_email, actual_email)
def test_parse_dan_bernier_profile_counter(self):
expected_counter = Counter({'Ruby': 7, 'Processing': 4, 'C#': 3,
'Rails': 2, 'Javascript': 1, '.NET': 1, 'JavaScript': 1,
'Scheme': 1})
actual_counter = self.generate_counter('DanBernierProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_dan_bernier_profile_name(self):
expected_name = 'Dan Bernier'
actual_name = self.generate_name('DanBernierProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_dan_bernier_profile_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('DanBernierProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_dylan_hirschkorn_profile_counter(self):
expected_counter = Counter({'Dylan': 3, 'Visual Basic': 3, 'BASIC':
3, 'C#': 2, 'Swift': 1})
actual_counter = self.generate_counter('DylanHirschkornProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_dylan_hirschkorn_profile_name(self):
expected_name = 'Dylan Hirschkorn'
actual_name = self.generate_name('DylanHirschkornProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_dylan_hirschkorn_profile_email(self):
expected_email = ''
actual_email = self.generate_email('DylanHirschkornProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_sean_dugan_murphy_profile_counter(self):
expected_counter = Counter({'Swift': 11, 'Twitter': 3,
'Objective-C': 3, 'Facebook': 3, 'GitHub': 2, 'YouTube': 2,
'CSS': 1, 'C#': 1})
actual_counter = self.generate_counter('SeanDuganMurphyProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_sean_dugan_murphy_profile_name(self):
expected_name = 'Sean Dugan'
actual_name = self.generate_name('SeanDuganMurphyProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_sean_dugan_murphy_profile_email(self):
expected_email = ''
actual_email = self.generate_email('SeanDuganMurphyProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_christopher_salat_ceev_counter(self):
expected_counter = Counter({'YouTube': 5, 'PHP': 2, 'Scratch': 1})
actual_counter = self.generate_counter('Christopher_Salat_Ceev')
self.assertEqual(expected_counter, actual_counter)
def test_parse_christopher_salat_ceev_name(self):
expected_name = 'Christopher Salat'
actual_name = self.generate_name('Christopher_Salat_Ceev')
self.assertEqual(expected_name, actual_name)
def test_parse_christopher_salat_ceev_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('Christopher_Salat_Ceev')
self.assertEqual(expected_email, actual_email)
| from __future__ import unicode_literals
from django.test import TestCase
from collections import Counter
import generator.resume_parser as resume_parser
import os
import json
class TestResumeParser(TestCase):
def load_resume(self, resume_name):
path_to_directory = 'generator/fixtures/{resume_name}.pdf'.format(
resume_name=resume_name)
file_path = os.path.abspath(path_to_directory)
json_string = resume_parser.convert(file_path)
json_file = json.loads(json_string)
return json_file
def convert_to_counter(self, json_file):
counter = json_file['counter']
return Counter(counter)
def generate_counter(self, resume_name):
json_file = self.load_resume(resume_name)
return self.convert_to_counter(json_file)
def generate_name(self, resume_name):
json_file = self.load_resume(resume_name)
return json_file['name']
def generate_email(self, resume_name):
json_file = self.load_resume(resume_name)
return json_file['email']
def test_parse_tariq_ali_profile_counter(self):
expected_counter = Counter({'Ruby': 8, 'Rails': 5, 'WordPress': 3,
'Bootstrap': 2, 'JavaScript': 1, 'jQuery': 1, '.NET': 1, 'C#':
1, 'RSpec': 1, 'Sinatra': 1, 'C++': 1, 'Angular': 1,
'Javascript': 1, 'Ethereum': 1, 'blockchain': 1})
actual_counter = self.generate_counter('TariqAliProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_tariq_ali_profile_name(self):
expected_name = 'Tariq Ali'
actual_name = self.generate_name('TariqAliProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_tariq_ali_profile_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('TariqAliProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_second_tariq_ali_profile_counter(self):
expected_counter = Counter({'Ruby': 15, 'Rails': 5, 'WordPress': 3,
'Angular': 3, 'Sinatra': 2, 'jQuery': 2, 'JavaScript': 2, 'C++':
2, 'Twitter': 2, 'Javascript': 2, 'Bootstrap': 2, 'GitHub': 1,
'.NET': 1, 'RSpec': 1, 'blockchain': 1, 'Ethereum': 1,
'Capistrano': 1, 'AWS': 1, 'C#': 1, 'React': 1})
actual_counter = self.generate_counter('Tariq_Ali')
self.assertEqual(expected_counter, actual_counter)
def test_parse_second_tariq_ali_profile_name(self):
expected_name = 'Tariq\xa0Ali'
actual_name = self.generate_name('Tariq_Ali')
self.assertEqual(expected_name, actual_name)
def test_parse_second_tariq_ali_profile_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('Tariq_Ali')
self.assertEqual(expected_email, actual_email)
def test_parse_dan_bernier_profile_counter(self):
expected_counter = Counter({'Ruby': 7, 'Processing': 4, 'C#': 3,
'Rails': 2, 'Javascript': 1, '.NET': 1, 'JavaScript': 1,
'Scheme': 1})
actual_counter = self.generate_counter('DanBernierProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_dan_bernier_profile_name(self):
expected_name = 'Dan Bernier'
actual_name = self.generate_name('DanBernierProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_dan_bernier_profile_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('DanBernierProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_dylan_hirschkorn_profile_counter(self):
expected_counter = Counter({'Dylan': 3, 'Visual Basic': 3, 'BASIC':
3, 'C#': 2, 'Swift': 1})
actual_counter = self.generate_counter('DylanHirschkornProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_dylan_hirschkorn_profile_name(self):
expected_name = 'Dylan Hirschkorn'
actual_name = self.generate_name('DylanHirschkornProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_dylan_hirschkorn_profile_email(self):
expected_email = ''
actual_email = self.generate_email('DylanHirschkornProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_sean_dugan_murphy_profile_counter(self):
expected_counter = Counter({'Swift': 11, 'Twitter': 3,
'Objective-C': 3, 'Facebook': 3, 'GitHub': 2, 'YouTube': 2,
'CSS': 1, 'C#': 1})
actual_counter = self.generate_counter('SeanDuganMurphyProfile')
self.assertEqual(expected_counter, actual_counter)
def test_parse_sean_dugan_murphy_profile_name(self):
expected_name = 'Sean Dugan'
actual_name = self.generate_name('SeanDuganMurphyProfile')
self.assertEqual(expected_name, actual_name)
def test_parse_sean_dugan_murphy_profile_email(self):
expected_email = ''
actual_email = self.generate_email('SeanDuganMurphyProfile')
self.assertEqual(expected_email, actual_email)
def test_parse_christopher_salat_ceev_counter(self):
expected_counter = Counter({'YouTube': 5, 'PHP': 2, 'Scratch': 1})
actual_counter = self.generate_counter('Christopher_Salat_Ceev')
self.assertEqual(expected_counter, actual_counter)
def test_parse_christopher_salat_ceev_name(self):
expected_name = 'Christopher Salat'
actual_name = self.generate_name('Christopher_Salat_Ceev')
self.assertEqual(expected_name, actual_name)
def test_parse_christopher_salat_ceev_email(self):
expected_email = '[email protected]'
actual_email = self.generate_email('Christopher_Salat_Ceev')
self.assertEqual(expected_email, actual_email)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from collections import Counter
import generator.resume_parser as resume_parser
import os
import json
class TestResumeParser(TestCase):
def load_resume(self, resume_name):
path_to_directory = "generator/fixtures/{resume_name}.pdf".format(resume_name=resume_name)
file_path = os.path.abspath(path_to_directory)
json_string = resume_parser.convert(file_path)
json_file = json.loads(json_string)
return json_file
def convert_to_counter(self, json_file):
counter = json_file["counter"]
return Counter(counter)
def generate_counter(self, resume_name):
json_file = self.load_resume(resume_name)
return self.convert_to_counter(json_file)
def generate_name(self, resume_name):
json_file = self.load_resume(resume_name)
return json_file["name"]
def generate_email(self, resume_name):
json_file = self.load_resume(resume_name)
return json_file["email"]
def test_parse_tariq_ali_profile_counter(self):
expected_counter = Counter({'Ruby': 8, 'Rails': 5, 'WordPress': 3, 'Bootstrap': 2, 'JavaScript': 1, 'jQuery': 1, '.NET': 1, 'C#': 1, 'RSpec': 1, 'Sinatra': 1, 'C++': 1, 'Angular': 1, 'Javascript': 1, 'Ethereum': 1, 'blockchain': 1})
actual_counter = self.generate_counter("TariqAliProfile")
self.assertEqual(expected_counter, actual_counter)
def test_parse_tariq_ali_profile_name(self):
expected_name = "Tariq Ali"
actual_name = self.generate_name("TariqAliProfile")
self.assertEqual(expected_name, actual_name)
def test_parse_tariq_ali_profile_email(self):
expected_email = "[email protected]"
actual_email = self.generate_email("TariqAliProfile")
self.assertEqual(expected_email, actual_email)
def test_parse_second_tariq_ali_profile_counter(self):
expected_counter = Counter({'Ruby': 15, 'Rails': 5, 'WordPress': 3, 'Angular': 3, 'Sinatra': 2, 'jQuery': 2, 'JavaScript': 2, 'C++': 2, 'Twitter': 2, 'Javascript': 2, 'Bootstrap': 2, 'GitHub': 1, '.NET': 1, 'RSpec': 1, 'blockchain': 1, 'Ethereum': 1, 'Capistrano': 1, 'AWS': 1, 'C#': 1, 'React': 1})
actual_counter = self.generate_counter("Tariq_Ali")
self.assertEqual(expected_counter, actual_counter)
def test_parse_second_tariq_ali_profile_name(self):
expected_name = "Tariq\xa0Ali"
actual_name = self.generate_name("Tariq_Ali")
self.assertEqual(expected_name, actual_name)
def test_parse_second_tariq_ali_profile_email(self):
expected_email = "[email protected]"
actual_email = self.generate_email("Tariq_Ali")
self.assertEqual(expected_email, actual_email)
def test_parse_dan_bernier_profile_counter(self):
expected_counter = Counter({'Ruby': 7, 'Processing': 4, 'C#': 3, 'Rails': 2, 'Javascript': 1, '.NET': 1, 'JavaScript': 1, 'Scheme': 1})
actual_counter = self.generate_counter("DanBernierProfile")
self.assertEqual(expected_counter, actual_counter)
def test_parse_dan_bernier_profile_name(self):
expected_name = "Dan Bernier"
actual_name = self.generate_name("DanBernierProfile")
self.assertEqual(expected_name, actual_name)
def test_parse_dan_bernier_profile_email(self):
expected_email = "[email protected]"
actual_email = self.generate_email("DanBernierProfile")
self.assertEqual(expected_email, actual_email)
def test_parse_dylan_hirschkorn_profile_counter(self):
expected_counter = Counter({'Dylan': 3, 'Visual Basic': 3, 'BASIC': 3, 'C#': 2, 'Swift': 1})
# This is a bug, Dylan only mentioned "Visual Basic", not "Basic" on his resume. However, I do not know of a good way of fixing this specific edge case. Also, Dylan is the name of a programming language, which is why Dylan shows up in the counter.
actual_counter = self.generate_counter("DylanHirschkornProfile")
self.assertEqual(expected_counter, actual_counter)
def test_parse_dylan_hirschkorn_profile_name(self):
expected_name = "Dylan Hirschkorn"
actual_name = self.generate_name("DylanHirschkornProfile")
self.assertEqual(expected_name, actual_name)
def test_parse_dylan_hirschkorn_profile_email(self):
expected_email = ""
actual_email = self.generate_email("DylanHirschkornProfile")
self.assertEqual(expected_email, actual_email)
def test_parse_sean_dugan_murphy_profile_counter(self):
expected_counter = Counter({'Swift': 11, 'Twitter': 3, 'Objective-C': 3, 'Facebook': 3, 'GitHub': 2, 'YouTube': 2, 'CSS': 1, 'C#': 1})
actual_counter = self.generate_counter("SeanDuganMurphyProfile")
self.assertEqual(expected_counter, actual_counter)
def test_parse_sean_dugan_murphy_profile_name(self):
# The full name of the candidate is Sean Dugan Murphy. However we assume that a candidate only has a first and last name...and ignore the edge case where a candidate has a middle name.
expected_name = "Sean Dugan"
actual_name = self.generate_name("SeanDuganMurphyProfile")
self.assertEqual(expected_name, actual_name)
def test_parse_sean_dugan_murphy_profile_email(self):
expected_email = ""
actual_email = self.generate_email("SeanDuganMurphyProfile")
self.assertEqual(expected_email, actual_email)
def test_parse_christopher_salat_ceev_counter(self):
# Note that Christopher Salat does not actually know either PHP or Scratch. He links to several websites that end with the .php extension and he serves as a Scratch DJ. This indicates a problem with relying solely on keywords detached from the context.
expected_counter = Counter({'YouTube': 5, 'PHP': 2, 'Scratch': 1})
actual_counter = self.generate_counter("Christopher_Salat_Ceev")
self.assertEqual(expected_counter, actual_counter)
def test_parse_christopher_salat_ceev_name(self):
expected_name = "Christopher Salat"
actual_name = self.generate_name("Christopher_Salat_Ceev")
self.assertEqual(expected_name, actual_name)
def test_parse_christopher_salat_ceev_email(self):
expected_email = "[email protected]"
actual_email = self.generate_email("Christopher_Salat_Ceev")
self.assertEqual(expected_email, actual_email)
| [
10,
22,
24,
25,
26
] |
1,997 | 4e02edcf8a512060fa92ede11f33993978584147 |
#!/usr/bin/env python
"""
Author: Adam White, Matthew Schlegel, Mohammad M. Ajallooeian, Sina Ghiassian
Purpose: Skeleton code for Monte Carlo Exploring Starts Control Agent
for use on A3 of Reinforcement learning course University of Alberta Fall 2017
"""
"""
/*
* Copyright (c) HAOTIAN ZHU ,COMPUT301,University Of Alberta All Rights Reserved.
* You May Use, Distribute Or Modify This Code Under Term And
* Condition Of Code Of Students Behavior At University Of Alberta.
*
*
* Author: Haotian Zhu
* If You Have Any Question Please contact [email protected].
*
*/
"""
import numpy as np
import pickle
from importlib import import_module
tile = import_module("tiles3")
iht = tile.IHT(3000)
w = None
currentState = None
lastState = None
alpha = 0.01/50
gamma = 1.0
x = None
def agent_init():
global w,currentState,lastState,x
w = np.zeros(1200)
currentState = np.zeros(1)
lastState = np.zeros(1)
return
def agent_start(state):
global w,currentState,lastState,x
currentState[0] = float(state[0]/200.0)
lastState[0] = currentState[0]
action = chooseAction(state[0])
return action
def agent_step(reward, state):
global w,currentState,lastState,x
state1 = np.zeros(1200)
state2 = np.zeros(1200)
currentState[0] = float(state[0]/200.0)
currentx = tile.tiles(iht,50,currentState)
lastx = tile.tiles(iht,50,lastState)
for index in currentx:
state1[index] = 1
for index in lastx:
state2[index] = 1
w = w + alpha*(reward+gamma*np.dot(w,state1) - np.dot(w,state2))*state2
lastState[0] = currentState[0]
action = chooseAction(state[0])
return action
def agent_end(reward):
global w,currentState,lastState,x
state2 = np.zeros(1200)
lastx = tile.tiles(iht,50,lastState)
for index in lastx:
state2[index] = 1
w = w + alpha*(reward- np.dot(w,state2))*state2
return
def agent_cleanup():
"""
This function is not used
"""
# clean up
return
def agent_message(in_message): # returns string, in_message: string
global w
"""
Arguments: in_message: string
returns: The value function as a string.
This function is complete. You do not need to add code here.
"""
# should not need to modify this function. Modify at your own risk
if (in_message == 'ValueFunction'):
out = np.zeros(1000)
for i in range(1000):
x = tile.tiles(iht,50,[float(i/200.0)])
state = np.zeros(1200)
for index in x:
state[index] = 1
out[i] = np.dot(w,state)
return out
else:
return "I don't know what to return!!"
def chooseAction(state):
if np.random.randint(2) : #1
result = np.random.randint(100)+1
if result+state>=1000:
return 1000-state
else:
return result
else:
result = (np.random.randint(100)+1)*(-1)
if result+state<=0:
return state*(-1)
else:
return result
| null | null | null | null | [
0
] |
1,998 | 1133d3cf900e31278dc491565c99969a116e6c83 | <mask token>
class TorchData(Dataset):
<mask token>
<mask token>
<mask token>
<mask token>
| <mask token>
def create_gt_mask(vocal_spec, bg_spec):
"""
Take in log spectrogram and return a mask map for TF bins
1 if the vocal sound is dominated in the TF-bin, while 0 for not
"""
vocal_spec = vocal_spec.numpy()
bg_spec = bg_spec.numpy()
return np.array(vocal_spec > bg_spec, dtype=np.float32)
class TorchData(Dataset):
def __init__(self, dataset_path):
"""
Take the h5py dataset
"""
super(TorchData, self).__init__()
self.dataset = h5py.File(dataset_path, 'r')
self.bg = self.dataset['bg']
self.vocal = self.dataset['vocal']
self.mix = self.dataset['mix']
self.len = self.bg.shape[0]
def __len__(self):
return self.len
def __getitem__(self, index):
bg = self.bg[index].astype(np.float32)
vocal = self.vocal[index].astype(np.float32)
mix = self.mix[index].astype(np.float32)
mix = torch.from_numpy(mix)
bg = torch.from_numpy(bg)
vocal = torch.from_numpy(vocal)
target = torch.from_numpy(create_gt_mask(vocal, bg))
sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}
return sample
def torch_dataset_loader(dataset, batch_size, shuffle, kwargs):
"""
take the h5py dataset
"""
loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=
shuffle, **kwargs)
return loader
<mask token>
| <mask token>
def create_gt_mask(vocal_spec, bg_spec):
"""
Take in log spectrogram and return a mask map for TF bins
1 if the vocal sound is dominated in the TF-bin, while 0 for not
"""
vocal_spec = vocal_spec.numpy()
bg_spec = bg_spec.numpy()
return np.array(vocal_spec > bg_spec, dtype=np.float32)
class TorchData(Dataset):
def __init__(self, dataset_path):
"""
Take the h5py dataset
"""
super(TorchData, self).__init__()
self.dataset = h5py.File(dataset_path, 'r')
self.bg = self.dataset['bg']
self.vocal = self.dataset['vocal']
self.mix = self.dataset['mix']
self.len = self.bg.shape[0]
def __len__(self):
return self.len
def __getitem__(self, index):
bg = self.bg[index].astype(np.float32)
vocal = self.vocal[index].astype(np.float32)
mix = self.mix[index].astype(np.float32)
mix = torch.from_numpy(mix)
bg = torch.from_numpy(bg)
vocal = torch.from_numpy(vocal)
target = torch.from_numpy(create_gt_mask(vocal, bg))
sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}
return sample
def torch_dataset_loader(dataset, batch_size, shuffle, kwargs):
"""
take the h5py dataset
"""
loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=
shuffle, **kwargs)
return loader
train_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE,
True, PARAS.kwargs)
validation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.
BATCH_SIZE, False, PARAS.kwargs)
test_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE,
False, PARAS.kwargs)
if __name__ == '__main__':
for index, data_item in enumerate(test_loader):
print(data_item['vocal'].shape)
print(data_item['bg'].shape)
print(data_item['mix'].shape)
print(data_item['target'].shape)
break
| import torch
import numpy as np
import h5py
from torch.utils.data import Dataset, DataLoader
from config import PARAS
<mask token>
def create_gt_mask(vocal_spec, bg_spec):
"""
Take in log spectrogram and return a mask map for TF bins
1 if the vocal sound is dominated in the TF-bin, while 0 for not
"""
vocal_spec = vocal_spec.numpy()
bg_spec = bg_spec.numpy()
return np.array(vocal_spec > bg_spec, dtype=np.float32)
class TorchData(Dataset):
def __init__(self, dataset_path):
"""
Take the h5py dataset
"""
super(TorchData, self).__init__()
self.dataset = h5py.File(dataset_path, 'r')
self.bg = self.dataset['bg']
self.vocal = self.dataset['vocal']
self.mix = self.dataset['mix']
self.len = self.bg.shape[0]
def __len__(self):
return self.len
def __getitem__(self, index):
bg = self.bg[index].astype(np.float32)
vocal = self.vocal[index].astype(np.float32)
mix = self.mix[index].astype(np.float32)
mix = torch.from_numpy(mix)
bg = torch.from_numpy(bg)
vocal = torch.from_numpy(vocal)
target = torch.from_numpy(create_gt_mask(vocal, bg))
sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}
return sample
def torch_dataset_loader(dataset, batch_size, shuffle, kwargs):
"""
take the h5py dataset
"""
loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=
shuffle, **kwargs)
return loader
train_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE,
True, PARAS.kwargs)
validation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.
BATCH_SIZE, False, PARAS.kwargs)
test_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE,
False, PARAS.kwargs)
if __name__ == '__main__':
for index, data_item in enumerate(test_loader):
print(data_item['vocal'].shape)
print(data_item['bg'].shape)
print(data_item['mix'].shape)
print(data_item['target'].shape)
break
| import torch
import numpy as np
import h5py
from torch.utils.data import Dataset, DataLoader
from config import PARAS
"""
Be careful:
We use log mel-spectrogram for training,
while the mask generated is for power mel-spectrogram
"""
def create_gt_mask(vocal_spec, bg_spec):
"""
Take in log spectrogram and return a mask map for TF bins
1 if the vocal sound is dominated in the TF-bin, while 0 for not
"""
vocal_spec = vocal_spec.numpy()
bg_spec = bg_spec.numpy()
return np.array(vocal_spec > bg_spec, dtype=np.float32)
class TorchData(Dataset):
def __init__(self, dataset_path):
"""
Take the h5py dataset
"""
super(TorchData, self).__init__()
self.dataset = h5py.File(dataset_path, 'r')
self.bg = self.dataset['bg']
self.vocal = self.dataset['vocal']
self.mix = self.dataset['mix']
self.len = self.bg.shape[0]
def __len__(self):
return self.len
def __getitem__(self, index):
bg = self.bg[index].astype(np.float32)
vocal = self.vocal[index].astype(np.float32)
mix = self.mix[index].astype(np.float32)
mix = torch.from_numpy(mix)
bg = torch.from_numpy(bg)
vocal = torch.from_numpy(vocal)
target = torch.from_numpy(create_gt_mask(vocal, bg))
sample = {
'vocal': vocal, # this is used for test
'bg': bg, # this is used for test
'mix': mix,
'target': target,
}
return sample
# define the data loaders
def torch_dataset_loader(dataset, batch_size, shuffle, kwargs):
"""
take the h5py dataset
"""
loader = DataLoader(TorchData(dataset),
batch_size=batch_size,
shuffle=shuffle,
**kwargs)
return loader
train_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE, True, PARAS.kwargs)
validation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)
test_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)
if __name__ == '__main__':
for index, data_item in enumerate(test_loader):
print(data_item['vocal'].shape)
print(data_item['bg'].shape)
print(data_item['mix'].shape)
print(data_item['target'].shape)
break
| [
1,
6,
8,
9,
10
] |
1,999 | 41681a80807800efc06b3912533d739dab2cd085 | <mask token>
class AttachmentTestCase(BaseTestCase):
def set_up(self):
BaseTestCase.set_up(self)
self.init_data = dict(content='Important attachment content.',
file_name='test_file1.txt', description='A test file.', size=14,
author='user1', time=None)
def test_init(self):
att = AttachmentWrapper(**self.init_data)
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_create_from_trac_data(self):
file_name = 'test_file1.txt'
description = 'A test file.'
size = len(file_name)
time = datetime
author = 'user1'
trac_data = file_name, description, size, time, author
att = AttachmentWrapper.create_from_trac_data(trac_data)
self.init_data['content'] = None
self.init_data['time'] = time
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_get_base64_data_for_upload(self):
test_str = 'This is a string for base64 conversion testing.'
self.init_data['content'] = test_str
exp_conv = Base64Converter.encode_string(test_str)
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
test_stream = StringIO(
'This is a stream for base64 conversion testing.')
exp_conv = Base64Converter.encode_stream(test_stream)
self.init_data['content'] = test_stream
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
file_map = dict(file1='test stream 1', file2='test stream 2')
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
for fn, content in file_map.iteritems():
archive.writestr(fn, content)
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
exp_conv = Base64Converter.encode_zip_stream(zip_stream)
self.init_data['content'] = file_map
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
self.init_data['content'] = 1
att = AttachmentWrapper(**self.init_data)
self.assert_raises(TypeError, att.get_base64_data_for_upload)
| <mask token>
class Base64ConverterTestCase(BaseTestCase):
<mask token>
<mask token>
def test_encode_zip_stream(self):
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
archive.writestr('file1', 'test stream 1')
archive.writestr('file2', 'test stream 2')
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
exp_conv = Binary(zip_stream.getvalue())
self.assert_equal(Base64Converter.encode_zip_stream(zip_stream),
exp_conv)
zip_stream.close()
def test_decode_string(self):
test_str = 'This is a string for base64 conversion testing.'
conv = Base64Converter.encode_string(test_str)
self.assert_equal(Base64Converter.decode_to_string(conv), test_str)
def test_decode_stream(self):
test_stream = StringIO(
'This is a stream for base64 conversion testing.')
conv = Base64Converter.encode_stream(test_stream)
decoded_conv = Base64Converter.decode_to_stream(conv)
decoded_cont = decoded_conv.read()
test_stream.seek(0)
exp_cont = test_stream.read()
self.assert_equal(decoded_cont, exp_cont)
def test_decode_zip_file_data(self):
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
archive.writestr('file1', 'test stream 1')
archive.writestr('file2', 'test stream 2')
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
conv = Base64Converter.encode_zip_stream(zip_stream)
decoded_conv = Base64Converter.decode_to_stream(conv)
ret_archive = zipfile.ZipFile(decoded_conv, 'a', zipfile.
ZIP_DEFLATED, False)
content1 = None
content2 = None
self.assert_equal(len(ret_archive.namelist()), 2)
for file_name in ret_archive.namelist():
if file_name == 'file1':
content1 = ret_archive.read(file_name)
self.assert_equal(content1, 'test stream 1')
self.assert_not_equal(content2, 'test stream 2')
else:
content2 = ret_archive.read(file_name)
self.assert_equal(content2, 'test stream 2')
self.assert_not_equal(content2, 'test stream 1')
class AttachmentTestCase(BaseTestCase):
def set_up(self):
BaseTestCase.set_up(self)
self.init_data = dict(content='Important attachment content.',
file_name='test_file1.txt', description='A test file.', size=14,
author='user1', time=None)
def test_init(self):
att = AttachmentWrapper(**self.init_data)
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_create_from_trac_data(self):
file_name = 'test_file1.txt'
description = 'A test file.'
size = len(file_name)
time = datetime
author = 'user1'
trac_data = file_name, description, size, time, author
att = AttachmentWrapper.create_from_trac_data(trac_data)
self.init_data['content'] = None
self.init_data['time'] = time
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_get_base64_data_for_upload(self):
test_str = 'This is a string for base64 conversion testing.'
self.init_data['content'] = test_str
exp_conv = Base64Converter.encode_string(test_str)
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
test_stream = StringIO(
'This is a stream for base64 conversion testing.')
exp_conv = Base64Converter.encode_stream(test_stream)
self.init_data['content'] = test_stream
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
file_map = dict(file1='test stream 1', file2='test stream 2')
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
for fn, content in file_map.iteritems():
archive.writestr(fn, content)
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
exp_conv = Base64Converter.encode_zip_stream(zip_stream)
self.init_data['content'] = file_map
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
self.init_data['content'] = 1
att = AttachmentWrapper(**self.init_data)
self.assert_raises(TypeError, att.get_base64_data_for_upload)
| <mask token>
class Base64ConverterTestCase(BaseTestCase):
<mask token>
def test_encode_stream(self):
test_stream = StringIO(
'This is a stream for base64 conversion testing.')
exp_conv = Binary(test_stream.read())
self.assert_equal(Base64Converter.encode_stream(test_stream), exp_conv)
test_stream.close()
def test_encode_zip_stream(self):
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
archive.writestr('file1', 'test stream 1')
archive.writestr('file2', 'test stream 2')
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
exp_conv = Binary(zip_stream.getvalue())
self.assert_equal(Base64Converter.encode_zip_stream(zip_stream),
exp_conv)
zip_stream.close()
def test_decode_string(self):
test_str = 'This is a string for base64 conversion testing.'
conv = Base64Converter.encode_string(test_str)
self.assert_equal(Base64Converter.decode_to_string(conv), test_str)
def test_decode_stream(self):
test_stream = StringIO(
'This is a stream for base64 conversion testing.')
conv = Base64Converter.encode_stream(test_stream)
decoded_conv = Base64Converter.decode_to_stream(conv)
decoded_cont = decoded_conv.read()
test_stream.seek(0)
exp_cont = test_stream.read()
self.assert_equal(decoded_cont, exp_cont)
def test_decode_zip_file_data(self):
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
archive.writestr('file1', 'test stream 1')
archive.writestr('file2', 'test stream 2')
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
conv = Base64Converter.encode_zip_stream(zip_stream)
decoded_conv = Base64Converter.decode_to_stream(conv)
ret_archive = zipfile.ZipFile(decoded_conv, 'a', zipfile.
ZIP_DEFLATED, False)
content1 = None
content2 = None
self.assert_equal(len(ret_archive.namelist()), 2)
for file_name in ret_archive.namelist():
if file_name == 'file1':
content1 = ret_archive.read(file_name)
self.assert_equal(content1, 'test stream 1')
self.assert_not_equal(content2, 'test stream 2')
else:
content2 = ret_archive.read(file_name)
self.assert_equal(content2, 'test stream 2')
self.assert_not_equal(content2, 'test stream 1')
class AttachmentTestCase(BaseTestCase):
def set_up(self):
BaseTestCase.set_up(self)
self.init_data = dict(content='Important attachment content.',
file_name='test_file1.txt', description='A test file.', size=14,
author='user1', time=None)
def test_init(self):
att = AttachmentWrapper(**self.init_data)
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_create_from_trac_data(self):
file_name = 'test_file1.txt'
description = 'A test file.'
size = len(file_name)
time = datetime
author = 'user1'
trac_data = file_name, description, size, time, author
att = AttachmentWrapper.create_from_trac_data(trac_data)
self.init_data['content'] = None
self.init_data['time'] = time
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_get_base64_data_for_upload(self):
test_str = 'This is a string for base64 conversion testing.'
self.init_data['content'] = test_str
exp_conv = Base64Converter.encode_string(test_str)
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
test_stream = StringIO(
'This is a stream for base64 conversion testing.')
exp_conv = Base64Converter.encode_stream(test_stream)
self.init_data['content'] = test_stream
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
file_map = dict(file1='test stream 1', file2='test stream 2')
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
for fn, content in file_map.iteritems():
archive.writestr(fn, content)
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
exp_conv = Base64Converter.encode_zip_stream(zip_stream)
self.init_data['content'] = file_map
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
self.init_data['content'] = 1
att = AttachmentWrapper(**self.init_data)
self.assert_raises(TypeError, att.get_base64_data_for_upload)
| <mask token>
class Base64ConverterTestCase(BaseTestCase):
def test_encode_string(self):
test_str = 'This is a string for base64 conversion testing.'
exp_conv = Binary(test_str)
self.assert_equal(Base64Converter.encode_string(test_str), exp_conv)
def test_encode_stream(self):
test_stream = StringIO(
'This is a stream for base64 conversion testing.')
exp_conv = Binary(test_stream.read())
self.assert_equal(Base64Converter.encode_stream(test_stream), exp_conv)
test_stream.close()
def test_encode_zip_stream(self):
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
archive.writestr('file1', 'test stream 1')
archive.writestr('file2', 'test stream 2')
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
exp_conv = Binary(zip_stream.getvalue())
self.assert_equal(Base64Converter.encode_zip_stream(zip_stream),
exp_conv)
zip_stream.close()
def test_decode_string(self):
test_str = 'This is a string for base64 conversion testing.'
conv = Base64Converter.encode_string(test_str)
self.assert_equal(Base64Converter.decode_to_string(conv), test_str)
def test_decode_stream(self):
test_stream = StringIO(
'This is a stream for base64 conversion testing.')
conv = Base64Converter.encode_stream(test_stream)
decoded_conv = Base64Converter.decode_to_stream(conv)
decoded_cont = decoded_conv.read()
test_stream.seek(0)
exp_cont = test_stream.read()
self.assert_equal(decoded_cont, exp_cont)
def test_decode_zip_file_data(self):
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
archive.writestr('file1', 'test stream 1')
archive.writestr('file2', 'test stream 2')
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
conv = Base64Converter.encode_zip_stream(zip_stream)
decoded_conv = Base64Converter.decode_to_stream(conv)
ret_archive = zipfile.ZipFile(decoded_conv, 'a', zipfile.
ZIP_DEFLATED, False)
content1 = None
content2 = None
self.assert_equal(len(ret_archive.namelist()), 2)
for file_name in ret_archive.namelist():
if file_name == 'file1':
content1 = ret_archive.read(file_name)
self.assert_equal(content1, 'test stream 1')
self.assert_not_equal(content2, 'test stream 2')
else:
content2 = ret_archive.read(file_name)
self.assert_equal(content2, 'test stream 2')
self.assert_not_equal(content2, 'test stream 1')
class AttachmentTestCase(BaseTestCase):
def set_up(self):
BaseTestCase.set_up(self)
self.init_data = dict(content='Important attachment content.',
file_name='test_file1.txt', description='A test file.', size=14,
author='user1', time=None)
def test_init(self):
att = AttachmentWrapper(**self.init_data)
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_create_from_trac_data(self):
file_name = 'test_file1.txt'
description = 'A test file.'
size = len(file_name)
time = datetime
author = 'user1'
trac_data = file_name, description, size, time, author
att = AttachmentWrapper.create_from_trac_data(trac_data)
self.init_data['content'] = None
self.init_data['time'] = time
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_get_base64_data_for_upload(self):
test_str = 'This is a string for base64 conversion testing.'
self.init_data['content'] = test_str
exp_conv = Base64Converter.encode_string(test_str)
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
test_stream = StringIO(
'This is a stream for base64 conversion testing.')
exp_conv = Base64Converter.encode_stream(test_stream)
self.init_data['content'] = test_stream
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
file_map = dict(file1='test stream 1', file2='test stream 2')
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
for fn, content in file_map.iteritems():
archive.writestr(fn, content)
for zfile in archive.filelist:
zfile.create_system = 0
archive.close()
zip_stream.seek(0)
exp_conv = Base64Converter.encode_zip_stream(zip_stream)
self.init_data['content'] = file_map
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
self.init_data['content'] = 1
att = AttachmentWrapper(**self.init_data)
self.assert_raises(TypeError, att.get_base64_data_for_upload)
| """
This file is part of the tractor library.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
Created on Jan 06, 2012.
"""
from StringIO import StringIO
from datetime import datetime
from tractor.attachment import AttachmentWrapper
from tractor.attachment import Base64Converter
from tractor.tests.base import BaseTestCase
from xmlrpclib import Binary
import zipfile
class Base64ConverterTestCase(BaseTestCase):
def test_encode_string(self):
test_str = 'This is a string for base64 conversion testing.'
exp_conv = Binary(test_str)
self.assert_equal(Base64Converter.encode_string(test_str), exp_conv)
def test_encode_stream(self):
test_stream = StringIO('This is a stream for base64 conversion testing.')
exp_conv = Binary(test_stream.read())
self.assert_equal(Base64Converter.encode_stream(test_stream), exp_conv)
test_stream.close()
def test_encode_zip_stream(self):
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
archive.writestr('file1', 'test stream 1')
archive.writestr('file2', 'test stream 2')
for zfile in archive.filelist: zfile.create_system = 0
archive.close()
zip_stream.seek(0)
exp_conv = Binary(zip_stream.getvalue())
self.assert_equal(Base64Converter.encode_zip_stream(zip_stream),
exp_conv)
zip_stream.close()
def test_decode_string(self):
test_str = 'This is a string for base64 conversion testing.'
conv = Base64Converter.encode_string(test_str)
self.assert_equal(Base64Converter.decode_to_string(conv), test_str)
def test_decode_stream(self):
test_stream = StringIO('This is a stream for base64 conversion testing.')
conv = Base64Converter.encode_stream(test_stream)
decoded_conv = Base64Converter.decode_to_stream(conv)
decoded_cont = decoded_conv.read()
test_stream.seek(0)
exp_cont = test_stream.read()
self.assert_equal(decoded_cont, exp_cont)
def test_decode_zip_file_data(self):
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
archive.writestr('file1', 'test stream 1')
archive.writestr('file2', 'test stream 2')
for zfile in archive.filelist: zfile.create_system = 0
archive.close()
zip_stream.seek(0)
conv = Base64Converter.encode_zip_stream(zip_stream)
decoded_conv = Base64Converter.decode_to_stream(conv)
ret_archive = zipfile.ZipFile(decoded_conv, 'a', zipfile.ZIP_DEFLATED,
False)
content1 = None
content2 = None
self.assert_equal(len(ret_archive.namelist()), 2)
for file_name in ret_archive.namelist():
if file_name == 'file1':
content1 = ret_archive.read(file_name)
self.assert_equal(content1, 'test stream 1')
self.assert_not_equal(content2, 'test stream 2')
else:
content2 = ret_archive.read(file_name)
self.assert_equal(content2, 'test stream 2')
self.assert_not_equal(content2, 'test stream 1')
class AttachmentTestCase(BaseTestCase):
def set_up(self):
BaseTestCase.set_up(self)
self.init_data = dict(content='Important attachment content.',
file_name='test_file1.txt',
description='A test file.',
size=14,
author='user1',
time=None)
def test_init(self):
att = AttachmentWrapper(**self.init_data)
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_create_from_trac_data(self):
file_name = 'test_file1.txt'
description = 'A test file.'
size = len(file_name)
time = datetime
author = 'user1'
trac_data = (file_name, description, size, time, author)
att = AttachmentWrapper.create_from_trac_data(trac_data)
self.init_data['content'] = None
self.init_data['time'] = time
for attr_name, exp_value in self.init_data.iteritems():
self.assert_equal(getattr(att, attr_name), exp_value)
def test_get_base64_data_for_upload(self):
# Test string
test_str = 'This is a string for base64 conversion testing.'
self.init_data['content'] = test_str
exp_conv = Base64Converter.encode_string(test_str)
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
# Test stream
test_stream = StringIO('This is a stream for base64 conversion testing.')
exp_conv = Base64Converter.encode_stream(test_stream)
self.init_data['content'] = test_stream
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
# Test file map
file_map = dict(file1='test stream 1', file2='test stream 2')
zip_stream = StringIO()
archive = zipfile.ZipFile(zip_stream, 'a', zipfile.ZIP_DEFLATED, False)
for fn, content in file_map.iteritems(): archive.writestr(fn, content)
for zfile in archive.filelist: zfile.create_system = 0
archive.close()
zip_stream.seek(0)
exp_conv = Base64Converter.encode_zip_stream(zip_stream)
self.init_data['content'] = file_map
att = AttachmentWrapper(**self.init_data)
self.assert_equal(att.get_base64_data_for_upload(), exp_conv)
# Test error raising
self.init_data['content'] = 1
att = AttachmentWrapper(**self.init_data)
self.assert_raises(TypeError, att.get_base64_data_for_upload)
| [
5,
10,
11,
12,
14
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.